|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.943820224719101, |
|
"eval_steps": 500, |
|
"global_step": 385, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.3199, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5e-06, |
|
"loss": 2.6164, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 2.7792, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1e-05, |
|
"loss": 2.6204, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.999830024102874e-06, |
|
"loss": 2.3813, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.99932010796822e-06, |
|
"loss": 2.5856, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.998470286265415e-06, |
|
"loss": 2.3747, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.997280616774147e-06, |
|
"loss": 2.5818, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.995751180380468e-06, |
|
"loss": 2.5867, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.993882081071307e-06, |
|
"loss": 2.2268, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.991673445927399e-06, |
|
"loss": 2.6059, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.989125425114639e-06, |
|
"loss": 2.0894, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.986238191873874e-06, |
|
"loss": 2.2201, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.983011942509131e-06, |
|
"loss": 2.316, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.979446896374264e-06, |
|
"loss": 2.6564, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.975543295858035e-06, |
|
"loss": 2.465, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.971301406367644e-06, |
|
"loss": 2.4582, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.966721516310683e-06, |
|
"loss": 2.2573, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.961803937075516e-06, |
|
"loss": 2.3717, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.956549003010122e-06, |
|
"loss": 2.6225, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.950957071399357e-06, |
|
"loss": 2.0773, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.945028522440654e-06, |
|
"loss": 2.2186, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.938763759218186e-06, |
|
"loss": 2.7806, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.93216320767545e-06, |
|
"loss": 2.5508, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.925227316586316e-06, |
|
"loss": 2.2016, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.917956557524511e-06, |
|
"loss": 2.2435, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.910351424831545e-06, |
|
"loss": 2.5108, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.902412435583127e-06, |
|
"loss": 2.2933, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.89414012955398e-06, |
|
"loss": 2.1846, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.885535069181163e-06, |
|
"loss": 2.9822, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.876597839525814e-06, |
|
"loss": 2.231, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.867329048233387e-06, |
|
"loss": 2.2585, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.857729325492329e-06, |
|
"loss": 2.3573, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.847799323991234e-06, |
|
"loss": 2.2582, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.837539718874466e-06, |
|
"loss": 2.2804, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.826951207696258e-06, |
|
"loss": 2.864, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.816034510373287e-06, |
|
"loss": 2.1242, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.804790369135719e-06, |
|
"loss": 2.2738, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.793219548476754e-06, |
|
"loss": 2.6035, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.781322835100639e-06, |
|
"loss": 2.1281, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.769101037869187e-06, |
|
"loss": 2.392, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.756554987746777e-06, |
|
"loss": 2.2579, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.743685537743856e-06, |
|
"loss": 2.1741, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.730493562858954e-06, |
|
"loss": 2.2772, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.716979960019173e-06, |
|
"loss": 2.2809, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.70314564801922e-06, |
|
"loss": 2.3821, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.688991567458934e-06, |
|
"loss": 2.5027, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.67451868067933e-06, |
|
"loss": 2.3994, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.659727971697173e-06, |
|
"loss": 2.3403, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.644620446138078e-06, |
|
"loss": 2.2843, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.629197131168125e-06, |
|
"loss": 1.9102, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.613459075424033e-06, |
|
"loss": 2.0563, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.597407348941865e-06, |
|
"loss": 2.5812, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.58104304308426e-06, |
|
"loss": 2.0869, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.564367270466247e-06, |
|
"loss": 1.9037, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.54738116487959e-06, |
|
"loss": 2.3678, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.530085881215705e-06, |
|
"loss": 2.2848, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.512482595387131e-06, |
|
"loss": 2.0607, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.494572504247593e-06, |
|
"loss": 1.956, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.476356825510613e-06, |
|
"loss": 2.2119, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.457836797666722e-06, |
|
"loss": 2.2242, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.439013679899263e-06, |
|
"loss": 2.259, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.419888751998768e-06, |
|
"loss": 2.2482, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.400463314275942e-06, |
|
"loss": 2.0579, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.380738687473274e-06, |
|
"loss": 1.9384, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.360716212675213e-06, |
|
"loss": 2.1505, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.340397251217009e-06, |
|
"loss": 2.0986, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.319783184592142e-06, |
|
"loss": 2.3174, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.298875414358399e-06, |
|
"loss": 2.1534, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.27767536204258e-06, |
|
"loss": 2.1114, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.256184469043852e-06, |
|
"loss": 1.9863, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.23440419653574e-06, |
|
"loss": 2.137, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.212336025366789e-06, |
|
"loss": 2.225, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.189981455959873e-06, |
|
"loss": 2.0334, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.167342008210191e-06, |
|
"loss": 1.9056, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.144419221381919e-06, |
|
"loss": 2.144, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.121214654003561e-06, |
|
"loss": 1.8992, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.097729883761977e-06, |
|
"loss": 1.8587, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.073966507395123e-06, |
|
"loss": 2.5454, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.049926140583487e-06, |
|
"loss": 2.0381, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.025610417840238e-06, |
|
"loss": 2.0646, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.001020992400086e-06, |
|
"loss": 1.6412, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.976159536106895e-06, |
|
"loss": 1.9907, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.951027739299996e-06, |
|
"loss": 1.8317, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.925627310699275e-06, |
|
"loss": 2.1631, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.899959977288988e-06, |
|
"loss": 2.2149, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.874027484200342e-06, |
|
"loss": 2.2374, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.847831594592851e-06, |
|
"loss": 2.124, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.821374089534446e-06, |
|
"loss": 2.0088, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.794656767880394e-06, |
|
"loss": 2.1843, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.767681446150977e-06, |
|
"loss": 2.552, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.740449958408006e-06, |
|
"loss": 1.7471, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.7129641561301e-06, |
|
"loss": 1.9054, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.68522590808682e-06, |
|
"loss": 2.1348, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.657237100211604e-06, |
|
"loss": 2.0139, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 8.628999635473547e-06, |
|
"loss": 1.9181, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 8.600515433748003e-06, |
|
"loss": 2.2683, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 8.571786431686074e-06, |
|
"loss": 1.7892, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 8.542814582582917e-06, |
|
"loss": 1.9259, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 8.513601856244951e-06, |
|
"loss": 1.9594, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 8.484150238855921e-06, |
|
"loss": 2.1425, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 8.454461732841864e-06, |
|
"loss": 2.1687, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 8.424538356734957e-06, |
|
"loss": 2.128, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 8.394382145036277e-06, |
|
"loss": 2.3052, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 8.363995148077481e-06, |
|
"loss": 2.1029, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 8.333379431881398e-06, |
|
"loss": 1.9932, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 8.302537078021555e-06, |
|
"loss": 1.9616, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 8.271470183480664e-06, |
|
"loss": 2.0529, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.240180860508027e-06, |
|
"loss": 1.8921, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 8.208671236475945e-06, |
|
"loss": 1.7787, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 8.176943453735062e-06, |
|
"loss": 2.0703, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 8.144999669468714e-06, |
|
"loss": 1.9988, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 8.112842055546254e-06, |
|
"loss": 1.8816, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 8.080472798375392e-06, |
|
"loss": 1.953, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.04789409875354e-06, |
|
"loss": 2.0868, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.015108171718177e-06, |
|
"loss": 2.2089, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.982117246396246e-06, |
|
"loss": 1.9521, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.948923565852597e-06, |
|
"loss": 2.2335, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.915529386937486e-06, |
|
"loss": 2.2536, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 7.881936980133118e-06, |
|
"loss": 1.6541, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 7.848148629399287e-06, |
|
"loss": 1.9498, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 7.814166632018083e-06, |
|
"loss": 2.3166, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 7.779993298437704e-06, |
|
"loss": 2.1169, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 7.745630952115365e-06, |
|
"loss": 2.1505, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 7.711081929359316e-06, |
|
"loss": 1.9007, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 7.67634857917002e-06, |
|
"loss": 2.1772, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 7.641433263080418e-06, |
|
"loss": 2.4341, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 7.606338354995381e-06, |
|
"loss": 1.7081, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 7.571066241030302e-06, |
|
"loss": 2.0402, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 7.5356193193488655e-06, |
|
"loss": 2.0681, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.5562, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 7.464210704754009e-06, |
|
"loss": 1.7387, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 7.4282538669379186e-06, |
|
"loss": 2.1956, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 7.3921319312700365e-06, |
|
"loss": 2.0449, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.355847353693729e-06, |
|
"loss": 1.8579, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.319402601210448e-06, |
|
"loss": 2.0661, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.282800151711991e-06, |
|
"loss": 2.1408, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.246042493812036e-06, |
|
"loss": 1.729, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.209132126676934e-06, |
|
"loss": 2.3547, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.172071559855792e-06, |
|
"loss": 2.2959, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.134863313109847e-06, |
|
"loss": 2.3131, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.097509916241145e-06, |
|
"loss": 1.7705, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.060013908920549e-06, |
|
"loss": 1.6909, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.022377840515047e-06, |
|
"loss": 1.84, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.984604269914437e-06, |
|
"loss": 1.977, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.94669576535734e-06, |
|
"loss": 2.177, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.908654904256584e-06, |
|
"loss": 1.9606, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.870484273023967e-06, |
|
"loss": 2.0072, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.832186466894402e-06, |
|
"loss": 1.7583, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.793764089749473e-06, |
|
"loss": 1.8085, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.755219753940389e-06, |
|
"loss": 1.7839, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.716556080110374e-06, |
|
"loss": 1.9219, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.677775697016484e-06, |
|
"loss": 2.1836, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.638881241350884e-06, |
|
"loss": 1.964, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.599875357561572e-06, |
|
"loss": 1.8509, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.560760697672583e-06, |
|
"loss": 2.2632, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 6.5215399211036815e-06, |
|
"loss": 1.9777, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 6.4822156944895375e-06, |
|
"loss": 1.9851, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.442790691498433e-06, |
|
"loss": 2.204, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 6.403267592650466e-06, |
|
"loss": 1.7941, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 6.363649085135311e-06, |
|
"loss": 1.8559, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 6.323937862629513e-06, |
|
"loss": 1.9673, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 6.2841366251133405e-06, |
|
"loss": 1.883, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 6.244248078687213e-06, |
|
"loss": 1.9343, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 6.204274935387716e-06, |
|
"loss": 1.8082, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 6.164219913003208e-06, |
|
"loss": 1.9898, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 6.124085734889034e-06, |
|
"loss": 1.6458, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 6.083875129782366e-06, |
|
"loss": 2.0815, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 6.043590831616677e-06, |
|
"loss": 2.1997, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 6.003235579335851e-06, |
|
"loss": 2.1733, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 5.962812116707977e-06, |
|
"loss": 1.8058, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 5.92232319213878e-06, |
|
"loss": 2.1662, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 5.8817715584847744e-06, |
|
"loss": 1.8951, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 5.841159972866085e-06, |
|
"loss": 1.9074, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 5.800491196478989e-06, |
|
"loss": 1.9271, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 5.759767994408188e-06, |
|
"loss": 1.9044, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 5.718993135438803e-06, |
|
"loss": 1.9798, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 5.678169391868128e-06, |
|
"loss": 1.9495, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 5.637299539317141e-06, |
|
"loss": 2.1539, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 5.596386356541779e-06, |
|
"loss": 2.0293, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 5.555432625244024e-06, |
|
"loss": 1.8224, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 5.51444112988276e-06, |
|
"loss": 1.6832, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 5.473414657484468e-06, |
|
"loss": 2.0847, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 5.432355997453729e-06, |
|
"loss": 2.0309, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 5.391267941383572e-06, |
|
"loss": 2.0406, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 5.350153282865674e-06, |
|
"loss": 1.7257, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 5.309014817300422e-06, |
|
"loss": 1.8888, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 5.26785534170685e-06, |
|
"loss": 1.8149, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 5.226677654532476e-06, |
|
"loss": 2.0397, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 5.185484555463026e-06, |
|
"loss": 2.1468, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 5.1442788452320915e-06, |
|
"loss": 2.1732, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 5.1030633254306935e-06, |
|
"loss": 2.2631, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 5.061840798316815e-06, |
|
"loss": 1.9009, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 5.020614066624868e-06, |
|
"loss": 2.2187, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.979385933375133e-06, |
|
"loss": 1.6122, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.9381592016831856e-06, |
|
"loss": 1.5505, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.896936674569309e-06, |
|
"loss": 1.843, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.85572115476791e-06, |
|
"loss": 1.7565, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.814515444536975e-06, |
|
"loss": 1.8699, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.773322345467525e-06, |
|
"loss": 1.9872, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.732144658293151e-06, |
|
"loss": 1.9223, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.690985182699581e-06, |
|
"loss": 1.9225, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.649846717134327e-06, |
|
"loss": 1.8284, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.6087320586164296e-06, |
|
"loss": 1.8652, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.567644002546273e-06, |
|
"loss": 1.735, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.526585342515533e-06, |
|
"loss": 2.0562, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.485558870117241e-06, |
|
"loss": 2.0004, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.444567374755978e-06, |
|
"loss": 2.2094, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.403613643458222e-06, |
|
"loss": 1.9606, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.362700460682861e-06, |
|
"loss": 1.984, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.321830608131872e-06, |
|
"loss": 1.7363, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.281006864561199e-06, |
|
"loss": 1.9783, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.240232005591816e-06, |
|
"loss": 1.6583, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.1995088035210126e-06, |
|
"loss": 2.3748, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.158840027133917e-06, |
|
"loss": 2.0012, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.1182284415152255e-06, |
|
"loss": 1.6849, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.077676807861221e-06, |
|
"loss": 1.729, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.037187883292027e-06, |
|
"loss": 2.2028, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.996764420664149e-06, |
|
"loss": 1.9073, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.956409168383325e-06, |
|
"loss": 1.6662, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.916124870217635e-06, |
|
"loss": 1.9193, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.875914265110967e-06, |
|
"loss": 1.8654, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.835780086996794e-06, |
|
"loss": 1.9015, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.7957250646122843e-06, |
|
"loss": 1.9406, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.755751921312788e-06, |
|
"loss": 1.8809, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.715863374886661e-06, |
|
"loss": 1.8371, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.6760621373704867e-06, |
|
"loss": 2.0746, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.636350914864689e-06, |
|
"loss": 1.822, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.5967324073495363e-06, |
|
"loss": 1.7246, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.5572093085015683e-06, |
|
"loss": 1.6679, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.5177843055104633e-06, |
|
"loss": 1.8069, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.4784600788963197e-06, |
|
"loss": 1.9182, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.4392393023274173e-06, |
|
"loss": 1.6175, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.4001246424384294e-06, |
|
"loss": 1.8909, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.361118758649116e-06, |
|
"loss": 2.0512, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.322224302983517e-06, |
|
"loss": 1.8031, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.2834439198896285e-06, |
|
"loss": 1.8325, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.2447802460596124e-06, |
|
"loss": 1.9987, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.206235910250529e-06, |
|
"loss": 1.694, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.167813533105598e-06, |
|
"loss": 2.1121, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.1295157269760347e-06, |
|
"loss": 2.1676, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.0913450957434177e-06, |
|
"loss": 1.9139, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.0533042346426612e-06, |
|
"loss": 1.8614, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.015395730085565e-06, |
|
"loss": 1.9445, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 2.9776221594849565e-06, |
|
"loss": 2.1084, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 2.9399860910794532e-06, |
|
"loss": 1.9299, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 2.902490083758856e-06, |
|
"loss": 1.7569, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 2.8651366868901543e-06, |
|
"loss": 1.5915, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 2.8279284401442085e-06, |
|
"loss": 1.4711, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 2.790867873323067e-06, |
|
"loss": 1.7658, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 2.753957506187964e-06, |
|
"loss": 2.2383, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 2.7171998482880093e-06, |
|
"loss": 2.1103, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 2.680597398789554e-06, |
|
"loss": 1.9208, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 2.6441526463062727e-06, |
|
"loss": 1.9773, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 2.607868068729966e-06, |
|
"loss": 2.0155, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 2.571746133062082e-06, |
|
"loss": 1.8626, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 2.5357892952459917e-06, |
|
"loss": 1.764, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 2.0152, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 2.4643806806511344e-06, |
|
"loss": 1.8742, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 2.4289337589697e-06, |
|
"loss": 2.0988, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 2.3936616450046207e-06, |
|
"loss": 1.9274, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 2.3585667369195815e-06, |
|
"loss": 1.5094, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 2.32365142082998e-06, |
|
"loss": 2.0823, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 2.288918070640684e-06, |
|
"loss": 1.6446, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 2.254369047884639e-06, |
|
"loss": 1.9535, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 2.2200067015622986e-06, |
|
"loss": 1.5839, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.185833367981918e-06, |
|
"loss": 1.7815, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 2.1518513706007154e-06, |
|
"loss": 1.4904, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.118063019866884e-06, |
|
"loss": 2.2307, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 2.0844706130625146e-06, |
|
"loss": 1.947, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 2.0510764341474032e-06, |
|
"loss": 2.2949, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.0178827536037547e-06, |
|
"loss": 1.7868, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.9848918282818242e-06, |
|
"loss": 1.663, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.952105901246461e-06, |
|
"loss": 1.7499, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.9195272016246105e-06, |
|
"loss": 1.938, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.887157944453749e-06, |
|
"loss": 2.0385, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 1.855000330531289e-06, |
|
"loss": 2.0374, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.823056546264939e-06, |
|
"loss": 1.7599, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.7913287635240573e-06, |
|
"loss": 2.1512, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.7598191394919738e-06, |
|
"loss": 1.8319, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.7285298165193388e-06, |
|
"loss": 1.7238, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.697462921978446e-06, |
|
"loss": 1.8568, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.6666205681186032e-06, |
|
"loss": 2.1115, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.6360048519225197e-06, |
|
"loss": 1.8881, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.6056178549637248e-06, |
|
"loss": 1.8789, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.5754616432650443e-06, |
|
"loss": 2.168, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.5455382671581365e-06, |
|
"loss": 1.9499, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.5158497611440792e-06, |
|
"loss": 2.0706, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.48639814375505e-06, |
|
"loss": 1.9817, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.4571854174170847e-06, |
|
"loss": 1.8163, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.428213568313927e-06, |
|
"loss": 1.785, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.3994845662519985e-06, |
|
"loss": 2.0604, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.3710003645264559e-06, |
|
"loss": 1.7002, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.3427628997883957e-06, |
|
"loss": 1.788, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.3147740919131814e-06, |
|
"loss": 1.5869, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.2870358438699005e-06, |
|
"loss": 1.6432, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.2595500415919948e-06, |
|
"loss": 1.9052, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.232318553849023e-06, |
|
"loss": 1.8955, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.2053432321196085e-06, |
|
"loss": 1.904, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.1786259104655562e-06, |
|
"loss": 2.0135, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.1521684054071524e-06, |
|
"loss": 1.8603, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.1259725157996593e-06, |
|
"loss": 1.7627, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.1000400227110142e-06, |
|
"loss": 1.6693, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.0743726893007257e-06, |
|
"loss": 1.779, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.0489722607000052e-06, |
|
"loss": 2.0575, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.0238404638931077e-06, |
|
"loss": 1.9037, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 9.989790075999145e-07, |
|
"loss": 1.9991, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 9.743895821597638e-07, |
|
"loss": 2.0821, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 9.500738594165132e-07, |
|
"loss": 1.9613, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 9.260334926048787e-07, |
|
"loss": 1.939, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 9.022701162380259e-07, |
|
"loss": 1.7692, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.787853459964407e-07, |
|
"loss": 2.0053, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.555807786180814e-07, |
|
"loss": 1.8909, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 8.326579917898098e-07, |
|
"loss": 1.8692, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 8.100185440401276e-07, |
|
"loss": 2.1948, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 7.876639746332132e-07, |
|
"loss": 2.2319, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 7.655958034642619e-07, |
|
"loss": 1.8725, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 7.43815530956149e-07, |
|
"loss": 1.5717, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 7.223246379574206e-07, |
|
"loss": 1.7549, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 7.011245856416016e-07, |
|
"loss": 2.0717, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 6.802168154078586e-07, |
|
"loss": 1.9039, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 6.596027487829915e-07, |
|
"loss": 2.0704, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 6.392837873247876e-07, |
|
"loss": 1.8727, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 6.192613125267283e-07, |
|
"loss": 1.8494, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 5.995366857240592e-07, |
|
"loss": 1.9835, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 5.801112480012344e-07, |
|
"loss": 2.0905, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 5.609863201007382e-07, |
|
"loss": 1.8491, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 5.421632023332779e-07, |
|
"loss": 1.6643, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 5.236431744893883e-07, |
|
"loss": 1.5409, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 5.054274957524075e-07, |
|
"loss": 1.6956, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 4.875174046128684e-07, |
|
"loss": 1.9333, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.6991411878429593e-07, |
|
"loss": 1.7577, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 4.526188351204103e-07, |
|
"loss": 2.0727, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.3563272953375426e-07, |
|
"loss": 1.8069, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 4.1895695691574146e-07, |
|
"loss": 2.0435, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.025926510581357e-07, |
|
"loss": 2.0467, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 3.8654092457596714e-07, |
|
"loss": 1.6247, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.7080286883187713e-07, |
|
"loss": 1.9954, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.553795538619237e-07, |
|
"loss": 2.255, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.402720283028277e-07, |
|
"loss": 1.9724, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.2548131932067184e-07, |
|
"loss": 1.9318, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.110084325410667e-07, |
|
"loss": 1.7369, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 2.9685435198078095e-07, |
|
"loss": 1.7993, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 2.830200399808286e-07, |
|
"loss": 1.9586, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 2.6950643714104774e-07, |
|
"loss": 1.6359, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.563144622561453e-07, |
|
"loss": 1.8904, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.4344501225322557e-07, |
|
"loss": 1.7661, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.3089896213081553e-07, |
|
"loss": 1.8997, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 2.1867716489936297e-07, |
|
"loss": 1.759, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.0678045152324798e-07, |
|
"loss": 1.7521, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.9520963086428258e-07, |
|
"loss": 1.5358, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 1.8396548962671456e-07, |
|
"loss": 2.0727, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.7304879230374328e-07, |
|
"loss": 2.0473, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.6246028112553603e-07, |
|
"loss": 1.9068, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.5220067600876686e-07, |
|
"loss": 2.1523, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.422706745076713e-07, |
|
"loss": 1.9869, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.3267095176661304e-07, |
|
"loss": 2.0737, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.2340216047418697e-07, |
|
"loss": 1.9659, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.1446493081883891e-07, |
|
"loss": 1.4132, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.0585987044602009e-07, |
|
"loss": 1.7613, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 9.758756441687333e-08, |
|
"loss": 2.0321, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 8.964857516845449e-08, |
|
"loss": 2.1659, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 8.204344247549067e-08, |
|
"loss": 1.7783, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 7.47726834136836e-08, |
|
"loss": 1.8563, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 6.783679232455043e-08, |
|
"loss": 1.986, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 6.123624078181512e-08, |
|
"loss": 1.8735, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 5.4971477559346286e-08, |
|
"loss": 1.5537, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 4.90429286006433e-08, |
|
"loss": 1.857, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 4.34509969898772e-08, |
|
"loss": 1.673, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 3.819606292448541e-08, |
|
"loss": 1.9837, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 3.327848368931907e-08, |
|
"loss": 1.6531, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.8698593632357496e-08, |
|
"loss": 1.9188, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.4456704141967437e-08, |
|
"loss": 1.9565, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.0553103625737813e-08, |
|
"loss": 1.6245, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.6988057490868736e-08, |
|
"loss": 1.7511, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.3761808126126486e-08, |
|
"loss": 1.8122, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.0874574885362809e-08, |
|
"loss": 1.6269, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 8.32655407260241e-09, |
|
"loss": 2.0738, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 6.117918928693623e-09, |
|
"loss": 2.0557, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 4.248819619533384e-09, |
|
"loss": 1.8619, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.7193832258537447e-09, |
|
"loss": 2.2485, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.5297137345843261e-09, |
|
"loss": 1.9175, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 6.798920317807601e-10, |
|
"loss": 2.099, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.6997589712575145e-10, |
|
"loss": 2.1961, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 0.0, |
|
"loss": 2.2319, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"step": 385, |
|
"total_flos": 3.4617941689368576e+17, |
|
"train_loss": 2.0085288332654283, |
|
"train_runtime": 6983.5696, |
|
"train_samples_per_second": 0.446, |
|
"train_steps_per_second": 0.055 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 385, |
|
"num_train_epochs": 5, |
|
"save_steps": 1000, |
|
"total_flos": 3.4617941689368576e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|