|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9653679653679652, |
|
"eval_steps": 500, |
|
"global_step": 230, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1e-05, |
|
"loss": 2.0642, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2e-05, |
|
"loss": 2.0704, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3e-05, |
|
"loss": 2.0758, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4e-05, |
|
"loss": 2.0031, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5e-05, |
|
"loss": 2.0466, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6e-05, |
|
"loss": 2.027, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7e-05, |
|
"loss": 2.0737, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8e-05, |
|
"loss": 2.1039, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9e-05, |
|
"loss": 2.0212, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0001, |
|
"loss": 2.047, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.999780139628657e-05, |
|
"loss": 2.0435, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.99912057785006e-05, |
|
"loss": 2.0305, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998021372668808e-05, |
|
"loss": 1.9602, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.996482620753565e-05, |
|
"loss": 2.0138, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.994504457428558e-05, |
|
"loss": 2.065, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.992087056661677e-05, |
|
"loss": 2.0051, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.989230631049171e-05, |
|
"loss": 2.008, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.985935431796962e-05, |
|
"loss": 1.9578, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.982201748698542e-05, |
|
"loss": 1.9591, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.978029910109491e-05, |
|
"loss": 1.9495, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.973420282918601e-05, |
|
"loss": 1.9121, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.968373272515612e-05, |
|
"loss": 1.9639, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.962889322755555e-05, |
|
"loss": 1.9431, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.956968915919725e-05, |
|
"loss": 1.9283, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.950612572673255e-05, |
|
"loss": 1.9112, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.943820852019344e-05, |
|
"loss": 1.9942, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.936594351250082e-05, |
|
"loss": 1.9536, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.928933705893924e-05, |
|
"loss": 1.9364, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.920839589659803e-05, |
|
"loss": 1.9485, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.91231271437788e-05, |
|
"loss": 1.9743, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.903353829936943e-05, |
|
"loss": 1.9492, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.893963724218455e-05, |
|
"loss": 1.9612, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.884143223027266e-05, |
|
"loss": 1.8852, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.873893190018995e-05, |
|
"loss": 1.9104, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.863214526624065e-05, |
|
"loss": 1.9762, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.852108171968436e-05, |
|
"loss": 1.9602, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.840575102791013e-05, |
|
"loss": 1.9283, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.828616333357743e-05, |
|
"loss": 1.8863, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.816232915372423e-05, |
|
"loss": 1.9776, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.8034259378842e-05, |
|
"loss": 1.8874, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.790196527191811e-05, |
|
"loss": 1.8779, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.776545846744509e-05, |
|
"loss": 1.9399, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.762475097039767e-05, |
|
"loss": 1.8758, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.747985515517683e-05, |
|
"loss": 1.903, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.733078376452171e-05, |
|
"loss": 1.9438, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.717754990838881e-05, |
|
"loss": 1.885, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.702016706279913e-05, |
|
"loss": 1.9278, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.685864906865303e-05, |
|
"loss": 1.9612, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.669301013051297e-05, |
|
"loss": 1.9352, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.652326481535435e-05, |
|
"loss": 1.9198, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.634942805128433e-05, |
|
"loss": 1.9552, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.617151512622917e-05, |
|
"loss": 1.9767, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.598954168658955e-05, |
|
"loss": 1.9933, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.580352373586467e-05, |
|
"loss": 1.9751, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.561347763324484e-05, |
|
"loss": 1.8376, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.541942009217273e-05, |
|
"loss": 1.9485, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.522136817887353e-05, |
|
"loss": 1.885, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.501933931085416e-05, |
|
"loss": 1.9241, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.481335125537138e-05, |
|
"loss": 1.8931, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.460342212786932e-05, |
|
"loss": 1.9544, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.43895703903864e-05, |
|
"loss": 1.8637, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.417181484993154e-05, |
|
"loss": 1.964, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.395017465683036e-05, |
|
"loss": 1.8605, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.372466930304091e-05, |
|
"loss": 1.9388, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.349531862043952e-05, |
|
"loss": 1.9195, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.32621427790767e-05, |
|
"loss": 1.9328, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.302516228540327e-05, |
|
"loss": 1.8901, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.278439798046697e-05, |
|
"loss": 1.9296, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.253987103807958e-05, |
|
"loss": 1.9282, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.229160296295488e-05, |
|
"loss": 1.9448, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.203961558881731e-05, |
|
"loss": 1.9211, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.178393107648193e-05, |
|
"loss": 1.8658, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.15245719119055e-05, |
|
"loss": 1.9358, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.126156090420888e-05, |
|
"loss": 1.9446, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.099492118367123e-05, |
|
"loss": 1.8403, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.072467619969572e-05, |
|
"loss": 1.9857, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.878, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.017346582226289e-05, |
|
"loss": 1.9484, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.98925489045329e-05, |
|
"loss": 1.8543, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.960812367055646e-05, |
|
"loss": 1.8613, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.93202151338687e-05, |
|
"loss": 1.887, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.902884861434065e-05, |
|
"loss": 1.8897, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.873404973595285e-05, |
|
"loss": 1.93, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.843584442454158e-05, |
|
"loss": 1.951, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.81342589055191e-05, |
|
"loss": 1.9591, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.782931970156707e-05, |
|
"loss": 1.8649, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.752105363030414e-05, |
|
"loss": 1.9752, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.720948780192746e-05, |
|
"loss": 1.9189, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 8.689464961682852e-05, |
|
"loss": 1.9188, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 8.657656676318346e-05, |
|
"loss": 1.9376, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.625526721451798e-05, |
|
"loss": 1.8662, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.593077922724733e-05, |
|
"loss": 1.8113, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.560313133819125e-05, |
|
"loss": 1.8575, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.527235236206436e-05, |
|
"loss": 1.8991, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.493847138894209e-05, |
|
"loss": 1.8475, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.46015177817023e-05, |
|
"loss": 1.8795, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.426152117344313e-05, |
|
"loss": 1.8596, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.391851146487675e-05, |
|
"loss": 1.9481, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 8.357251882169994e-05, |
|
"loss": 1.9213, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.322357367194109e-05, |
|
"loss": 1.9243, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.28717067032843e-05, |
|
"loss": 1.8764, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.251694886037052e-05, |
|
"loss": 1.9728, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 8.215933134207618e-05, |
|
"loss": 1.884, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 8.179888559876943e-05, |
|
"loss": 1.9589, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.143564332954425e-05, |
|
"loss": 2.017, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 8.106963647943274e-05, |
|
"loss": 1.9672, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 8.070089723659566e-05, |
|
"loss": 1.8711, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.032945802949179e-05, |
|
"loss": 1.9211, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 7.995535152402591e-05, |
|
"loss": 1.9019, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.957861062067614e-05, |
|
"loss": 1.8887, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.919926845160037e-05, |
|
"loss": 1.9923, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.881735837772274e-05, |
|
"loss": 1.8621, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.843291398579946e-05, |
|
"loss": 1.9584, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.804596908546529e-05, |
|
"loss": 1.943, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.765655770625997e-05, |
|
"loss": 1.8953, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.726471409463572e-05, |
|
"loss": 1.9394, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 7.687047271094527e-05, |
|
"loss": 1.91, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 7.64738682264115e-05, |
|
"loss": 1.9356, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.607493552007805e-05, |
|
"loss": 1.9421, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 7.56737096757421e-05, |
|
"loss": 1.8658, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 7.527022597886895e-05, |
|
"loss": 1.9158, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 7.486451991348872e-05, |
|
"loss": 1.9376, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 7.445662715907591e-05, |
|
"loss": 1.9365, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 7.40465835874115e-05, |
|
"loss": 1.8633, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 7.363442525942826e-05, |
|
"loss": 1.8888, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 7.322018842203941e-05, |
|
"loss": 1.8944, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 7.280390950495093e-05, |
|
"loss": 1.8644, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 7.238562511745768e-05, |
|
"loss": 1.9133, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 7.196537204522401e-05, |
|
"loss": 1.8339, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 7.154318724704853e-05, |
|
"loss": 1.8625, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 7.111910785161381e-05, |
|
"loss": 1.8877, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 7.06931711542212e-05, |
|
"loss": 1.9571, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 7.026541461351092e-05, |
|
"loss": 1.8926, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 6.983587584816769e-05, |
|
"loss": 1.8999, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 6.940459263361249e-05, |
|
"loss": 1.8957, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 6.897160289868042e-05, |
|
"loss": 1.9096, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 6.853694472228503e-05, |
|
"loss": 1.8695, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 6.810065633006956e-05, |
|
"loss": 1.8959, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 6.766277609104518e-05, |
|
"loss": 1.8242, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.722334251421665e-05, |
|
"loss": 1.8903, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.678239424519575e-05, |
|
"loss": 1.8978, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.633997006280252e-05, |
|
"loss": 1.8537, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.589610887565503e-05, |
|
"loss": 1.842, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.9226, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.500423175001705e-05, |
|
"loss": 1.8413, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.4556294246901e-05, |
|
"loss": 1.8876, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.410707660288155e-05, |
|
"loss": 1.902, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.36566183240219e-05, |
|
"loss": 1.9084, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.320495902549183e-05, |
|
"loss": 1.8314, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 6.275213842808383e-05, |
|
"loss": 1.9189, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 6.229819635471972e-05, |
|
"loss": 1.8738, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 6.184317272694867e-05, |
|
"loss": 1.8302, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 6.138710756143613e-05, |
|
"loss": 1.91, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 6.093004096644481e-05, |
|
"loss": 1.8405, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.0472013138307235e-05, |
|
"loss": 1.8767, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.001306435789072e-05, |
|
"loss": 1.8827, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.9553234987055006e-05, |
|
"loss": 1.938, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.909256546510257e-05, |
|
"loss": 1.8674, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.86310963052223e-05, |
|
"loss": 1.9161, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 5.816886809092651e-05, |
|
"loss": 1.8738, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5.770592147248196e-05, |
|
"loss": 1.855, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 5.7242297163334804e-05, |
|
"loss": 1.895, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.6778035936530184e-05, |
|
"loss": 1.8702, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.631317862112636e-05, |
|
"loss": 1.9038, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 5.584776609860414e-05, |
|
"loss": 1.8444, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.538183929927152e-05, |
|
"loss": 1.9575, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.4915439198664164e-05, |
|
"loss": 1.9068, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.4448606813941805e-05, |
|
"loss": 1.8192, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.3981383200281e-05, |
|
"loss": 1.8179, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.351380944726465e-05, |
|
"loss": 1.9271, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.3045926675268344e-05, |
|
"loss": 1.8794, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.257777603184407e-05, |
|
"loss": 1.9613, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.210939868810156e-05, |
|
"loss": 1.9117, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.16408358350875e-05, |
|
"loss": 1.8796, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.117212868016303e-05, |
|
"loss": 1.881, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.07033184433798e-05, |
|
"loss": 1.9096, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.023444635385493e-05, |
|
"loss": 1.9094, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.9765553646145086e-05, |
|
"loss": 1.9166, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.9296681556620207e-05, |
|
"loss": 1.8408, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.882787131983698e-05, |
|
"loss": 1.9153, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.835916416491251e-05, |
|
"loss": 1.8751, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.789060131189845e-05, |
|
"loss": 1.9234, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.742222396815593e-05, |
|
"loss": 1.937, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.6954073324731654e-05, |
|
"loss": 1.9361, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.648619055273537e-05, |
|
"loss": 1.8353, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.601861679971901e-05, |
|
"loss": 1.9344, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.5551393186058213e-05, |
|
"loss": 1.788, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.508456080133584e-05, |
|
"loss": 1.8955, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.461816070072851e-05, |
|
"loss": 1.9064, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.415223390139588e-05, |
|
"loss": 1.858, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.368682137887365e-05, |
|
"loss": 1.8601, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.322196406346984e-05, |
|
"loss": 1.9734, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.27577028366652e-05, |
|
"loss": 1.8557, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.229407852751806e-05, |
|
"loss": 1.9431, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.183113190907349e-05, |
|
"loss": 1.9365, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.136890369477772e-05, |
|
"loss": 1.831, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.090743453489744e-05, |
|
"loss": 1.8685, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.0446765012945006e-05, |
|
"loss": 1.8634, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.998693564210929e-05, |
|
"loss": 1.8493, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.952798686169279e-05, |
|
"loss": 1.8875, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.9069959033555195e-05, |
|
"loss": 1.9417, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.861289243856388e-05, |
|
"loss": 1.8595, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.8156827273051365e-05, |
|
"loss": 1.8732, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.770180364528029e-05, |
|
"loss": 1.8268, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.7247861571916185e-05, |
|
"loss": 1.869, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.679504097450816e-05, |
|
"loss": 1.8882, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.6343381675978116e-05, |
|
"loss": 1.849, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.5892923397118474e-05, |
|
"loss": 1.88, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.5443705753099014e-05, |
|
"loss": 1.884, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.499576824998298e-05, |
|
"loss": 1.8501, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.8654, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.410389112434499e-05, |
|
"loss": 1.8698, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.366002993719747e-05, |
|
"loss": 1.8402, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.321760575480427e-05, |
|
"loss": 1.8361, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.277665748578336e-05, |
|
"loss": 1.8392, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.233722390895483e-05, |
|
"loss": 1.9025, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.1899343669930446e-05, |
|
"loss": 1.8904, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.146305527771499e-05, |
|
"loss": 1.9215, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.102839710131958e-05, |
|
"loss": 1.8426, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.0595407366387504e-05, |
|
"loss": 1.9003, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.016412415183233e-05, |
|
"loss": 1.8339, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.9734585386489093e-05, |
|
"loss": 1.8936, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.93068288457788e-05, |
|
"loss": 1.8554, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.8880892148386198e-05, |
|
"loss": 1.9069, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.8456812752951485e-05, |
|
"loss": 1.9439, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.8034627954775994e-05, |
|
"loss": 1.888, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.7614374882542317e-05, |
|
"loss": 1.9194, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.719609049504911e-05, |
|
"loss": 1.895, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.677981157796059e-05, |
|
"loss": 1.8856, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.636557474057173e-05, |
|
"loss": 1.8132, |
|
"step": 230 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 345, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 115, |
|
"total_flos": 5.211426518110568e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|