File size: 9,142 Bytes
d663695
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
{
  "best_metric": 0.8985657508208054,
  "best_model_checkpoint": "CHECKPOINTS/checkpoint-3307",
  "epoch": 11.083056143533918,
  "eval_steps": 3307,
  "global_step": 19842,
  "is_hyper_param_search": false,
  "is_local_process_zero": true,
  "is_world_process_zero": true,
  "log_history": [
    {
      "epoch": 0.02519907267412559,
      "grad_norm": 0.7393302321434021,
      "learning_rate": 2.4899193548387098e-06,
      "loss": 0.3059,
      "step": 500
    },
    {
      "epoch": 0.05039814534825118,
      "grad_norm": 0.6863130331039429,
      "learning_rate": 5.010080645161291e-06,
      "loss": 0.1492,
      "step": 1000
    },
    {
      "epoch": 0.07559721802237677,
      "grad_norm": 0.703405499458313,
      "learning_rate": 7.5302419354838715e-06,
      "loss": 0.1428,
      "step": 1500
    },
    {
      "epoch": 1.017437758290495,
      "grad_norm": 0.6589027643203735,
      "learning_rate": 9.994400268787099e-06,
      "loss": 0.1261,
      "step": 2000
    },
    {
      "epoch": 1.0426368309646206,
      "grad_norm": 0.6031370759010315,
      "learning_rate": 9.71441370814201e-06,
      "loss": 0.1202,
      "step": 2500
    },
    {
      "epoch": 1.0678359036387461,
      "grad_norm": 0.6696850657463074,
      "learning_rate": 9.434427147496921e-06,
      "loss": 0.1171,
      "step": 3000
    },
    {
      "epoch": 1.0833081342606592,
      "eval_loss": 0.017358383163809776,
      "eval_runtime": 504.91,
      "eval_samples_per_second": 0.824,
      "eval_steps_per_second": 0.008,
      "eval_wer": 0.8985657508208054,
      "step": 3307
    },
    {
      "epoch": 2.0096764439068644,
      "grad_norm": 0.591613233089447,
      "learning_rate": 9.154440586851832e-06,
      "loss": 0.1042,
      "step": 3500
    },
    {
      "epoch": 2.03487551658099,
      "grad_norm": 0.5527406930923462,
      "learning_rate": 8.874454026206742e-06,
      "loss": 0.0861,
      "step": 4000
    },
    {
      "epoch": 2.0600745892551156,
      "grad_norm": 0.5651601552963257,
      "learning_rate": 8.594467465561653e-06,
      "loss": 0.0878,
      "step": 4500
    },
    {
      "epoch": 3.0019151295232334,
      "grad_norm": 0.4751633107662201,
      "learning_rate": 8.314480904916565e-06,
      "loss": 0.0865,
      "step": 5000
    },
    {
      "epoch": 3.027114202197359,
      "grad_norm": 0.5415444374084473,
      "learning_rate": 8.034494344271475e-06,
      "loss": 0.0625,
      "step": 5500
    },
    {
      "epoch": 3.0523132748714845,
      "grad_norm": 0.5294741988182068,
      "learning_rate": 7.754507783626388e-06,
      "loss": 0.0636,
      "step": 6000
    },
    {
      "epoch": 3.0775123475456105,
      "grad_norm": 0.538415789604187,
      "learning_rate": 7.474521222981298e-06,
      "loss": 0.0652,
      "step": 6500
    },
    {
      "epoch": 3.083257736115311,
      "eval_loss": 0.01661744900047779,
      "eval_runtime": 517.2779,
      "eval_samples_per_second": 0.804,
      "eval_steps_per_second": 0.008,
      "eval_wer": 0.9369659965823781,
      "step": 6614
    },
    {
      "epoch": 4.019352887813729,
      "grad_norm": 0.6536675691604614,
      "learning_rate": 7.194534662336209e-06,
      "loss": 0.0474,
      "step": 7000
    },
    {
      "epoch": 4.044551960487854,
      "grad_norm": 0.5821442604064941,
      "learning_rate": 6.91454810169112e-06,
      "loss": 0.0441,
      "step": 7500
    },
    {
      "epoch": 4.06975103316198,
      "grad_norm": 0.5527841448783875,
      "learning_rate": 6.6345615410460304e-06,
      "loss": 0.0449,
      "step": 8000
    },
    {
      "epoch": 5.011591573430098,
      "grad_norm": 0.453218549489975,
      "learning_rate": 6.354574980400942e-06,
      "loss": 0.0369,
      "step": 8500
    },
    {
      "epoch": 5.036790646104223,
      "grad_norm": 0.5013980865478516,
      "learning_rate": 6.074588419755852e-06,
      "loss": 0.0273,
      "step": 9000
    },
    {
      "epoch": 5.061989718778349,
      "grad_norm": 0.5885359644889832,
      "learning_rate": 5.7946018591107636e-06,
      "loss": 0.0288,
      "step": 9500
    },
    {
      "epoch": 5.083207337969963,
      "eval_loss": 0.017346344888210297,
      "eval_runtime": 530.4397,
      "eval_samples_per_second": 0.784,
      "eval_steps_per_second": 0.008,
      "eval_wer": 1.0060864389532094,
      "step": 9921
    },
    {
      "epoch": 6.003830259046467,
      "grad_norm": 0.4446285665035248,
      "learning_rate": 5.514615298465674e-06,
      "loss": 0.0276,
      "step": 10000
    },
    {
      "epoch": 6.029029331720593,
      "grad_norm": 0.46283265948295593,
      "learning_rate": 5.234628737820585e-06,
      "loss": 0.016,
      "step": 10500
    },
    {
      "epoch": 6.054228404394718,
      "grad_norm": 0.42813611030578613,
      "learning_rate": 4.954642177175496e-06,
      "loss": 0.0173,
      "step": 11000
    },
    {
      "epoch": 6.079427477068844,
      "grad_norm": 0.48960232734680176,
      "learning_rate": 4.674655616530407e-06,
      "loss": 0.018,
      "step": 11500
    },
    {
      "epoch": 7.021268017336962,
      "grad_norm": 0.3988407254219055,
      "learning_rate": 4.394669055885318e-06,
      "loss": 0.0109,
      "step": 12000
    },
    {
      "epoch": 7.046467090011087,
      "grad_norm": 0.3698909282684326,
      "learning_rate": 4.114682495240229e-06,
      "loss": 0.0101,
      "step": 12500
    },
    {
      "epoch": 7.071666162685213,
      "grad_norm": 0.4073663055896759,
      "learning_rate": 3.8346959345951395e-06,
      "loss": 0.0109,
      "step": 13000
    },
    {
      "epoch": 7.083156939824614,
      "eval_loss": 0.0192726943641901,
      "eval_runtime": 518.0983,
      "eval_samples_per_second": 0.803,
      "eval_steps_per_second": 0.008,
      "eval_wer": 0.9907263406485801,
      "step": 13228
    },
    {
      "epoch": 8.01350670295333,
      "grad_norm": 0.33067503571510315,
      "learning_rate": 3.5547093739500504e-06,
      "loss": 0.0082,
      "step": 13500
    },
    {
      "epoch": 8.038705775627458,
      "grad_norm": 0.3731881380081177,
      "learning_rate": 3.2747228133049617e-06,
      "loss": 0.0062,
      "step": 14000
    },
    {
      "epoch": 8.063904848301583,
      "grad_norm": 0.2673242688179016,
      "learning_rate": 2.9947362526598727e-06,
      "loss": 0.0066,
      "step": 14500
    },
    {
      "epoch": 9.0057453885697,
      "grad_norm": 0.18087884783744812,
      "learning_rate": 2.7147496920147836e-06,
      "loss": 0.0062,
      "step": 15000
    },
    {
      "epoch": 9.030944461243827,
      "grad_norm": 0.250787615776062,
      "learning_rate": 2.4347631313696945e-06,
      "loss": 0.0038,
      "step": 15500
    },
    {
      "epoch": 9.056143533917952,
      "grad_norm": 0.2255438268184662,
      "learning_rate": 2.1547765707246054e-06,
      "loss": 0.0041,
      "step": 16000
    },
    {
      "epoch": 9.081342606592077,
      "grad_norm": 0.29366812109947205,
      "learning_rate": 1.8747900100795163e-06,
      "loss": 0.0044,
      "step": 16500
    },
    {
      "epoch": 9.083106541679266,
      "eval_loss": 0.020848926156759262,
      "eval_runtime": 513.0393,
      "eval_samples_per_second": 0.811,
      "eval_steps_per_second": 0.008,
      "eval_wer": 0.9772862546320297,
      "step": 16535
    },
    {
      "epoch": 10.023183146860196,
      "grad_norm": 0.1837795376777649,
      "learning_rate": 1.5948034494344272e-06,
      "loss": 0.0026,
      "step": 17000
    },
    {
      "epoch": 10.048382219534322,
      "grad_norm": 0.24585728347301483,
      "learning_rate": 1.314816888789338e-06,
      "loss": 0.0026,
      "step": 17500
    },
    {
      "epoch": 10.073581292208447,
      "grad_norm": 0.15548868477344513,
      "learning_rate": 1.0348303281442492e-06,
      "loss": 0.0026,
      "step": 18000
    },
    {
      "epoch": 11.015421832476564,
      "grad_norm": 0.08757825195789337,
      "learning_rate": 7.554037406204502e-07,
      "loss": 0.002,
      "step": 18500
    },
    {
      "epoch": 11.040620905150691,
      "grad_norm": 0.1045205295085907,
      "learning_rate": 4.7541717997536123e-07,
      "loss": 0.0016,
      "step": 19000
    },
    {
      "epoch": 11.065819977824816,
      "grad_norm": 0.07768367975950241,
      "learning_rate": 1.9543061933027217e-07,
      "loss": 0.0016,
      "step": 19500
    },
    {
      "epoch": 11.083056143533918,
      "eval_loss": 0.02412882074713707,
      "eval_runtime": 518.0169,
      "eval_samples_per_second": 0.803,
      "eval_steps_per_second": 0.008,
      "eval_wer": 0.9926463529366589,
      "step": 19842
    },
    {
      "epoch": 11.083056143533918,
      "step": 19842,
      "total_flos": 8.628884758428616e+21,
      "train_loss": 0.04815149868992161,
      "train_runtime": 124240.955,
      "train_samples_per_second": 20.442,
      "train_steps_per_second": 0.16
    }
  ],
  "logging_steps": 500,
  "max_steps": 19842,
  "num_input_tokens_seen": 0,
  "num_train_epochs": 9223372036854775807,
  "save_steps": 3307,
  "total_flos": 8.628884758428616e+21,
  "train_batch_size": 32,
  "trial_name": null,
  "trial_params": null
}