bobox commited on
Commit
5a586a7
·
verified ·
1 Parent(s): 099bd42

Training in progress, step 219, checkpoint

Browse files
checkpoint-219/1_AdvancedWeightedPooling/config.json CHANGED
@@ -3,8 +3,8 @@
3
  "num_heads": 8,
4
  "dropout": 0.05,
5
  "bias": true,
6
- "use_layernorm": false,
7
- "use_MLP": false,
8
- "MLP_h_size": 1024,
9
  "use_residual": false
10
  }
 
3
  "num_heads": 8,
4
  "dropout": 0.05,
5
  "bias": true,
6
+ "use_layernorm": true,
7
+ "use_MLP": true,
8
+ "MLP_h_size": 2048,
9
  "use_residual": false
10
  }
checkpoint-219/1_AdvancedWeightedPooling/pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f97a74790baf4f9b4bfe76c552d03d206d9abb6c2fd26018ba91a65b2ba91e3c
3
- size 16795776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4a04620ba1d9784801533cf38956b610797b242832a12878c87866d64167d5d
3
+ size 33595276
checkpoint-219/README.md CHANGED
The diff for this file is too large to render. See raw diff
 
checkpoint-219/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aed94c686872402dc6d0044a04b74611096cffb940c42f13734ffa1c4d5ca3c0
3
- size 33591506
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65463ea38447a4abb9a8a8ad374b85c61ef0eb7fd8597c70d67476cd027eb816
3
+ size 67191488
checkpoint-219/rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5668a220165f42213673fa9d59210929bc3fe917df8d3c4e3fee21e6bcd0aab9
3
  size 14244
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e25d70a0a83e5fa5e858dd1e9293efb8acc5162fe94a4a09c0786dc8cbe38398
3
  size 14244
checkpoint-219/trainer_state.json CHANGED
@@ -8,262 +8,1662 @@
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
10
  "log_history": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  {
12
  "epoch": 0.015089163237311385,
13
- "grad_norm": 3.8793580532073975,
14
  "learning_rate": 1.8281535648994517e-05,
15
- "loss": 0.7558,
16
  "step": 11
17
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  {
19
  "epoch": 0.03017832647462277,
20
- "grad_norm": 3.666482448577881,
21
  "learning_rate": 3.839122486288849e-05,
22
- "loss": 0.7776,
23
  "step": 22
24
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  {
26
  "epoch": 0.04526748971193416,
27
- "grad_norm": 3.493319511413574,
28
  "learning_rate": 5.850091407678245e-05,
29
- "loss": 0.379,
30
  "step": 33
31
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  {
33
  "epoch": 0.06035665294924554,
34
- "grad_norm": 2.435302972793579,
35
  "learning_rate": 7.861060329067642e-05,
36
- "loss": 0.4947,
37
  "step": 44
38
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  {
40
  "epoch": 0.07544581618655692,
41
- "grad_norm": 2.2073612213134766,
42
  "learning_rate": 9.872029250457039e-05,
43
- "loss": 0.3406,
44
  "step": 55
45
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  {
47
  "epoch": 0.09053497942386832,
48
- "grad_norm": 0.5107505917549133,
49
  "learning_rate": 0.00011882998171846434,
50
- "loss": 0.2391,
51
  "step": 66
52
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  {
54
  "epoch": 0.1056241426611797,
55
- "grad_norm": 2.4110350608825684,
56
  "learning_rate": 0.00013893967093235832,
57
- "loss": 0.2154,
58
  "step": 77
59
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  {
61
  "epoch": 0.12071330589849108,
62
- "grad_norm": 3.4400408267974854,
63
  "learning_rate": 0.00015904936014625229,
64
- "loss": 0.2662,
65
  "step": 88
66
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
  {
68
  "epoch": 0.13580246913580246,
69
- "grad_norm": 3.31318998336792,
70
  "learning_rate": 0.00017915904936014626,
71
- "loss": 0.283,
72
  "step": 99
73
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74
  {
75
  "epoch": 0.15089163237311384,
76
- "grad_norm": 2.624321699142456,
77
  "learning_rate": 0.00019926873857404023,
78
- "loss": 0.2391,
79
  "step": 110
80
  },
81
  {
82
  "epoch": 0.15089163237311384,
83
- "eval_Qnli-dev_cosine_accuracy": 0.7353515625,
84
- "eval_Qnli-dev_cosine_accuracy_threshold": 0.641769528388977,
85
- "eval_Qnli-dev_cosine_ap": 0.7934694922676566,
86
- "eval_Qnli-dev_cosine_f1": 0.7255734919286321,
87
- "eval_Qnli-dev_cosine_f1_threshold": 0.533623456954956,
88
- "eval_Qnli-dev_cosine_precision": 0.6170520231213873,
89
- "eval_Qnli-dev_cosine_recall": 0.8804123711340206,
90
- "eval_allNLI-dev_cosine_accuracy": 0.7451171875,
91
- "eval_allNLI-dev_cosine_accuracy_threshold": 0.7058684229850769,
92
- "eval_allNLI-dev_cosine_ap": 0.6358738534384165,
93
- "eval_allNLI-dev_cosine_f1": 0.6633039092055485,
94
- "eval_allNLI-dev_cosine_f1_threshold": 0.6644865274429321,
95
- "eval_allNLI-dev_cosine_precision": 0.579295154185022,
96
- "eval_allNLI-dev_cosine_recall": 0.775811209439528,
97
- "eval_sequential_score": 0.7934694922676566,
98
- "eval_sts-test_pearson_cosine": 0.8508165029528609,
99
- "eval_sts-test_spearman_cosine": 0.8665992028008191,
100
- "eval_vitaminc-pairs_loss": 1.550615668296814,
101
- "eval_vitaminc-pairs_runtime": 24.2459,
102
- "eval_vitaminc-pairs_samples_per_second": 10.558,
103
- "eval_vitaminc-pairs_steps_per_second": 0.041,
104
  "step": 110
105
  },
106
  {
107
  "epoch": 0.15089163237311384,
108
- "eval_negation-triplets_loss": 0.8121126294136047,
109
- "eval_negation-triplets_runtime": 4.2821,
110
- "eval_negation-triplets_samples_per_second": 59.784,
111
- "eval_negation-triplets_steps_per_second": 0.234,
112
  "step": 110
113
  },
114
  {
115
  "epoch": 0.15089163237311384,
116
- "eval_scitail-pairs-pos_loss": 0.05080736428499222,
117
- "eval_scitail-pairs-pos_runtime": 3.0909,
118
- "eval_scitail-pairs-pos_samples_per_second": 82.824,
119
- "eval_scitail-pairs-pos_steps_per_second": 0.324,
120
  "step": 110
121
  },
122
  {
123
  "epoch": 0.15089163237311384,
124
- "eval_scitail-pairs-qa_loss": 0.011191274970769882,
125
- "eval_scitail-pairs-qa_runtime": 2.3574,
126
- "eval_scitail-pairs-qa_samples_per_second": 108.596,
127
- "eval_scitail-pairs-qa_steps_per_second": 0.424,
128
  "step": 110
129
  },
130
  {
131
  "epoch": 0.15089163237311384,
132
- "eval_xsum-pairs_loss": 0.2039160132408142,
133
- "eval_xsum-pairs_runtime": 12.8416,
134
- "eval_xsum-pairs_samples_per_second": 19.935,
135
- "eval_xsum-pairs_steps_per_second": 0.078,
136
  "step": 110
137
  },
138
  {
139
  "epoch": 0.15089163237311384,
140
- "eval_sciq_pairs_loss": 0.023365184664726257,
141
- "eval_sciq_pairs_runtime": 20.6659,
142
- "eval_sciq_pairs_samples_per_second": 12.388,
143
- "eval_sciq_pairs_steps_per_second": 0.048,
144
  "step": 110
145
  },
146
  {
147
  "epoch": 0.15089163237311384,
148
- "eval_qasc_pairs_loss": 0.558290421962738,
149
- "eval_qasc_pairs_runtime": 3.009,
150
- "eval_qasc_pairs_samples_per_second": 85.077,
151
- "eval_qasc_pairs_steps_per_second": 0.332,
152
  "step": 110
153
  },
154
  {
155
  "epoch": 0.15089163237311384,
156
- "eval_openbookqa_pairs_loss": 1.253723382949829,
157
- "eval_openbookqa_pairs_runtime": 2.2457,
158
- "eval_openbookqa_pairs_samples_per_second": 113.993,
159
- "eval_openbookqa_pairs_steps_per_second": 0.445,
160
  "step": 110
161
  },
162
  {
163
  "epoch": 0.15089163237311384,
164
- "eval_nq_pairs_loss": 0.10213108360767365,
165
- "eval_nq_pairs_runtime": 18.094,
166
- "eval_nq_pairs_samples_per_second": 14.148,
167
- "eval_nq_pairs_steps_per_second": 0.055,
168
  "step": 110
169
  },
170
  {
171
  "epoch": 0.15089163237311384,
172
- "eval_trivia_pairs_loss": 0.4372706711292267,
173
- "eval_trivia_pairs_runtime": 16.9326,
174
- "eval_trivia_pairs_samples_per_second": 15.119,
175
- "eval_trivia_pairs_steps_per_second": 0.059,
176
  "step": 110
177
  },
178
  {
179
  "epoch": 0.15089163237311384,
180
- "eval_gooaq_pairs_loss": 0.2727060914039612,
181
- "eval_gooaq_pairs_runtime": 3.6277,
182
- "eval_gooaq_pairs_samples_per_second": 70.568,
183
- "eval_gooaq_pairs_steps_per_second": 0.276,
184
  "step": 110
185
  },
186
  {
187
  "epoch": 0.15089163237311384,
188
- "eval_paws-pos_loss": 0.040396444499492645,
189
- "eval_paws-pos_runtime": 2.9381,
190
- "eval_paws-pos_samples_per_second": 87.132,
191
- "eval_paws-pos_steps_per_second": 0.34,
192
  "step": 110
193
  },
194
  {
195
  "epoch": 0.15089163237311384,
196
- "eval_global_dataset_loss": 0.1820984184741974,
197
- "eval_global_dataset_runtime": 125.361,
198
- "eval_global_dataset_samples_per_second": 9.764,
199
- "eval_global_dataset_steps_per_second": 0.04,
200
  "step": 110
201
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
202
  {
203
  "epoch": 0.16598079561042525,
204
- "grad_norm": 3.345057725906372,
205
  "learning_rate": 0.00021937842778793417,
206
- "loss": 0.2456,
207
  "step": 121
208
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
209
  {
210
  "epoch": 0.18106995884773663,
211
- "grad_norm": 2.2555935382843018,
212
  "learning_rate": 0.00023948811700182814,
213
- "loss": 0.4199,
214
  "step": 132
215
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
216
  {
217
  "epoch": 0.19615912208504802,
218
- "grad_norm": 2.3139288425445557,
219
  "learning_rate": 0.0002595978062157221,
220
- "loss": 0.2809,
221
  "step": 143
222
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
223
  {
224
  "epoch": 0.2112482853223594,
225
- "grad_norm": 3.585463047027588,
226
  "learning_rate": 0.0002797074954296161,
227
- "loss": 0.5773,
228
  "step": 154
229
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
230
  {
231
  "epoch": 0.22633744855967078,
232
- "grad_norm": 2.4281251430511475,
233
  "learning_rate": 0.00029981718464351003,
234
- "loss": 0.3734,
235
  "step": 165
236
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
237
  {
238
  "epoch": 0.24142661179698216,
239
- "grad_norm": 0.2383209466934204,
240
  "learning_rate": 0.000319926873857404,
241
- "loss": 0.2348,
242
  "step": 176
243
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
244
  {
245
  "epoch": 0.25651577503429357,
246
- "grad_norm": 2.4634456634521484,
247
  "learning_rate": 0.00034003656307129797,
248
- "loss": 0.4421,
249
  "step": 187
250
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
251
  {
252
  "epoch": 0.2716049382716049,
253
- "grad_norm": 3.1270384788513184,
254
  "learning_rate": 0.00036014625228519197,
255
- "loss": 0.5076,
256
  "step": 198
257
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
258
  {
259
  "epoch": 0.28669410150891633,
260
- "grad_norm": 0.7871516346931458,
261
  "learning_rate": 0.0003802559414990859,
262
- "loss": 0.211,
263
  "step": 209
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
264
  }
265
  ],
266
- "logging_steps": 11,
267
  "max_steps": 2187,
268
  "num_input_tokens_seen": 0,
269
  "num_train_epochs": 3,
 
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
10
  "log_history": [
11
+ {
12
+ "epoch": 0.0013717421124828531,
13
+ "grad_norm": 13.75906753540039,
14
+ "learning_rate": 1.8281535648994516e-06,
15
+ "loss": 1.2468,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.0027434842249657062,
20
+ "grad_norm": 14.05058765411377,
21
+ "learning_rate": 3.6563071297989032e-06,
22
+ "loss": 1.4692,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.00411522633744856,
27
+ "grad_norm": 13.991771697998047,
28
+ "learning_rate": 5.484460694698355e-06,
29
+ "loss": 1.2457,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.0054869684499314125,
34
+ "grad_norm": 13.429465293884277,
35
+ "learning_rate": 7.3126142595978065e-06,
36
+ "loss": 1.1859,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.006858710562414266,
41
+ "grad_norm": 12.403002738952637,
42
+ "learning_rate": 9.140767824497258e-06,
43
+ "loss": 1.2404,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.00823045267489712,
48
+ "grad_norm": 1.5524662733078003,
49
+ "learning_rate": 1.096892138939671e-05,
50
+ "loss": 0.042,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.009602194787379973,
55
+ "grad_norm": 10.1494140625,
56
+ "learning_rate": 1.2797074954296162e-05,
57
+ "loss": 0.8856,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.010973936899862825,
62
+ "grad_norm": Infinity,
63
+ "learning_rate": 1.2797074954296162e-05,
64
+ "loss": 1.5417,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.012345679012345678,
69
+ "grad_norm": 0.4553964138031006,
70
+ "learning_rate": 1.4625228519195613e-05,
71
+ "loss": 0.023,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.013717421124828532,
76
+ "grad_norm": 10.332369804382324,
77
+ "learning_rate": 1.6453382084095062e-05,
78
+ "loss": 0.8655,
79
+ "step": 10
80
+ },
81
  {
82
  "epoch": 0.015089163237311385,
83
+ "grad_norm": 7.544310092926025,
84
  "learning_rate": 1.8281535648994517e-05,
85
+ "loss": 0.5894,
86
  "step": 11
87
  },
88
+ {
89
+ "epoch": 0.01646090534979424,
90
+ "grad_norm": 8.16427230834961,
91
+ "learning_rate": 2.0109689213893968e-05,
92
+ "loss": 0.7053,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.01783264746227709,
97
+ "grad_norm": 7.403252601623535,
98
+ "learning_rate": 2.193784277879342e-05,
99
+ "loss": 0.5857,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.019204389574759947,
104
+ "grad_norm": 8.974674224853516,
105
+ "learning_rate": 2.376599634369287e-05,
106
+ "loss": 0.8375,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.0205761316872428,
111
+ "grad_norm": 13.417745590209961,
112
+ "learning_rate": 2.5594149908592324e-05,
113
+ "loss": 0.9043,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.02194787379972565,
118
+ "grad_norm": 12.881294250488281,
119
+ "learning_rate": 2.742230347349177e-05,
120
+ "loss": 0.8756,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.023319615912208505,
125
+ "grad_norm": 7.439205169677734,
126
+ "learning_rate": 2.9250457038391226e-05,
127
+ "loss": 0.5076,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.024691358024691357,
132
+ "grad_norm": 8.46964168548584,
133
+ "learning_rate": 3.107861060329068e-05,
134
+ "loss": 0.4757,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.02606310013717421,
139
+ "grad_norm": 17.02773666381836,
140
+ "learning_rate": 3.2906764168190124e-05,
141
+ "loss": 0.9993,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.027434842249657063,
146
+ "grad_norm": 6.2668776512146,
147
+ "learning_rate": 3.473491773308958e-05,
148
+ "loss": 0.2622,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.02880658436213992,
153
+ "grad_norm": 8.273824691772461,
154
+ "learning_rate": 3.656307129798903e-05,
155
+ "loss": 0.3497,
156
+ "step": 21
157
+ },
158
  {
159
  "epoch": 0.03017832647462277,
160
+ "grad_norm": 5.5460944175720215,
161
  "learning_rate": 3.839122486288849e-05,
162
+ "loss": 0.2514,
163
  "step": 22
164
  },
165
+ {
166
+ "epoch": 0.03155006858710562,
167
+ "grad_norm": 4.283128261566162,
168
+ "learning_rate": 4.0219378427787935e-05,
169
+ "loss": 0.1673,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.03292181069958848,
174
+ "grad_norm": 4.708792209625244,
175
+ "learning_rate": 4.204753199268738e-05,
176
+ "loss": 0.203,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 0.03429355281207133,
181
+ "grad_norm": 14.491021156311035,
182
+ "learning_rate": 4.387568555758684e-05,
183
+ "loss": 0.698,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 0.03566529492455418,
188
+ "grad_norm": 7.903520584106445,
189
+ "learning_rate": 4.570383912248629e-05,
190
+ "loss": 0.3401,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 0.037037037037037035,
195
+ "grad_norm": 7.333080291748047,
196
+ "learning_rate": 4.753199268738574e-05,
197
+ "loss": 0.2185,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 0.038408779149519894,
202
+ "grad_norm": 8.625358581542969,
203
+ "learning_rate": 4.936014625228519e-05,
204
+ "loss": 0.4424,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 0.039780521262002745,
209
+ "grad_norm": 1.5588488578796387,
210
+ "learning_rate": 5.118829981718465e-05,
211
+ "loss": 0.0381,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 0.0411522633744856,
216
+ "grad_norm": 12.401138305664062,
217
+ "learning_rate": 5.3016453382084095e-05,
218
+ "loss": 0.8215,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 0.04252400548696845,
223
+ "grad_norm": 5.405845642089844,
224
+ "learning_rate": 5.484460694698354e-05,
225
+ "loss": 0.1542,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 0.0438957475994513,
230
+ "grad_norm": 8.558808326721191,
231
+ "learning_rate": 5.6672760511883e-05,
232
+ "loss": 0.6893,
233
+ "step": 32
234
+ },
235
  {
236
  "epoch": 0.04526748971193416,
237
+ "grad_norm": 7.206741809844971,
238
  "learning_rate": 5.850091407678245e-05,
239
+ "loss": 0.3773,
240
  "step": 33
241
  },
242
+ {
243
+ "epoch": 0.04663923182441701,
244
+ "grad_norm": 8.300729751586914,
245
+ "learning_rate": 6.0329067641681906e-05,
246
+ "loss": 0.538,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 0.04801097393689986,
251
+ "grad_norm": 0.2500181496143341,
252
+ "learning_rate": 6.215722120658135e-05,
253
+ "loss": 0.0073,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 0.04938271604938271,
258
+ "grad_norm": 17.457223892211914,
259
+ "learning_rate": 6.398537477148081e-05,
260
+ "loss": 2.378,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 0.05075445816186557,
265
+ "grad_norm": 10.884990692138672,
266
+ "learning_rate": 6.581352833638025e-05,
267
+ "loss": 0.5949,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 0.05212620027434842,
272
+ "grad_norm": 10.013723373413086,
273
+ "learning_rate": 6.764168190127972e-05,
274
+ "loss": 0.7071,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 0.053497942386831275,
279
+ "grad_norm": 4.653324604034424,
280
+ "learning_rate": 6.946983546617916e-05,
281
+ "loss": 0.1607,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 0.05486968449931413,
286
+ "grad_norm": 9.527400970458984,
287
+ "learning_rate": 7.129798903107861e-05,
288
+ "loss": 0.7735,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 0.056241426611796985,
293
+ "grad_norm": 12.477531433105469,
294
+ "learning_rate": 7.312614259597807e-05,
295
+ "loss": 0.7594,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 0.05761316872427984,
300
+ "grad_norm": 5.369799613952637,
301
+ "learning_rate": 7.495429616087751e-05,
302
+ "loss": 0.3569,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 0.05898491083676269,
307
+ "grad_norm": 5.1385908126831055,
308
+ "learning_rate": 7.678244972577697e-05,
309
+ "loss": 0.2454,
310
+ "step": 43
311
+ },
312
  {
313
  "epoch": 0.06035665294924554,
314
+ "grad_norm": 6.1807708740234375,
315
  "learning_rate": 7.861060329067642e-05,
316
+ "loss": 0.2723,
317
  "step": 44
318
  },
319
+ {
320
+ "epoch": 0.06172839506172839,
321
+ "grad_norm": 7.941879749298096,
322
+ "learning_rate": 8.043875685557587e-05,
323
+ "loss": 0.5338,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 0.06310013717421124,
328
+ "grad_norm": 5.015410423278809,
329
+ "learning_rate": 8.226691042047532e-05,
330
+ "loss": 0.1891,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 0.0644718792866941,
335
+ "grad_norm": 7.299699306488037,
336
+ "learning_rate": 8.409506398537477e-05,
337
+ "loss": 0.3647,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 0.06584362139917696,
342
+ "grad_norm": 8.421393394470215,
343
+ "learning_rate": 8.592321755027423e-05,
344
+ "loss": 0.383,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 0.06721536351165981,
349
+ "grad_norm": 5.5915937423706055,
350
+ "learning_rate": 8.775137111517367e-05,
351
+ "loss": 0.2353,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 0.06858710562414266,
356
+ "grad_norm": 8.187829971313477,
357
+ "learning_rate": 8.957952468007313e-05,
358
+ "loss": 0.5541,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 0.06995884773662552,
363
+ "grad_norm": 6.386786460876465,
364
+ "learning_rate": 9.140767824497258e-05,
365
+ "loss": 0.4908,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 0.07133058984910837,
370
+ "grad_norm": 8.64050006866455,
371
+ "learning_rate": 9.323583180987204e-05,
372
+ "loss": 0.586,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 0.07270233196159122,
377
+ "grad_norm": 5.879551410675049,
378
+ "learning_rate": 9.506398537477148e-05,
379
+ "loss": 0.2241,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 0.07407407407407407,
384
+ "grad_norm": 7.824138164520264,
385
+ "learning_rate": 9.689213893967093e-05,
386
+ "loss": 0.6046,
387
+ "step": 54
388
+ },
389
  {
390
  "epoch": 0.07544581618655692,
391
+ "grad_norm": 6.351109504699707,
392
  "learning_rate": 9.872029250457039e-05,
393
+ "loss": 0.231,
394
  "step": 55
395
  },
396
+ {
397
+ "epoch": 0.07681755829903979,
398
+ "grad_norm": 9.437410354614258,
399
+ "learning_rate": 0.00010054844606946984,
400
+ "loss": 0.7105,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 0.07818930041152264,
405
+ "grad_norm": 8.40911865234375,
406
+ "learning_rate": 0.0001023765996343693,
407
+ "loss": 0.5591,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 0.07956104252400549,
412
+ "grad_norm": 7.631382942199707,
413
+ "learning_rate": 0.00010420475319926874,
414
+ "loss": 0.5194,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 0.08093278463648834,
419
+ "grad_norm": 5.773220062255859,
420
+ "learning_rate": 0.00010603290676416819,
421
+ "loss": 0.3297,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 0.0823045267489712,
426
+ "grad_norm": 1.3606321811676025,
427
+ "learning_rate": 0.00010786106032906765,
428
+ "loss": 0.0299,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 0.08367626886145405,
433
+ "grad_norm": 7.216275215148926,
434
+ "learning_rate": 0.00010968921389396709,
435
+ "loss": 0.3514,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 0.0850480109739369,
440
+ "grad_norm": 4.70477294921875,
441
+ "learning_rate": 0.00011151736745886655,
442
+ "loss": 0.1932,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 0.08641975308641975,
447
+ "grad_norm": 6.754104137420654,
448
+ "learning_rate": 0.000113345521023766,
449
+ "loss": 0.4035,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 0.0877914951989026,
454
+ "grad_norm": 0.19067375361919403,
455
+ "learning_rate": 0.00011517367458866546,
456
+ "loss": 0.0094,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 0.08916323731138547,
461
+ "grad_norm": 1.1715893745422363,
462
+ "learning_rate": 0.0001170018281535649,
463
+ "loss": 0.0148,
464
+ "step": 65
465
+ },
466
  {
467
  "epoch": 0.09053497942386832,
468
+ "grad_norm": 1.6287739276885986,
469
  "learning_rate": 0.00011882998171846434,
470
+ "loss": 0.0231,
471
  "step": 66
472
  },
473
+ {
474
+ "epoch": 0.09190672153635117,
475
+ "grad_norm": 7.027708053588867,
476
+ "learning_rate": 0.00012065813528336381,
477
+ "loss": 0.3204,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 0.09327846364883402,
482
+ "grad_norm": 7.248253345489502,
483
+ "learning_rate": 0.00012248628884826325,
484
+ "loss": 0.3011,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 0.09465020576131687,
489
+ "grad_norm": 9.592718124389648,
490
+ "learning_rate": 0.0001243144424131627,
491
+ "loss": 0.3871,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 0.09602194787379972,
496
+ "grad_norm": 5.128874778747559,
497
+ "learning_rate": 0.00012614259597806216,
498
+ "loss": 0.1823,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 0.09739368998628258,
503
+ "grad_norm": 6.496853351593018,
504
+ "learning_rate": 0.00012797074954296162,
505
+ "loss": 0.3572,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 0.09876543209876543,
510
+ "grad_norm": 6.564659118652344,
511
+ "learning_rate": 0.00012979890310786104,
512
+ "loss": 0.5289,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 0.10013717421124829,
517
+ "grad_norm": 6.480371952056885,
518
+ "learning_rate": 0.0001316270566727605,
519
+ "loss": 0.3223,
520
+ "step": 73
521
+ },
522
+ {
523
+ "epoch": 0.10150891632373114,
524
+ "grad_norm": 7.222306728363037,
525
+ "learning_rate": 0.00013345521023765998,
526
+ "loss": 0.3247,
527
+ "step": 74
528
+ },
529
+ {
530
+ "epoch": 0.102880658436214,
531
+ "grad_norm": 5.406076431274414,
532
+ "learning_rate": 0.00013528336380255943,
533
+ "loss": 0.2133,
534
+ "step": 75
535
+ },
536
+ {
537
+ "epoch": 0.10425240054869685,
538
+ "grad_norm": 11.029163360595703,
539
+ "learning_rate": 0.00013711151736745886,
540
+ "loss": 0.8249,
541
+ "step": 76
542
+ },
543
  {
544
  "epoch": 0.1056241426611797,
545
+ "grad_norm": 7.284115314483643,
546
  "learning_rate": 0.00013893967093235832,
547
+ "loss": 0.4341,
548
  "step": 77
549
  },
550
+ {
551
+ "epoch": 0.10699588477366255,
552
+ "grad_norm": 6.240738868713379,
553
+ "learning_rate": 0.00014076782449725777,
554
+ "loss": 0.2932,
555
+ "step": 78
556
+ },
557
+ {
558
+ "epoch": 0.1083676268861454,
559
+ "grad_norm": 0.1745665967464447,
560
+ "learning_rate": 0.00014259597806215722,
561
+ "loss": 0.0099,
562
+ "step": 79
563
+ },
564
+ {
565
+ "epoch": 0.10973936899862825,
566
+ "grad_norm": 5.460353851318359,
567
+ "learning_rate": 0.00014442413162705668,
568
+ "loss": 0.3348,
569
+ "step": 80
570
+ },
571
+ {
572
+ "epoch": 0.1111111111111111,
573
+ "grad_norm": 8.869246482849121,
574
+ "learning_rate": 0.00014625228519195613,
575
+ "loss": 0.6405,
576
+ "step": 81
577
+ },
578
+ {
579
+ "epoch": 0.11248285322359397,
580
+ "grad_norm": 4.475996971130371,
581
+ "learning_rate": 0.0001480804387568556,
582
+ "loss": 0.1536,
583
+ "step": 82
584
+ },
585
+ {
586
+ "epoch": 0.11385459533607682,
587
+ "grad_norm": 2.700299024581909,
588
+ "learning_rate": 0.00014990859232175501,
589
+ "loss": 0.1299,
590
+ "step": 83
591
+ },
592
+ {
593
+ "epoch": 0.11522633744855967,
594
+ "grad_norm": 7.5515618324279785,
595
+ "learning_rate": 0.00015173674588665447,
596
+ "loss": 0.5863,
597
+ "step": 84
598
+ },
599
+ {
600
+ "epoch": 0.11659807956104253,
601
+ "grad_norm": 9.869407653808594,
602
+ "learning_rate": 0.00015356489945155395,
603
+ "loss": 0.7205,
604
+ "step": 85
605
+ },
606
+ {
607
+ "epoch": 0.11796982167352538,
608
+ "grad_norm": 8.208423614501953,
609
+ "learning_rate": 0.00015539305301645338,
610
+ "loss": 0.4052,
611
+ "step": 86
612
+ },
613
+ {
614
+ "epoch": 0.11934156378600823,
615
+ "grad_norm": 6.408420562744141,
616
+ "learning_rate": 0.00015722120658135283,
617
+ "loss": 0.3953,
618
+ "step": 87
619
+ },
620
  {
621
  "epoch": 0.12071330589849108,
622
+ "grad_norm": 7.050099849700928,
623
  "learning_rate": 0.00015904936014625229,
624
+ "loss": 0.5598,
625
  "step": 88
626
  },
627
+ {
628
+ "epoch": 0.12208504801097393,
629
+ "grad_norm": 5.326991558074951,
630
+ "learning_rate": 0.00016087751371115174,
631
+ "loss": 0.2856,
632
+ "step": 89
633
+ },
634
+ {
635
+ "epoch": 0.12345679012345678,
636
+ "grad_norm": 4.510193347930908,
637
+ "learning_rate": 0.0001627056672760512,
638
+ "loss": 0.2277,
639
+ "step": 90
640
+ },
641
+ {
642
+ "epoch": 0.12482853223593965,
643
+ "grad_norm": 5.769596576690674,
644
+ "learning_rate": 0.00016453382084095065,
645
+ "loss": 0.3296,
646
+ "step": 91
647
+ },
648
+ {
649
+ "epoch": 0.1262002743484225,
650
+ "grad_norm": 6.066390037536621,
651
+ "learning_rate": 0.0001663619744058501,
652
+ "loss": 0.3079,
653
+ "step": 92
654
+ },
655
+ {
656
+ "epoch": 0.12757201646090535,
657
+ "grad_norm": 6.80173921585083,
658
+ "learning_rate": 0.00016819012797074953,
659
+ "loss": 0.4867,
660
+ "step": 93
661
+ },
662
+ {
663
+ "epoch": 0.1289437585733882,
664
+ "grad_norm": 6.219693183898926,
665
+ "learning_rate": 0.00017001828153564899,
666
+ "loss": 0.4319,
667
+ "step": 94
668
+ },
669
+ {
670
+ "epoch": 0.13031550068587106,
671
+ "grad_norm": 5.316290855407715,
672
+ "learning_rate": 0.00017184643510054847,
673
+ "loss": 0.2952,
674
+ "step": 95
675
+ },
676
+ {
677
+ "epoch": 0.13168724279835392,
678
+ "grad_norm": 6.86447811126709,
679
+ "learning_rate": 0.00017367458866544792,
680
+ "loss": 0.5531,
681
+ "step": 96
682
+ },
683
+ {
684
+ "epoch": 0.13305898491083676,
685
+ "grad_norm": 1.2648167610168457,
686
+ "learning_rate": 0.00017550274223034735,
687
+ "loss": 0.0296,
688
+ "step": 97
689
+ },
690
+ {
691
+ "epoch": 0.13443072702331962,
692
+ "grad_norm": 8.14661979675293,
693
+ "learning_rate": 0.0001773308957952468,
694
+ "loss": 0.8536,
695
+ "step": 98
696
+ },
697
  {
698
  "epoch": 0.13580246913580246,
699
+ "grad_norm": 8.927884101867676,
700
  "learning_rate": 0.00017915904936014626,
701
+ "loss": 0.4879,
702
  "step": 99
703
  },
704
+ {
705
+ "epoch": 0.13717421124828533,
706
+ "grad_norm": 9.555243492126465,
707
+ "learning_rate": 0.00018098720292504568,
708
+ "loss": 0.67,
709
+ "step": 100
710
+ },
711
+ {
712
+ "epoch": 0.13854595336076816,
713
+ "grad_norm": 7.783656120300293,
714
+ "learning_rate": 0.00018281535648994517,
715
+ "loss": 0.4813,
716
+ "step": 101
717
+ },
718
+ {
719
+ "epoch": 0.13991769547325103,
720
+ "grad_norm": 0.5169872641563416,
721
+ "learning_rate": 0.00018464351005484462,
722
+ "loss": 0.0488,
723
+ "step": 102
724
+ },
725
+ {
726
+ "epoch": 0.1412894375857339,
727
+ "grad_norm": 6.967692852020264,
728
+ "learning_rate": 0.00018647166361974407,
729
+ "loss": 0.5388,
730
+ "step": 103
731
+ },
732
+ {
733
+ "epoch": 0.14266117969821673,
734
+ "grad_norm": 6.324373245239258,
735
+ "learning_rate": 0.0001882998171846435,
736
+ "loss": 0.376,
737
+ "step": 104
738
+ },
739
+ {
740
+ "epoch": 0.1440329218106996,
741
+ "grad_norm": 0.7642683982849121,
742
+ "learning_rate": 0.00019012797074954296,
743
+ "loss": 0.017,
744
+ "step": 105
745
+ },
746
+ {
747
+ "epoch": 0.14540466392318244,
748
+ "grad_norm": 8.600672721862793,
749
+ "learning_rate": 0.00019195612431444244,
750
+ "loss": 0.7542,
751
+ "step": 106
752
+ },
753
+ {
754
+ "epoch": 0.1467764060356653,
755
+ "grad_norm": 7.111880302429199,
756
+ "learning_rate": 0.00019378427787934186,
757
+ "loss": 0.4063,
758
+ "step": 107
759
+ },
760
+ {
761
+ "epoch": 0.14814814814814814,
762
+ "grad_norm": 6.075577735900879,
763
+ "learning_rate": 0.00019561243144424132,
764
+ "loss": 0.3658,
765
+ "step": 108
766
+ },
767
+ {
768
+ "epoch": 0.149519890260631,
769
+ "grad_norm": 6.12313175201416,
770
+ "learning_rate": 0.00019744058500914077,
771
+ "loss": 0.4389,
772
+ "step": 109
773
+ },
774
  {
775
  "epoch": 0.15089163237311384,
776
+ "grad_norm": 5.813235759735107,
777
  "learning_rate": 0.00019926873857404023,
778
+ "loss": 0.3803,
779
  "step": 110
780
  },
781
  {
782
  "epoch": 0.15089163237311384,
783
+ "eval_Qnli-dev_cosine_accuracy": 0.705078125,
784
+ "eval_Qnli-dev_cosine_accuracy_threshold": 0.6866907477378845,
785
+ "eval_Qnli-dev_cosine_ap": 0.7567018413685389,
786
+ "eval_Qnli-dev_cosine_f1": 0.6931818181818182,
787
+ "eval_Qnli-dev_cosine_f1_threshold": 0.6343963146209717,
788
+ "eval_Qnli-dev_cosine_precision": 0.6267123287671232,
789
+ "eval_Qnli-dev_cosine_recall": 0.7754237288135594,
790
+ "eval_allNLI-dev_cosine_accuracy": 0.76953125,
791
+ "eval_allNLI-dev_cosine_accuracy_threshold": 0.7752166986465454,
792
+ "eval_allNLI-dev_cosine_ap": 0.6627175481841632,
793
+ "eval_allNLI-dev_cosine_f1": 0.6624737945492662,
794
+ "eval_allNLI-dev_cosine_f1_threshold": 0.6564935445785522,
795
+ "eval_allNLI-dev_cosine_precision": 0.5197368421052632,
796
+ "eval_allNLI-dev_cosine_recall": 0.9132947976878613,
797
+ "eval_sequential_score": 0.7567018413685389,
798
+ "eval_sts-test_pearson_cosine": 0.9026620207137961,
799
+ "eval_sts-test_spearman_cosine": 0.913678627606199,
800
+ "eval_vitaminc-pairs_loss": 2.009296178817749,
801
+ "eval_vitaminc-pairs_runtime": 14.3224,
802
+ "eval_vitaminc-pairs_samples_per_second": 8.937,
803
+ "eval_vitaminc-pairs_steps_per_second": 0.07,
804
  "step": 110
805
  },
806
  {
807
  "epoch": 0.15089163237311384,
808
+ "eval_negation-triplets_loss": 1.59572434425354,
809
+ "eval_negation-triplets_runtime": 1.1528,
810
+ "eval_negation-triplets_samples_per_second": 111.029,
811
+ "eval_negation-triplets_steps_per_second": 0.867,
812
  "step": 110
813
  },
814
  {
815
  "epoch": 0.15089163237311384,
816
+ "eval_scitail-pairs-pos_loss": 0.061776161193847656,
817
+ "eval_scitail-pairs-pos_runtime": 1.5728,
818
+ "eval_scitail-pairs-pos_samples_per_second": 81.383,
819
+ "eval_scitail-pairs-pos_steps_per_second": 0.636,
820
  "step": 110
821
  },
822
  {
823
  "epoch": 0.15089163237311384,
824
+ "eval_scitail-pairs-qa_loss": 0.009187542833387852,
825
+ "eval_scitail-pairs-qa_runtime": 1.2102,
826
+ "eval_scitail-pairs-qa_samples_per_second": 105.771,
827
+ "eval_scitail-pairs-qa_steps_per_second": 0.826,
828
  "step": 110
829
  },
830
  {
831
  "epoch": 0.15089163237311384,
832
+ "eval_xsum-pairs_loss": 0.37210211157798767,
833
+ "eval_xsum-pairs_runtime": 6.2854,
834
+ "eval_xsum-pairs_samples_per_second": 20.365,
835
+ "eval_xsum-pairs_steps_per_second": 0.159,
836
  "step": 110
837
  },
838
  {
839
  "epoch": 0.15089163237311384,
840
+ "eval_sciq_pairs_loss": 0.04122849553823471,
841
+ "eval_sciq_pairs_runtime": 8.8116,
842
+ "eval_sciq_pairs_samples_per_second": 14.526,
843
+ "eval_sciq_pairs_steps_per_second": 0.113,
844
  "step": 110
845
  },
846
  {
847
  "epoch": 0.15089163237311384,
848
+ "eval_qasc_pairs_loss": 0.4748501479625702,
849
+ "eval_qasc_pairs_runtime": 1.3827,
850
+ "eval_qasc_pairs_samples_per_second": 92.573,
851
+ "eval_qasc_pairs_steps_per_second": 0.723,
852
  "step": 110
853
  },
854
  {
855
  "epoch": 0.15089163237311384,
856
+ "eval_openbookqa_pairs_loss": 1.1540580987930298,
857
+ "eval_openbookqa_pairs_runtime": 1.1788,
858
+ "eval_openbookqa_pairs_samples_per_second": 108.581,
859
+ "eval_openbookqa_pairs_steps_per_second": 0.848,
860
  "step": 110
861
  },
862
  {
863
  "epoch": 0.15089163237311384,
864
+ "eval_nq_pairs_loss": 0.2363465428352356,
865
+ "eval_nq_pairs_runtime": 7.8515,
866
+ "eval_nq_pairs_samples_per_second": 16.303,
867
+ "eval_nq_pairs_steps_per_second": 0.127,
868
  "step": 110
869
  },
870
  {
871
  "epoch": 0.15089163237311384,
872
+ "eval_trivia_pairs_loss": 0.6520176529884338,
873
+ "eval_trivia_pairs_runtime": 8.9067,
874
+ "eval_trivia_pairs_samples_per_second": 14.371,
875
+ "eval_trivia_pairs_steps_per_second": 0.112,
876
  "step": 110
877
  },
878
  {
879
  "epoch": 0.15089163237311384,
880
+ "eval_gooaq_pairs_loss": 0.22620199620723724,
881
+ "eval_gooaq_pairs_runtime": 2.067,
882
+ "eval_gooaq_pairs_samples_per_second": 61.924,
883
+ "eval_gooaq_pairs_steps_per_second": 0.484,
884
  "step": 110
885
  },
886
  {
887
  "epoch": 0.15089163237311384,
888
+ "eval_paws-pos_loss": 0.02822125516831875,
889
+ "eval_paws-pos_runtime": 1.5117,
890
+ "eval_paws-pos_samples_per_second": 84.672,
891
+ "eval_paws-pos_steps_per_second": 0.662,
892
  "step": 110
893
  },
894
  {
895
  "epoch": 0.15089163237311384,
896
+ "eval_global_dataset_loss": 0.30668479204177856,
897
+ "eval_global_dataset_runtime": 33.2591,
898
+ "eval_global_dataset_samples_per_second": 11.546,
899
+ "eval_global_dataset_steps_per_second": 0.06,
900
  "step": 110
901
  },
902
+ {
903
+ "epoch": 0.1522633744855967,
904
+ "grad_norm": 4.30504846572876,
905
+ "learning_rate": 0.00020109689213893968,
906
+ "loss": 0.2478,
907
+ "step": 111
908
+ },
909
+ {
910
+ "epoch": 0.15363511659807957,
911
+ "grad_norm": 6.559568881988525,
912
+ "learning_rate": 0.00020292504570383914,
913
+ "loss": 0.8402,
914
+ "step": 112
915
+ },
916
+ {
917
+ "epoch": 0.1550068587105624,
918
+ "grad_norm": 5.812280654907227,
919
+ "learning_rate": 0.0002047531992687386,
920
+ "loss": 0.6608,
921
+ "step": 113
922
+ },
923
+ {
924
+ "epoch": 0.15637860082304528,
925
+ "grad_norm": 2.0805885791778564,
926
+ "learning_rate": 0.00020658135283363802,
927
+ "loss": 0.0934,
928
+ "step": 114
929
+ },
930
+ {
931
+ "epoch": 0.15775034293552812,
932
+ "grad_norm": 5.199294090270996,
933
+ "learning_rate": 0.00020840950639853747,
934
+ "loss": 0.3907,
935
+ "step": 115
936
+ },
937
+ {
938
+ "epoch": 0.15912208504801098,
939
+ "grad_norm": 6.3685078620910645,
940
+ "learning_rate": 0.00021023765996343693,
941
+ "loss": 0.449,
942
+ "step": 116
943
+ },
944
+ {
945
+ "epoch": 0.16049382716049382,
946
+ "grad_norm": 6.4199652671813965,
947
+ "learning_rate": 0.00021206581352833638,
948
+ "loss": 0.4041,
949
+ "step": 117
950
+ },
951
+ {
952
+ "epoch": 0.16186556927297668,
953
+ "grad_norm": 6.015898704528809,
954
+ "learning_rate": 0.00021389396709323584,
955
+ "loss": 0.6749,
956
+ "step": 118
957
+ },
958
+ {
959
+ "epoch": 0.16323731138545952,
960
+ "grad_norm": 7.721911430358887,
961
+ "learning_rate": 0.0002157221206581353,
962
+ "loss": 0.4847,
963
+ "step": 119
964
+ },
965
+ {
966
+ "epoch": 0.1646090534979424,
967
+ "grad_norm": 1.8774610757827759,
968
+ "learning_rate": 0.00021755027422303474,
969
+ "loss": 0.0526,
970
+ "step": 120
971
+ },
972
  {
973
  "epoch": 0.16598079561042525,
974
+ "grad_norm": 8.094359397888184,
975
  "learning_rate": 0.00021937842778793417,
976
+ "loss": 0.6795,
977
  "step": 121
978
  },
979
+ {
980
+ "epoch": 0.1673525377229081,
981
+ "grad_norm": 0.33090323209762573,
982
+ "learning_rate": 0.00022120658135283365,
983
+ "loss": 0.0064,
984
+ "step": 122
985
+ },
986
+ {
987
+ "epoch": 0.16872427983539096,
988
+ "grad_norm": 7.3609418869018555,
989
+ "learning_rate": 0.0002230347349177331,
990
+ "loss": 0.5918,
991
+ "step": 123
992
+ },
993
+ {
994
+ "epoch": 0.1700960219478738,
995
+ "grad_norm": 6.189216613769531,
996
+ "learning_rate": 0.00022486288848263253,
997
+ "loss": 0.3544,
998
+ "step": 124
999
+ },
1000
+ {
1001
+ "epoch": 0.17146776406035666,
1002
+ "grad_norm": 5.588890075683594,
1003
+ "learning_rate": 0.000226691042047532,
1004
+ "loss": 0.3849,
1005
+ "step": 125
1006
+ },
1007
+ {
1008
+ "epoch": 0.1728395061728395,
1009
+ "grad_norm": 3.4582345485687256,
1010
+ "learning_rate": 0.00022851919561243144,
1011
+ "loss": 0.2051,
1012
+ "step": 126
1013
+ },
1014
+ {
1015
+ "epoch": 0.17421124828532236,
1016
+ "grad_norm": 4.075862407684326,
1017
+ "learning_rate": 0.00023034734917733092,
1018
+ "loss": 0.2129,
1019
+ "step": 127
1020
+ },
1021
+ {
1022
+ "epoch": 0.1755829903978052,
1023
+ "grad_norm": 15.110091209411621,
1024
+ "learning_rate": 0.00023217550274223035,
1025
+ "loss": 2.7937,
1026
+ "step": 128
1027
+ },
1028
+ {
1029
+ "epoch": 0.17695473251028807,
1030
+ "grad_norm": 0.35791516304016113,
1031
+ "learning_rate": 0.0002340036563071298,
1032
+ "loss": 0.0166,
1033
+ "step": 129
1034
+ },
1035
+ {
1036
+ "epoch": 0.17832647462277093,
1037
+ "grad_norm": 7.5200090408325195,
1038
+ "learning_rate": 0.00023583180987202926,
1039
+ "loss": 0.7856,
1040
+ "step": 130
1041
+ },
1042
+ {
1043
+ "epoch": 0.17969821673525377,
1044
+ "grad_norm": 6.566864490509033,
1045
+ "learning_rate": 0.0002376599634369287,
1046
+ "loss": 0.8368,
1047
+ "step": 131
1048
+ },
1049
  {
1050
  "epoch": 0.18106995884773663,
1051
+ "grad_norm": 4.958701133728027,
1052
  "learning_rate": 0.00023948811700182814,
1053
+ "loss": 0.3813,
1054
  "step": 132
1055
  },
1056
+ {
1057
+ "epoch": 0.18244170096021947,
1058
+ "grad_norm": 5.745133876800537,
1059
+ "learning_rate": 0.00024131627056672762,
1060
+ "loss": 0.5695,
1061
+ "step": 133
1062
+ },
1063
+ {
1064
+ "epoch": 0.18381344307270234,
1065
+ "grad_norm": 4.952736854553223,
1066
+ "learning_rate": 0.00024314442413162708,
1067
+ "loss": 0.351,
1068
+ "step": 134
1069
+ },
1070
+ {
1071
+ "epoch": 0.18518518518518517,
1072
+ "grad_norm": 5.733601093292236,
1073
+ "learning_rate": 0.0002449725776965265,
1074
+ "loss": 0.3821,
1075
+ "step": 135
1076
+ },
1077
+ {
1078
+ "epoch": 0.18655692729766804,
1079
+ "grad_norm": 5.019097328186035,
1080
+ "learning_rate": 0.00024680073126142596,
1081
+ "loss": 0.3249,
1082
+ "step": 136
1083
+ },
1084
+ {
1085
+ "epoch": 0.18792866941015088,
1086
+ "grad_norm": 5.300777912139893,
1087
+ "learning_rate": 0.0002486288848263254,
1088
+ "loss": 0.3404,
1089
+ "step": 137
1090
+ },
1091
+ {
1092
+ "epoch": 0.18930041152263374,
1093
+ "grad_norm": 4.518141269683838,
1094
+ "learning_rate": 0.00025045703839122487,
1095
+ "loss": 0.4535,
1096
+ "step": 138
1097
+ },
1098
+ {
1099
+ "epoch": 0.1906721536351166,
1100
+ "grad_norm": 1.0158088207244873,
1101
+ "learning_rate": 0.0002522851919561243,
1102
+ "loss": 0.0577,
1103
+ "step": 139
1104
+ },
1105
+ {
1106
+ "epoch": 0.19204389574759945,
1107
+ "grad_norm": 5.966796398162842,
1108
+ "learning_rate": 0.0002541133455210238,
1109
+ "loss": 0.7431,
1110
+ "step": 140
1111
+ },
1112
+ {
1113
+ "epoch": 0.1934156378600823,
1114
+ "grad_norm": 6.123642921447754,
1115
+ "learning_rate": 0.00025594149908592323,
1116
+ "loss": 0.6778,
1117
+ "step": 141
1118
+ },
1119
+ {
1120
+ "epoch": 0.19478737997256515,
1121
+ "grad_norm": 5.842874050140381,
1122
+ "learning_rate": 0.0002577696526508227,
1123
+ "loss": 0.5436,
1124
+ "step": 142
1125
+ },
1126
  {
1127
  "epoch": 0.19615912208504802,
1128
+ "grad_norm": 4.759068012237549,
1129
  "learning_rate": 0.0002595978062157221,
1130
+ "loss": 0.3582,
1131
  "step": 143
1132
  },
1133
+ {
1134
+ "epoch": 0.19753086419753085,
1135
+ "grad_norm": 4.080338478088379,
1136
+ "learning_rate": 0.00026142595978062154,
1137
+ "loss": 0.316,
1138
+ "step": 144
1139
+ },
1140
+ {
1141
+ "epoch": 0.19890260631001372,
1142
+ "grad_norm": 4.1391448974609375,
1143
+ "learning_rate": 0.000263254113345521,
1144
+ "loss": 0.4446,
1145
+ "step": 145
1146
+ },
1147
+ {
1148
+ "epoch": 0.20027434842249658,
1149
+ "grad_norm": 5.856256008148193,
1150
+ "learning_rate": 0.0002650822669104205,
1151
+ "loss": 0.7792,
1152
+ "step": 146
1153
+ },
1154
+ {
1155
+ "epoch": 0.20164609053497942,
1156
+ "grad_norm": 7.747331142425537,
1157
+ "learning_rate": 0.00026691042047531996,
1158
+ "loss": 1.1147,
1159
+ "step": 147
1160
+ },
1161
+ {
1162
+ "epoch": 0.2030178326474623,
1163
+ "grad_norm": 6.825289249420166,
1164
+ "learning_rate": 0.0002687385740402194,
1165
+ "loss": 0.8267,
1166
+ "step": 148
1167
+ },
1168
+ {
1169
+ "epoch": 0.20438957475994513,
1170
+ "grad_norm": 7.336719512939453,
1171
+ "learning_rate": 0.00027056672760511887,
1172
+ "loss": 0.8149,
1173
+ "step": 149
1174
+ },
1175
+ {
1176
+ "epoch": 0.205761316872428,
1177
+ "grad_norm": 6.731626510620117,
1178
+ "learning_rate": 0.00027239488117001827,
1179
+ "loss": 0.942,
1180
+ "step": 150
1181
+ },
1182
+ {
1183
+ "epoch": 0.20713305898491083,
1184
+ "grad_norm": 10.727692604064941,
1185
+ "learning_rate": 0.0002742230347349177,
1186
+ "loss": 2.4865,
1187
+ "step": 151
1188
+ },
1189
+ {
1190
+ "epoch": 0.2085048010973937,
1191
+ "grad_norm": 8.583380699157715,
1192
+ "learning_rate": 0.0002760511882998172,
1193
+ "loss": 1.0715,
1194
+ "step": 152
1195
+ },
1196
+ {
1197
+ "epoch": 0.20987654320987653,
1198
+ "grad_norm": 6.236877918243408,
1199
+ "learning_rate": 0.00027787934186471663,
1200
+ "loss": 0.6219,
1201
+ "step": 153
1202
+ },
1203
  {
1204
  "epoch": 0.2112482853223594,
1205
+ "grad_norm": 6.254538536071777,
1206
  "learning_rate": 0.0002797074954296161,
1207
+ "loss": 0.8705,
1208
  "step": 154
1209
  },
1210
+ {
1211
+ "epoch": 0.21262002743484226,
1212
+ "grad_norm": 3.0917959213256836,
1213
+ "learning_rate": 0.00028153564899451554,
1214
+ "loss": 0.2407,
1215
+ "step": 155
1216
+ },
1217
+ {
1218
+ "epoch": 0.2139917695473251,
1219
+ "grad_norm": 4.438024997711182,
1220
+ "learning_rate": 0.000283363802559415,
1221
+ "loss": 0.4925,
1222
+ "step": 156
1223
+ },
1224
+ {
1225
+ "epoch": 0.21536351165980797,
1226
+ "grad_norm": 0.43344631791114807,
1227
+ "learning_rate": 0.00028519195612431445,
1228
+ "loss": 0.0316,
1229
+ "step": 157
1230
+ },
1231
+ {
1232
+ "epoch": 0.2167352537722908,
1233
+ "grad_norm": 5.73934268951416,
1234
+ "learning_rate": 0.0002870201096892139,
1235
+ "loss": 0.3935,
1236
+ "step": 158
1237
+ },
1238
+ {
1239
+ "epoch": 0.21810699588477367,
1240
+ "grad_norm": 4.532804012298584,
1241
+ "learning_rate": 0.00028884826325411336,
1242
+ "loss": 0.2083,
1243
+ "step": 159
1244
+ },
1245
+ {
1246
+ "epoch": 0.2194787379972565,
1247
+ "grad_norm": 4.846848487854004,
1248
+ "learning_rate": 0.0002906764168190128,
1249
+ "loss": 0.2798,
1250
+ "step": 160
1251
+ },
1252
+ {
1253
+ "epoch": 0.22085048010973937,
1254
+ "grad_norm": 7.060863018035889,
1255
+ "learning_rate": 0.00029250457038391227,
1256
+ "loss": 0.8777,
1257
+ "step": 161
1258
+ },
1259
+ {
1260
+ "epoch": 0.2222222222222222,
1261
+ "grad_norm": 0.012754157185554504,
1262
+ "learning_rate": 0.0002943327239488117,
1263
+ "loss": 0.0002,
1264
+ "step": 162
1265
+ },
1266
+ {
1267
+ "epoch": 0.22359396433470508,
1268
+ "grad_norm": 4.094379901885986,
1269
+ "learning_rate": 0.0002961608775137112,
1270
+ "loss": 0.2736,
1271
+ "step": 163
1272
+ },
1273
+ {
1274
+ "epoch": 0.22496570644718794,
1275
+ "grad_norm": 10.741785049438477,
1276
+ "learning_rate": 0.0002979890310786106,
1277
+ "loss": 2.4185,
1278
+ "step": 164
1279
+ },
1280
  {
1281
  "epoch": 0.22633744855967078,
1282
+ "grad_norm": 4.820891380310059,
1283
  "learning_rate": 0.00029981718464351003,
1284
+ "loss": 0.7767,
1285
  "step": 165
1286
  },
1287
+ {
1288
+ "epoch": 0.22770919067215364,
1289
+ "grad_norm": 6.423076152801514,
1290
+ "learning_rate": 0.0003016453382084095,
1291
+ "loss": 0.7971,
1292
+ "step": 166
1293
+ },
1294
+ {
1295
+ "epoch": 0.22908093278463648,
1296
+ "grad_norm": 4.492727756500244,
1297
+ "learning_rate": 0.00030347349177330894,
1298
+ "loss": 0.4535,
1299
+ "step": 167
1300
+ },
1301
+ {
1302
+ "epoch": 0.23045267489711935,
1303
+ "grad_norm": 5.301379680633545,
1304
+ "learning_rate": 0.00030530164533820845,
1305
+ "loss": 0.6654,
1306
+ "step": 168
1307
+ },
1308
+ {
1309
+ "epoch": 0.23182441700960219,
1310
+ "grad_norm": 5.155853748321533,
1311
+ "learning_rate": 0.0003071297989031079,
1312
+ "loss": 0.3985,
1313
+ "step": 169
1314
+ },
1315
+ {
1316
+ "epoch": 0.23319615912208505,
1317
+ "grad_norm": 0.4378865361213684,
1318
+ "learning_rate": 0.00030895795246800735,
1319
+ "loss": 0.0338,
1320
+ "step": 170
1321
+ },
1322
+ {
1323
+ "epoch": 0.2345679012345679,
1324
+ "grad_norm": 4.022473335266113,
1325
+ "learning_rate": 0.00031078610603290675,
1326
+ "loss": 0.1834,
1327
+ "step": 171
1328
+ },
1329
+ {
1330
+ "epoch": 0.23593964334705075,
1331
+ "grad_norm": 7.863429069519043,
1332
+ "learning_rate": 0.0003126142595978062,
1333
+ "loss": 0.603,
1334
+ "step": 172
1335
+ },
1336
+ {
1337
+ "epoch": 0.23731138545953362,
1338
+ "grad_norm": 8.951998710632324,
1339
+ "learning_rate": 0.00031444241316270566,
1340
+ "loss": 0.7871,
1341
+ "step": 173
1342
+ },
1343
+ {
1344
+ "epoch": 0.23868312757201646,
1345
+ "grad_norm": 6.265102386474609,
1346
+ "learning_rate": 0.0003162705667276051,
1347
+ "loss": 0.4304,
1348
+ "step": 174
1349
+ },
1350
+ {
1351
+ "epoch": 0.24005486968449932,
1352
+ "grad_norm": 6.6486005783081055,
1353
+ "learning_rate": 0.00031809872029250457,
1354
+ "loss": 0.649,
1355
+ "step": 175
1356
+ },
1357
  {
1358
  "epoch": 0.24142661179698216,
1359
+ "grad_norm": 0.47100114822387695,
1360
  "learning_rate": 0.000319926873857404,
1361
+ "loss": 0.048,
1362
  "step": 176
1363
  },
1364
+ {
1365
+ "epoch": 0.24279835390946503,
1366
+ "grad_norm": 4.884115695953369,
1367
+ "learning_rate": 0.0003217550274223035,
1368
+ "loss": 0.4079,
1369
+ "step": 177
1370
+ },
1371
+ {
1372
+ "epoch": 0.24417009602194786,
1373
+ "grad_norm": 4.508667469024658,
1374
+ "learning_rate": 0.0003235831809872029,
1375
+ "loss": 0.4627,
1376
+ "step": 178
1377
+ },
1378
+ {
1379
+ "epoch": 0.24554183813443073,
1380
+ "grad_norm": 3.22367262840271,
1381
+ "learning_rate": 0.0003254113345521024,
1382
+ "loss": 0.3703,
1383
+ "step": 179
1384
+ },
1385
+ {
1386
+ "epoch": 0.24691358024691357,
1387
+ "grad_norm": 7.695303916931152,
1388
+ "learning_rate": 0.00032723948811700184,
1389
+ "loss": 0.8343,
1390
+ "step": 180
1391
+ },
1392
+ {
1393
+ "epoch": 0.24828532235939643,
1394
+ "grad_norm": 7.249318599700928,
1395
+ "learning_rate": 0.0003290676416819013,
1396
+ "loss": 0.692,
1397
+ "step": 181
1398
+ },
1399
+ {
1400
+ "epoch": 0.2496570644718793,
1401
+ "grad_norm": 11.686202049255371,
1402
+ "learning_rate": 0.00033089579524680075,
1403
+ "loss": 2.7071,
1404
+ "step": 182
1405
+ },
1406
+ {
1407
+ "epoch": 0.25102880658436216,
1408
+ "grad_norm": 6.061092376708984,
1409
+ "learning_rate": 0.0003327239488117002,
1410
+ "loss": 0.8451,
1411
+ "step": 183
1412
+ },
1413
+ {
1414
+ "epoch": 0.252400548696845,
1415
+ "grad_norm": 5.932607650756836,
1416
+ "learning_rate": 0.00033455210237659966,
1417
+ "loss": 0.635,
1418
+ "step": 184
1419
+ },
1420
+ {
1421
+ "epoch": 0.25377229080932784,
1422
+ "grad_norm": 3.491114616394043,
1423
+ "learning_rate": 0.00033638025594149906,
1424
+ "loss": 0.312,
1425
+ "step": 185
1426
+ },
1427
+ {
1428
+ "epoch": 0.2551440329218107,
1429
+ "grad_norm": 6.4914164543151855,
1430
+ "learning_rate": 0.0003382084095063985,
1431
+ "loss": 0.6996,
1432
+ "step": 186
1433
+ },
1434
  {
1435
  "epoch": 0.25651577503429357,
1436
+ "grad_norm": 6.15857458114624,
1437
  "learning_rate": 0.00034003656307129797,
1438
+ "loss": 0.4432,
1439
  "step": 187
1440
  },
1441
+ {
1442
+ "epoch": 0.2578875171467764,
1443
+ "grad_norm": 4.767185211181641,
1444
+ "learning_rate": 0.0003418647166361974,
1445
+ "loss": 0.375,
1446
+ "step": 188
1447
+ },
1448
+ {
1449
+ "epoch": 0.25925925925925924,
1450
+ "grad_norm": 7.944342613220215,
1451
+ "learning_rate": 0.00034369287020109693,
1452
+ "loss": 0.9366,
1453
+ "step": 189
1454
+ },
1455
+ {
1456
+ "epoch": 0.2606310013717421,
1457
+ "grad_norm": 6.573953628540039,
1458
+ "learning_rate": 0.0003455210237659964,
1459
+ "loss": 0.755,
1460
+ "step": 190
1461
+ },
1462
+ {
1463
+ "epoch": 0.262002743484225,
1464
+ "grad_norm": 4.173367023468018,
1465
+ "learning_rate": 0.00034734917733089584,
1466
+ "loss": 0.6068,
1467
+ "step": 191
1468
+ },
1469
+ {
1470
+ "epoch": 0.26337448559670784,
1471
+ "grad_norm": 5.26171875,
1472
+ "learning_rate": 0.00034917733089579524,
1473
+ "loss": 0.5336,
1474
+ "step": 192
1475
+ },
1476
+ {
1477
+ "epoch": 0.26474622770919065,
1478
+ "grad_norm": 6.669304370880127,
1479
+ "learning_rate": 0.0003510054844606947,
1480
+ "loss": 0.8783,
1481
+ "step": 193
1482
+ },
1483
+ {
1484
+ "epoch": 0.2661179698216735,
1485
+ "grad_norm": 4.4192938804626465,
1486
+ "learning_rate": 0.00035283363802559415,
1487
+ "loss": 0.3576,
1488
+ "step": 194
1489
+ },
1490
+ {
1491
+ "epoch": 0.2674897119341564,
1492
+ "grad_norm": 10.117819786071777,
1493
+ "learning_rate": 0.0003546617915904936,
1494
+ "loss": 2.1854,
1495
+ "step": 195
1496
+ },
1497
+ {
1498
+ "epoch": 0.26886145404663925,
1499
+ "grad_norm": 5.256247520446777,
1500
+ "learning_rate": 0.00035648994515539306,
1501
+ "loss": 0.7835,
1502
+ "step": 196
1503
+ },
1504
+ {
1505
+ "epoch": 0.27023319615912206,
1506
+ "grad_norm": 5.784887313842773,
1507
+ "learning_rate": 0.0003583180987202925,
1508
+ "loss": 0.5668,
1509
+ "step": 197
1510
+ },
1511
  {
1512
  "epoch": 0.2716049382716049,
1513
+ "grad_norm": 4.977567672729492,
1514
  "learning_rate": 0.00036014625228519197,
1515
+ "loss": 0.7033,
1516
  "step": 198
1517
  },
1518
+ {
1519
+ "epoch": 0.2729766803840878,
1520
+ "grad_norm": 0.011424711905419827,
1521
+ "learning_rate": 0.00036197440585009137,
1522
+ "loss": 0.0002,
1523
+ "step": 199
1524
+ },
1525
+ {
1526
+ "epoch": 0.27434842249657065,
1527
+ "grad_norm": 5.805008411407471,
1528
+ "learning_rate": 0.0003638025594149909,
1529
+ "loss": 0.5791,
1530
+ "step": 200
1531
+ },
1532
+ {
1533
+ "epoch": 0.2757201646090535,
1534
+ "grad_norm": 3.8826043605804443,
1535
+ "learning_rate": 0.00036563071297989033,
1536
+ "loss": 0.2697,
1537
+ "step": 201
1538
+ },
1539
+ {
1540
+ "epoch": 0.27709190672153633,
1541
+ "grad_norm": 6.563521385192871,
1542
+ "learning_rate": 0.0003674588665447898,
1543
+ "loss": 0.6261,
1544
+ "step": 202
1545
+ },
1546
+ {
1547
+ "epoch": 0.2784636488340192,
1548
+ "grad_norm": 4.584529399871826,
1549
+ "learning_rate": 0.00036928702010968924,
1550
+ "loss": 0.3253,
1551
+ "step": 203
1552
+ },
1553
+ {
1554
+ "epoch": 0.27983539094650206,
1555
+ "grad_norm": 6.636009216308594,
1556
+ "learning_rate": 0.0003711151736745887,
1557
+ "loss": 0.8323,
1558
+ "step": 204
1559
+ },
1560
+ {
1561
+ "epoch": 0.2812071330589849,
1562
+ "grad_norm": 5.0911359786987305,
1563
+ "learning_rate": 0.00037294332723948815,
1564
+ "loss": 0.4472,
1565
+ "step": 205
1566
+ },
1567
+ {
1568
+ "epoch": 0.2825788751714678,
1569
+ "grad_norm": 3.9219255447387695,
1570
+ "learning_rate": 0.00037477148080438755,
1571
+ "loss": 0.3342,
1572
+ "step": 206
1573
+ },
1574
+ {
1575
+ "epoch": 0.2839506172839506,
1576
+ "grad_norm": 5.114777565002441,
1577
+ "learning_rate": 0.000376599634369287,
1578
+ "loss": 0.6313,
1579
+ "step": 207
1580
+ },
1581
+ {
1582
+ "epoch": 0.28532235939643347,
1583
+ "grad_norm": 0.3298715353012085,
1584
+ "learning_rate": 0.00037842778793418646,
1585
+ "loss": 0.059,
1586
+ "step": 208
1587
+ },
1588
  {
1589
  "epoch": 0.28669410150891633,
1590
+ "grad_norm": 1.5965046882629395,
1591
  "learning_rate": 0.0003802559414990859,
1592
+ "loss": 0.1195,
1593
  "step": 209
1594
+ },
1595
+ {
1596
+ "epoch": 0.2880658436213992,
1597
+ "grad_norm": 0.39121323823928833,
1598
+ "learning_rate": 0.00038208409506398537,
1599
+ "loss": 0.0296,
1600
+ "step": 210
1601
+ },
1602
+ {
1603
+ "epoch": 0.289437585733882,
1604
+ "grad_norm": 4.317224025726318,
1605
+ "learning_rate": 0.0003839122486288849,
1606
+ "loss": 0.5316,
1607
+ "step": 211
1608
+ },
1609
+ {
1610
+ "epoch": 0.2908093278463649,
1611
+ "grad_norm": 4.000308036804199,
1612
+ "learning_rate": 0.00038574040219378433,
1613
+ "loss": 0.5201,
1614
+ "step": 212
1615
+ },
1616
+ {
1617
+ "epoch": 0.29218106995884774,
1618
+ "grad_norm": 6.2192301750183105,
1619
+ "learning_rate": 0.00038756855575868373,
1620
+ "loss": 0.6602,
1621
+ "step": 213
1622
+ },
1623
+ {
1624
+ "epoch": 0.2935528120713306,
1625
+ "grad_norm": 6.702320098876953,
1626
+ "learning_rate": 0.0003893967093235832,
1627
+ "loss": 0.9578,
1628
+ "step": 214
1629
+ },
1630
+ {
1631
+ "epoch": 0.29492455418381347,
1632
+ "grad_norm": 3.9136242866516113,
1633
+ "learning_rate": 0.00039122486288848264,
1634
+ "loss": 0.2089,
1635
+ "step": 215
1636
+ },
1637
+ {
1638
+ "epoch": 0.2962962962962963,
1639
+ "grad_norm": 6.901303768157959,
1640
+ "learning_rate": 0.0003930530164533821,
1641
+ "loss": 1.2112,
1642
+ "step": 216
1643
+ },
1644
+ {
1645
+ "epoch": 0.29766803840877915,
1646
+ "grad_norm": 4.04884672164917,
1647
+ "learning_rate": 0.00039488117001828155,
1648
+ "loss": 0.3294,
1649
+ "step": 217
1650
+ },
1651
+ {
1652
+ "epoch": 0.299039780521262,
1653
+ "grad_norm": 5.46201753616333,
1654
+ "learning_rate": 0.000396709323583181,
1655
+ "loss": 0.867,
1656
+ "step": 218
1657
+ },
1658
+ {
1659
+ "epoch": 0.3004115226337449,
1660
+ "grad_norm": 5.559458255767822,
1661
+ "learning_rate": 0.00039853747714808046,
1662
+ "loss": 1.1745,
1663
+ "step": 219
1664
  }
1665
  ],
1666
+ "logging_steps": 1,
1667
  "max_steps": 2187,
1668
  "num_input_tokens_seen": 0,
1669
  "num_train_epochs": 3,
checkpoint-219/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cc4e2f1b0b6ec0b954c31a7a2d7392e2f2f817d60f3db0f6f0a80bdf431c25bc
3
  size 5880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:845321cafd8553e30026f9c6fea66eb5820d6f58e9ea4a0042d4e1d71fd924e8
3
  size 5880