snoopsy commited on
Commit
a412681
·
verified ·
1 Parent(s): 5f6d426

Run 3. Outer Step 0. Inner Step 0.

Browse files
Files changed (3) hide show
  1. config.json +25 -19
  2. inner_optimizer.pt +2 -2
  3. model.safetensors +1 -1
config.json CHANGED
@@ -3,7 +3,7 @@
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "NON_PARTICIPATING",
6
- "1": "SUCCESS",
7
  "10": "NON_PARTICIPATING",
8
  "100": "NON_PARTICIPATING",
9
  "101": "SUCCESS",
@@ -14,7 +14,7 @@
14
  "106": "NON_PARTICIPATING",
15
  "107": "NON_PARTICIPATING",
16
  "108": "NON_PARTICIPATING",
17
- "109": "SUCCESS",
18
  "11": "NON_PARTICIPATING",
19
  "110": "NON_PARTICIPATING",
20
  "111": "NON_PARTICIPATING",
@@ -80,7 +80,7 @@
80
  "166": "NON_PARTICIPATING",
81
  "167": "NON_PARTICIPATING",
82
  "168": "NON_PARTICIPATING",
83
- "169": "SUCCESS",
84
  "17": "NON_PARTICIPATING",
85
  "170": "NON_PARTICIPATING",
86
  "171": "NON_PARTICIPATING",
@@ -115,8 +115,8 @@
115
  "198": "NON_PARTICIPATING",
116
  "199": "NON_PARTICIPATING",
117
  "2": "NON_PARTICIPATING",
118
- "20": "SUCCESS",
119
- "200": "SUCCESS",
120
  "201": "NON_PARTICIPATING",
121
  "202": "NON_PARTICIPATING",
122
  "203": "NON_PARTICIPATING",
@@ -141,10 +141,10 @@
141
  "220": "NON_PARTICIPATING",
142
  "221": "SUCCESS",
143
  "222": "NON_PARTICIPATING",
144
- "223": "NON_PARTICIPATING",
145
  "224": "NON_PARTICIPATING",
146
- "225": "SUCCESS",
147
- "226": "NON_PARTICIPATING",
148
  "227": "NON_PARTICIPATING",
149
  "228": "NON_PARTICIPATING",
150
  "229": "NON_PARTICIPATING",
@@ -154,7 +154,7 @@
154
  "232": "NON_PARTICIPATING",
155
  "233": "NON_PARTICIPATING",
156
  "234": "NON_PARTICIPATING",
157
- "235": "NON_PARTICIPATING",
158
  "236": "NON_PARTICIPATING",
159
  "237": "NON_PARTICIPATING",
160
  "238": "NON_PARTICIPATING",
@@ -171,7 +171,7 @@
171
  "248": "NON_PARTICIPATING",
172
  "249": "SUCCESS",
173
  "25": "SUCCESS",
174
- "250": "NON_PARTICIPATING",
175
  "251": "NON_PARTICIPATING",
176
  "252": "NON_PARTICIPATING",
177
  "253": "NON_PARTICIPATING",
@@ -184,11 +184,11 @@
184
  "3": "NON_PARTICIPATING",
185
  "30": "NON_PARTICIPATING",
186
  "31": "NON_PARTICIPATING",
187
- "32": "SUCCESS",
188
  "33": "NON_PARTICIPATING",
189
  "34": "NON_PARTICIPATING",
190
- "35": "SUCCESS",
191
- "36": "SUCCESS",
192
  "37": "NON_PARTICIPATING",
193
  "38": "NON_PARTICIPATING",
194
  "39": "NON_PARTICIPATING",
@@ -217,12 +217,12 @@
217
  "6": "NON_PARTICIPATING",
218
  "60": "NON_PARTICIPATING",
219
  "61": "NON_PARTICIPATING",
220
- "62": "SUCCESS",
221
  "63": "NON_PARTICIPATING",
222
  "64": "NON_PARTICIPATING",
223
  "65": "NON_PARTICIPATING",
224
  "66": "NON_PARTICIPATING",
225
- "67": "SUCCESS",
226
  "68": "NON_PARTICIPATING",
227
  "69": "NON_PARTICIPATING",
228
  "7": "NON_PARTICIPATING",
@@ -237,8 +237,8 @@
237
  "78": "NON_PARTICIPATING",
238
  "79": "NON_PARTICIPATING",
239
  "8": "NON_PARTICIPATING",
240
- "80": "NON_PARTICIPATING",
241
- "81": "SUCCESS",
242
  "82": "NON_PARTICIPATING",
243
  "83": "NON_PARTICIPATING",
244
  "84": "NON_PARTICIPATING",
@@ -267,7 +267,13 @@
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
- "block_list": [],
 
 
 
 
 
 
271
  "block_size": 1024,
272
  "bos_token_id": 50256,
273
  "embd_pdrop": 0.1,
@@ -275,7 +281,7 @@
275
  "initializer_range": 0.02,
276
  "inner_step": 0,
277
  "inner_steps": 0,
278
- "last_allreduce_block": 5351170,
279
  "layer_norm_epsilon": 1e-05,
280
  "model_type": "gpt_optimized",
281
  "n_embd": 1280,
 
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "NON_PARTICIPATING",
6
+ "1": "NON_PARTICIPATING",
7
  "10": "NON_PARTICIPATING",
8
  "100": "NON_PARTICIPATING",
9
  "101": "SUCCESS",
 
14
  "106": "NON_PARTICIPATING",
15
  "107": "NON_PARTICIPATING",
16
  "108": "NON_PARTICIPATING",
17
+ "109": "NON_PARTICIPATING",
18
  "11": "NON_PARTICIPATING",
19
  "110": "NON_PARTICIPATING",
20
  "111": "NON_PARTICIPATING",
 
80
  "166": "NON_PARTICIPATING",
81
  "167": "NON_PARTICIPATING",
82
  "168": "NON_PARTICIPATING",
83
+ "169": "NON_PARTICIPATING",
84
  "17": "NON_PARTICIPATING",
85
  "170": "NON_PARTICIPATING",
86
  "171": "NON_PARTICIPATING",
 
115
  "198": "NON_PARTICIPATING",
116
  "199": "NON_PARTICIPATING",
117
  "2": "NON_PARTICIPATING",
118
+ "20": "NON_PARTICIPATING",
119
+ "200": "NON_PARTICIPATING",
120
  "201": "NON_PARTICIPATING",
121
  "202": "NON_PARTICIPATING",
122
  "203": "NON_PARTICIPATING",
 
141
  "220": "NON_PARTICIPATING",
142
  "221": "SUCCESS",
143
  "222": "NON_PARTICIPATING",
144
+ "223": "FAIL",
145
  "224": "NON_PARTICIPATING",
146
+ "225": "NON_PARTICIPATING",
147
+ "226": "SUCCESS",
148
  "227": "NON_PARTICIPATING",
149
  "228": "NON_PARTICIPATING",
150
  "229": "NON_PARTICIPATING",
 
154
  "232": "NON_PARTICIPATING",
155
  "233": "NON_PARTICIPATING",
156
  "234": "NON_PARTICIPATING",
157
+ "235": "SUCCESS",
158
  "236": "NON_PARTICIPATING",
159
  "237": "NON_PARTICIPATING",
160
  "238": "NON_PARTICIPATING",
 
171
  "248": "NON_PARTICIPATING",
172
  "249": "SUCCESS",
173
  "25": "SUCCESS",
174
+ "250": "SUCCESS",
175
  "251": "NON_PARTICIPATING",
176
  "252": "NON_PARTICIPATING",
177
  "253": "NON_PARTICIPATING",
 
184
  "3": "NON_PARTICIPATING",
185
  "30": "NON_PARTICIPATING",
186
  "31": "NON_PARTICIPATING",
187
+ "32": "NON_PARTICIPATING",
188
  "33": "NON_PARTICIPATING",
189
  "34": "NON_PARTICIPATING",
190
+ "35": "NON_PARTICIPATING",
191
+ "36": "NON_PARTICIPATING",
192
  "37": "NON_PARTICIPATING",
193
  "38": "NON_PARTICIPATING",
194
  "39": "NON_PARTICIPATING",
 
217
  "6": "NON_PARTICIPATING",
218
  "60": "NON_PARTICIPATING",
219
  "61": "NON_PARTICIPATING",
220
+ "62": "NON_PARTICIPATING",
221
  "63": "NON_PARTICIPATING",
222
  "64": "NON_PARTICIPATING",
223
  "65": "NON_PARTICIPATING",
224
  "66": "NON_PARTICIPATING",
225
+ "67": "NON_PARTICIPATING",
226
  "68": "NON_PARTICIPATING",
227
  "69": "NON_PARTICIPATING",
228
  "7": "NON_PARTICIPATING",
 
237
  "78": "NON_PARTICIPATING",
238
  "79": "NON_PARTICIPATING",
239
  "8": "NON_PARTICIPATING",
240
+ "80": "SUCCESS",
241
+ "81": "NON_PARTICIPATING",
242
  "82": "NON_PARTICIPATING",
243
  "83": "NON_PARTICIPATING",
244
  "84": "NON_PARTICIPATING",
 
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
+ "block_list": [
271
+ 5365626,
272
+ 5365627,
273
+ 5365628,
274
+ 5365629,
275
+ 5365630
276
+ ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
 
281
  "initializer_range": 0.02,
282
  "inner_step": 0,
283
  "inner_steps": 0,
284
+ "last_allreduce_block": 5363566,
285
  "layer_norm_epsilon": 1e-05,
286
  "model_type": "gpt_optimized",
287
  "n_embd": 1280,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:57a7560b7d6dcff2f9b986cdf1519151986f63582928104d82fbbcf373d695a7
3
- size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50852309c11bba1c68b1fc599de303950b01245d3f8e70c6ec061ebf1229b0c3
3
+ size 2944
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:30a3c2a1f583992135596ada96883cba467a485b6ef53f21ba48c2355f414587
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90b3e428e98fa30e20e42438a9da3f27f84d804d2b9741a9c840c0dcdc0d9ae6
3
  size 4040701744