kmfoda commited on
Commit
6497710
·
verified ·
1 Parent(s): f82e3c6

Epoch 4. Batch Size 0. Peers 13.

Browse files
Files changed (4) hide show
  1. config.json +12 -6
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
  4. outer_optimizer.pt +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "distributed/optimized-gpt2-1b",
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "NON_PARTICIPATING",
@@ -106,7 +106,7 @@
106
  "19": "NON_PARTICIPATING",
107
  "190": "NON_PARTICIPATING",
108
  "191": "NON_PARTICIPATING",
109
- "192": "NON_PARTICIPATING",
110
  "193": "NON_PARTICIPATING",
111
  "194": "NON_PARTICIPATING",
112
  "195": "NON_PARTICIPATING",
@@ -170,7 +170,7 @@
170
  "247": "NON_PARTICIPATING",
171
  "248": "NON_PARTICIPATING",
172
  "249": "SUCCESS",
173
- "25": "NON_PARTICIPATING",
174
  "250": "NON_PARTICIPATING",
175
  "251": "NON_PARTICIPATING",
176
  "252": "NON_PARTICIPATING",
@@ -187,7 +187,7 @@
187
  "32": "NON_PARTICIPATING",
188
  "33": "NON_PARTICIPATING",
189
  "34": "NON_PARTICIPATING",
190
- "35": "SUCCESS",
191
  "36": "NON_PARTICIPATING",
192
  "37": "NON_PARTICIPATING",
193
  "38": "NON_PARTICIPATING",
@@ -267,7 +267,13 @@
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
- "block_list": [],
 
 
 
 
 
 
271
  "block_size": 1024,
272
  "bos_token_id": 50256,
273
  "embd_pdrop": 0.1,
@@ -275,7 +281,7 @@
275
  "initializer_range": 0.02,
276
  "inner_step": 0,
277
  "inner_steps": 0,
278
- "last_allreduce_block": 5226437,
279
  "layer_norm_epsilon": 1e-05,
280
  "model_type": "gpt_optimized",
281
  "n_embd": 1280,
 
1
  {
2
+ "_name_or_path": "kinggeezero/minor1",
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "NON_PARTICIPATING",
 
106
  "19": "NON_PARTICIPATING",
107
  "190": "NON_PARTICIPATING",
108
  "191": "NON_PARTICIPATING",
109
+ "192": "SUCCESS",
110
  "193": "NON_PARTICIPATING",
111
  "194": "NON_PARTICIPATING",
112
  "195": "NON_PARTICIPATING",
 
170
  "247": "NON_PARTICIPATING",
171
  "248": "NON_PARTICIPATING",
172
  "249": "SUCCESS",
173
+ "25": "SUCCESS",
174
  "250": "NON_PARTICIPATING",
175
  "251": "NON_PARTICIPATING",
176
  "252": "NON_PARTICIPATING",
 
187
  "32": "NON_PARTICIPATING",
188
  "33": "NON_PARTICIPATING",
189
  "34": "NON_PARTICIPATING",
190
+ "35": "NON_PARTICIPATING",
191
  "36": "NON_PARTICIPATING",
192
  "37": "NON_PARTICIPATING",
193
  "38": "NON_PARTICIPATING",
 
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
+ "block_list": [
271
+ 5076106,
272
+ 5076110,
273
+ 5076114,
274
+ 5076118,
275
+ 5076123
276
+ ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
 
281
  "initializer_range": 0.02,
282
  "inner_step": 0,
283
  "inner_steps": 0,
284
+ "last_allreduce_block": 5227728,
285
  "layer_norm_epsilon": 1e-05,
286
  "model_type": "gpt_optimized",
287
  "n_embd": 1280,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ee18aa6cae78518ec5b8b190f239543754c73c6909694a918abbb83ff2b806ba
3
  size 2752
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d5077c2bbd1e33d316afb58f8af5121760ac319a981491a493b8bc9f7bfed3a
3
  size 2752
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cc7f741b2f9e33696ea74a0a2eaad55870886a6237a7f51fd1fc22da40fb4811
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dbacd9c5aa5c72199f4b0f44e26f8da971b38e44f9497137ecd97afcb379f0fd
3
  size 4040701744
outer_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:53ec782ffd234c9ae814621926fd417b234055d9fa1348216bfd1774a92f967f
3
  size 4040805354
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7095de2399910580c63c24a8c652e295322a9d84d085ed85dbd41a610e2bc10
3
  size 4040805354