kmfoda commited on
Commit
d69913a
·
verified ·
1 Parent(s): 9e2648d

Run 2. Outer Step 7. Inner Step 0. Peers 10.

Browse files
Files changed (4) hide show
  1. config.json +4 -4
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
  4. outer_optimizer.pt +1 -1
config.json CHANGED
@@ -14,7 +14,7 @@
14
  "106": "NON_PARTICIPATING",
15
  "107": "NON_PARTICIPATING",
16
  "108": "NON_PARTICIPATING",
17
- "109": "NON_PARTICIPATING",
18
  "11": "NON_PARTICIPATING",
19
  "110": "NON_PARTICIPATING",
20
  "111": "NON_PARTICIPATING",
@@ -81,7 +81,7 @@
81
  "167": "NON_PARTICIPATING",
82
  "168": "NON_PARTICIPATING",
83
  "169": "NON_PARTICIPATING",
84
- "17": "SUCCESS",
85
  "170": "NON_PARTICIPATING",
86
  "171": "NON_PARTICIPATING",
87
  "172": "NON_PARTICIPATING",
@@ -188,7 +188,7 @@
188
  "33": "NON_PARTICIPATING",
189
  "34": "NON_PARTICIPATING",
190
  "35": "NON_PARTICIPATING",
191
- "36": "SUCCESS",
192
  "37": "NON_PARTICIPATING",
193
  "38": "NON_PARTICIPATING",
194
  "39": "SUCCESS",
@@ -275,7 +275,7 @@
275
  "initializer_range": 0.02,
276
  "inner_step": 0,
277
  "inner_steps": 0,
278
- "last_allreduce_block": 5338124,
279
  "layer_norm_epsilon": 1e-05,
280
  "model_type": "gpt_optimized",
281
  "n_embd": 1280,
 
14
  "106": "NON_PARTICIPATING",
15
  "107": "NON_PARTICIPATING",
16
  "108": "NON_PARTICIPATING",
17
+ "109": "SUCCESS",
18
  "11": "NON_PARTICIPATING",
19
  "110": "NON_PARTICIPATING",
20
  "111": "NON_PARTICIPATING",
 
81
  "167": "NON_PARTICIPATING",
82
  "168": "NON_PARTICIPATING",
83
  "169": "NON_PARTICIPATING",
84
+ "17": "NON_PARTICIPATING",
85
  "170": "NON_PARTICIPATING",
86
  "171": "NON_PARTICIPATING",
87
  "172": "NON_PARTICIPATING",
 
188
  "33": "NON_PARTICIPATING",
189
  "34": "NON_PARTICIPATING",
190
  "35": "NON_PARTICIPATING",
191
+ "36": "NON_PARTICIPATING",
192
  "37": "NON_PARTICIPATING",
193
  "38": "NON_PARTICIPATING",
194
  "39": "SUCCESS",
 
275
  "initializer_range": 0.02,
276
  "inner_step": 0,
277
  "inner_steps": 0,
278
+ "last_allreduce_block": 5339896,
279
  "layer_norm_epsilon": 1e-05,
280
  "model_type": "gpt_optimized",
281
  "n_embd": 1280,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f1ac42b91ee3e45fe290bd12a6852ac5e3428187d4f26506c3077a5309a675dd
3
  size 8081781770
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d61f079674c40aa7d0ce48e5b0a717a7e695330c7ddaa68f85aa51a2c6ca8f61
3
  size 8081781770
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8075c0eb8eb3fa7fd23d17ce0490d23e8bca6659552869544e9e62d8bd7bd5d4
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcdfb45c1c7b670e719f10e78091cec2c601ac898763796339f16712e03c08c1
3
  size 4040701744
outer_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0962a19d74842d654b48bd685717c1c95ffd61fda69950f2847a952fb78c733c
3
  size 4040805354
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b025c21df5808fa76d7738634f7775d4fd9a7584f3a73fd376959e13bb1679d
3
  size 4040805354