Run 2. Outer Step 9. Inner Step 0.
Browse files- config.json +2 -8
- inner_optimizer.pt +1 -1
- model.safetensors +1 -1
config.json
CHANGED
@@ -267,19 +267,13 @@
|
|
267 |
"AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
|
268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
269 |
},
|
270 |
-
"block_list": [
|
271 |
-
5355175,
|
272 |
-
5355176,
|
273 |
-
5355178,
|
274 |
-
5355179,
|
275 |
-
5355181
|
276 |
-
],
|
277 |
"block_size": 1024,
|
278 |
"bos_token_id": 50256,
|
279 |
"embd_pdrop": 0.1,
|
280 |
"eos_token_id": 50256,
|
281 |
"initializer_range": 0.02,
|
282 |
-
"inner_step":
|
283 |
"inner_steps": 0,
|
284 |
"last_allreduce_block": 5351170,
|
285 |
"layer_norm_epsilon": 1e-05,
|
|
|
267 |
"AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
|
268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
269 |
},
|
270 |
+
"block_list": [],
|
|
|
|
|
|
|
|
|
|
|
|
|
271 |
"block_size": 1024,
|
272 |
"bos_token_id": 50256,
|
273 |
"embd_pdrop": 0.1,
|
274 |
"eos_token_id": 50256,
|
275 |
"initializer_range": 0.02,
|
276 |
+
"inner_step": 0,
|
277 |
"inner_steps": 0,
|
278 |
"last_allreduce_block": 5351170,
|
279 |
"layer_norm_epsilon": 1e-05,
|
inner_optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 8081782026
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:57a7560b7d6dcff2f9b986cdf1519151986f63582928104d82fbbcf373d695a7
|
3 |
size 8081782026
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4040701744
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:30a3c2a1f583992135596ada96883cba467a485b6ef53f21ba48c2355f414587
|
3 |
size 4040701744
|