Epoch 261. Batch Size 352. Peers 2.
Browse files- config.json +2 -0
- model.safetensors +1 -1
- optimizer.pt +3 -0
config.json
CHANGED
@@ -9,11 +9,13 @@
|
|
9 |
"AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
|
10 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
11 |
},
|
|
|
12 |
"block_size": 1024,
|
13 |
"bos_token_id": 50256,
|
14 |
"embd_pdrop": 0.1,
|
15 |
"eos_token_id": 50256,
|
16 |
"initializer_range": 0.02,
|
|
|
17 |
"layer_norm_epsilon": 1e-05,
|
18 |
"model_type": "gpt_optimized",
|
19 |
"n_embd": 1280,
|
|
|
9 |
"AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
|
10 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
11 |
},
|
12 |
+
"block_list": [],
|
13 |
"block_size": 1024,
|
14 |
"bos_token_id": 50256,
|
15 |
"embd_pdrop": 0.1,
|
16 |
"eos_token_id": 50256,
|
17 |
"initializer_range": 0.02,
|
18 |
+
"last_allreduce_block": 3923465,
|
19 |
"layer_norm_epsilon": 1e-05,
|
20 |
"model_type": "gpt_optimized",
|
21 |
"n_embd": 1280,
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2151635344
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:de4163002b953b968b97bd91f30da2aaccb27f6c13c1827021bad7db9748b220
|
3 |
size 2151635344
|
optimizer.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5b26b7c4f7d11b64dc5925fbf2b783049327f9c95a2b1a5542e1775284621894
|
3 |
+
size 2151685882
|