modelwizard commited on
Commit
4219a8f
1 Parent(s): 12b27de

Upload LlamaForCausalLM

Browse files
config.json CHANGED
@@ -6,7 +6,7 @@
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
- "eos_token_id": 32000,
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
@@ -20,10 +20,9 @@
20
  "rms_norm_eps": 1e-05,
21
  "rope_scaling": null,
22
  "rope_theta": 10000.0,
23
- "sliding_window": 4096,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
  "transformers_version": "4.36.2",
27
  "use_cache": false,
28
- "vocab_size": 32002
29
  }
 
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
 
20
  "rms_norm_eps": 1e-05,
21
  "rope_scaling": null,
22
  "rope_theta": 10000.0,
 
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.36.2",
26
  "use_cache": false,
27
+ "vocab_size": 32000
28
  }
generation_config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
- "eos_token_id": 32000,
5
- "transformers_version": "4.36.2"
 
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.36.2",
6
+ "use_cache": false
7
  }
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:df7b6464c9ed2cce8f40a634dbd1543c6b09fea40a9a76f777ea2020d2a17bff
3
- size 4943178720
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff8d0652a4606f1322eede612836e29dc929b19b39df2872476ff861f376d90a
3
+ size 4943162336
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c7a4fae83e100aaf8c105aa2a1545f88317813fc09e197b91b4d5ed6e3adfdaf
3
  size 4999819336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:162e31cc8d5f4669a8e65d4117dec60e3cbf5b0fd747154cd184531046a2f8d4
3
  size 4999819336
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3bcee426eacaa064deb49e15d2c6e7d3e3fd79a27a52aad3f56c67f3c418a483
3
- size 4540532728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c58ec971cacbf9b6925e69471f0f36e2572a6889d4e1aa7faff0d6ade376fd2
3
+ size 4540516344
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 14483496960
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00003-of-00003.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 14483464192
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00003-of-00003.safetensors",