aaaacash commited on
Commit
45cd7ef
·
1 Parent(s): c12ee10

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. config.json +5 -5
  2. pytorch_model.bin +2 -2
  3. tokenizer.model +2 -2
  4. training.log +0 -0
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "meta-llama/Llama-2-7b-hf",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -11,7 +11,7 @@
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 11008,
14
- "max_position_embeddings": 4096,
15
  "model_type": "llama",
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
@@ -20,10 +20,10 @@
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
22
  "rope_scaling": null,
23
- "rope_theta": 10000.0,
24
  "tie_word_embeddings": false,
25
- "torch_dtype": "float16",
26
  "transformers_version": "4.34.1",
27
  "use_cache": true,
28
- "vocab_size": 32008
29
  }
 
1
  {
2
+ "_name_or_path": "codellama/CodeLlama-7b-hf",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 11008,
14
+ "max_position_embeddings": 16384,
15
  "model_type": "llama",
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
 
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
22
  "rope_scaling": null,
23
+ "rope_theta": 1000000,
24
  "tie_word_embeddings": false,
25
+ "torch_dtype": "bfloat16",
26
  "transformers_version": "4.34.1",
27
  "use_cache": true,
28
+ "vocab_size": 32024
29
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cf544eb8d443929e4962e9d28818eff3ae66a972fac9e2a8b772fa8881ed7662
3
- size 13477059118
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d74301f2cae6ab30b1f242db93a876012b14df30e6cfa4b4c18d300d5b3b9d3f
3
+ size 13477321262
tokenizer.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
- size 499723
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45ccb9c8b6b561889acea59191d66986d314e7cbd6a78abc6e49b139ca91c1e6
3
+ size 500058
training.log CHANGED
The diff for this file is too large to render. See raw diff