WilliamGazeley commited on
Commit
b587cfc
1 Parent(s): ad8f797

Upload LlamaForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +4 -2
  2. generation_config.json +1 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
@@ -11,6 +12,7 @@
11
  "initializer_range": 0.02,
12
  "intermediate_size": 5632,
13
  "max_position_embeddings": 4096,
 
14
  "model_type": "llama",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 22,
@@ -21,7 +23,7 @@
21
  "rope_theta": 10000.0,
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "float32",
24
- "transformers_version": "4.39.3",
25
  "use_cache": false,
26
- "vocab_size": 32000
27
  }
 
1
  {
2
+ "_name_or_path": "models/irai/tiny-chat-cpt-v2/sft/merged",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 5632,
14
  "max_position_embeddings": 4096,
15
+ "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 22,
 
23
  "rope_theta": 10000.0,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float32",
26
+ "transformers_version": "4.41.1",
27
  "use_cache": false,
28
+ "vocab_size": 32002
29
  }
generation_config.json CHANGED
@@ -4,5 +4,5 @@
4
  "eos_token_id": 2,
5
  "max_length": 2048,
6
  "pad_token_id": 0,
7
- "transformers_version": "4.39.3"
8
  }
 
4
  "eos_token_id": 2,
5
  "max_length": 2048,
6
  "pad_token_id": 0,
7
+ "transformers_version": "4.41.1"
8
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6a5e0a13e3302318e44ce34263025c2da9db4eb4f8d646adaa85ace70e3deb45
3
- size 4400216536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bc615f880d52a1b8c4d7c96048c0363437cce6d32e9627e08948aea09db526f
3
+ size 4400249304