Nevil9 commited on
Commit
98a1e6e
1 Parent(s): 1550374

add updated config.json

Browse files
psyLlama_v1_llama_8b_instruct/config.json CHANGED
@@ -1,8 +1,9 @@
1
  {
2
  "model_type": "llama",
3
- "hidden_size": 2048,
4
- "num_hidden_layers": 12,
5
- "num_attention_heads": 16,
 
6
  "intermediate_size": 8192,
7
  "max_position_embeddings": 512,
8
  "vocab_size": 50265,
 
1
  {
2
  "model_type": "llama",
3
+ "architectures": ["LLaMAForCausalLM"],
4
+ "hidden_size": 4096,
5
+ "num_hidden_layers": 32,
6
+ "num_attention_heads": 32,
7
  "intermediate_size": 8192,
8
  "max_position_embeddings": 512,
9
  "vocab_size": 50265,