add updated config.json
Browse files
psyLlama_v1_llama_8b_instruct/config.json
CHANGED
@@ -1,8 +1,9 @@
|
|
1 |
{
|
2 |
"model_type": "llama",
|
3 |
-
"
|
4 |
-
"
|
5 |
-
"
|
|
|
6 |
"intermediate_size": 8192,
|
7 |
"max_position_embeddings": 512,
|
8 |
"vocab_size": 50265,
|
|
|
1 |
{
|
2 |
"model_type": "llama",
|
3 |
+
"architectures": ["LLaMAForCausalLM"],
|
4 |
+
"hidden_size": 4096,
|
5 |
+
"num_hidden_layers": 32,
|
6 |
+
"num_attention_heads": 32,
|
7 |
"intermediate_size": 8192,
|
8 |
"max_position_embeddings": 512,
|
9 |
"vocab_size": 50265,
|