Upload folder using huggingface_hub
Browse files- README.md +1 -0
- config.json +1 -1
README.md
CHANGED
@@ -82,6 +82,7 @@ config_json['conv_dim_out'] = 64
|
|
82 |
config_json['hidden_size'] = 64
|
83 |
config_json['intermediate_size'] = 128
|
84 |
config_json['num_attention_heads'] = 2
|
|
|
85 |
config_json['num_hidden_layers'] = 2
|
86 |
config_json['num_key_value_heads'] = 1
|
87 |
config_json['tie_word_embeddings'] = True
|
|
|
82 |
config_json['hidden_size'] = 64
|
83 |
config_json['intermediate_size'] = 128
|
84 |
config_json['num_attention_heads'] = 2
|
85 |
+
config_json['num_heads'] = 2
|
86 |
config_json['num_hidden_layers'] = 2
|
87 |
config_json['num_key_value_heads'] = 1
|
88 |
config_json['tie_word_embeddings'] = True
|
config.json
CHANGED
@@ -30,7 +30,7 @@
|
|
30 |
"model_type": "lfm2",
|
31 |
"norm_eps": 1e-05,
|
32 |
"num_attention_heads": 2,
|
33 |
-
"num_heads":
|
34 |
"num_hidden_layers": 2,
|
35 |
"num_key_value_heads": 1,
|
36 |
"pad_token_id": 0,
|
|
|
30 |
"model_type": "lfm2",
|
31 |
"norm_eps": 1e-05,
|
32 |
"num_attention_heads": 2,
|
33 |
+
"num_heads": 2,
|
34 |
"num_hidden_layers": 2,
|
35 |
"num_key_value_heads": 1,
|
36 |
"pad_token_id": 0,
|