Upload folder using huggingface_hub (#2)
Browse files- f586138748e6e5c2e89bde0ddaa50a043c4251868f14b66f89fe335a01b6e97f (4b166ac5b0d8c1ea93f3ef87bed02dd7bc5fed31)
- 914e0fa64d34d01dcee78159e7536c6f39e48d9de28ef24b752ecb85f4211e53 (4036968d53d22dcb02d9d83afc24b5e4986932f6)
- config.json +2 -1
- generation_config.json +1 -1
- model.safetensors +1 -1
config.json
CHANGED
@@ -4,6 +4,7 @@
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
|
|
7 |
"bos_token_id": 1,
|
8 |
"eos_token_id": 2,
|
9 |
"hidden_act": "silu",
|
@@ -35,7 +36,7 @@
|
|
35 |
"rope_theta": 10000.0,
|
36 |
"tie_word_embeddings": false,
|
37 |
"torch_dtype": "float16",
|
38 |
-
"transformers_version": "4.
|
39 |
"use_cache": true,
|
40 |
"vocab_size": 32000
|
41 |
}
|
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
"bos_token_id": 1,
|
9 |
"eos_token_id": 2,
|
10 |
"hidden_act": "silu",
|
|
|
36 |
"rope_theta": 10000.0,
|
37 |
"tie_word_embeddings": false,
|
38 |
"torch_dtype": "float16",
|
39 |
+
"transformers_version": "4.36.2",
|
40 |
"use_cache": true,
|
41 |
"vocab_size": 32000
|
42 |
}
|
generation_config.json
CHANGED
@@ -3,5 +3,5 @@
|
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
"pad_token_id": 0,
|
6 |
-
"transformers_version": "4.
|
7 |
}
|
|
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.36.2"
|
7 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3767120584
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c1cd020bfca185a9bd451725e7e39203e8f7f5daae0c2ae5521a3317f715d9e6
|
3 |
size 3767120584
|