Update config.json
Browse files- config.json +4 -5
config.json
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
{
|
|
|
2 |
"_name_or_path": "unsloth/Meta-Llama-3.1-8B-bnb-4bit",
|
3 |
"architectures": [
|
4 |
"LlamaForCausalLM"
|
@@ -23,10 +24,7 @@
|
|
23 |
"rope_scaling": {
|
24 |
"type": "linear",
|
25 |
"factor": 8.0,
|
26 |
-
"
|
27 |
-
"low_freq_factor": 1.0,
|
28 |
-
"original_max_position_embeddings": 8192,
|
29 |
-
"rope_type": "llama3"
|
30 |
},
|
31 |
"rope_theta": 500000.0,
|
32 |
"tie_word_embeddings": false,
|
@@ -34,5 +32,6 @@
|
|
34 |
"transformers_version": "4.43.3",
|
35 |
"unsloth_version": "2024.8",
|
36 |
"use_cache": true,
|
37 |
-
"vocab_size": 128256
|
|
|
38 |
}
|
|
|
1 |
{
|
2 |
+
{
|
3 |
"_name_or_path": "unsloth/Meta-Llama-3.1-8B-bnb-4bit",
|
4 |
"architectures": [
|
5 |
"LlamaForCausalLM"
|
|
|
24 |
"rope_scaling": {
|
25 |
"type": "linear",
|
26 |
"factor": 8.0,
|
27 |
+
"rope_type": "linear"
|
|
|
|
|
|
|
28 |
},
|
29 |
"rope_theta": 500000.0,
|
30 |
"tie_word_embeddings": false,
|
|
|
32 |
"transformers_version": "4.43.3",
|
33 |
"unsloth_version": "2024.8",
|
34 |
"use_cache": true,
|
35 |
+
"vocab_size": 128256,
|
36 |
+
"attn_implementation": null
|
37 |
}
|