Commit
•
b35fd71
1
Parent(s):
cb08879
missing rope scaling
Browse files- config.json +4 -1
config.json
CHANGED
@@ -36,7 +36,10 @@
|
|
36 |
"architectures": [
|
37 |
"LlamaForCausalLM"
|
38 |
],
|
39 |
-
"
|
|
|
|
|
|
|
40 |
"max_position_embeddings": 4096,
|
41 |
"model_type": "llama",
|
42 |
"pad_token_id": 0,
|
|
|
36 |
"architectures": [
|
37 |
"LlamaForCausalLM"
|
38 |
],
|
39 |
+
"rope_scaling" : {
|
40 |
+
"factor": 2.5,
|
41 |
+
"type": "linear"
|
42 |
+
},
|
43 |
"max_position_embeddings": 4096,
|
44 |
"model_type": "llama",
|
45 |
"pad_token_id": 0,
|