RaushanTurganbay HF staff commited on
Commit
b35fd71
1 Parent(s): cb08879

missing rope scaling

Browse files
Files changed (1) hide show
  1. config.json +4 -1
config.json CHANGED
@@ -36,7 +36,10 @@
36
  "architectures": [
37
  "LlamaForCausalLM"
38
  ],
39
- "factor": 2.0,
 
 
 
40
  "max_position_embeddings": 4096,
41
  "model_type": "llama",
42
  "pad_token_id": 0,
 
36
  "architectures": [
37
  "LlamaForCausalLM"
38
  ],
39
+ "rope_scaling" : {
40
+ "factor": 2.5,
41
+ "type": "linear"
42
+ },
43
  "max_position_embeddings": 4096,
44
  "model_type": "llama",
45
  "pad_token_id": 0,