allstax commited on
Commit
995bb53
·
1 Parent(s): 41df484

Pushing fine-tuned model

Browse files
Files changed (2) hide show
  1. adapter_config.json +2 -2
  2. generation_config.json +0 -1
adapter_config.json CHANGED
@@ -19,10 +19,10 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "k_proj",
23
  "v_proj",
24
  "o_proj",
25
- "q_proj"
 
26
  ],
27
  "task_type": "CAUSAL_LM",
28
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "v_proj",
23
  "o_proj",
24
+ "q_proj",
25
+ "k_proj"
26
  ],
27
  "task_type": "CAUSAL_LM",
28
  "use_rslora": false
generation_config.json CHANGED
@@ -2,6 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "pad_token_id": 0,
6
  "transformers_version": "4.37.0.dev0"
7
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
 
5
  "transformers_version": "4.37.0.dev0"
6
  }