mamkkl commited on
Commit
2556263
·
verified ·
1 Parent(s): f405f85

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +2 -20
adapter_config.json CHANGED
@@ -1,26 +1,8 @@
1
  {
 
2
  "auto_mapping": null,
3
  "base_model_name_or_path": "Neko-Institute-of-Science/LLaMA-65B-HF",
4
- "bias": "none",
5
- "fan_in_fan_out": false,
6
- "inference_mode": true,
7
- "init_lora_weights": true,
8
- "layers_pattern": null,
9
- "layers_to_transform": null,
10
- "lora_alpha": 16,
11
- "lora_dropout": 0.05,
12
- "modules_to_save": null,
13
- "peft_type": "LORA",
14
- "r": 16,
15
  "revision": null,
16
- "target_modules": [
17
- "q_proj",
18
- "k_proj",
19
- "v_proj",
20
- "o_proj",
21
- "gate_proj",
22
- "up_proj",
23
- "down_proj"
24
- ],
25
  "task_type": "CAUSAL_LM"
 
26
  }
 
1
  {
2
+ "peft_type": "LORA",
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "Neko-Institute-of-Science/LLaMA-65B-HF",
 
 
 
 
 
 
 
 
 
 
 
5
  "revision": null,
 
 
 
 
 
 
 
 
 
6
  "task_type": "CAUSAL_LM"
7
+ "inference_mode": true
8
  }