File size: 350 Bytes
726fa4d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
{
  "num_train_epochs": 10,
  "learning_rate": 0.0003,
  "cutoff_len": 512,
  "lora_r": 16,
  "lora_alpha": 16,
  "lora_dropout": 0.05,
  "lora_target_modules": [
    "q_proj",
    "v_proj",
    "k_proj",
    "o_proj"
  ],
  "train_on_inputs": true,
  "group_by_length": false,
  "save_steps": 2000,
  "save_total_limit": 10,
  "logging_steps": 10
}