egemenNB commited on
Commit
88c9215
1 Parent(s): 77f900c

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +6 -6
adapter_config.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
- "base_model_name_or_path": "bigscience/bloom-1b7", // The base model you are adapting
3
- "r": 8, // Low-rank dimension, typically a small number like 8 or 16
4
- "lora_alpha": 32, // Scaling factor for the LoRA layers
5
- "lora_dropout": 0.1, // Dropout probability in the LoRA layers
6
- "bias": "none", // Whether to use bias in LoRA layers ('none', 'all', or 'lora_only')
7
- "task_type": "CAUSAL_LM" // The type of task (e.g., 'CAUSAL_LM', 'SEQ_2_SEQ_LM', etc.)
8
  }
 
1
  {
2
+ "base_model_name_or_path": "bigscience/bloom-1b7",
3
+ "r": 8,
4
+ "lora_alpha": 32,
5
+ "lora_dropout": 0.1,
6
+ "bias": "none",
7
+ "task_type": "CAUSAL_LM"
8
  }