egemenNB commited on
Commit
77f900c
1 Parent(s): ae4fbe2

Create adapter_config.json

Browse files

Add adapter_config.json for LoRA configuration

Files changed (1) hide show
  1. adapter_config.json +8 -0
adapter_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_model_name_or_path": "bigscience/bloom-1b7", // The base model you are adapting
3
+ "r": 8, // Low-rank dimension, typically a small number like 8 or 16
4
+ "lora_alpha": 32, // Scaling factor for the LoRA layers
5
+ "lora_dropout": 0.1, // Dropout probability in the LoRA layers
6
+ "bias": "none", // Whether to use bias in LoRA layers ('none', 'all', or 'lora_only')
7
+ "task_type": "CAUSAL_LM" // The type of task (e.g., 'CAUSAL_LM', 'SEQ_2_SEQ_LM', etc.)
8
+ }