File size: 174 Bytes
09fb3c8
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
{
    "adapter_path": "adapters",
    "lora_layers": 8,
    "lora_parameters": {
        "rank": 16,
        "alpha": 16,
        "dropout": 0.0,
        "scale": 1.0
    }
}