File size: 636 Bytes
4233370 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
{
"config": {
"alpha": 64,
"architecture": "lora",
"attn_matrices": [
"q",
"v"
],
"composition_mode": "add",
"dropout": 0.1,
"init_weights": "lora",
"intermediate_lora": false,
"leave_out": [
12,
13,
14,
15
],
"output_lora": false,
"r": 32,
"selfattn_lora": true,
"use_gating": false
},
"config_id": "97ce7e3198bb142b",
"hidden_size": 2048,
"model_class": "LlamaForCausalLM",
"model_name": "meta-llama/Llama-3.2-1B-Instruct",
"model_type": "llama",
"name": "llama23-1b-instruct-lora32-minus4-is",
"version": "adapters.1.0.0"
} |