{ "config": { "alpha": 64, "architecture": "lora", "attn_matrices": [ "q", "v" ], "composition_mode": "add", "dropout": 0.1, "init_weights": "lora", "intermediate_lora": false, "leave_out": [ 12, 13, 14, 15 ], "output_lora": false, "r": 32, "selfattn_lora": true, "use_gating": false }, "config_id": "97ce7e3198bb142b", "hidden_size": 2048, "model_class": "LlamaForCausalLM", "model_name": "meta-llama/Llama-3.2-1B-Instruct", "model_type": "llama", "name": "llama23-1b-instruct-lora32-minus4-is", "version": "adapters.1.0.0" }