{ | |
"config": { | |
"alpha": 16, | |
"architecture": "lora", | |
"attn_matrices": [ | |
"q", | |
"v" | |
], | |
"composition_mode": "add", | |
"dropout": 0.1, | |
"init_weights": "lora", | |
"intermediate_lora": false, | |
"leave_out": [], | |
"output_lora": false, | |
"r": 8, | |
"selfattn_lora": true, | |
"use_gating": false | |
}, | |
"config_id": "3fbd25b704780a8b", | |
"hidden_size": 768, | |
"model_class": "DebertaV2ForMaskedLM", | |
"model_name": "microsoft/mdeberta-v3-base", | |
"model_type": "deberta-v2", | |
"name": "cc100", | |
"version": "adapters.1.0.0" | |
} |