german_lora32 / adapter_config.json
rominaoji's picture
Upload folder using huggingface_hub
dbcfabd verified
raw
history blame contribute delete
563 Bytes
{
"config": {
"alpha": 64,
"architecture": "lora",
"attn_matrices": [
"q",
"v"
],
"composition_mode": "add",
"dropout": 0.1,
"init_weights": "lora",
"intermediate_lora": false,
"leave_out": [],
"output_lora": false,
"r": 32,
"selfattn_lora": true,
"use_gating": false
},
"config_id": "6b97e74b95889514",
"hidden_size": 768,
"model_class": "DebertaV2ForMaskedLM",
"model_name": "microsoft/mdeberta-v3-base",
"model_type": "deberta-v2",
"name": "cc100",
"version": "adapters.1.0.0"
}