adapter_t5-3b_mnli / config.json
cyl's picture
add model
a616aeb
raw
history blame contribute delete
481 Bytes
{
"backbone_checkpoint_name": "t5-3b",
"backbone_class": "T5ForConditionalGeneration",
"backbone_hash": "7c1a2586dbc6502f9a137cd79f50b3a4",
"bottleneck_dim": 24,
"common_structure": true,
"delta_type": "adapter",
"modified_modules": [
"attn",
"ff"
],
"non_linearity": "gelu_new",
"opendelta_version": "0.0.1",
"sequential": true,
"transformers_version": "4.10.0",
"unfrozen_modules": [
"deltas",
"layer_norm",
"final_layer_norm"
]
}