adapter_t5-3b_qqp / config.json
cyl's picture
add model
f99c7c8
raw
history blame contribute delete
481 Bytes
{
"backbone_checkpoint_name": "t5-3b",
"backbone_class": "T5ForConditionalGeneration",
"backbone_hash": "9f688d26560cafe96c021bbfc020b6d7",
"bottleneck_dim": 24,
"common_structure": true,
"delta_type": "adapter",
"modified_modules": [
"attn",
"ff"
],
"non_linearity": "gelu_new",
"opendelta_version": "0.0.1",
"sequential": true,
"transformers_version": "4.10.0",
"unfrozen_modules": [
"deltas",
"layer_norm",
"final_layer_norm"
]
}