prefix32 / adapter_config.json
jekunz's picture
Upload 3 files
0777477 verified
raw
history blame contribute delete
542 Bytes
{
"config": {
"architecture": "prefix_tuning",
"bottleneck_size": 512,
"cross_prefix": true,
"dropout": 0.0,
"encoder_prefix": true,
"flat": false,
"leave_out": [],
"non_linearity": "tanh",
"prefix_length": 30,
"shared_gating": true,
"use_gating": false
},
"config_id": "648bf22f5afeaaa6",
"hidden_size": 2048,
"model_class": "LlamaForCausalLM",
"model_name": "meta-llama/Llama-3.2-1B-Instruct",
"model_type": "llama",
"name": "llama23-1b-prefix-is",
"version": "adapters.1.0.0"
}