falcon-7b-sharded / adapter_config.json
hung200504's picture
falcon-7b-sharded
9dbe82c
raw
history blame contribute delete
452 Bytes
{
"auto_mapping": null,
"base_model_name_or_path": "cosmin/falcon-7b-sharded-bf16",
"encoder_dropout": 0.0,
"encoder_hidden_size": 128,
"encoder_num_layers": 2,
"encoder_reparameterization_type": "MLP",
"inference_mode": true,
"num_attention_heads": 71,
"num_layers": 32,
"num_transformer_submodules": 1,
"num_virtual_tokens": 20,
"peft_type": "P_TUNING",
"revision": null,
"task_type": "QUESTION_ANS",
"token_dim": 4544
}