clmmasking-waldomodel / adapter_config.json
isashap's picture
Upload model
3ec3af5
raw
history blame
417 Bytes
{
"base_model_name_or_path": "distilgpt2",
"inference_mode": true,
"num_attention_heads": 12,
"num_layers": 12,
"num_transformer_submodules": 1,
"num_virtual_tokens": 8,
"peft_type": "PROMPT_TUNING",
"prompt_tuning_init": "TEXT",
"prompt_tuning_init_text": "Write a complete resume point for the given job.",
"task_type": "CAUSAL_LM",
"token_dim": 768,
"tokenizer_name_or_path": "distilgpt2"
}