{ | |
"adapter_layers": 30, | |
"adapter_len": 10, | |
"auto_mapping": null, | |
"base_model_name_or_path": ".cache/huggingface/hub/models--meta-llama--Meta-Llama-3-8B/snapshots/62bd457b6fe961a42a631306577e622c83876cb6/", | |
"inference_mode": true, | |
"peft_type": "ADAPTION_PROMPT", | |
"revision": null, | |
"target_modules": "self_attn", | |
"task_type": "CAUSAL_LM" | |
} |