File size: 183 Bytes
accf2eb
17c1964
 
 
 
 
 
c9b1063
1
2
3
4
5
6
7
8
{
    "base_model_name_or_path": "meta-llama/Llama-3.2-3B-Instruct",
    "peft_type": "LORA",
    "r": 8,
    "lora_alpha": 16,
    "lora_dropout": 0.0,
    "task_type": "CAUSAL_LM"
}