{ "base_model_name_or_path": "bigscience/bloom-1b7", // The base model you are adapting "r": 8, // Low-rank dimension, typically a small number like 8 or 16 "lora_alpha": 32, // Scaling factor for the LoRA layers "lora_dropout": 0.1, // Dropout probability in the LoRA layers "bias": "none", // Whether to use bias in LoRA layers ('none', 'all', or 'lora_only') "task_type": "CAUSAL_LM" // The type of task (e.g., 'CAUSAL_LM', 'SEQ_2_SEQ_LM', etc.) }