|
cutoff_len: 512 |
|
dataset: physics |
|
dataset_dir: data |
|
ddp_timeout: 180000000 |
|
do_train: true |
|
finetuning_type: lora |
|
flash_attn: auto |
|
fp16: true |
|
gradient_accumulation_steps: 8 |
|
include_num_input_tokens_seen: true |
|
learning_rate: 3.0e-05 |
|
logging_steps: 5 |
|
lora_alpha: 16 |
|
lora_dropout: 0 |
|
lora_rank: 8 |
|
lora_target: all |
|
lr_scheduler_type: cosine |
|
max_grad_norm: 1.0 |
|
max_samples: 10000 |
|
model_name_or_path: mistralai/Mistral-7B-Instruct-v0.1 |
|
num_train_epochs: 1.0 |
|
optim: adamw_torch |
|
output_dir: saves/Mistral-7B-v0.1-Chat/lora/mistral_physs |
|
packing: false |
|
per_device_train_batch_size: 8 |
|
plot_loss: true |
|
preprocessing_num_workers: 16 |
|
report_to: none |
|
save_steps: 100 |
|
stage: sft |
|
template: mistral |
|
warmup_steps: 0 |
|
|