gpt4-alpaca-lora-7b-llm_tuner / llm-tuner-config.json
kallaballa
config for llm-tuner
0f59742
raw
history blame
471 Bytes
{
"data_path": "path/to/your/data/file",
"num_epochs": 3,
"cutoff_len": 512,
"group_by_length": true,
"output_dir": "path/to/your/output/directory",
"batch_size": 4,
"micro_batch_size": 1,
"lr": 5e-5,
"warmup_steps": 100,
"weight_decay": 0.01,
"adam_epsilon": 1e-8,
"max_grad_norm": 1.0,
"logging_steps": 50,
"save_steps": 500,
"eval_steps": 500,
"overwrite_output_dir": true,
"do_train": true,
"do_eval": true,
"do_predict": true
}