File size: 721 Bytes
9feda12 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
{
"local_rank": 0,
"model_path": "imone/Mistral_7B_with_EOT_token",
"data_prefix": "dataset_openchat3.5/tokenized/openchat_mistral_1017",
"save_path": "/ML-A100/home/csj/trained_models/openchat_mistral/1017",
"save_every": 1,
"batch_size_per_gpu": 10,
"epochs": 5,
"lr": 0.000012507232220003032,
"lr_min_ratio": 0.1,
"lr_warmup_ratio": 0.05,
"weight_decay": 0.1,
"beta1": 0.9,
"beta2": 0.95,
"eps": 0.00001,
"deepspeed": true,
"deepspeed_config": "ochat/training_deepspeed/deepspeed_config.json",
"deepscale": false,
"deepscale_config": null,
"deepspeed_mpi": false,
"model_type": "openchat_v3.2_mistral",
"batch_max_len": 81920,
"device": "<non-serializable>",
"epoch": 2
}
|