distil-mistral-1.5B-v0.1 / config_mistral_100k.yaml
sanchit-gandhi's picture
Upload folder using huggingface_hub
1710729 verified
raw
history blame contribute delete
768 Bytes
# Model arguments
model_name_or_path: sanchit-gandhi/Mistral-7B-v0.1-6-layer
teacher_model_name_or_path: mistralai/Mistral-7B-v0.1
dtype: bfloat16
load_teacher_in_4bit: true
# Data arguments
train_dataset_name: HuggingFaceTB/cosmopedia-100k
train_dataset_config_name: default
train_split_name: train[:-1000]
eval_split_name: train[-1000:]
num_train_epochs: 10
# Training arguments
do_train: true
do_eval: true
per_device_eval_batch_size: 8
per_device_train_batch_size: 8
learning_rate: 0.0003
warmup_steps: 500
gradient_checkpointing: true
dataloader_num_workers: 4
preprocessing_num_workers: 32
ddp_timeout: 7200
save_strategy: epoch
evaluation_strategy: epoch
logging_steps: 25
overwrite_output_dir: true
output_router_logits: true
report_to: wandb
output_dir: ./