gpt2-medium-indonesian / run_pretraining.sh
cahya's picture
Saving weights and logs of step 5000
735daf5
raw
history blame
894 Bytes
export MODEL_DIR=`pwd`
export WANDB_ENTITY="wandb"
export WANDB_PROJECT="hf-flax-gpt2-indonesian"
export WANDB_LOG_MODEL="true"
./run_clm_flax.py \
--output_dir="${MODEL_DIR}" \
--model_type="gpt2" \
--config_name="${MODEL_DIR}" \
--tokenizer_name="${MODEL_DIR}" \
--dataset_name="oscar" \
--dataset_config_name="unshuffled_deduplicated_id" \
--do_train --do_eval \
--block_size="512" \
--per_device_train_batch_size="24" \
--per_device_eval_batch_size="24" \
--learning_rate="0.0024" --warmup_steps="1000" \
--adam_beta1="0.9" --adam_beta2="0.98" --weight_decay="0.01" \
--overwrite_output_dir \
--num_train_epochs="20" \
--dataloader_num_workers="64" \
--preprocessing_num_workers="64" \
--logging_steps="5000" \
--save_steps="5000" \
--eval_steps="5000" \
--validation_split_percentage="2" \
--push_to_hub