deberta-v3-large-irony / run_train.sh
Elron's picture
Pushing deberta-v3-large-irony to hub
27e031e
jbsub -queue x86_6h -cores 4+1 -mem 30g -require a100 -o outputs/train/tweet_eval2/irony/deberta-v3-large-irony-lr8e-6-gas2-ls0.1/train.log /dccstor/tslm/envs/anaconda3/envs/tslm-gen/bin/python train_clf.py --model_name_or_path microsoft/deberta-v3-large --train_file data/tweet_eval/irony/train.csv --validation_file data/tweet_eval/irony/validation.csv --do_train --do_eval --per_device_train_batch_size 16 --per_device_eval_batch_size 16 --max_seq_length 256 --learning_rate 8e-6 --output_dir outputs/train/tweet_eval2/irony/deberta-v3-large-irony-lr8e-6-gas2-ls0.1 --evaluation_strategy steps --save_strategy no --warmup_steps 50 --num_train_epochs 10 --overwrite_output_dir --logging_steps 100 --gradient_accumulation_steps 2 --label_smoothing_factor 0.1 --report_to clearml --metric_for_best_model accuracy --logging_dir outputs/train/tweet_eval2/irony/deberta-v3-large-irony-lr8e-6-gas2-ls0.1/tb \; rm -rf outputs/train/tweet_eval2/irony/deberta-v3-large-irony-lr8e-6-gas2-ls0.1/tb \; rm -rf outputs/train/tweet_eval2/irony/deberta-v3-large-irony-lr8e-6-gas2-ls0.1/checkpoint-* \; . outputs/train/tweet_eval2/irony/deberta-v3-large-irony-lr8e-6-gas2-ls0.1/run_test.sh