train_distilbart_cnn.sh 894 Bytes
Newer Older
1
export WANDB_PROJECT=distilbart-trainer
Suraj Patil's avatar
Suraj Patil committed
2
3
4
5
6
7
8
export BS=32
export m=sshleifer/student_cnn_12_6
export tok=facebook/bart-large
export MAX_TGT_LEN=142

python finetune_trainer.py \
    --model_name_or_path $m --tokenizer_name $tok \ 
9
    --data_dir cnn_dm \
Suraj Patil's avatar
Suraj Patil committed
10
11
12
13
14
    --output_dir distilbart-cnn-12-6 --overwrite_output_dir \
    --learning_rate=3e-5 \
    --warmup_steps 500 --sortish_sampler \
    --fp16 \
    --n_val 500 \
15
    --gradient_accumulation_steps=1 \
Suraj Patil's avatar
Suraj Patil committed
16
17
18
19
20
    --per_device_train_batch_size=$BS --per_device_eval_batch_size=$BS \
    --freeze_encoder --freeze_embeds \
    --num_train_epochs=2 \
    --save_steps 3000 --eval_steps 3000 \
    --logging_first_step \
21
    --max_target_length 56 --val_max_target_length $MAX_TGT_LEN --test_max_target_length $MAX_TGT_LEN \
22
23
    --do_train --do_eval --do_predict \
    --evaluation_strategy steps \
24
    --predict_with_generate --sortish_sampler \
Suraj Patil's avatar
Suraj Patil committed
25
    "$@"