#!/usr/bin/env bash TOTAL_NUM_UPDATES=1000000000000000 # 10 epochs through IMDB for bsz 32 WARMUP_UPDATES=216085 # 6 percent of the number of updates LR=1e-05 # Peak LR for polynomial LR scheduler. HEAD_NAME=hesaid # Custom name for the classification head. NUM_CLASSES=2 # Number of classes for the classification task. MAX_SENTENCES=35 # Batch size. ROBERTA_PATH="roberta_base_fairseq/model.pt" fairseq-train data-bin/ \ --restore-file $ROBERTA_PATH \ --max-positions 512 \ --max-sentences $MAX_SENTENCES \ --max-tokens 8192 \ --task sentence_prediction \ --reset-optimizer --reset-dataloader --reset-meters \ --required-batch-size-multiple 2 \ --init-token 0 --separator-token 2 \ --arch roberta_base \ --criterion sentence_prediction \ --classification-head-name $HEAD_NAME \ --num-classes $NUM_CLASSES \ --dropout 0.1 --attention-dropout 0.1 \ --weight-decay 0.1 --optimizer adam --adam-betas "(0.9, 0.98)" --adam-eps 1e-06 \ --clip-norm 0.0 \ --lr-scheduler polynomial_decay --lr $LR --total-num-update $TOTAL_NUM_UPDATES --warmup-updates $WARMUP_UPDATES \ --max-epoch 5 --log-format tqdm --log-interval 1 --save-interval-updates 15000 --keep-interval-updates 5 --skip-invalid-size-inputs-valid-test \ --best-checkpoint-metric accuracy --maximize-best-checkpoint-metric \ --find-unused-parameters \ --update-freq 1