temporal-t5/4_train_model.sh
Jakub Pokrywka d27c288bc4 temporal t5
2022-07-31 19:54:14 +00:00

20 lines
603 B
Bash

export TRANSFORMERS_CACHE=/mnt/gpu_data1/kubapok/cache
python run_t5_mlm_flax.py \
--output_dir="./temporal-t5-base" \
--model_type="t5" \
--config_name="./temporal-t5-base" \
--tokenizer_name="./temporal-t5-base" \
--train_file="./train-splitted-shuf.txt" \
--validation_file="./dev-splitted-shuf.txt" \
--max_seq_length="512" \
--per_device_train_batch_size="32" \
--per_device_eval_batch_size="32" \
--adafactor \
--learning_rate="0.005" \
--weight_decay="0.001" \
--warmup_steps="2000" \
--overwrite_output_dir \
--logging_steps="500" \
--save_steps="10000" \
--eval_steps="2500" \