| | TASKS=("cola" "sst2" "mrpc" "qqp" "mnli" "qnli" "rte" "wnli") |
| |
|
| | |
| | LOG_DIR="./baseline" |
| | mkdir -p "$LOG_DIR" |
| |
|
| | |
| | for TASK_NAME in "${TASKS[@]}"; do |
| | |
| | if [[ "$TASK_NAME" == "mrpc" || "$TASK_NAME" == "wnli" ]]; then |
| | NUM_EPOCHS=5 |
| | else |
| | NUM_EPOCHS=3 |
| | fi |
| |
|
| | echo "Running training for task: $TASK_NAME with $NUM_EPOCHS epochs..." |
| |
|
| | CUDA_VISIBLE_DEVICES=0 python run_glue.py \ |
| | --model_name_or_path google-bert/bert-base-cased \ |
| | --task_name $TASK_NAME \ |
| | --do_train \ |
| | --do_eval \ |
| | --max_seq_length 128 \ |
| | --per_device_train_batch_size 32 \ |
| | --learning_rate 2e-5 \ |
| | --num_train_epochs $NUM_EPOCHS \ |
| | --output_dir $LOG_DIR/$TASK_NAME/ \ |
| | --overwrite_output_dir |
| |
|
| | echo "Finished training for task: $TASK_NAME" |
| | done |
| |
|