| | #!/usr/bin/env bash |
| | python3 run_mlm_flax.py \ |
| | --output_dir="./" \ |
| | --model_type="roberta" \ |
| | --config_name="./" \ |
| | --tokenizer_name="./" \ |
| | --dataset_name="oscar" \ |
| | --dataset_config_name="unshuffled_deduplicated_th" \ |
| | --max_seq_length="128" \ |
| | --weight_decay="0.01" \ |
| | --preprocessing_num_workers="64" \ |
| | --per_device_train_batch_size="512" \ |
| | --per_device_eval_batch_size="512" \ |
| | --learning_rate="3e-5" \ |
| | --warmup_steps="312" \ |
| | --overwrite_output_dir \ |
| | --seed="19" \ |
| | --num_train_epochs="8" \ |
| | --adam_beta1="0.9" \ |
| | --adam_beta2="0.98" \ |
| | --logging_steps="31" \ |
| | --save_steps="312" \ |
| | --eval_steps="1250" \ |
| | --dtype="bfloat16" \ |
| | --push_to_hub |
| |
|