|
|
#!/bin/bash |
|
|
|
|
|
|
|
|
BASE_DIR="./lightning_logs_round_1" |
|
|
|
|
|
|
|
|
subdirs=($(ls -d "$BASE_DIR"/*/ | sort)) |
|
|
|
|
|
|
|
|
for i in {0..9}; do |
|
|
|
|
|
version_number=$(basename "${subdirs[$i]}") |
|
|
|
|
|
|
|
|
sbatch_filename="alignment_dataset_2_96_from_ESM3_${i}.sh" |
|
|
|
|
|
|
|
|
cat <<EOL > "$sbatch_filename" |
|
|
#!/bin/bash |
|
|
#SBATCH -N 1 |
|
|
#SBATCH -G 4 |
|
|
#SBATCH -C gpu&hbm80g |
|
|
#SBATCH -q regular |
|
|
#SBATCH -t 12:00:00 |
|
|
#SBATCH -A m4235 |
|
|
#SBATCH -J alignment |
|
|
|
|
|
source /global/u2/s/si264/mambaforge/etc/profile.d/conda.sh |
|
|
conda activate proera |
|
|
|
|
|
pera_train \\ |
|
|
"train.lightning_model_args.eval_type=era" \\ |
|
|
"train.lightning_model_args.beta=10.0" \\ |
|
|
"train.lightning_model_args.gamma=0" \\ |
|
|
"train.trainer_args.devices=4" \\ |
|
|
"train.trainer_args.max_epochs=25" \\ |
|
|
"train.trainer_args.log_every_n_steps=1" \\ |
|
|
"train.trainer_args.enable_progress_bar=True" \\ |
|
|
"train.lightning_model_args.interval=epoch" \\ |
|
|
"train.lightning_model_args.monitor=val/ERALoss" \\ |
|
|
"train.lightning_model_args.lr_scheduler=ReduceLROnPlateau" \\ |
|
|
"++train.lightning_model_args.lr_scheduler_args.patience=5" \\ |
|
|
"train.lightning_model_args.optimizer=AdamW" \\ |
|
|
"train.lightning_model_args.optimizer_args.lr=1.0e-6" \\ |
|
|
"++train.lightning_model_args.optimizer_args.betas=[0.9,0.99]" \\ |
|
|
"++train.lightning_model_args.optimizer_args.weight_decay=0.01" \\ |
|
|
"train.lightning_model_args.on_step=false" \\ |
|
|
"global_args.dataset_filename=alignment_dataset_2_96_from_ESM3_${i}.hdf5" \\ |
|
|
"nn.batch_size=4" \\ |
|
|
"nn.load_model=$BASE_DIR/$version_number/checkpoints/best_model.ckpt" \\ |
|
|
"++nn.model_args.unified_transformer_args.ida_layer_indices=[]" \\ |
|
|
EOL |
|
|
|
|
|
echo "Created $sbatch_filename" |
|
|
done |
|
|
|