|
|
#!/bin/bash |
|
|
|
|
|
|
|
|
|
|
|
DATA_ROOT="/data/yty/brats2023/ASNR-MICCAI-BraTS2023-GLI-Challenge-TrainingData" |
|
|
SAM3_CHECKPOINT="/data/yty/sam3/sam3.pt" |
|
|
OUTPUT_DIR="/data/yty/brats23_sam3_lora_output" |
|
|
|
|
|
|
|
|
MODALITY=0 |
|
|
TARGET_SIZE="512 512" |
|
|
BATCH_SIZE=4 |
|
|
GRAD_ACCUM=4 |
|
|
EPOCHS=50 |
|
|
LR=1e-4 |
|
|
|
|
|
|
|
|
LORA_RANK=8 |
|
|
LORA_ALPHA=16 |
|
|
LORA_DROPOUT=0.1 |
|
|
|
|
|
|
|
|
NUM_WORKERS=4 |
|
|
VAL_FREQ=5 |
|
|
SEED=42 |
|
|
|
|
|
echo "============================================" |
|
|
echo "SAM3 LoRA Fine-tuning for BraTS2023" |
|
|
echo "============================================" |
|
|
echo "" |
|
|
echo "Configuration:" |
|
|
echo " Data root: $DATA_ROOT" |
|
|
echo " Checkpoint: $SAM3_CHECKPOINT" |
|
|
echo " Output: $OUTPUT_DIR" |
|
|
echo " LoRA rank: $LORA_RANK" |
|
|
echo " LoRA alpha: $LORA_ALPHA" |
|
|
echo " Batch size: $BATCH_SIZE x $GRAD_ACCUM = $((BATCH_SIZE * GRAD_ACCUM))" |
|
|
echo " Learning rate: $LR" |
|
|
echo " Epochs: $EPOCHS" |
|
|
echo "" |
|
|
|
|
|
|
|
|
mkdir -p "$OUTPUT_DIR" |
|
|
|
|
|
|
|
|
cd /root/githubs/sam3/medsam3_brats |
|
|
|
|
|
python train_lora.py \ |
|
|
--data_root "$DATA_ROOT" \ |
|
|
--modality $MODALITY \ |
|
|
--target_size $TARGET_SIZE \ |
|
|
--dataset_type image \ |
|
|
--checkpoint "$SAM3_CHECKPOINT" \ |
|
|
--lora_rank $LORA_RANK \ |
|
|
--lora_alpha $LORA_ALPHA \ |
|
|
--lora_dropout $LORA_DROPOUT \ |
|
|
--epochs $EPOCHS \ |
|
|
--batch_size $BATCH_SIZE \ |
|
|
--lr $LR \ |
|
|
--grad_accum $GRAD_ACCUM \ |
|
|
--num_workers $NUM_WORKERS \ |
|
|
--output_dir "$OUTPUT_DIR" \ |
|
|
--val_freq $VAL_FREQ \ |
|
|
--seed $SEED \ |
|
|
2>&1 | tee "$OUTPUT_DIR/train.log" |
|
|
|
|
|
echo "" |
|
|
echo "============================================" |
|
|
echo "Training completed!" |
|
|
echo "Output saved to: $OUTPUT_DIR" |
|
|
echo "============================================" |
|
|
|