ChipYTY's picture
Add files using upload-large-folder tool
34a4bcb verified
#!/bin/bash
# SAM3 LoRA 微调训练脚本 - BraTS2023
# 配置路径
DATA_ROOT="/data/yty/brats2023/ASNR-MICCAI-BraTS2023-GLI-Challenge-TrainingData"
SAM3_CHECKPOINT="/data/yty/sam3/sam3.pt"
OUTPUT_DIR="/data/yty/brats23_sam3_lora_output"
# 训练参数
MODALITY=0 # 0=t1c, 1=t1n, 2=t2f, 3=t2w
TARGET_SIZE="512 512"
BATCH_SIZE=4
GRAD_ACCUM=4 # 有效batch size = BATCH_SIZE * GRAD_ACCUM = 16
EPOCHS=50
LR=1e-4
# LoRA参数
LORA_RANK=8
LORA_ALPHA=16
LORA_DROPOUT=0.1
# 其他
NUM_WORKERS=4
VAL_FREQ=5
SEED=42
echo "============================================"
echo "SAM3 LoRA Fine-tuning for BraTS2023"
echo "============================================"
echo ""
echo "Configuration:"
echo " Data root: $DATA_ROOT"
echo " Checkpoint: $SAM3_CHECKPOINT"
echo " Output: $OUTPUT_DIR"
echo " LoRA rank: $LORA_RANK"
echo " LoRA alpha: $LORA_ALPHA"
echo " Batch size: $BATCH_SIZE x $GRAD_ACCUM = $((BATCH_SIZE * GRAD_ACCUM))"
echo " Learning rate: $LR"
echo " Epochs: $EPOCHS"
echo ""
# 创建输出目录
mkdir -p "$OUTPUT_DIR"
# 运行训练
cd /root/githubs/sam3/medsam3_brats
python train_lora.py \
--data_root "$DATA_ROOT" \
--modality $MODALITY \
--target_size $TARGET_SIZE \
--dataset_type image \
--checkpoint "$SAM3_CHECKPOINT" \
--lora_rank $LORA_RANK \
--lora_alpha $LORA_ALPHA \
--lora_dropout $LORA_DROPOUT \
--epochs $EPOCHS \
--batch_size $BATCH_SIZE \
--lr $LR \
--grad_accum $GRAD_ACCUM \
--num_workers $NUM_WORKERS \
--output_dir "$OUTPUT_DIR" \
--val_freq $VAL_FREQ \
--seed $SEED \
2>&1 | tee "$OUTPUT_DIR/train.log"
echo ""
echo "============================================"
echo "Training completed!"
echo "Output saved to: $OUTPUT_DIR"
echo "============================================"