| | #!/bin/bash |
| |
|
| | |
| | |
| |
|
| | |
| | export RANK=1 |
| | export MASTER_PORT=29571 |
| |
|
| | |
| | export LOCAL_BATCH_SIZE=2 |
| | export GRADIENT_ACCUMULATION_STEPS=4 |
| |
|
| | |
| | export TRANSFORMERS_OFFLINE=1 |
| | export WANDB_PROJECT=vtimellm |
| | export MODEL_VERSION=vicuna-v1-5-7b |
| | export OUTPUT_DIR=./outputs/ |
| | export STAGE4=./outputs/vtimellm-vicuna-v1-5-7b-activitynet-stage4 |
| | export RUN_NAME=vtimellm-$MODEL_VERSION-activitynet-stage5 |
| |
|
| | |
| | export PYTHONPATH="${PYTHONPATH}:$(pwd)" |
| | export CUDA_VISIBLE_DEVICES=1 |
| | export TORCH_USE_CUDA_DSA=1 |
| |
|
| | |
| | export TRANSFORMERS_VERBOSITY=info |
| | export TOKENIZERS_PARALLELISM=false |
| |
|
| | |
| | mkdir -p $OUTPUT_DIR/$RUN_NAME |
| |
|
| | echo "=== Debug Environment Setup ===" |
| | echo "RANK: $RANK" |
| | echo "MASTER_PORT: $MASTER_PORT" |
| | echo "CUDA_VISIBLE_DEVICES: $CUDA_VISIBLE_DEVICES" |
| | echo "PYTHONPATH: $PYTHONPATH" |
| | echo "OUTPUT_DIR: $OUTPUT_DIR/$RUN_NAME" |
| | echo "================================" |
| |
|
| | |
| | echo "=== Checking Required Files ===" |
| | required_files=( |
| | "./checkpoints/vicuna-7b-v1.5" |
| | "./data/activitynet/mdpo-train.json" |
| | "./data/activitynet/videos/train" |
| | "./data/activitynet/clipvitl14-vtimellm.pth" |
| | "./checkpoints/vtimellm-vicuna-v1-5-7b-stage1/mm_projector.bin" |
| | "./checkpoints/vtimellm-vicuna-v1-5-7b-stage2" |
| | "./checkpoints/vtimellm-vicuna-v1-5-7b-stage3" |
| | "./checkpoints/vtimellm-vicuna-v1-5-7b-activitynet-stage4" |
| | "./scripts/zero2.json" |
| | ) |
| |
|
| | for file in "${required_files[@]}"; do |
| | if [ -e "$file" ]; then |
| | echo "✓ Found: $file" |
| | else |
| | echo "✗ Missing: $file" |
| | fi |
| | done |
| | echo "================================" |
| |
|
| | |
| | echo "=== GPU Information ===" |
| | nvidia-smi --query-gpu=name,memory.total,memory.free --format=csv,noheader,nounits |
| | echo "================================" |
| |
|
| | |
| | echo "=== Starting Debug Training ===" |
| | echo "Command: deepspeed --include localhost:$RANK --master_port $MASTER_PORT vtimellm/train/train_dpo_mem.py [args...]" |
| | echo "================================" |
| |
|
| | |
| | deepspeed --include localhost:$RANK --master_port $MASTER_PORT vtimellm/train/train_dpo_mem.py \ |
| | --deepspeed ./scripts/zero2.json \ |
| | --lora_enable True --lora_r 8 --lora_alpha 128 \ |
| | --training_stage 3 --finetuning True \ |
| | --model_name_or_path ./checkpoints/vicuna-7b-v1.5 \ |
| | --version v1 \ |
| | --data_path ./data/activitynet/mdpo-train.json \ |
| | --data_folder ./data/activitynet/videos/train \ |
| | --feat_folder ./data/activitynet/clipvitl14-vtimellm.pth \ |
| | --pretrain_mm_mlp_adapter ./checkpoints/vtimellm-vicuna-v1-5-7b-stage1/mm_projector.bin \ |
| | --stage2_path ./checkpoints/vtimellm-vicuna-v1-5-7b-stage2 \ |
| | --stage3_path ./checkpoints/vtimellm-vicuna-v1-5-7b-stage3 \ |
| | --stage4_path checkpoints/vtimellm-vicuna-v1-5-7b-activitynet-stage4 \ |
| | --output_dir $OUTPUT_DIR/$RUN_NAME \ |
| | --bf16 True \ |
| | --max_steps 100 \ |
| | --per_device_train_batch_size $LOCAL_BATCH_SIZE \ |
| | --gradient_accumulation_steps $GRADIENT_ACCUMULATION_STEPS \ |
| | --evaluation_strategy "no" \ |
| | --save_strategy "no" \ |
| | --save_steps 50000 \ |
| | --save_total_limit 10 \ |
| | --learning_rate 1e-6 \ |
| | --freeze_mm_mlp_adapter True \ |
| | --weight_decay 0. --warmup_ratio 0.1 --lr_scheduler_type "cosine" \ |
| | --logging_steps 1 \ |
| | --tf32 True \ |
| | --model_max_length 2048 \ |
| | --gradient_checkpointing True \ |
| | --dataloader_num_workers 4 \ |
| | --lazy_preprocess True \ |
| | --report_to "none" \ |
| | --run_name $RUN_NAME \ |
| | --gamma 0.0 --beta 0.5 --dpo_alpha 1.0 --train4dpo |
| |
|
| | echo "=== Training Completed ===" |
| |
|