File size: 5,516 Bytes
e31e7b4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
set -x
sudo apt-get update && sudo apt-get install -y libgl1-mesa-glx
bash ./config/shell_scripts/cogvideo_i2v/train_wan_prepare.sh
git --no-pager log --decorate=short --pretty=oneline -n5

export OMNISTORE_LOAD_STRICT_MODE=0
export OMNISTORE_LOGGING_LEVEL=ERROR
#################################################################
## Torch
#################################################################
export TOKENIZERS_PARALLELISM=false
export TORCH_LOGS="+dynamo,recompiles,graph_breaks"
export TORCHDYNAMO_VERBOSE=1
export TORCH_NCCL_ENABLE_MONITORING=1
export PYTORCH_CUDA_ALLOC_CONF="expandable_segments:True,garbage_collection_threshold:0.9"
#################################################################


#################################################################
## NCCL
#################################################################
export NCCL_IB_GID_INDEX=3
export NCCL_IB_HCA=$ARNOLD_RDMA_DEVICE
export NCCL_SOCKET_IFNAME=eth0
export NCCL_SOCKET_TIMEOUT=3600000

export NCCL_DEBUG=WARN  # disable the verbose NCCL logs
export NCCL_P2P_DISABLE=0
export NCCL_IB_DISABLE=0  # was 1
export NCCL_SHM_DISABLE=0  # was 1
export NCCL_P2P_LEVEL=NVL

export NCCL_PXN_DISABLE=0
export NCCL_NET_GDR_LEVEL=2
export NCCL_IB_QPS_PER_CONNECTION=4
export NCCL_IB_TC=160
export NCCL_IB_TIMEOUT=22
#################################################################

#################################################################
## WANDB
#################################################################
export WANDB__SERVICE_WAIT=6000
export WANDB_MODE=online
export WANDB_DISABLE_SERVICE=True
#################################################################

#################################################################
## DIST
#################################################################
MASTER_ADDR=$ARNOLD_WORKER_0_HOST
ports=(`echo $METIS_WORKER_0_PORT | tr ',' ' '`)
MASTER_PORT=${ports[0]}
NNODES=$ARNOLD_WORKER_NUM
NODE_RANK=$ARNOLD_ID
GPUS_PER_NODE=$ARNOLD_WORKER_GPU
# GPUS_PER_NODE=1
# NNODES=1
# NODE_RANK=0
WORLD_SIZE=$(($GPUS_PER_NODE*$NNODES))

DISTRIBUTED_ARGS="--nproc_per_node $GPUS_PER_NODE --nnodes $NNODES --node_rank $NODE_RANK --master_addr $MASTER_ADDR --master_port $MASTER_PORT"
if [ ! -z $RDZV_BACKEND ]; then
    DISTRIBUTED_ARGS="${DISTRIBUTED_ARGS} --rdzv_endpoint $MASTER_ADDR:$MASTER_PORT --rdzv_id 9863 --rdzv_backend c10d"
    export NCCL_SHM_DISABLE=1
fi

region=$RUNTIME_IDC_NAME
if [ $region == 'maliva' ]; then
    hdfs_prefix=hdfs://harunava/home/byte_icaip_nebudata
    export ARNOLD_BASE_DIR=hdfs://harunava
else
    hdfs_prefix=hdfs://harunasg/home/byte_icaip_nebudata_sg
    export RUNTIME_IDC_NAME=my2
    export ARNOLD_BASE_DIR=hdfs://harunasg
fi

echo -e "\033[31mDISTRIBUTED_ARGS: ${DISTRIBUTED_ARGS}\033[0m"
echo -e "\033[31mPERSISTENCE_PATH: ${hdfs_prefix}\033[0m"

#################################################################

#################################################################
## Training
#################################################################
learning_rate="1e-5"
lr_schedule="cosine_with_restarts"
optimizer="adamw"
steps="2000000"
version="v0.4"
DATASET_CONFIG="config/dataset_config/512_collection_config_vae1011_aligned_full_dump.yaml"

CKPT="/mnt/bn/icvg/users/yangxiao.0/Wan-AI/Wan2.1-I2V-14B-720P-patchsize1"
# CKPT="./models/Wan2.1-I2V-14B-720P"
output_dir="hdfs://harunasg/home/byte_icvg_aigc_cp/user/video/dali/dit_ckpt/i2v_wan_imageonly_lime_official_rl_1e-5_rm_with_1st_frame_round_4_2fps_rm_0812_color_VQ_MQ_MPS_0_cc_0814"
#output_dir="hdfs://harunasg/home/byte_icaip_nebudata_sg/fuwen/results/wan"
logging_dir="/mnt/bn/icvg/users/xinwei.huang/video_refl_new/log"
#logging_dir="./results/wan"
#################################################################

#TODO: prefetching
export WANDB_PROJECT=dc_ae_dit
export EXP_NAME=refl_2e-5_no_flowmatching_overall_fps6_rm_with_1st_frame_round_3_2fps_0812_RM_color_VQ_MQ_MPS_0_cc_loss
python3 -m torch.distributed.launch $DISTRIBUTED_ARGS  ./training/train_wan_i2v_dc_ae.py \
  --dataset_config $DATASET_CONFIG \
  --frame_buckets 49 \
  --dataloader_num_workers 1 \
  --prefetch_factor 2 \
  --pin_memory \
  --seed 42 \
  --mixed_precision bf16 \
  --output_dir $output_dir \
  --train_batch_size 1 \
  --max_train_steps $steps \
  --checkpointing_steps 50 \
  --gradient_accumulation_steps 1 \
  --learning_rate $learning_rate \
  --lr_scheduler $lr_schedule \
  --lr_warmup_steps 1 \
  --lr_num_cycles 1 \
  --optimizer $optimizer \
  --beta1 0.9 \
  --beta2 0.95 \
  --weight_decay 0.001 \
  --max_grad_norm 1.0 \
  --allow_tf32 \
  --report_to wandb \
  --nccl_timeout 1800 \
  --resume_from_checkpoint latest \
  --wandb_project ${WANDB_PROJECT} \
  --wandb_name ${EXP_NAME} \
  --pretrained_model_name_or_path $CKPT \
  --use_robust_loss \
  --drop_first_frame_condition_threshold 0.00 \
  --drop_last_frame_condition_threshold 0.0 \
  --logging_dir $logging_dir \
  --video_logging_interval 1000000 \
  --scalar_logging_interval 1 \
  --tp_size 8 \
  --gradient_checkpointing \
  --ema \
  --ema_decay 0.99 \
  --ema_interval 1 \
  --sampling_steps 30  \
  --max_turn_step 29 \
  --min_turn_step 6 \
  --optimizing_objective "VQ, MQ" \
  --selected_frames 0 12 24 36 48 60 \
  --half_input \
  --use_cfg \
  --rm_model_path "/mnt/bn/icvg/users/xinwei.huang/VideoAlign/rm_output_0801_first_color" \
  --transformer_model_path "/mnt/bn/icvg/users/xinwei.huang/video_models/rm0806_round3_mps0.13000.pth/model.pt" \
  --frame_reward_loss_weight 0