File size: 1,496 Bytes
663494c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
#!/usr/bin/env bash
echo $PATH
# >>> conda initialize >>>
# !! Contents within this block are managed by 'conda init' !!
__conda_setup="$('/cpfs01/user/litianyu/opt/miniconda3/bin/conda' 'shell.bash' 'hook' 2> /dev/null)"
if [ $? -eq 0 ]; then
eval "$__conda_setup"
else
if [ -f "/cpfs01/user/litianyu/opt/miniconda3/etc/profile.d/conda.sh" ]; then
. "/cpfs01/user/litianyu/opt/miniconda3/etc/profile.d/conda.sh"
else
export PATH="/cpfs01/user/litianyu/opt/miniconda3/bin:$PATH"
fi
fi
unset __conda_setup
# <<< conda initialize <<<
conda activate paradrive
echo $PATH
CODE_PATH="/cpfs01/user/litianyu/projects/paradrive"
cd ${CODE_PATH}
AUTORESUME=0
CFG=configs/paradrive/navsim_openscenes_nuplan/base_e2e_onestage_r50_v2t1_baseline.py
WORK_DIR=${CODE_PATH}/work_dirs/uniad_12nodes_lr1x/
if [ ! -d ${WORK_DIR}logs ]; then
mkdir -p ${WORK_DIR}logs
fi
export PYTHONPATH="${CODE_PATH}":$PYTHONPATH
export PYTHONPATH="${CODE_PATH}/external":$PYTHONPATH
export PYTHONPATH=$PYTHONPATH:"${CODE_PATH}/external/toolbox"
T=`date +%m%d%H%M`
python -m torch.distributed.launch \
--nproc_per_node=8 \
--master_addr=${MASTER_ADDR} \
--master_port=${MASTER_PORT} \
--nnodes=${WORLD_SIZE} \
--node_rank=${RANK} \
./scripts/train.py \
$CFG \
--launcher pytorch ${@:4} \
--deterministic \
--work-dir ${WORK_DIR} \
--autoresume ${AUTORESUME} \
--resume-from ${WORK_DIR}latest.pth \
2>&1 | tee ${WORK_DIR}logs/train.$T
|