| #!/bin/zsh |
| |
| |
|
|
| |
| export HOME=/var/tmp/szang |
| export HF_HOME=/var/tmp/szang |
| export TORCH_HOME=/var/tmp/szang |
| export TRANSFORMERS_CACHE=/var/tmp/szang |
| export UV_CACHE_DIR=/var/tmp/szang/.cache/uv |
| mkdir -p /var/tmp/szang/.triton/autotune |
| mkdir -p /var/tmp/szang/.cache/torch/kernels |
| mkdir -p $UV_CACHE_DIR |
|
|
| |
| if typeset -f module > /dev/null 2>&1; then |
| module load gcc/10.5.0 cuda/11.8.0 |
| elif [[ -f /etc/profile.d/modules.sh ]]; then |
| source /etc/profile.d/modules.sh |
| module load gcc/10.5.0 cuda/11.8.0 |
| fi |
|
|
| |
| export LD_LIBRARY_PATH=/global/scratch/users/ghr/Projects/szang/envs/video_act/lib:$LD_LIBRARY_PATH |
|
|
| |
| export NCCL_P2P_DISABLE=1 |
| export NCCL_IB_DISABLE=1 |
| export NCCL_DEBUG=INFO |
| export NCCL_NVLS_ENABLE=1 |
| export NCCL_P2P_LEVEL=NVL |
| export NCCL_TIMEOUT=1200 |
|
|
| |
| export OMP_NUM_THREADS=4 |
| export TOKENIZERS_PARALLELISM=false |
|
|
| |
| tmux set mouse on 2>/dev/null || true |
|
|
| |
| cd /global/scratch/users/ghr/Projects/szang/GR00TN1.7 |
| source .venv/bin/activate |
|
|
| |
| nvcc --version && echo "CUDA_HOME: $CUDA_HOME" |
| python -c " |
| import torch |
| print('PyTorch version:', torch.__version__) |
| print('CUDA available:', torch.cuda.is_available()) |
| print('CUDA version:', torch.version.cuda) |
| print('cuDNN version:', torch.backends.cudnn.version()) |
| print('GPU count:', torch.cuda.device_count()) |
| for i in range(torch.cuda.device_count()): |
| print(f' GPU {i}: {torch.cuda.get_device_name(i)} ({torch.cuda.get_device_properties(i).total_memory / 1024**3:.1f} GB)') |
| print('Current device:', torch.cuda.current_device() if torch.cuda.is_available() else 'N/A') |
| " |
| python -c "import transformers; print('Transformers version:', transformers.__version__)" |
| python -c "import flash_attn; print('Flash Attention version:', flash_attn.__version__)" |
|
|