AniGen / setup.sh
Yihua7's picture
Initial commit: AniGen - Animatable 3D Generation
6b92ff7
# Read Arguments
TEMP=`getopt -o h --long help,new-env,torch,basic,train,all,xformers,flash-attn,diffoctreerast,vox2seq,spconv,mipgaussian,kaolin,nvdiffrast,demo -n 'setup.sh' -- "$@"`
eval set -- "$TEMP"
HELP=false
NEW_ENV=false
TORCH=false
BASIC=false
TRAIN=false
ALL=false
XFORMERS=false
FLASHATTN=false
DIFFOCTREERAST=false
VOX2SEQ=false
LINEAR_ASSIGNMENT=false
SPCONV=false
ERROR=false
MIPGAUSSIAN=false
KAOLIN=false
NVDIFFRAST=false
DEMO=false
if [ "$#" -eq 1 ] ; then
HELP=true
fi
while true ; do
case "$1" in
-h|--help) HELP=true ; shift ;;
--new-env) NEW_ENV=true ; shift ;;
--torch) TORCH=true ; shift ;;
--basic) BASIC=true ; shift ;;
--train) TRAIN=true ; shift ;;
--all) ALL=true ; shift ;;
--xformers) XFORMERS=true ; shift ;;
--flash-attn) FLASHATTN=true ; shift ;;
--diffoctreerast) DIFFOCTREERAST=true ; shift ;;
--vox2seq) VOX2SEQ=true ; shift ;;
--spconv) SPCONV=true ; shift ;;
--mipgaussian) MIPGAUSSIAN=true ; shift ;;
--kaolin) KAOLIN=true ; shift ;;
--nvdiffrast) NVDIFFRAST=true ; shift ;;
--demo) DEMO=true ; shift ;;
--) shift ; break ;;
*) ERROR=true ; break ;;
esac
done
if [ "$ERROR" = true ] ; then
echo "Error: Invalid argument"
HELP=true
fi
if [ "$ALL" = true ] ; then
BASIC=true
XFORMERS=true
FLASHATTN=true
DIFFOCTREERAST=true
SPCONV=true
MIPGAUSSIAN=true
KAOLIN=true
NVDIFFRAST=true
fi
if [ "$HELP" = true ] ; then
echo "Usage: . ./setup.sh [OPTIONS]"
echo "Options:"
echo " -h, --help Display this help message"
echo " --new-env Create a new conda environment (if available) and install PyTorch"
echo " --torch Install PyTorch into the current environment (via pip)"
echo " --basic Install basic pip dependencies"
echo " --train Install training dependencies"
echo " --all Install all dependencies (basic, xformers, flash-attn,"
echo " diffoctreerast, spconv, mipgaussian, kaolin, nvdiffrast)"
echo " --xformers Install xformers"
echo " --flash-attn Install flash-attn"
echo " --diffoctreerast Install diffoctreerast"
echo " --vox2seq Install vox2seq"
echo " --spconv Install spconv"
echo " --mipgaussian Install mip-splatting"
echo " --kaolin Install kaolin"
echo " --nvdiffrast Install nvdiffrast"
echo " --demo Install all dependencies for demo"
return
fi
if [ "$NEW_ENV" = true ] ; then
if command -v conda &>/dev/null ; then
conda create -n anigen python=3.10 -y
conda activate anigen
else
echo "[WARN] conda not found, skipping environment creation. Using current Python environment."
fi
TORCH=true
fi
if [ "$TORCH" = true ] ; then
pip install torch==2.4.0 torchvision==0.19.0 --index-url https://download.pytorch.org/whl/cu118
fi
# Check which flags require PyTorch
NEEDS_TORCH=false
for flag in "$XFORMERS" "$FLASHATTN" "$DIFFOCTREERAST" "$VOX2SEQ" "$SPCONV" "$MIPGAUSSIAN" "$KAOLIN" "$NVDIFFRAST" "$TRAIN" ; do
if [ "$flag" = true ] ; then
NEEDS_TORCH=true
break
fi
done
# Get system information
WORKDIR=$(pwd)
if python -c "import torch" 2>/dev/null ; then
PYTORCH_VERSION_RAW=$(python -c "import torch; print(torch.__version__)")
PYTORCH_VERSION=$(echo $PYTORCH_VERSION_RAW | sed 's/+.*//')
PLATFORM=$(python -c "import torch; print(('cuda' if torch.version.cuda else ('hip' if torch.version.hip else 'unknown')) if torch.cuda.is_available() else 'cpu')")
elif [ "$NEEDS_TORCH" = true ] ; then
echo "[ERROR] PyTorch is not installed. Please either:"
echo " - Use --new-env to create a new conda environment with PyTorch, or"
echo " - Use --torch to install PyTorch into the current environment, or"
echo " - Install PyTorch manually before running this script."
return 1
else
PYTORCH_VERSION=""
PLATFORM=""
fi
case $PLATFORM in
cuda)
CUDA_VERSION=$(python -c "import torch; print(torch.version.cuda)")
CUDA_MAJOR_VERSION=$(echo $CUDA_VERSION | cut -d'.' -f1)
CUDA_MINOR_VERSION=$(echo $CUDA_VERSION | cut -d'.' -f2)
echo "[SYSTEM] PyTorch Version: $PYTORCH_VERSION, CUDA Version: $CUDA_VERSION"
;;
hip)
HIP_VERSION=$(python -c "import torch; print(torch.version.hip)")
HIP_MAJOR_VERSION=$(echo $HIP_VERSION | cut -d'.' -f1)
HIP_MINOR_VERSION=$(echo $HIP_VERSION | cut -d'.' -f2)
# Install pytorch 2.4.1 for hip
if [ "$PYTORCH_VERSION" != "2.4.1+rocm6.1" ] ; then
echo "[SYSTEM] Installing PyTorch 2.4.1 for HIP ($PYTORCH_VERSION -> 2.4.1+rocm6.1)"
pip install torch==2.4.1 torchvision==0.19.1 --index-url https://download.pytorch.org/whl/rocm6.1 --user
mkdir -p /tmp/extensions
sudo cp /opt/rocm/share/amd_smi /tmp/extensions/amd_smi -r
cd /tmp/extensions/amd_smi
sudo chmod -R 777 .
pip install .
cd $WORKDIR
PYTORCH_VERSION=$(python -c "import torch; print(torch.__version__)")
fi
echo "[SYSTEM] PyTorch Version: $PYTORCH_VERSION, HIP Version: $HIP_VERSION"
;;
*)
;;
esac
if [ "$BASIC" = true ] ; then
pip install pillow imageio imageio-ffmpeg tqdm easydict opencv-python-headless scipy ninja rembg onnxruntime trimesh rtree open3d xatlas pyvista pymeshfix igraph transformers pygltflib psutil
pip install git+https://github.com/EasternJournalist/utils3d.git@9a4eb15e4021b67b12c460c7057d642626897ec8
pip install "git+https://github.com/facebookresearch/pytorch3d.git" --no-build-isolation
pip install geffnet
pip install extensions/CUBVH --no-build-isolation
pip install gradio==4.44.1 gradio_litmodel3d==0.0.1
fi
if [ "$TRAIN" = true ] ; then
pip install tensorboard pandas lpips
pip uninstall -y pillow
sudo apt install -y libjpeg-dev
pip install pillow-simd
fi
if [ "$XFORMERS" = true ] ; then
# install xformers
if [ "$PLATFORM" = "cuda" ] ; then
if [ "$CUDA_VERSION" = "11.8" ] ; then
case $PYTORCH_VERSION in
2.0.1) pip install https://files.pythonhosted.org/packages/52/ca/82aeee5dcc24a3429ff5de65cc58ae9695f90f49fbba71755e7fab69a706/xformers-0.0.22-cp310-cp310-manylinux2014_x86_64.whl ;;
2.1.0) pip install xformers==0.0.22.post7 --index-url https://download.pytorch.org/whl/cu118 ;;
2.1.1) pip install xformers==0.0.23 --index-url https://download.pytorch.org/whl/cu118 ;;
2.1.2) pip install xformers==0.0.23.post1 --index-url https://download.pytorch.org/whl/cu118 ;;
2.2.0) pip install xformers==0.0.24 --index-url https://download.pytorch.org/whl/cu118 ;;
2.2.1) pip install xformers==0.0.25 --index-url https://download.pytorch.org/whl/cu118 ;;
2.2.2) pip install xformers==0.0.25.post1 --index-url https://download.pytorch.org/whl/cu118 ;;
2.3.0) pip install xformers==0.0.26.post1 --index-url https://download.pytorch.org/whl/cu118 ;;
2.4.0) pip install xformers==0.0.27.post2 --index-url https://download.pytorch.org/whl/cu118 ;;
2.4.1) pip install xformers==0.0.28 --index-url https://download.pytorch.org/whl/cu118 ;;
2.5.0) pip install xformers==0.0.28.post2 --index-url https://download.pytorch.org/whl/cu118 ;;
*) echo "[XFORMERS] Unsupported PyTorch & CUDA version: $PYTORCH_VERSION & $CUDA_VERSION" ;;
esac
elif [ "$CUDA_VERSION" = "12.1" ] ; then
case $PYTORCH_VERSION in
2.1.0) pip install xformers==0.0.22.post7 --index-url https://download.pytorch.org/whl/cu121 ;;
2.1.1) pip install xformers==0.0.23 --index-url https://download.pytorch.org/whl/cu121 ;;
2.1.2) pip install xformers==0.0.23.post1 --index-url https://download.pytorch.org/whl/cu121 ;;
2.2.0) pip install xformers==0.0.24 --index-url https://download.pytorch.org/whl/cu121 ;;
2.2.1) pip install xformers==0.0.25 --index-url https://download.pytorch.org/whl/cu121 ;;
2.2.2) pip install xformers==0.0.25.post1 --index-url https://download.pytorch.org/whl/cu121 ;;
2.3.0) pip install xformers==0.0.26.post1 --index-url https://download.pytorch.org/whl/cu121 ;;
2.4.0) pip install xformers==0.0.27.post2 --index-url https://download.pytorch.org/whl/cu121 ;;
2.4.1) pip install xformers==0.0.28 --index-url https://download.pytorch.org/whl/cu121 ;;
2.5.0) pip install xformers==0.0.28.post2 --index-url https://download.pytorch.org/whl/cu121 ;;
*) echo "[XFORMERS] Unsupported PyTorch & CUDA version: $PYTORCH_VERSION & $CUDA_VERSION" ;;
esac
elif [ "$CUDA_VERSION" = "12.4" ] ; then
case $PYTORCH_VERSION in
2.5.0) pip install xformers==0.0.28.post2 --index-url https://download.pytorch.org/whl/cu124 ;;
*) echo "[XFORMERS] Unsupported PyTorch & CUDA version: $PYTORCH_VERSION & $CUDA_VERSION" ;;
esac
else
echo "[XFORMERS] Unsupported CUDA version: $CUDA_MAJOR_VERSION"
fi
elif [ "$PLATFORM" = "hip" ] ; then
case $PYTORCH_VERSION in
2.4.1\+rocm6.1) pip install xformers==0.0.28 --index-url https://download.pytorch.org/whl/rocm6.1 ;;
*) echo "[XFORMERS] Unsupported PyTorch version: $PYTORCH_VERSION" ;;
esac
else
echo "[XFORMERS] Unsupported platform: $PLATFORM"
fi
fi
if [ "$FLASHATTN" = true ] ; then
if [ "$PLATFORM" = "cuda" ] ; then
pip install flash-attn --no-build-isolation
elif [ "$PLATFORM" = "hip" ] ; then
echo "[FLASHATTN] Prebuilt binaries not found. Building from source..."
mkdir -p /tmp/extensions
if [ ! -d /tmp/extensions/flash-attention ] ; then
git clone --recursive https://github.com/ROCm/flash-attention.git /tmp/extensions/flash-attention
fi
cd /tmp/extensions/flash-attention
git checkout tags/v2.6.3-cktile
GPU_ARCHS=gfx942 python setup.py install #MI300 series
cd $WORKDIR
else
echo "[FLASHATTN] Unsupported platform: $PLATFORM"
fi
fi
if [ "$KAOLIN" = true ] ; then
# install kaolin
if [ "$PLATFORM" = "cuda" ] ; then
case $PYTORCH_VERSION in
2.0.1) pip install kaolin -f https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.0.1_cu118.html;;
2.1.0) pip install kaolin -f https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.1.0_cu118.html;;
2.1.1) pip install kaolin -f https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.1.1_cu118.html;;
2.2.0) pip install kaolin -f https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.2.0_cu118.html;;
2.2.1) pip install kaolin -f https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.2.1_cu118.html;;
2.2.2) pip install kaolin -f https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.2.2_cu118.html;;
2.4.0) pip install kaolin -f https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.4.0_cu121.html;;
*) echo "[KAOLIN] Unsupported PyTorch version: $PYTORCH_VERSION" ;;
esac
else
echo "[KAOLIN] Unsupported platform: $PLATFORM"
fi
fi
if [ "$NVDIFFRAST" = true ] ; then
if [ "$PLATFORM" = "cuda" ] ; then
mkdir -p /tmp/extensions
if [ ! -d /tmp/extensions/nvdiffrast ] ; then
git clone https://github.com/NVlabs/nvdiffrast.git /tmp/extensions/nvdiffrast
fi
pip install /tmp/extensions/nvdiffrast --no-build-isolation
else
echo "[NVDIFFRAST] Unsupported platform: $PLATFORM"
fi
fi
if [ "$DIFFOCTREERAST" = true ] ; then
if [ "$PLATFORM" = "cuda" ] ; then
mkdir -p /tmp/extensions
if [ ! -d /tmp/extensions/diffoctreerast ] ; then
git clone --recurse-submodules https://github.com/JeffreyXiang/diffoctreerast.git /tmp/extensions/diffoctreerast
fi
pip install /tmp/extensions/diffoctreerast --no-build-isolation
else
echo "[DIFFOCTREERAST] Unsupported platform: $PLATFORM"
fi
fi
if [ "$MIPGAUSSIAN" = true ] ; then
if [ "$PLATFORM" = "cuda" ] ; then
mkdir -p /tmp/extensions
if [ ! -d /tmp/extensions/mip-splatting ] ; then
git clone https://github.com/autonomousvision/mip-splatting.git /tmp/extensions/mip-splatting
fi
pip install /tmp/extensions/mip-splatting/submodules/diff-gaussian-rasterization/ --no-build-isolation
else
echo "[MIPGAUSSIAN] Unsupported platform: $PLATFORM"
fi
fi
if [ "$VOX2SEQ" = true ] ; then
if [ "$PLATFORM" = "cuda" ] ; then
mkdir -p /tmp/extensions
if [ ! -d /tmp/extensions/vox2seq ] ; then
cp -r extensions/vox2seq /tmp/extensions/vox2seq
fi
pip install /tmp/extensions/vox2seq --no-build-isolation
else
echo "[VOX2SEQ] Unsupported platform: $PLATFORM"
fi
fi
if [ "$SPCONV" = true ] ; then
# install spconv
if [ "$PLATFORM" = "cuda" ] ; then
case $CUDA_MAJOR_VERSION in
11) pip install spconv-cu118 ;;
12) pip install spconv-cu120 ;;
*) echo "[SPCONV] Unsupported PyTorch CUDA version: $CUDA_MAJOR_VERSION" ;;
esac
else
echo "[SPCONV] Unsupported platform: $PLATFORM"
fi
fi
if [ "$DEMO" = true ] ; then
pip install gradio==4.44.1 gradio_litmodel3d==0.0.1 "huggingface_hub<0.25"
# Patch gradio_client bug: get_type() crashes on boolean schemas (github.com/gradio-app/gradio/issues/11084)
python << 'PATCH_EOF'
import re
from pathlib import Path
import gradio_client.utils as m
p = Path(m.__file__)
t = p.read_text()
if 'isinstance(schema, bool)' not in t:
t = re.sub(
r'(def get_type\(schema[^)]*\):)\n(\s+)(if "const" in schema:)',
r'\1\n\2if isinstance(schema, bool):\n\2 return "boolean"\n\2\3',
t, count=1
)
p.write_text(t)
print('[DEMO] Patched gradio_client for bool schema compatibility')
else:
print('[DEMO] gradio_client already patched')
PATCH_EOF
fi