Muhammad Taqi Raza commited on
Commit
82dd80b
·
1 Parent(s): f38bdba

adding docker file

Browse files
Files changed (1) hide show
  1. Dockerfile +59 -0
Dockerfile CHANGED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Base image with CUDA 12.1 and PyTorch support
2
+ FROM nvidia/cuda:12.1.1-cudnn8-devel-ubuntu22.04
3
+
4
+ ENV DEBIAN_FRONTEND=noninteractive
5
+
6
+ # --- 1. SYSTEM DEPS ---
7
+ RUN apt-get update && apt-get install -y \
8
+ git wget curl ffmpeg libgl1-mesa-glx \
9
+ libglib2.0-0 libsm6 libxext6 libxrender-dev \
10
+ && rm -rf /var/lib/apt/lists/*
11
+
12
+ # --- 2. INSTALL CONDA ---
13
+ ENV CONDA_DIR=/opt/conda
14
+ RUN curl -sLo ~/miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \
15
+ bash ~/miniconda.sh -b -p $CONDA_DIR && \
16
+ rm ~/miniconda.sh && \
17
+ $CONDA_DIR/bin/conda clean -afy
18
+ ENV PATH=$CONDA_DIR/bin:$PATH
19
+ SHELL ["/bin/bash", "-c"]
20
+
21
+ # --- 3. COPY FILES ---
22
+ COPY . /workspace
23
+ WORKDIR /workspace
24
+
25
+ # --- 4. CREATE CONDA ENV ---
26
+ RUN conda env create -f lyra.yaml
27
+ ENV PATH /opt/conda/envs/lyra/bin:$PATH
28
+ RUN echo "conda activate lyra" >> ~/.bashrc
29
+
30
+ # --- 5. INSTALL PIP DEPENDENCIES ---
31
+ RUN pip install -r requirements_gen3c.txt && \
32
+ pip install -r requirements_lyra.txt && \
33
+ pip install transformer-engine[pytorch]==1.12.0
34
+
35
+ # --- 6. INSTALL APEX ---
36
+ RUN git clone https://github.com/NVIDIA/apex && \
37
+ CUDA_HOME=$CONDA_PREFIX pip install -v --disable-pip-version-check --no-cache-dir \
38
+ --no-build-isolation --config-settings "--build-option=--cpp_ext" \
39
+ --config-settings "--build-option=--cuda_ext" ./apex
40
+
41
+ # --- 7. INSTALL MOGE + MAMBA ---
42
+ RUN pip install git+https://github.com/microsoft/MoGe.git
43
+ RUN pip install --no-build-isolation "git+https://github.com/state-spaces/mamba@v2.2.4"
44
+
45
+ # --- 8. PATCH HEADERS ---
46
+ RUN ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/ && \
47
+ ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/python3.10
48
+
49
+ # --- 9. DOWNLOAD CHECKPOINTS ---
50
+ RUN huggingface-cli login --token YOUR_HF_TOKEN_HERE && \
51
+ python3 scripts/download_tokenizer_checkpoints.py --checkpoint_dir checkpoints/cosmos_predict1 --tokenizer_types CV8x8x8-720p && \
52
+ CUDA_HOME=$CONDA_PREFIX PYTHONPATH=$(pwd) python scripts/download_gen3c_checkpoints.py --checkpoint_dir checkpoints && \
53
+ CUDA_HOME=$CONDA_PREFIX PYTHONPATH=$(pwd) python scripts/download_lyra_checkpoints.py --checkpoint_dir checkpoints
54
+
55
+ # --- 10. PORT FOR GRADIO ---
56
+ EXPOSE 7860
57
+
58
+ # --- 11. RUN GRADIO ---
59
+ CMD ["python", "main_gradio.py"]