File size: 2,517 Bytes
82dd80b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7550512
 
 
 
82dd80b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
# Base image with CUDA 12.1 and PyTorch support
FROM nvidia/cuda:12.1.1-cudnn8-devel-ubuntu22.04

ENV DEBIAN_FRONTEND=noninteractive

# --- 1. SYSTEM DEPS ---
RUN apt-get update && apt-get install -y \
    git wget curl ffmpeg libgl1-mesa-glx \
    libglib2.0-0 libsm6 libxext6 libxrender-dev \
    && rm -rf /var/lib/apt/lists/*

# --- 2. INSTALL CONDA ---
ENV CONDA_DIR=/opt/conda
RUN curl -sLo ~/miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \
    bash ~/miniconda.sh -b -p $CONDA_DIR && \
    rm ~/miniconda.sh && \
    $CONDA_DIR/bin/conda clean -afy
ENV PATH=$CONDA_DIR/bin:$PATH
SHELL ["/bin/bash", "-c"]

# --- 3. COPY FILES ---
COPY . /workspace
WORKDIR /workspace


RUN conda tos accept --override-channels --channel https://repo.anaconda.com/pkgs/main && \
    conda tos accept --override-channels --channel https://repo.anaconda.com/pkgs/r

# --- 4. CREATE CONDA ENV ---
RUN conda env create -f lyra.yaml
ENV PATH /opt/conda/envs/lyra/bin:$PATH
RUN echo "conda activate lyra" >> ~/.bashrc

# --- 5. INSTALL PIP DEPENDENCIES ---
RUN pip install -r requirements_gen3c.txt && \
    pip install -r requirements_lyra.txt && \
    pip install transformer-engine[pytorch]==1.12.0

# --- 6. INSTALL APEX ---
RUN git clone https://github.com/NVIDIA/apex && \
    CUDA_HOME=$CONDA_PREFIX pip install -v --disable-pip-version-check --no-cache-dir \
        --no-build-isolation --config-settings "--build-option=--cpp_ext" \
        --config-settings "--build-option=--cuda_ext" ./apex

# --- 7. INSTALL MOGE + MAMBA ---
RUN pip install git+https://github.com/microsoft/MoGe.git
RUN pip install --no-build-isolation "git+https://github.com/state-spaces/mamba@v2.2.4"

# --- 8. PATCH HEADERS ---
RUN ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/ && \
    ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/python3.10

# --- 9. DOWNLOAD CHECKPOINTS ---
RUN huggingface-cli login --token YOUR_HF_TOKEN_HERE && \
    python3 scripts/download_tokenizer_checkpoints.py --checkpoint_dir checkpoints/cosmos_predict1 --tokenizer_types CV8x8x8-720p && \
    CUDA_HOME=$CONDA_PREFIX PYTHONPATH=$(pwd) python scripts/download_gen3c_checkpoints.py --checkpoint_dir checkpoints && \
    CUDA_HOME=$CONDA_PREFIX PYTHONPATH=$(pwd) python scripts/download_lyra_checkpoints.py --checkpoint_dir checkpoints

# --- 10. PORT FOR GRADIO ---
EXPOSE 7860

# --- 11. RUN GRADIO ---
CMD ["python", "main_gradio.py"]