Spaces:
Paused
Paused
| # Use Python 3.10 as base image for better compatibility with ML libraries | |
| FROM python:3.10-slim | |
| # Set working directory | |
| WORKDIR /app | |
| # Install required system dependencies | |
| RUN apt-get update && \ | |
| apt-get install -y \ | |
| git \ | |
| wget \ | |
| && apt-get clean \ | |
| && rm -rf /var/lib/apt/lists/* | |
| # Create and set permissions for directories | |
| RUN mkdir -p /app/.cache/huggingface && \ | |
| chmod 777 /app/.cache/huggingface && \ | |
| mkdir -p /app/.git && \ | |
| chmod 777 /app/.git | |
| # Set environment variables | |
| ENV TRANSFORMERS_CACHE=/app/.cache/huggingface/hub | |
| ENV HF_HOME=/app/.cache/huggingface | |
| ENV GIT_CONFIG_GLOBAL=/app/.git/config | |
| # Copy requirements first to leverage Docker cache | |
| COPY requirements.txt . | |
| # Install Python dependencies | |
| RUN pip install --no-cache-dir -r requirements.txt | |
| # Create checkpoints directory with proper permissions | |
| RUN mkdir -p /app/main/checkpoints && \ | |
| chmod 777 /app/main/checkpoints | |
| # Download model using litgpt command line with proper secret mounting | |
| RUN --mount=type=secret,id=HF_TOKEN,mode=0444,required=true \ | |
| export HF_TOKEN=$(cat /run/secrets/HF_TOKEN) && \ | |
| echo "Starting model download..." && \ | |
| litgpt download mistralai/Mistral-7B-Instruct-v0.3 \ | |
| --access_token ${HF_TOKEN} \ | |
| --checkpoint_dir /app/main/checkpoints || { echo "Download failed with status $?"; exit 1; } | |
| # Copy the rest of the application | |
| COPY . . | |
| # Set environment variables for the application | |
| ENV LLM_ENGINE_HOST=0.0.0.0 | |
| ENV LLM_ENGINE_PORT=7860 | |
| ENV MODEL_PATH=/app/main/checkpoints/mistralai/Mistral-7B-Instruct-v0.3 | |
| # Expose port 7860 for Hugging Face Spaces | |
| EXPOSE 7860 | |
| # Command to run the application | |
| CMD ["python", "main/main.py"] |