Spaces:
Running
Running
| # ── Base ────────────────────────────────────────────────────────── | |
| FROM python:3.10-slim | |
| # Use Python 3.10 deliberately — avoids ALL the Python 3.13 issues: | |
| # - audioop removed in 3.13 (broke gradio 4.x) | |
| # - torch cp313 wheels only exist >=2.5.0 (huge download) | |
| # - tokenizers cp313 ABI issues | |
| # Python 3.10 is stable, well-tested, and all packages have wheels. | |
| RUN apt-get update && apt-get install -y --no-install-recommends \ | |
| git curl build-essential \ | |
| && rm -rf /var/lib/apt/lists/* | |
| # ── Non-root user (required by HF Spaces) ──────────────────────── | |
| RUN useradd -m -u 1000 user | |
| USER user | |
| ENV HOME=/home/user \ | |
| PATH=/home/user/.local/bin:$PATH | |
| WORKDIR $HOME/app | |
| COPY --chown=user requirements.txt . | |
| COPY --chown=user app.py . | |
| # ── Install deps ────────────────────────────────────────────────── | |
| RUN pip install --no-cache-dir --upgrade pip | |
| # Install torch (CPU) separately first for better layer caching | |
| RUN pip install --no-cache-dir torch==2.3.1 --index-url https://download.pytorch.org/whl/cpu | |
| # Install everything else — no version conflicts on Python 3.10 | |
| RUN pip install --no-cache-dir -r requirements.txt | |
| # ── Pre-download BioGPT weights into image (no cold-start delay) ── | |
| RUN python - <<'PYEOF' | |
| from transformers import BioGptTokenizer, BioGptForCausalLM | |
| print("Pre-downloading microsoft/biogpt ...") | |
| BioGptTokenizer.from_pretrained("microsoft/biogpt") | |
| BioGptForCausalLM.from_pretrained("microsoft/biogpt") | |
| print("Done.") | |
| PYEOF | |
| # ── Runtime ─────────────────────────────────────────────────────── | |
| ENV GRADIO_SERVER_NAME=0.0.0.0 \ | |
| GRADIO_SERVER_PORT=7860 \ | |
| PYTHONUNBUFFERED=1 \ | |
| TRANSFORMERS_CACHE=/home/user/.cache/huggingface \ | |
| CREWAI_TELEMETRY_OPT_OUT=1 \ | |
| OTEL_SDK_DISABLED=true \ | |
| OPENAI_API_KEY=sk-no-key-needed | |
| EXPOSE 7860 | |
| CMD ["python", "app.py"] | |