PNA-Assistant / Dockerfile
Lincoln Gombedza
Force CPU-only PyTorch to avoid 8GB CUDA libs on t3.small
f313b16 unverified
raw
history blame contribute delete
954 Bytes
FROM python:3.11-slim
# System deps
RUN apt-get update && apt-get install -y --no-install-recommends \
gcc g++ curl \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
# Install CPU-only PyTorch FIRST to prevent sentence-transformers
# pulling the massive CUDA-enabled build (~8GB → ~800MB)
RUN pip install --no-cache-dir \
torch==2.3.1+cpu torchvision==0.18.1+cpu \
--extra-index-url https://download.pytorch.org/whl/cpu
# Install remaining Python deps
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application
COPY . .
# Streamlit config
RUN mkdir -p /app/.streamlit
COPY .streamlit/config.toml /app/.streamlit/config.toml
EXPOSE 8501
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health || exit 1
CMD ["streamlit", "run", "streamlit_app.py", \
"--server.port=8501", \
"--server.address=0.0.0.0", \
"--server.headless=true", \
"--browser.gatherUsageStats=false"]