CheckInAPI / Dockerfile
ethnmcl's picture
Create Dockerfile
77b1831 verified
raw
history blame
767 Bytes
FROM python:3.11-slim
ENV PYTHONUNBUFFERED=1 \
PIP_NO_CACHE_DIR=1 \
HF_HOME=/app/.cache/huggingface \
TRANSFORMERS_CACHE=/app/.cache/huggingface/transformers
RUN apt-get update && apt-get install -y --no-install-recommends \
git && rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY requirements.txt /app/requirements.txt
# Install CPU torch first from the PyTorch index, then the rest
RUN pip install --no-cache-dir --index-url https://download.pytorch.org/whl/cpu torch==2.3.1+cpu \
&& pip install --no-cache-dir -r /app/requirements.txt
COPY main.py /app/main.py
EXPOSE 7860
ENV MODEL_ID="ethnmcl/checkin-gpt2"
# If private model, set HF_TOKEN as a Space secret
CMD ["python", "-m", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]