CheckInAPI / Dockerfile
ethnmcl's picture
Update Dockerfile
9fccefa verified
raw
history blame
866 Bytes
FROM python:3.11-slim
ENV PYTHONUNBUFFERED=1 \
PIP_NO_CACHE_DIR=1 \
HF_HOME=/data/huggingface
# Create a writable, persistent cache directory in Spaces
RUN mkdir -p /data/huggingface && chmod -R 777 /data
# (Optional) system tools
RUN apt-get update && apt-get install -y --no-install-recommends git && rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY requirements.txt /app/requirements.txt
# Install CPU torch first (smaller, no CUDA), then the rest
RUN pip install --no-cache-dir --index-url https://download.pytorch.org/whl/cpu torch==2.3.1+cpu \
&& pip install --no-cache-dir -r /app/requirements.txt
COPY main.py /app/main.py
EXPOSE 7860
ENV MODEL_ID="ethnmcl/checkin-gpt2"
# If the model repo is private, set a Space Secret named HF_TOKEN (Settings → Secrets)
CMD ["python", "-m", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]