MightyOctopus's picture
Create Dockerfile
66ef536 verified
raw
history blame
1.33 kB
# ===============================
# Base image: includes Python 3.9, PyTorch (CPU), and Transformers
# ===============================
FROM huggingface/transformers-pytorch-cpu:latest
# Set working directory
WORKDIR /app
# ===============================
# Pre-download and cache model weights
# ===============================
RUN python -c "from transformers import AutoTokenizer, AutoModelForSequenceClassification; \
model='fakespot-ai/roberta-base-ai-text-detection-v1'; \
AutoTokenizer.from_pretrained(model, cache_dir='/app/model'); \
AutoModelForSequenceClassification.from_pretrained(model, cache_dir='/app/model')"
# ===============================
# Copy dependency list and install packages
# ===============================
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# ===============================
# Copy application files
# ===============================
COPY . .
# ===============================
# Environment configuration
# ===============================
ENV TRANSFORMERS_CACHE=/app/model \
HF_HOME=/app/model \
PYTHONUNBUFFERED=1
# ===============================
# Expose Gradio default port
# ===============================
EXPOSE 7860
# ===============================
# Start the Gradio app
# ===============================
CMD ["python", "app.py"]