Update Dockerfile
Browse files- Dockerfile +17 -7
Dockerfile
CHANGED
|
@@ -1,34 +1,44 @@
|
|
| 1 |
FROM python:3.10-slim
|
| 2 |
|
|
|
|
| 3 |
ENV PYTHONDONTWRITEBYTECODE=1
|
| 4 |
ENV PYTHONUNBUFFERED=1
|
| 5 |
-
ENV
|
|
|
|
| 6 |
ENV NLTK_DATA=/app/nltk_data
|
| 7 |
ENV PORT=8080
|
| 8 |
ENV GUNICORN_CMD_ARGS="--workers=1 --threads=1 --timeout=180 --bind=0.0.0.0:8080"
|
| 9 |
|
|
|
|
| 10 |
WORKDIR /app
|
| 11 |
|
|
|
|
| 12 |
RUN apt-get update && apt-get install -y \
|
| 13 |
gcc \
|
| 14 |
python3-dev \
|
| 15 |
curl \
|
| 16 |
&& rm -rf /var/lib/apt/lists/*
|
| 17 |
|
| 18 |
-
|
| 19 |
-
|
|
|
|
| 20 |
|
|
|
|
| 21 |
COPY requirements.txt .
|
| 22 |
RUN pip install --no-cache-dir -r requirements.txt && \
|
| 23 |
python -c "import nltk; nltk.download('punkt', download_dir='/app/nltk_data')" && \
|
| 24 |
python -c "from transformers import AutoTokenizer, AutoModelForSequenceClassification; \
|
| 25 |
-
AutoTokenizer.from_pretrained('priyabrat/AI.or.Human.text.classification'); \
|
| 26 |
-
AutoModelForSequenceClassification.from_pretrained('priyabrat/AI.or.Human.text.classification')"
|
| 27 |
|
|
|
|
| 28 |
COPY . .
|
| 29 |
|
| 30 |
-
|
| 31 |
-
|
|
|
|
|
|
|
| 32 |
|
|
|
|
| 33 |
EXPOSE 8080
|
| 34 |
CMD ["gunicorn", "app:app"]
|
|
|
|
| 1 |
FROM python:3.10-slim
|
| 2 |
|
| 3 |
+
# Set environment variables
|
| 4 |
ENV PYTHONDONTWRITEBYTECODE=1
|
| 5 |
ENV PYTHONUNBUFFERED=1
|
| 6 |
+
ENV HF_HOME=/app/hf_cache
|
| 7 |
+
ENV TRANSFORMERS_CACHE=/app/hf_cache
|
| 8 |
ENV NLTK_DATA=/app/nltk_data
|
| 9 |
ENV PORT=8080
|
| 10 |
ENV GUNICORN_CMD_ARGS="--workers=1 --threads=1 --timeout=180 --bind=0.0.0.0:8080"
|
| 11 |
|
| 12 |
+
# Set working directory
|
| 13 |
WORKDIR /app
|
| 14 |
|
| 15 |
+
# Install system dependencies
|
| 16 |
RUN apt-get update && apt-get install -y \
|
| 17 |
gcc \
|
| 18 |
python3-dev \
|
| 19 |
curl \
|
| 20 |
&& rm -rf /var/lib/apt/lists/*
|
| 21 |
|
| 22 |
+
# Create necessary directories with appropriate permissions
|
| 23 |
+
RUN mkdir -p /app/hf_cache /app/nltk_data && \
|
| 24 |
+
chmod -R 777 /app/hf_cache /app/nltk_data
|
| 25 |
|
| 26 |
+
# Copy requirements and install Python dependencies
|
| 27 |
COPY requirements.txt .
|
| 28 |
RUN pip install --no-cache-dir -r requirements.txt && \
|
| 29 |
python -c "import nltk; nltk.download('punkt', download_dir='/app/nltk_data')" && \
|
| 30 |
python -c "from transformers import AutoTokenizer, AutoModelForSequenceClassification; \
|
| 31 |
+
AutoTokenizer.from_pretrained('priyabrat/AI.or.Human.text.classification', cache_dir='/app/hf_cache'); \
|
| 32 |
+
AutoModelForSequenceClassification.from_pretrained('priyabrat/AI.or.Human.text.classification', cache_dir='/app/hf_cache')"
|
| 33 |
|
| 34 |
+
# Copy application code
|
| 35 |
COPY . .
|
| 36 |
|
| 37 |
+
# Create a non-root user and switch to it
|
| 38 |
+
RUN adduser --disabled-password --gecos '' appuser && \
|
| 39 |
+
chown -R appuser:appuser /app
|
| 40 |
+
USER appuser
|
| 41 |
|
| 42 |
+
# Expose port and define default command
|
| 43 |
EXPOSE 8080
|
| 44 |
CMD ["gunicorn", "app:app"]
|