Spaces:
Sleeping
Sleeping
File size: 2,661 Bytes
9ccc31e 4a532ec 9ccc31e ed6d136 4a532ec 9ccc31e 4a532ec 2a2d987 76d3cad 9ccc31e 4a532ec 9ccc31e ed6d136 4a532ec 9ccc31e 5beeb8e 9ccc31e 4a532ec 9ccc31e 4a532ec 9ccc31e ed6d136 9ccc31e ed6d136 4a532ec 9ccc31e 4a532ec 9ccc31e d9c92a0 9ccc31e ed6d136 9ccc31e 80f45eb 9ccc31e 2a2d987 ed6d136 9ccc31e ed6d136 4a532ec 9ccc31e 2a2d987 9ccc31e bcbdfe4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
# ============================================================
# 🍽️ Trainera Food Recognition API
# Production-Ready Multilingual AI Food Recognition
# ============================================================
FROM python:3.11-slim
# Metadata
LABEL maintainer="Trainera Team"
LABEL description="AI Food Recognition API with OpenAI translations (101+ food categories)"
LABEL version="2.0.0"
# Create non-root user for security (HF Spaces requirement)
RUN useradd -m -u 1000 user
# Set working directory
WORKDIR /app
# Install system dependencies for ML and image processing
RUN apt-get update && apt-get install -y --no-install-recommends \
gcc \
g++ \
libglib2.0-0 \
libsm6 \
libxext6 \
libxrender-dev \
libgomp1 \
libgl1-mesa-dev \
libglib2.0-dev \
curl \
&& rm -rf /var/lib/apt/lists/* \
&& apt-get clean
# Copy requirements first (Docker layer caching optimization)
COPY --chown=user:user requirements.txt .
# Install Python dependencies
# Step 1: Install NumPy 1.x first (transformers compatibility)
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir "numpy>=1.24.0,<2.0.0"
# Step 2: Install remaining dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY --chown=user:user app.py .
# Create cache directories with correct permissions
RUN mkdir -p \
/home/user/.cache \
/tmp/transformers \
/tmp/huggingface \
/tmp/torch \
&& chown -R user:user /home/user/.cache /tmp/transformers /tmp/huggingface /tmp/torch
# Switch to non-root user (security best practice)
USER user
# Environment Variables
# ============================================================
# Python configuration
ENV PYTHONUNBUFFERED=1
ENV PYTHONDONTWRITEBYTECODE=1
# Port configuration (7860 = HF Spaces standard)
ENV PORT=7860
# User home
ENV HOME=/home/user
# Hugging Face cache directories
ENV HF_HOME=/tmp/huggingface
ENV TRANSFORMERS_CACHE=/tmp/transformers
ENV XDG_CACHE_HOME=/tmp
ENV TORCH_HOME=/tmp/torch
ENV HF_HUB_DISABLE_TELEMETRY=1
ENV HF_HUB_ENABLE_HF_TRANSFER=0
# OpenAI API Key (set via HF Spaces secrets or docker run -e)
ENV OPENAI_API_KEY=${OPENAI_API_KEY:-}
# USDA API Keys (optional - defaults to DEMO_KEY)
ENV USDA_API_KEY=${USDA_API_KEY:-DEMO_KEY}
# Performance optimizations
ENV TOKENIZERS_PARALLELISM=false
ENV OMP_NUM_THREADS=2
ENV MKL_NUM_THREADS=2
# Expose port
EXPOSE 7860
# Health check (monitors API health every 30s)
HEALTHCHECK --interval=30s --timeout=10s --start-period=90s --retries=3 \
CMD curl -f http://localhost:7860/health || exit 1
# Run the application
CMD ["python", "app.py"]
|