| # Use an official Python runtime as a parent image | |
| FROM python:3.12-slim | |
| # Set the transformers cache to a writable directory | |
| ENV TRANSFORMERS_CACHE=/app/.cache | |
| # Create the cache directory and set proper permissions | |
| RUN mkdir -p /app/.cache && chmod -R 777 /app/.cache | |
| # Set the working directory in the container | |
| WORKDIR /app | |
| # Copy the current directory contents into the container at /app | |
| COPY . . | |
| # Install any needed packages specified in requirements.txt | |
| RUN pip install --no-cache-dir --upgrade pip && \ | |
| pip install --no-cache-dir -r requirements.txt && \ | |
| pip install google-generativeai | |
| # Make port 7860 available to the world outside this container | |
| EXPOSE 7860 | |
| # Run uvicorn server using the environment port (defaults to 7860) | |
| CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"] | |