|
|
|
|
|
|
|
|
|
|
|
FROM huggingface/transformers-pytorch-gpu:latest@sha256:4c7317881a534b22e18add49c925096fa902651fb0571c69f3cad58af3ea2c0f |
|
|
|
|
|
|
|
|
WORKDIR /app |
|
|
|
|
|
|
|
|
RUN apt-get update && apt-get install -y \ |
|
|
build-essential \ |
|
|
curl \ |
|
|
git \ |
|
|
&& rm -rf /var/lib/apt/lists/* |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
RUN python3 -V |
|
|
|
|
|
|
|
|
COPY requirements.txt ./ |
|
|
|
|
|
|
|
|
RUN pip3 install --no-cache-dir -r requirements.txt |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ENV HF_HOME=/app/.cache/huggingface |
|
|
ENV HF_HUB_CACHE=/app/.cache/huggingface |
|
|
ENV HF_DATASETS_CACHE=/app/.cache/huggingface |
|
|
ENV TRANSFORMERS_CACHE=/app/.cache/huggingface |
|
|
ENV SENTENCE_TRANSFORMERS_HOME=/app/.cache/huggingface |
|
|
|
|
|
COPY download_models.py ./ |
|
|
COPY src/config/settings.yaml ./src/config/settings.yaml |
|
|
RUN mkdir -p /app/.cache/huggingface && \ |
|
|
chmod -R 755 /app/.cache && \ |
|
|
chmod -R 755 /app/.cache/huggingface && \ |
|
|
python3 download_models.py |
|
|
|
|
|
|
|
|
COPY . . |
|
|
|
|
|
RUN mkdir -p /app/.streamlit && \ |
|
|
mkdir -p /app/.cache/streamlit && \ |
|
|
mkdir -p /app/.cache/huggingface && \ |
|
|
mkdir -p /app/conversations && \ |
|
|
mkdir -p /app/feedback && \ |
|
|
printf '[server]\nport = 8501\nheadless = true\nenableCORS = false\nenableXsrfProtection = false\n\n[browser]\ngatherUsageStats = false\n' > /app/.streamlit/config.toml && \ |
|
|
chmod -R 777 /app/.streamlit && \ |
|
|
chmod -R 777 /app/.cache/streamlit && \ |
|
|
chmod -R 777 /app/.cache/huggingface && \ |
|
|
chmod -R 777 /app/conversations && \ |
|
|
chmod -R 777 /app/feedback |
|
|
|
|
|
|
|
|
ENV STREAMLIT_CONFIG_HOME=/app/.streamlit |
|
|
ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false |
|
|
ENV STREAMLIT_USER_BASE_PATH=/app/.cache/streamlit |
|
|
|
|
|
|
|
|
EXPOSE 8501 |
|
|
|
|
|
|
|
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ |
|
|
CMD curl --fail http://localhost:8501/_stcore/health || exit 1 |
|
|
|
|
|
|
|
|
RUN python3 -c "import torch; print(torch.cuda.is_available(), torch.cuda.get_device_name(0))" || true |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ENTRYPOINT ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0", "--server.headless", "true"] |
|
|
|