|
|
|
|
|
|
|
|
|
|
|
|
|
|
FROM python:3.10-slim AS model-builder |
|
|
|
|
|
|
|
|
RUN apt-get update && apt-get install -y \ |
|
|
wget \ |
|
|
curl \ |
|
|
git \ |
|
|
&& rm -rf /var/lib/apt/lists/* |
|
|
|
|
|
|
|
|
RUN pip install huggingface_hub |
|
|
|
|
|
|
|
|
RUN mkdir -p /models |
|
|
|
|
|
|
|
|
RUN echo 'from huggingface_hub import hf_hub_download\n\ |
|
|
import os\n\ |
|
|
\n\ |
|
|
|
|
|
print("Downloading model files...")\n\ |
|
|
hf_hub_download("seamo-ai/marina-species-v1", "marina-benthic-33k.pt", local_dir="/models", local_dir_use_symlinks=False)\n\ |
|
|
hf_hub_download("seamo-ai/marina-species-v1", "marina-benthic-33k.names", local_dir="/models", local_dir_use_symlinks=False)\n\ |
|
|
\n\ |
|
|
|
|
|
print("Downloading sample images...")\n\ |
|
|
sample_images = [\n\ |
|
|
"crab.png", "fish.png", "fish_2.png", "fish_3.png", "fish_4.png", "fish_5.png",\n\ |
|
|
"flat_fish.png", "flat_red_fish.png", "jelly.png", "jelly_2.png", "jelly_3.png",\n\ |
|
|
"puff.png", "red_fish.png", "red_fish_2.png", "scene.png", "scene_2.png",\n\ |
|
|
"scene_3.png", "scene_4.png", "scene_5.png", "scene_6.png", "soft_coral.png",\n\ |
|
|
"starfish.png", "starfish_2.png"\n\ |
|
|
]\n\ |
|
|
\n\ |
|
|
for img in sample_images:\n\ |
|
|
try:\n\ |
|
|
hf_hub_download("seamo-ai/marina-species-v1", f"images/{img}", local_dir="/models", local_dir_use_symlinks=False)\n\ |
|
|
print(f"Downloaded {img}")\n\ |
|
|
except Exception as e:\n\ |
|
|
print(f"Failed to download {img}: {e}")\n\ |
|
|
\n\ |
|
|
print("Download complete!")' > /tmp/download_models.py |
|
|
|
|
|
|
|
|
RUN python3 /tmp/download_models.py |
|
|
|
|
|
|
|
|
FROM python:3.10-slim |
|
|
|
|
|
|
|
|
RUN apt-get update && apt-get install -y \ |
|
|
ffmpeg \ |
|
|
libsm6 \ |
|
|
libxext6 \ |
|
|
libxrender-dev \ |
|
|
libglib2.0-0 \ |
|
|
libgomp1 \ |
|
|
&& rm -rf /var/lib/apt/lists/* |
|
|
|
|
|
|
|
|
RUN useradd -m -u 1000 user |
|
|
|
|
|
|
|
|
USER user |
|
|
|
|
|
|
|
|
ENV HOME=/home/user \ |
|
|
PATH=/home/user/.local/bin:$PATH |
|
|
|
|
|
|
|
|
WORKDIR $HOME/app |
|
|
|
|
|
|
|
|
ENV HF_HUB_OFFLINE=1 |
|
|
ENV TRANSFORMERS_NO_ADVISORY_WARNINGS=1 |
|
|
ENV PYTHONPATH=$HOME/app |
|
|
ENV TORCH_HOME=$HOME/.cache/torch |
|
|
ENV HF_HOME=$HOME/.cache/huggingface |
|
|
|
|
|
|
|
|
COPY --chown=user ./requirements.txt requirements.txt |
|
|
RUN pip install --no-cache-dir --upgrade pip |
|
|
RUN pip install --no-cache-dir --user -r requirements.txt |
|
|
|
|
|
|
|
|
COPY --chown=user --from=model-builder /models $HOME/app/models |
|
|
|
|
|
|
|
|
COPY --chown=user ./app app |
|
|
|
|
|
|
|
|
COPY --chown=user ./templates templates |
|
|
COPY --chown=user ./static static |
|
|
|
|
|
|
|
|
RUN mkdir -p static/images/samples |
|
|
RUN if [ -d "$HOME/app/models/images" ]; then cp $HOME/app/models/images/*.png static/images/samples/ 2>/dev/null || true; fi |
|
|
|
|
|
|
|
|
RUN mkdir -p $HOME/.cache/huggingface $HOME/.cache/torch |
|
|
|
|
|
|
|
|
EXPOSE 7860 |
|
|
|
|
|
|
|
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 \ |
|
|
CMD curl -f http://localhost:7860/health || exit 1 |
|
|
|
|
|
|
|
|
|
|
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "7860"] |
|
|
|