Spaces:
Sleeping
Sleeping
Update Dockerfile
Browse files- Dockerfile +21 -68
Dockerfile
CHANGED
|
@@ -1,90 +1,43 @@
|
|
| 1 |
-
# 1. ํ์ด์ฌ ๋ฒ์ ์ 3.11์ผ๋ก ์ค์
|
| 2 |
FROM python:3.11-slim
|
| 3 |
|
| 4 |
-
#
|
| 5 |
-
RUN pip install --no-cache-dir hf_transfer>=0.1.6
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
curl \
|
| 9 |
-
git \
|
| 10 |
-
git-lfs \
|
| 11 |
-
&& rm -rf /var/lib/apt/lists/*
|
| 12 |
|
| 13 |
# Ollama ์ค์น
|
| 14 |
RUN curl -fsSL https://ollama.com/install.sh | sh
|
| 15 |
|
| 16 |
-
#
|
| 17 |
-
ENV OLLAMA_MODEL=gemma2:9b
|
| 18 |
-
ENV OLLAMA_HOME=/app/.ollama
|
| 19 |
-
ENV OLLAMA_MODELS=/app/.ollama
|
| 20 |
-
|
| 21 |
-
# Ollama ๋๋ ํ ๋ฆฌ ๊ตฌ์กฐ๋ฅผ ์์ ํ ์์ฑํ๊ณ ๊ถํ ์ค์
|
| 22 |
-
RUN mkdir -p /app/.ollama/models \
|
| 23 |
-
&& mkdir -p /app/.ollama/manifests \
|
| 24 |
-
&& mkdir -p /app/.ollama/manifests/registry.ollama.ai/library \
|
| 25 |
-
&& mkdir -p /app/.ollama/blobs \
|
| 26 |
-
&& chmod -R 777 /app/.ollama
|
| 27 |
-
|
| 28 |
-
# ๋น๋ ์ค ์ ๊น ์๋ฒ๋ฅผ ๋์์ ๋ชจ๋ธ๋ง ๋ฏธ๋ฆฌ ๋ฐ์๋๊ณ ์ข
๋ฃ
|
| 29 |
-
RUN set -eux; \
|
| 30 |
-
ollama serve & pid=$!; \
|
| 31 |
-
i=0; \
|
| 32 |
-
until curl -fsS http://127.0.0.1:11434/api/tags >/dev/null 2>&1; do \
|
| 33 |
-
i=$((i+1)); \
|
| 34 |
-
[ "$i" -lt 60 ] || { echo "Ollama not ready"; kill "$pid"; exit 1; }; \
|
| 35 |
-
sleep 1; \
|
| 36 |
-
done; \
|
| 37 |
-
ollama pull gemma2:9b; \
|
| 38 |
-
kill "$pid" || true
|
| 39 |
-
|
| 40 |
-
# 3. ์์
๋๋ ํฐ๋ฆฌ ์ค์
|
| 41 |
WORKDIR /app
|
| 42 |
-
|
| 43 |
-
# 6. ํ์ด์ฌ ๋ผ์ด๋ธ๋ฌ๋ฆฌ ์ค์น
|
| 44 |
COPY requirements.txt .
|
| 45 |
RUN pip install --no-cache-dir -r requirements.txt
|
| 46 |
-
|
| 47 |
-
# 4. ์์ค ์ฝ๋ ๋ณต์ฌ
|
| 48 |
COPY . /app
|
| 49 |
-
|
| 50 |
-
# 5. Git LFS ์ค์ ๋ฐ ๋์ฉ๋ ํ์ผ ๋ค์ด๋ก๋
|
| 51 |
RUN git lfs install && git lfs pull || true
|
| 52 |
|
| 53 |
-
#
|
| 54 |
ENV HOME=/app
|
| 55 |
-
ENV STREAMLIT_HOME=/app/.streamlit
|
| 56 |
RUN mkdir -p /app/.streamlit && chmod -R 777 /app/.streamlit
|
| 57 |
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
XDG_CACHE_HOME=/tmp/xdg-cache
|
| 63 |
-
|
| 64 |
-
RUN mkdir -p /tmp/hf-home /tmp/hf-cache /tmp/torch-cache /tmp/xdg-cache
|
| 65 |
|
|
|
|
| 66 |
ENV OLLAMA_HOST=http://127.0.0.1:11434
|
|
|
|
|
|
|
|
|
|
| 67 |
EXPOSE 8501
|
| 68 |
-
EXPOSE 11434
|
| 69 |
|
| 70 |
-
#
|
| 71 |
-
USER root
|
| 72 |
|
| 73 |
-
# ์ต์ข
์คํ ์ปค๋งจ๋
|
| 74 |
CMD bash -lc '\
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
| 78 |
ollama serve & \
|
| 79 |
-
for i in {1..
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
break; \
|
| 83 |
-
fi; \
|
| 84 |
-
echo "Waiting for Ollama... ($i/120)"; \
|
| 85 |
-
sleep 1; \
|
| 86 |
-
done; \
|
| 87 |
-
echo "Ensuring model is available: $OLLAMA_MODEL"; \
|
| 88 |
-
ollama pull "$OLLAMA_MODEL" || { echo "Failed to pull model"; exit 1; }; \
|
| 89 |
-
echo "Starting Streamlit app"; \
|
| 90 |
-
streamlit run app.py --server.address=0.0.0.0 --server.port=${PORT:-8501}'
|
|
|
|
|
|
|
| 1 |
FROM python:3.11-slim
|
| 2 |
|
| 3 |
+
# base deps
|
| 4 |
+
RUN pip install --no-cache-dir hf_transfer>=0.1.6 && \
|
| 5 |
+
apt-get update && apt-get install -y build-essential curl git git-lfs && \
|
| 6 |
+
rm -rf /var/lib/apt/lists/*
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
# Ollama ์ค์น
|
| 9 |
RUN curl -fsSL https://ollama.com/install.sh | sh
|
| 10 |
|
| 11 |
+
# ์ฑ ์ค์น
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
WORKDIR /app
|
|
|
|
|
|
|
| 13 |
COPY requirements.txt .
|
| 14 |
RUN pip install --no-cache-dir -r requirements.txt
|
|
|
|
|
|
|
| 15 |
COPY . /app
|
|
|
|
|
|
|
| 16 |
RUN git lfs install && git lfs pull || true
|
| 17 |
|
| 18 |
+
# Streamlit ํด๋ ๊ถํ
|
| 19 |
ENV HOME=/app
|
|
|
|
| 20 |
RUN mkdir -p /app/.streamlit && chmod -R 777 /app/.streamlit
|
| 21 |
|
| 22 |
+
# โ
Ollama ์ ์ฅ์๋ฅผ /tmp๋ก ๊ณ ์ (Spaces์์ ์์ )
|
| 23 |
+
ENV OLLAMA_HOME=/tmp/ollama
|
| 24 |
+
ENV OLLAMA_MODELS=/tmp/ollama
|
| 25 |
+
RUN mkdir -p /tmp/ollama && chmod -R 777 /tmp/ollama
|
|
|
|
|
|
|
|
|
|
| 26 |
|
| 27 |
+
# โ
๋ฐํ์ ์ค์ (๋ชจ๋ธ ๊ณ ์ + ํ์์์ ์ฌ์ )
|
| 28 |
ENV OLLAMA_HOST=http://127.0.0.1:11434
|
| 29 |
+
ENV OLLAMA_MODEL=gemma2:9b
|
| 30 |
+
ENV OLLAMA_TIMEOUT=300
|
| 31 |
+
|
| 32 |
EXPOSE 8501
|
|
|
|
| 33 |
|
| 34 |
+
# โ
์คํ: serve โ ํฌ์ค์ฒดํฌ โ gemma2:9b pull โ Streamlit
|
|
|
|
| 35 |
|
|
|
|
| 36 |
CMD bash -lc '\
|
| 37 |
+
set -e; \
|
| 38 |
+
export OLLAMA_HOME=/tmp/ollama OLLAMA_MODELS=/tmp/ollama; \
|
| 39 |
+
echo "Starting Ollama with model: ${OLLAMA_MODEL}"; \
|
| 40 |
ollama serve & \
|
| 41 |
+
for i in {1..240}; do curl -sf http://127.0.0.1:11434/api/version >/dev/null && break || sleep 1; done; \
|
| 42 |
+
ollama pull "${OLLAMA_MODEL}" || { echo "[ERR] ollama pull failed"; exit 1; }; \
|
| 43 |
+
exec streamlit run "app (15).py" --server.address=0.0.0.0 --server.port=${PORT:-8501}'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|