Spaces:
Sleeping
Sleeping
| # 1. ํ์ด์ฌ ๋ฒ์ ์ 3.11์ผ๋ก ์ค์ | |
| FROM python:3.11-slim | |
| # 2. ํ์ํ ์์คํ ํจํค์ง ๋ฐ git-lfs ์ค์น | |
| RUN pip install --no-cache-dir hf_transfer>=0.1.6 | |
| RUN apt-get update && apt-get install -y \ | |
| build-essential \ | |
| curl \ | |
| git \ | |
| git-lfs \ | |
| && rm -rf /var/lib/apt/lists/* | |
| # ๊ธฐ์กด ๋ช ๋ น์ด (404 ์ค๋ฅ ๋ฐ์) | |
| # RUN curl -L --fail https://ollama.com/download/ollama-linux-amd64 -o /usr/local/bin/ollama \ | |
| # && chmod +x /usr/local/bin/ollama | |
| # ์์ ๋ ๋ช ๋ น์ด (install.sh ์คํฌ๋ฆฝํธ ์ฌ์ฉ) | |
| RUN curl -fsSL https://ollama.com/install.sh | sh | |
| # Ollama๊ฐ ์ธ ํ/๋ชจ๋ธ ๊ฒฝ๋ก๋ฅผ /app/.ollama ๋ก ํต์ผ | |
| ENV OLLAMA_HOME=/app/.ollama | |
| ENV OLLAMA_MODELS=/app/.ollama | |
| RUN mkdir -p /app/.ollama && chmod -R 777 /app/.ollama | |
| # ๋น๋ ์ค ์ ๊น ์๋ฒ๋ฅผ ๋์์ ๋ชจ๋ธ๋ง ๋ฏธ๋ฆฌ ๋ฐ์๋๊ณ ์ข ๋ฃ | |
| RUN set -eux; \ | |
| ollama serve & pid=$!; \ | |
| i=0; \ | |
| until curl -fsS http://127.0.0.1:11434/api/tags >/dev/null 2>&1; do \ | |
| i=$((i+1)); \ | |
| [ "$i" -lt 60 ] || { echo "Ollama not ready"; kill "$pid"; exit 1; }; \ | |
| sleep 1; \ | |
| done; \ | |
| ollama pull gemma2:9b; \ | |
| kill "$pid" || true | |
| # 3. ์์ ๋๋ ํฐ๋ฆฌ ์ค์ | |
| WORKDIR /app | |
| # 6. ํ์ด์ฌ ๋ผ์ด๋ธ๋ฌ๋ฆฌ ์ค์น | |
| COPY requirements.txt . | |
| RUN pip install --no-cache-dir -r requirements.txt | |
| # 4. ์์ค ์ฝ๋ ๋ณต์ฌ | |
| COPY . /app | |
| # 5. Git LFS ์ค์ ๋ฐ ๋์ฉ๋ ํ์ผ ๋ค์ด๋ก๋ | |
| RUN git lfs install && git lfs pull || true | |
| # 7. ์คํธ๋ฆผ๋ฆฟ์ด ์ฌ์ฉํ ์ค์ ํด๋๋ฅผ ๋ฏธ๋ฆฌ ๋ง๋ค๊ณ ๊ถํ ๋ถ์ฌ (PermissionError ํด๊ฒฐ) | |
| # ๊ถํ/๊ฒฝ๋ก ๊ณ ์ | |
| ENV HOME=/app | |
| ENV STREAMLIT_HOME=/app/.streamlit | |
| RUN mkdir -p /app/.streamlit && chmod -R 777 /app/.streamlit | |
| # ๊ถ์ฅ: ์บ์ ๊ฒฝ๋ก๋ฅผ ํ๊ฒฝ๋ณ์๋ก ๊ณ ์ | |
| ENV HF_HOME=/tmp/hf-home \ | |
| TRANSFORMERS_CACHE=/tmp/hf-cache \ | |
| HUGGINGFACE_HUB_CACHE=/tmp/hf-cache \ | |
| TORCH_HOME=/tmp/torch-cache \ | |
| XDG_CACHE_HOME=/tmp/xdg-cache | |
| RUN mkdir -p /tmp/hf-home /tmp/hf-cache /tmp/torch-cache /tmp/xdg-cache | |
| # ๋ด๋ถ ์ฐ๊ฒฐ์ฉ ํ๊ฒฝ๋ณ์(์ฑ์์ ์ฝ๊ฒ ํ๋ ค๋ฉด ํธ์์ ์ง์ ) | |
| ENV OLLAMA_HOST=http://127.0.0.1:11434 | |
| EXPOSE 8501 | |
| # (์ธ๋ถ๋ก Ollama๋ฅผ ์ด ํ์๋ ์์ง๋ง, ์์ด๋ ๋ฌด๋ฐฉ) | |
| EXPOSE 11434 | |
| # ๐ด ์ต์ข ์คํ ์ปค๋งจ๋: ollama serve โ ๋ ๋์ฒดํฌ โ streamlit | |
| CMD bash -lc '\ | |
| ollama serve & \ | |
| for i in {1..120}; do curl -fsS http://127.0.0.1:11434/api/version >/dev/null && break || sleep 1; done; \ | |
| ollama pull ${OLLAMA_MODEL} || exit 1; \ | |
| streamlit run app.py --server.address=0.0.0.0 --server.port=${PORT:-8501}' |