Spaces:
Running
Running
File size: 1,374 Bytes
2d548c5 b860cd0 c20e590 b860cd0 2d548c5 b860cd0 c20e590 2d548c5 b860cd0 2d548c5 12b5624 2d548c5 b860cd0 2d548c5 12b5624 b860cd0 2d548c5 b860cd0 2d548c5 b860cd0 2d548c5 de956af b860cd0 f8266e7 b860cd0 de956af 5f850dc b860cd0 f8266e7 9bd1193 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 | # Use the official Python minimal image
FROM python:3.10-slim
# 1. Install system tools
RUN apt-get update && apt-get install -y \
curl \
git \
git-lfs \
build-essential \
zstd \
&& rm -rf /var/lib/apt/lists/* \
&& git lfs install # <--- Initialize LFS
# 2. Install Ollama (Root)
RUN curl -fsSL https://ollama.com/install.sh | sh
# 3. Setup User
RUN useradd -m -u 1000 user
USER user
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
# 4. Workdir
WORKDIR $HOME/app
# 5. Requirements
COPY --chown=user requirements.txt $HOME/app/
RUN pip install --no-cache-dir -r requirements.txt
# 6. Copy Code
COPY --chown=user . $HOME/app
# 7. Startup
# We clone the dataset. git-lfs ensures we get the big files.
CMD rm -rf data_download && \
git clone https://huggingface.co/datasets/teofizzy/mshauri-data data_download && \
mv data_download/mshauri_fedha_v6.db . && \
mv data_download/mshauri_fedha_chroma_db . && \
rm -rf data_download && \
echo "Starting Ollama..." && \
ollama serve & \
until curl -sf http://localhost:11434/api/tags > /dev/null; do sleep 2; done && \
echo "Pulling Fallback Model (7B)..." && \
ollama pull qwen2.5:7b && \
ollama pull nomic-embed-text && \
echo "Models Ready. Launching App..." && \
streamlit run src/app.py --server.port 7860 --server.address 0.0.0.0
|