Spaces:
Sleeping
Sleeping
Commit
·
b3c94ce
1
Parent(s):
15394bf
Initial Commit
Browse files- .gitignore +1 -0
- Dockerfile +45 -0
- app.py +7 -0
- entrypoint.sh +15 -0
- requirements.txt +9 -0
.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
/python
|
Dockerfile
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM ollama/ollama:latest
|
| 2 |
+
|
| 3 |
+
# Change to custom app directory
|
| 4 |
+
WORKDIR /app
|
| 5 |
+
ENV HOME=/app
|
| 6 |
+
|
| 7 |
+
# Add ollama user and ensure home + ollama directories
|
| 8 |
+
RUN groupadd -r ollama && useradd -r -g ollama ollama \
|
| 9 |
+
&& mkdir -p /app/.ollama /app/.streamlit \
|
| 10 |
+
&& chown -R ollama:ollama /app
|
| 11 |
+
|
| 12 |
+
# Install packages
|
| 13 |
+
RUN apt-get update && apt-get install -y \
|
| 14 |
+
build-essential \
|
| 15 |
+
curl \
|
| 16 |
+
software-properties-common \
|
| 17 |
+
git \
|
| 18 |
+
python3 \
|
| 19 |
+
python3-pip \
|
| 20 |
+
python3-venv \
|
| 21 |
+
netcat-openbsd \
|
| 22 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 23 |
+
|
| 24 |
+
COPY requirements.txt ./
|
| 25 |
+
COPY src/ ./src/
|
| 26 |
+
COPY entrypoint.sh /usr/local/bin/entrypoint.sh
|
| 27 |
+
|
| 28 |
+
RUN python3 -m venv /opt/venv
|
| 29 |
+
ENV PATH="/opt/venv/bin:$PATH"
|
| 30 |
+
|
| 31 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 32 |
+
|
| 33 |
+
RUN chmod +x /usr/local/bin/entrypoint.sh
|
| 34 |
+
|
| 35 |
+
RUN chmod -R 777 /app/.streamlit /app/.ollama
|
| 36 |
+
|
| 37 |
+
USER ollama
|
| 38 |
+
|
| 39 |
+
ENV OLLAMA_HOST=0.0.0.0:11434
|
| 40 |
+
|
| 41 |
+
EXPOSE 11434 7860
|
| 42 |
+
|
| 43 |
+
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health || exit 1
|
| 44 |
+
|
| 45 |
+
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
app.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI
|
| 2 |
+
|
| 3 |
+
app = FastAPI()
|
| 4 |
+
|
| 5 |
+
@app.get("/")
|
| 6 |
+
def greet_json():
|
| 7 |
+
return {"Hello": "World!"}
|
entrypoint.sh
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
ollama serve &
|
| 4 |
+
|
| 5 |
+
while ! nc -z localhost 11434; do
|
| 6 |
+
echo "⏳ Waiting for Ollama to start..."
|
| 7 |
+
sleep 1
|
| 8 |
+
done
|
| 9 |
+
|
| 10 |
+
ollama pull nomic-embed-text:latest
|
| 11 |
+
|
| 12 |
+
ollama pull gpt-oss:20b || true
|
| 13 |
+
|
| 14 |
+
python3 -c "from pyngrok import ngrok; print('🌐 Public Ollama URL:', ngrok.connect(7860, 'http'))" &
|
| 15 |
+
uvicorn app:app --host 0.0.0.0 --port 7860
|
requirements.txt
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
fastapi
|
| 2 |
+
uvicorn[standard]
|
| 3 |
+
requests
|
| 4 |
+
pytz
|
| 5 |
+
langchain
|
| 6 |
+
langchain-ollama
|
| 7 |
+
langchain-community
|
| 8 |
+
pyngrok
|
| 9 |
+
ollama
|