vps1122 / start.sh
proti0070's picture
Create start.sh
af84cb4 verified
#!/bin/bash
# ─── dbus ────────────────────────────────────────────────────────────────────
mkdir -p /run/dbus
dbus-daemon --system --fork 2>/dev/null || true
export PATH=$PATH:/usr/local/bin:/root/.local/bin
# ─── Verify all dependencies are present ─────────────────────────────────────
echo "=== Verifying dependencies ==="
python3 -c "import fastapi, uvicorn, httpx" 2>/dev/null \
&& echo "βœ… Python deps OK" \
|| { echo "❌ Python deps missing β€” installing..."; \
pip3 install --break-system-packages fastapi "uvicorn[standard]" httpx; }
command -v ollama >/dev/null 2>&1 \
&& echo "βœ… ollama OK" \
|| { echo "❌ ollama missing β€” installing..."; \
curl -fsSL https://ollama.com/install.sh | sh; }
command -v ttyd >/dev/null 2>&1 \
&& echo "βœ… ttyd OK" \
|| { echo "❌ ttyd missing β€” installing..."; \
curl -fsSL https://github.com/tsl0922/ttyd/releases/download/1.7.4/ttyd.x86_64 \
-o /usr/local/bin/ttyd && chmod +x /usr/local/bin/ttyd; }
command -v nginx >/dev/null 2>&1 \
&& echo "βœ… nginx OK" \
|| { echo "❌ nginx missing"; exit 1; }
echo ""
# ─── Start Ollama ─────────────────────────────────────────────────────────────
echo "=== Starting Ollama ==="
ollama serve &
OLLAMA_PID=$!
for i in $(seq 1 30); do
if curl -sf http://127.0.0.1:11434/api/tags >/dev/null 2>&1; then
echo "βœ… Ollama is up"
break
fi
echo " Waiting for Ollama... ($i/30)"
sleep 1
done
# Optional: pull a default model if none exist
# MODELS=$(curl -s http://127.0.0.1:11434/api/tags | python3 -c "import sys,json; print(len(json.load(sys.stdin).get('models',[])))")
# if [ "$MODELS" = "0" ]; then
# echo "=== Pulling default model (llama3.2:1b) ==="
# ollama pull llama3.2:1b
# fi
# ─── Start FastAPI app ────────────────────────────────────────────────────────
echo "=== Starting app.py (port 8000) ==="
cd /workspace
python3 app.py &
APP_PID=$!
sleep 3
# ─── Start ttyd terminal ──────────────────────────────────────────────────────
echo "=== Starting ttyd (port 8080) ==="
ttyd \
--port 8080 \
--interface 127.0.0.1 \
--writable \
bash &
TTYD_PID=$!
sleep 2
# ─── Start nginx ──────────────────────────────────────────────────────────────
echo "=== Starting nginx ==="
nginx -g "daemon off;" &
NGINX_PID=$!
echo ""
echo "βœ… All services running:"
echo " 🌐 Main UI β†’ port 7860 (nginx)"
echo " πŸ“Š Monitor β†’ /monitor"
echo " πŸ€– Ollama β†’ /api/..."
echo " πŸ’» Terminal β†’ / (root)"
echo ""
# ─── Watchdog ─────────────────────────────────────────────────────────────────
while true; do
if ! kill -0 $OLLAMA_PID 2>/dev/null; then
echo "[watchdog] Restarting Ollama..."
ollama serve &
OLLAMA_PID=$!
fi
if ! kill -0 $APP_PID 2>/dev/null; then
echo "[watchdog] Restarting app.py..."
cd /workspace && python3 app.py &
APP_PID=$!
fi
if ! kill -0 $TTYD_PID 2>/dev/null; then
echo "[watchdog] Restarting ttyd..."
ttyd --port 8080 --interface 127.0.0.1 --writable bash &
TTYD_PID=$!
fi
if ! kill -0 $NGINX_PID 2>/dev/null; then
echo "[watchdog] Restarting nginx..."
nginx -g "daemon off;" &
NGINX_PID=$!
fi
sleep 10
done