File size: 811 Bytes
84aa332 5292ff7 84aa332 5292ff7 84aa332 5292ff7 84aa332 5292ff7 84aa332 5292ff7 84aa332 5292ff7 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 | #!/bin/bash
set -e
export OLLAMA_HOST=127.0.0.1:11434
export OLLAMA_MODELS=/home/user/.ollama/models
echo "==> Starting Ollama..."
ollama serve &
# Wait for ollama ready
echo "==> Waiting for Ollama to be ready..."
MAX_RETRIES=30
COUNT=0
until curl -s http://127.0.0.1:11434/api/version > /dev/null 2>&1; do
COUNT=$((COUNT + 1))
if [ $COUNT -ge $MAX_RETRIES ]; then
echo "ERROR: Ollama did not start."
exit 1
fi
echo " ... attempt $COUNT/$MAX_RETRIES"
sleep 2
done
echo "==> Ollama ready!"
# Pull models
echo "==> Pulling deepseek-r1:latest..."
ollama pull deepseek-r1:latest
echo "==> Pulling qwen3-vl:latest..."
ollama pull qwen3-vl:latest
echo "==> Models loaded:"
ollama list
echo "==> Starting proxy on :7860..."
exec uvicorn proxy:app --host 0.0.0.0 --port 7860 |