| | #!/bin/bash |
| | set -e |
| |
|
| | echo "==> Starting Ollama (internal on 11434)..." |
| | export OLLAMA_HOST=127.0.0.1:11434 |
| | export OLLAMA_MODELS=/home/user/.ollama/models |
| |
|
| | ollama serve & |
| |
|
| | |
| | echo "==> Waiting for Ollama..." |
| | MAX_RETRIES=30 |
| | COUNT=0 |
| | until curl -s http://127.0.0.1:11434/api/version > /dev/null 2>&1; do |
| | COUNT=$((COUNT + 1)) |
| | if [ $COUNT -ge $MAX_RETRIES ]; then |
| | echo "ERROR: Ollama did not start." |
| | exit 1 |
| | fi |
| | echo " ... attempt $COUNT/$MAX_RETRIES" |
| | sleep 2 |
| | done |
| |
|
| | echo "==> Ollama ready!" |
| |
|
| | |
| | if ! ollama list | grep -q "granite4"; then |
| | echo "==> Pulling granite4:350m..." |
| | ollama pull granite4:350m |
| | fi |
| |
|
| | echo "==> Models:" |
| | ollama list |
| |
|
| | echo "==> Starting FastAPI proxy on 0.0.0.0:7860 (API key: connectkey)..." |
| | exec uvicorn proxy:app --host 0.0.0.0 --port 7860 |