File size: 841 Bytes
1f2477d
 
 
8b2bb87
 
1f2477d
 
 
 
8b2bb87
 
1f2477d
 
8b2bb87
1f2477d
 
8b2bb87
1f2477d
 
 
 
 
 
8b2bb87
1f2477d
8b2bb87
1f2477d
 
 
 
 
8b2bb87
1f2477d
 
8b2bb87
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
#!/bin/bash
set -e

echo "==> Starting Ollama (internal on 11434)..."
export OLLAMA_HOST=127.0.0.1:11434
export OLLAMA_MODELS=/home/user/.ollama/models

ollama serve &

# Wait for ollama ready
echo "==> Waiting for Ollama..."
MAX_RETRIES=30
COUNT=0
until curl -s http://127.0.0.1:11434/api/version > /dev/null 2>&1; do
    COUNT=$((COUNT + 1))
    if [ $COUNT -ge $MAX_RETRIES ]; then
        echo "ERROR: Ollama did not start."
        exit 1
    fi
    echo "  ... attempt $COUNT/$MAX_RETRIES"
    sleep 2
done

echo "==> Ollama ready!"

# Pull model if not cached
if ! ollama list | grep -q "granite4"; then
    echo "==> Pulling granite4:350m..."
    ollama pull granite4:350m
fi

echo "==> Models:"
ollama list

echo "==> Starting FastAPI proxy on 0.0.0.0:7860 (API key: connectkey)..."
exec uvicorn proxy:app --host 0.0.0.0 --port 7860