cometapii commited on
Commit
8b2bb87
·
verified ·
1 Parent(s): 1901180

Update entrypoint.sh

Browse files
Files changed (1) hide show
  1. entrypoint.sh +11 -20
entrypoint.sh CHANGED
@@ -1,45 +1,36 @@
1
  #!/bin/bash
2
  set -e
3
 
4
- echo "==> Starting Ollama server on port 7860..."
5
- export OLLAMA_HOST=0.0.0.0:7860
6
  export OLLAMA_MODELS=/home/user/.ollama/models
7
 
8
- # Start ollama in foreground
9
  ollama serve &
10
- OLLAMA_PID=$!
11
 
12
- # Wait for ollama to be ready
13
- echo "==> Waiting for Ollama to be ready..."
14
  MAX_RETRIES=30
15
  COUNT=0
16
- until curl -s http://localhost:7860/api/version > /dev/null 2>&1; do
17
  COUNT=$((COUNT + 1))
18
  if [ $COUNT -ge $MAX_RETRIES ]; then
19
- echo "ERROR: Ollama did not start in time."
20
  exit 1
21
  fi
22
  echo " ... attempt $COUNT/$MAX_RETRIES"
23
  sleep 2
24
  done
25
 
26
- echo "==> Ollama is ready!"
27
 
28
- # Pull model if not cached (fallback in case build layer failed)
29
  if ! ollama list | grep -q "granite4"; then
30
  echo "==> Pulling granite4:350m..."
31
  ollama pull granite4:350m
32
  fi
33
 
34
- echo "==> Model available:"
35
  ollama list
36
 
37
- echo "==> Ollama API running at http://0.0.0.0:7860"
38
- echo "==> Endpoints:"
39
- echo " POST /api/generate"
40
- echo " POST /api/chat"
41
- echo " GET /api/tags"
42
- echo " POST /api/embeddings"
43
-
44
- # Keep process alive
45
- wait $OLLAMA_PID
 
1
  #!/bin/bash
2
  set -e
3
 
4
+ echo "==> Starting Ollama (internal on 11434)..."
5
+ export OLLAMA_HOST=127.0.0.1:11434
6
  export OLLAMA_MODELS=/home/user/.ollama/models
7
 
 
8
  ollama serve &
 
9
 
10
+ # Wait for ollama ready
11
+ echo "==> Waiting for Ollama..."
12
  MAX_RETRIES=30
13
  COUNT=0
14
+ until curl -s http://127.0.0.1:11434/api/version > /dev/null 2>&1; do
15
  COUNT=$((COUNT + 1))
16
  if [ $COUNT -ge $MAX_RETRIES ]; then
17
+ echo "ERROR: Ollama did not start."
18
  exit 1
19
  fi
20
  echo " ... attempt $COUNT/$MAX_RETRIES"
21
  sleep 2
22
  done
23
 
24
+ echo "==> Ollama ready!"
25
 
26
+ # Pull model if not cached
27
  if ! ollama list | grep -q "granite4"; then
28
  echo "==> Pulling granite4:350m..."
29
  ollama pull granite4:350m
30
  fi
31
 
32
+ echo "==> Models:"
33
  ollama list
34
 
35
+ echo "==> Starting FastAPI proxy on 0.0.0.0:7860 (API key: connectkey)..."
36
+ exec uvicorn proxy:app --host 0.0.0.0 --port 7860