Spaces:
Runtime error
Runtime error
tomo14151 commited on
Commit ·
6624945
1
Parent(s): 9d79680
fix(start.sh): robust wait for Ollama, retry model creation, don't hard-exit if Modelfile missing
Browse files
start.sh
CHANGED
|
@@ -1,39 +1,73 @@
|
|
| 1 |
#!/usr/bin/env bash
|
| 2 |
-
set -
|
| 3 |
|
| 4 |
-
|
|
|
|
|
|
|
| 5 |
APP_DIR=/app
|
| 6 |
MODEL_NAME=aj-mini
|
| 7 |
|
| 8 |
-
cd $APP_DIR
|
| 9 |
|
| 10 |
-
#
|
| 11 |
if ! command -v ollama >/dev/null 2>&1; then
|
| 12 |
-
echo "
|
|
|
|
| 13 |
fi
|
| 14 |
|
| 15 |
-
#
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
fi
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
echo "Ollama PID: $OLLAMA_PID"
|
| 33 |
else
|
| 34 |
-
echo "
|
| 35 |
fi
|
| 36 |
|
| 37 |
# Start Gunicorn for Flask app
|
| 38 |
echo "Starting gunicorn for API..."
|
| 39 |
-
|
|
|
|
|
|
| 1 |
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
|
| 4 |
+
echo "===== Application Startup at $(date) ====="
|
| 5 |
+
|
| 6 |
+
# start.sh - robust startup flow: start Ollama, wait for readiness, create model (if provided), then run gunicorn
|
| 7 |
APP_DIR=/app
|
| 8 |
MODEL_NAME=aj-mini
|
| 9 |
|
| 10 |
+
cd "$APP_DIR"
|
| 11 |
|
| 12 |
+
# Check if ollama is installed
|
| 13 |
if ! command -v ollama >/dev/null 2>&1; then
|
| 14 |
+
echo "Error: ollama binary not found in container. Make sure Ollama was installed."
|
| 15 |
+
exit 1
|
| 16 |
fi
|
| 17 |
|
| 18 |
+
# Start ollama serve in background
|
| 19 |
+
echo "Starting ollama serve..."
|
| 20 |
+
ollama serve > /tmp/ollama.log 2>&1 &
|
| 21 |
+
OLLAMA_PID=$!
|
| 22 |
+
echo "Ollama PID: $OLLAMA_PID"
|
| 23 |
+
|
| 24 |
+
# Tail Ollama logs in background for visibility
|
| 25 |
+
tail -n +1 -f /tmp/ollama.log &
|
| 26 |
+
|
| 27 |
+
# Wait for ollama to be ready (with retries)
|
| 28 |
+
echo "Waiting for Ollama to be ready (http://localhost:11434)..."
|
| 29 |
+
for i in {1..60}; do
|
| 30 |
+
if curl -s http://127.0.0.1:11434/api/tags > /dev/null 2>&1; then
|
| 31 |
+
echo "Ollama is ready (after $i attempts)!"
|
| 32 |
+
break
|
| 33 |
fi
|
| 34 |
+
echo "Waiting for Ollama to start... ($i/60)"
|
| 35 |
+
sleep 2
|
| 36 |
+
if [ $i -eq 60 ]; then
|
| 37 |
+
echo "Warning: Ollama did not become ready after retries. Check /tmp/ollama.log for details but continuing startup." >&2
|
| 38 |
+
fi
|
| 39 |
+
done
|
| 40 |
+
|
| 41 |
+
# Function to create model with retries
|
| 42 |
+
create_model() {
|
| 43 |
+
local tries=0
|
| 44 |
+
local max=5
|
| 45 |
+
until [ $tries -ge $max ]; do
|
| 46 |
+
if ollama list | grep -q "^$MODEL_NAME\b"; then
|
| 47 |
+
echo "Model $MODEL_NAME already exists."
|
| 48 |
+
return 0
|
| 49 |
+
fi
|
| 50 |
+
echo "Attempting to create model $MODEL_NAME (try $((tries+1))/$max)..."
|
| 51 |
+
if ollama create "$MODEL_NAME" -f Modelfile-aj-mini-v2 >> /tmp/ollama.log 2>&1; then
|
| 52 |
+
echo "Model $MODEL_NAME created successfully."
|
| 53 |
+
return 0
|
| 54 |
+
fi
|
| 55 |
+
tries=$((tries+1))
|
| 56 |
+
sleep 3
|
| 57 |
+
done
|
| 58 |
+
echo "Failed to create model $MODEL_NAME after $max attempts. Continuing; API may return errors until model is available." >&2
|
| 59 |
+
return 1
|
| 60 |
+
}
|
| 61 |
|
| 62 |
+
# Create model if Modelfile exists
|
| 63 |
+
if [ -f "$APP_DIR/Modelfile-aj-mini-v2" ]; then
|
| 64 |
+
echo "Found Modelfile at $APP_DIR/Modelfile-aj-mini-v2"
|
| 65 |
+
create_model || true
|
|
|
|
| 66 |
else
|
| 67 |
+
echo "Modelfile not found at $APP_DIR/Modelfile-aj-mini-v2 - skipping model creation. If you expect a local model, add the Modelfile and push again." >&2
|
| 68 |
fi
|
| 69 |
|
| 70 |
# Start Gunicorn for Flask app
|
| 71 |
echo "Starting gunicorn for API..."
|
| 72 |
+
echo "===== API Server Ready ====="
|
| 73 |
+
exec gunicorn api_server:app --bind 0.0.0.0:5000 --workers 2 --timeout 120
|