ollama2 / entrypoint.sh
cometapii's picture
Upload 4 files
d34092d verified
raw
history blame contribute delete
879 Bytes
#!/bin/bash
set -e
export OLLAMA_HOST=127.0.0.1:11434
export OLLAMA_MODELS=/home/user/.ollama/models
export OLLAMA_NUM_PARALLEL=2
export OLLAMA_MAX_LOADED_MODELS=2
export OLLAMA_KEEP_ALIVE=-1
echo "==> Starting Ollama..."
ollama serve &
echo "==> Waiting for Ollama to be ready..."
MAX_RETRIES=30
COUNT=0
until curl -s http://127.0.0.1:11434/api/version > /dev/null 2>&1; do
COUNT=$((COUNT + 1))
if [ $COUNT -ge $MAX_RETRIES ]; then
echo "ERROR: Ollama did not start."
exit 1
fi
echo " ... attempt $COUNT/$MAX_RETRIES"
sleep 2
done
echo "==> Ollama ready!"
echo "==> Pulling lfm2.5-thinking:latest..."
ollama pull lfm2.5-thinking:latest
echo "==> Pulling qwen2.5vl:7b..."
ollama pull qwen2.5vl:7b
echo "==> Models loaded:"
ollama list
echo "==> Starting proxy on :7860..."
exec uvicorn proxy:app --host 0.0.0.0 --port 7860 --workers 4