Spaces:
Runtime error
Runtime error
| # Set Ollama environment variables | |
| export OLLAMA_HOST=0.0.0.0:11434 | |
| export OLLAMA_ORIGINS="*" | |
| export OLLAMA_DATA_DIR=/tmp/.ollama | |
| export OLLAMA_MODELS=/tmp/.ollama/models | |
| # Create directories with proper permissions | |
| mkdir -p /tmp/.ollama/models | |
| chmod 755 /tmp/.ollama | |
| chmod 755 /tmp/.ollama/models | |
| # Start Ollama server in background with explicit data directory | |
| echo "Starting Ollama server..." | |
| OLLAMA_DATA_DIR=/tmp/.ollama ollama serve & | |
| # Store the PID | |
| OLLAMA_PID=$! | |
| # Wait for Ollama to start | |
| echo "Waiting for Ollama server to start..." | |
| for i in {1..30}; do | |
| if curl -s http://localhost:11434/api/version > /dev/null 2>&1; then | |
| echo "Ollama server started successfully!" | |
| break | |
| fi | |
| echo "Attempt $i/30: Waiting for Ollama..." | |
| sleep 2 | |
| done | |
| # Check if Ollama actually started | |
| if ! curl -s http://localhost:11434/api/version > /dev/null 2>&1; then | |
| echo "Error: Ollama server failed to start" | |
| exit 1 | |
| fi | |
| # Pull the LLaVA model for vision analysis | |
| echo "Pulling LLaVA model for vision analysis..." | |
| OLLAMA_DATA_DIR=/tmp/.ollama ollama pull llava:7b | |
| if [ $? -eq 0 ]; then | |
| echo "Model pulled successfully!" | |
| else | |
| echo "Warning: Failed to pull model, but continuing..." | |
| fi | |
| # Start FastAPI on port 7860 (HF Spaces requirement) | |
| echo "Starting FastAPI server on port 7860..." | |
| cd /app | |
| python3 -m uvicorn fast:app --host 0.0.0.0 --port 7860 |