Spaces:
Sleeping
Sleeping
| # Local testing script with Ollama | |
| echo "Setting up Ollama for local testing..." | |
| # Set environment variables for Ollama | |
| export LLM_PROVIDER=ollama | |
| export OLLAMA_MODEL=mistral | |
| export OLLAMA_BASE_URL=http://localhost:11434/v1 | |
| # Check if Ollama is running | |
| if ! curl -s http://localhost:11434/api/tags > /dev/null 2>&1; then | |
| echo "⚠️ Warning: Ollama service doesn't seem to be running!" | |
| echo " Start it with: ollama serve" | |
| echo "" | |
| read -p "Continue anyway? (y/n) " -n 1 -r | |
| echo | |
| if [[ ! $REPLY =~ ^[Yy]$ ]]; then | |
| exit 1 | |
| fi | |
| fi | |
| echo "✅ Using Ollama with model: $OLLAMA_MODEL" | |
| echo "" | |
| # Run the app | |
| echo "Starting app..." | |
| python app.py | |