visique-worker / start_local.sh
Happy People
Deploying CPU-optimized Dolphin Worker from Visique
11d88a8
#!/bin/bash
# start_local_ai.sh
# -----------------
# Starts the Dolphin AI Worker locally for the demo.
#
# Usage:
# 1. Run this script: ./start_local_ai.sh
# 2. In a separate terminal, expose port 7860 using ngrok:
# ngrok http 7860
# 3. Copy the ngrok URL (e.g., https://xyz.ngrok-free.app)
# 4. Set DOLPHIN_API_URL on Render to that URL.
echo "πŸš€ Starting Visique Local AI Worker..."
echo "----------------------------------------"
# Ensure we are in the project root (relative to script location)
cd "$(dirname "$0")/../../"
# Check for required python dependencies
python3 -c "import uvicorn, fastapi, torch" 2>/dev/null
if [ $? -ne 0 ]; then
echo "⚠️ Missing Python dependencies. Installing..."
pip3 install -r visique/ai-worker/requirements.txt
if [ $? -ne 0 ]; then
echo "❌ Failed to install dependencies."
echo " Try running: pip3 install -r visique/ai-worker/requirements.txt"
exit 1
fi
fi
# Check if model files exist
# (DolphinClient handles lazy loading, but let's warn if missing)
if [ ! -d "visique/backend/models/dolphin-v2" ]; then
echo "⚠️ Model directory not found at visique/backend/models/dolphin-v2"
echo " The application will attempt to download it on first run."
fi
# Check for ngrok
if ! command -v ngrok &> /dev/null; then
echo "⚠️ 'ngrok' command not found!"
echo " You need it to create the public tunnel."
echo " πŸ‘‰ Install command: brew install ngrok/ngrok/ngrok"
echo " (Or download from https://ngrok.com/download)"
echo ""
read -p " Press Enter to continue anyway (if you installed it elsewhere)..."
fi
# Generate a random API key if not set, OR use a fixed default for convenience
# Using a fixed key means you don't have to update Render every time you restart.
if [ -z "$DOLPHIN_API_KEY" ]; then
export DOLPHIN_API_KEY="visique-local-demo-key"
fi
# Set dummy env vars to satisfy backend Pydantic validation
# (The worker doesn't use the DB, but imports trigger the check)
export SECRET_KEY="dummy-secret-for-worker"
export DATABASE_URL="postgresql://dummy:dummy@localhost/dummy"
# Set python path to include backend
export PYTHONPATH=$PYTHONPATH:$(pwd)/visique/backend
echo "πŸ”’ Security: API Key enabled"
echo "πŸ”‘ Key: $DOLPHIN_API_KEY"
echo ""
# Run the worker app
echo "πŸ”₯ Launching worker on http://0.0.0.0:7860"
echo " (This requires ~16GB RAM for the model)"
# Use python -m to run uvicorn, pointing to the renamed worker.py
# We use --app-dir to set the root, avoiding package issues with hyphens
python3 -m uvicorn worker:app --app-dir visique/ai-worker --host 0.0.0.0 --port 7860 --reload