| # API configuration | |
| API_HOST=0.0.0.0 | |
| API_PORT=8000 | |
| # If you want to switch from hosted to local, comment out | |
| # the hosted lines and uncomment the local ones | |
| # Hosted LLM backend configuration | |
| LLM_BACKEND=openrouter | |
| LLM_HOST=https://openrouter.ai/api/v1 | |
| LLM_MODEL={choose_a_free_one_from_openrouter} | |
| LLM_API_KEY={your_api_key} | |
| # Local LLM backend configuration | |
| # LLM_BACKEND=ollama | |
| # LLM_HOST=http://localhost:11434 | |
| # LLM_MODEL=llama3.2:latest | |
| LLM_TIMEOUT=120 | |
| LLM_MAX_RETRIES=3 | |
| LLM_RETRY_DELAY=1.0 | |
| # Optional (required when LLM_BACKEND=openrouter) | |
| LLM_SITE_URL=http://localhost:7860 | |
| LLM_APP_NAME=AI_Survey_Simulator | |
| # Frontend configuration | |
| FRONTEND_BACKEND_BASE_URL=http://localhost:8000 | |
| FRONTEND_WEBSOCKET_URL=ws://localhost:8000/ws/conversation | |
| # Persistence (SQLite) | |
| # Local default: | |
| DB_PATH=.localdata/converta.db | |
| # Hugging Face Spaces recommended: | |
| # DB_PATH=/data/converta/converta.db | |
| # Logging | |
| LOG_LEVEL=INFO | |