| # API keys for different providers | |
| OPENAI_API_KEY= | |
| AZURE_OPENAI_API_KEY= | |
| DEEPSEEK_API_KEY= | |
| ANTHROPIC_API_KEY= | |
| GOOGLE_API_KEY= | |
| GROQ_API_KEY= | |
| OPENROUTER_API_KEY= | |
| USE_AWS_BEDROCK=false | |
| #Vertex AI | |
| GOOGLE_APPLICATION_CREDENTIALS= | |
| # Amazon Bedrock Knowledge Base ID | |
| AWS_KB_ID="<knowledge-base-id>" | |
| # Use a fake model for testing | |
| USE_FAKE_MODEL=false | |
| # Set a default model | |
| DEFAULT_MODEL= | |
| # If MODEL is set to "openai-compatible", set the following | |
| # This is just a flexible solution. If you need multiple model options, you still need to add it to models.py | |
| COMPATIBLE_MODEL= | |
| COMPATIBLE_API_KEY= | |
| COMPATIBLE_BASE_URL= | |
| # Web server configuration | |
| HOST=0.0.0.0 | |
| PORT=7860 | |
| # Authentication secret, HTTP bearer token header is required if set | |
| AUTH_SECRET= | |
| CORS_ORIGINS=http://localhost:3000,http://localhost:8081,http://localhost:5173 | |
| # Langsmith configuration | |
| # LANGSMITH_TRACING=true | |
| # LANGSMITH_API_KEY= | |
| # LANGSMITH_PROJECT=default | |
| # LANGSMITH_ENDPOINT=https://api.smith.langchain.com | |
| # Application mode. If the value is "dev", it will enable uvicorn reload | |
| MODE= | |
| # Database type. | |
| # If the value is "postgres", then it will require Postgresql related environment variables. | |
| # If the value is "sqlite", then you can configure optional file path via SQLITE_DB_PATH | |
| DATABASE_TYPE= | |
| # If DATABASE_TYPE=sqlite (Optional) | |
| SQLITE_DB_PATH= | |
| # If DATABASE_TYPE=postgres | |
| # Docker Compose default values (will work with docker-compose setup) | |
| POSTGRES_USER= | |
| POSTGRES_PASSWORD= | |
| POSTGRES_HOST= | |
| POSTGRES_PORT= | |
| POSTGRES_DB= | |
| # you will be able to identify AST connections in Postgres Connection Manager under this Application Name | |
| # POSTGRES_APPLICATION_NAME = "agent-service-toolkit" | |
| # set these values to customize the number of connections in the pool. Saver and store have independent connection pools | |
| # POSTGRES_MIN_CONNECTIONS_PER_POOL=1 | |
| # POSTGRES_MAX_CONNECTIONS_PER_POOL= 3 | |
| # OpenWeatherMap API key | |
| OPENWEATHERMAP_API_KEY= | |
| # Add for running ollama | |
| # OLLAMA_MODEL=llama3.2 | |
| # Note: set OLLAMA_BASE_URL if running service in docker and ollama on bare metal | |
| # OLLAMA_BASE_URL=http://host.docker.internal:11434 | |
| # Add for running Azure OpenAI | |
| # AZURE_OPENAI_ENDPOINT=https://your-resource.openai.azure.com | |
| # AZURE_OPENAI_API_VERSION=2024-10-21 | |
| # AZURE_OPENAI_DEPLOYMENT_MAP={"gpt-4o": "gpt-4o-deployment", "gpt-4o-mini": "gpt-4o-mini-deployment"} | |
| # Agent URL: used in Streamlit app - if not set, defaults to http://{HOST}:{PORT} | |
| # AGENT_URL=http://localhost:7860 | |
| # LANGFUSE Configuration | |
| #LANGFUSE_TRACING=true | |
| #LANGFUSE_PUBLIC_KEY=pk-... | |
| #LANGFUSE_SECRET_KEY=sk-lf-.... | |
| #LANGFUSE_HOST=http://localhost:3000 | |
| # GitHub MCP Agent Configuration | |
| # GitHub Personal Access Token (required for GitHub MCP server) | |
| # If not set, the GitHub MCP agent will have no tools | |
| GITHUB_PAT= | |
| # Voice Features (Optional) | |
| # NOTE: Voice features are configured on the client (Streamlit app) side, not the server (API). | |
| # Requires OPENAI_API_KEY to be set (see above). | |
| # Set provider name to enable voice input/output. Leave empty to disable. | |
| VOICE_STT_PROVIDER= # Speech-to-text provider (only 'openai' supported currently) | |
| VOICE_TTS_PROVIDER= # Text-to-speech provider (only 'openai' supported currently) | |