Spaces:
Running
Running
| import os | |
| from pathlib import Path | |
| from dotenv import load_dotenv | |
| def load_environment_variables(): | |
| """ | |
| Load environment variables with proper priority: | |
| 1. System environment variables (highest priority - for production/HF Spaces) | |
| 2. .env file (fallback for local development) | |
| """ | |
| # First check if we're in a production environment (HF Spaces, Docker, etc.) | |
| # by looking for common production indicators | |
| is_production = any([ | |
| os.getenv("SPACE_ID"), # Hugging Face Spaces | |
| os.getenv("RENDER"), # Render.com | |
| os.getenv("RAILWAY_ENVIRONMENT"), # Railway | |
| os.getenv("VERCEL"), # Vercel | |
| os.getenv("KUBERNETES_SERVICE_HOST"), # Kubernetes | |
| os.getenv("AWS_LAMBDA_FUNCTION_NAME"), # AWS Lambda | |
| ]) | |
| # Load .env file only if not in production AND file exists | |
| env_path = Path('.') / '.env' | |
| if not is_production and env_path.exists(): | |
| print(f"π§ Loading environment variables from {env_path}") | |
| load_dotenv(dotenv_path=env_path, override=False) # Don't override existing env vars | |
| elif is_production: | |
| print("π Production environment detected, using system environment variables") | |
| else: | |
| print("β οΈ No .env file found and not in production environment") | |
| # Load environment variables using the unified method | |
| load_environment_variables() | |
| # OpenAI Configuration | |
| OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") | |
| OPENAI_MODEL_NAME = os.getenv("OPENAI_MODEL_NAME", "gpt-5-mini") | |
| AZURE_API_KEY = os.getenv("AZURE_API_KEY") | |
| AZURE_API_BASE = os.getenv("AZURE_API_BASE") | |
| AZURE_API_VERSION = os.getenv("AZURE_API_VERSION") | |
| # Langfuse Configuration | |
| LANGFUSE_PUBLIC_KEY = os.getenv("LANGFUSE_PUBLIC_KEY") | |
| LANGFUSE_SECRET_KEY = os.getenv("LANGFUSE_SECRET_KEY") | |
| LANGFUSE_HOST = os.getenv("LANGFUSE_HOST", "https://cloud.langfuse.com") | |
| LANGFUSE_AUTH = "" | |
| if LANGFUSE_PUBLIC_KEY and LANGFUSE_SECRET_KEY: | |
| import base64 | |
| LANGFUSE_AUTH = base64.b64encode(f"{LANGFUSE_PUBLIC_KEY}:{LANGFUSE_SECRET_KEY}".encode()).decode() | |
| # Other API Keys | |
| ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY") | |
| # Database Configuration | |
| # For HF Spaces, use /data persistent storage directory if available | |
| # Fall back to /tmp if /data is not accessible (Persistent Storage not enabled) | |
| # For local development, use datasets/db directory | |
| def _get_default_db_uri(): | |
| """Get default database URI based on environment.""" | |
| if os.getenv("SPACE_ID"): # HF Spaces | |
| # Try to use HF Persistent Storage at /data first | |
| data_dir = Path("/data") | |
| try: | |
| # Check if /data exists and is writable | |
| data_dir.mkdir(parents=True, exist_ok=True) | |
| test_file = data_dir / ".write_test" | |
| test_file.touch() | |
| test_file.unlink() | |
| print("β Using HF Persistent Storage at /data") | |
| return "sqlite:////data/agent_monitoring.db" | |
| except (OSError, PermissionError) as e: | |
| # Fall back to /tmp if /data is not available | |
| print(f"β οΈ /data not available ({e}), using /tmp for ephemeral storage") | |
| tmp_dir = Path("/tmp/agentgraph") | |
| tmp_dir.mkdir(parents=True, exist_ok=True) | |
| return f"sqlite:///{tmp_dir}/agent_monitoring.db" | |
| else: | |
| # Local development - use datasets/db relative to project root | |
| project_root = Path(__file__).parent.parent.resolve() | |
| db_dir = project_root / "datasets" / "db" | |
| os.makedirs(db_dir, exist_ok=True) | |
| return f"sqlite:///{db_dir}/agent_monitoring.db" | |
| DB_URI = os.getenv("DB_URI", _get_default_db_uri()) | |
| # Function to validate configuration | |
| def validate_config(): | |
| """Validates that all required environment variables are set""" | |
| required_vars = [ | |
| ("OPENAI_API_KEY", OPENAI_API_KEY), | |
| ] | |
| missing_vars = [var_name for var_name, var_value in required_vars if not var_value] | |
| if missing_vars: | |
| missing_vars_str = ", ".join(missing_vars) | |
| print(f"β Missing required environment variables: {missing_vars_str}") | |
| print(f"π Please set them in the .env file or as environment variables") | |
| return False | |
| return True | |
| def debug_config(): | |
| """Debug function to show current configuration state""" | |
| print("π AgentGraph Configuration Debug:") | |
| print("=" * 50) | |
| # Show environment loading method | |
| env_path = Path('.') / '.env' | |
| is_production = any([ | |
| os.getenv("SPACE_ID"), | |
| os.getenv("RENDER"), | |
| os.getenv("RAILWAY_ENVIRONMENT"), | |
| os.getenv("VERCEL"), | |
| os.getenv("KUBERNETES_SERVICE_HOST"), | |
| os.getenv("AWS_LAMBDA_FUNCTION_NAME"), | |
| ]) | |
| print(f"ποΈ Environment: {'Production' if is_production else 'Development'}") | |
| print(f"π .env file exists: {env_path.exists()}") | |
| print(f"π Working directory: {Path.cwd()}") | |
| print() | |
| # Show key configuration values (masked) | |
| configs = [ | |
| ("OPENAI_API_KEY", OPENAI_API_KEY), | |
| ("OPENAI_MODEL_NAME", OPENAI_MODEL_NAME), | |
| ("LANGFUSE_PUBLIC_KEY", LANGFUSE_PUBLIC_KEY), | |
| ("LANGFUSE_SECRET_KEY", LANGFUSE_SECRET_KEY), | |
| ("LANGFUSE_HOST", LANGFUSE_HOST), | |
| ("DB_URI", DB_URI), | |
| ] | |
| for name, value in configs: | |
| if value: | |
| if "KEY" in name or "SECRET" in name: | |
| masked = f"{value[:8]}..." if len(value) > 8 else "***" | |
| print(f"β {name}: {masked}") | |
| else: | |
| print(f"β {name}: {value}") | |
| else: | |
| print(f"β {name}: Not set") | |
| print("=" * 50) | |