Spaces:
Sleeping
Sleeping
| """ | |
| ============================================ | |
| Application Configuration | |
| All settings loaded from environment variables | |
| ============================================ | |
| """ | |
| from pydantic_settings import BaseSettings | |
| from pydantic import Field | |
| from typing import Optional | |
| import os | |
| class Settings(BaseSettings): | |
| """ | |
| Application settings. | |
| Values are loaded from environment variables or .env file. | |
| """ | |
| # --- Database --- | |
| DATABASE_URL: str = Field( | |
| default="postgresql+asyncpg://user:pass@localhost:5432/novels", | |
| description="Async PostgreSQL connection string" | |
| ) | |
| # --- Scraper Concurrency --- | |
| MAX_CONCURRENT_BROWSERS: int = Field( | |
| default=3, | |
| description="Maximum browser contexts running simultaneously" | |
| ) | |
| # --- Human-like Delay Range (seconds) --- | |
| MIN_DELAY_SECONDS: float = Field(default=3.0) | |
| MAX_DELAY_SECONDS: float = Field(default=8.0) | |
| # --- Playwright Timeouts --- | |
| PAGE_TIMEOUT_SECONDS: int = Field( | |
| default=30, | |
| description="Max seconds to wait for page load" | |
| ) | |
| CAPTCHA_CHECK_TIMEOUT: int = Field( | |
| default=10, | |
| description="Seconds to wait before assuming captcha" | |
| ) | |
| # --- App Meta --- | |
| APP_TITLE: str = Field(default="Novel Scraper Pro") | |
| DEBUG: bool = Field(default=False) | |
| # --- Screenshots Path --- | |
| SCREENSHOTS_DIR: str = Field( | |
| default="app/static/screenshots" | |
| ) | |
| class Config: | |
| env_file = ".env" | |
| env_file_encoding = "utf-8" | |
| case_sensitive = True | |
| # --- Singleton instance --- | |
| settings = Settings() | |
| def get_database_url() -> str: | |
| """ | |
| Ensure the DATABASE_URL uses the async driver. | |
| Render gives: postgresql://... | |
| We need: postgresql+asyncpg://... | |
| """ | |
| url = settings.DATABASE_URL | |
| # Fix common Render URL format | |
| if url.startswith("postgres://"): | |
| url = url.replace("postgres://", "postgresql+asyncpg://", 1) | |
| elif url.startswith("postgresql://") and "+asyncpg" not in url: | |
| url = url.replace("postgresql://", "postgresql+asyncpg://", 1) | |
| return url |