Spaces:
Runtime error
Runtime error
| """ | |
| Configurações para para.AI v3.0 - VERSÃO TESTE INTERNO (SQLite) | |
| Otimizado para desenvolvimento, testes e deploys leves sem PostgreSQL | |
| """ | |
| import os | |
| from typing import Optional, List | |
| from pydantic_settings import BaseSettings | |
| from pydantic import Field | |
| from functools import lru_cache | |
| from pathlib import Path | |
| class Settings(BaseSettings): | |
| """ | |
| Configurações para ambiente de TESTE INTERNO com SQLite. | |
| Características: | |
| - SQLite em vez de PostgreSQL (sem container DB) | |
| - Configurações simplificadas | |
| - Otimizado para desenvolvimento e CI/CD | |
| - Build rápido (~2min vs 8min) | |
| """ | |
| # ======================================================================== | |
| # API SETTINGS | |
| # ======================================================================== | |
| APP_NAME: str = "para.AI API - SQLite Test" | |
| APP_VERSION: str = "3.0.0-sqlite" | |
| APP_DESCRIPTION: str = "API de teste para análise de acórdãos (SQLite)" | |
| APP_ENV: str = Field(default="development", env="APP_ENV") | |
| DEBUG: bool = Field(default=True, env="DEBUG") | |
| # ======================================================================== | |
| # SERVER SETTINGS | |
| # ======================================================================== | |
| HOST: str = Field(default="0.0.0.0", env="HOST") | |
| PORT: int = Field(default=7860, env="PORT") | |
| WORKERS: int = Field(default=1, env="WORKERS") # 1 worker para SQLite | |
| RELOAD: bool = Field(default=True, env="RELOAD") | |
| # ======================================================================== | |
| # DATABASE SETTINGS - SQLITE | |
| # ======================================================================== | |
| DATABASE_TYPE: str = Field(default="sqlite", env="DATABASE_TYPE") | |
| SQLITE_DB_PATH: str = Field( | |
| default="./data/para_ai.db", | |
| env="SQLITE_DB_PATH", | |
| description="Caminho do arquivo SQLite" | |
| ) | |
| DATABASE_URL: str = Field( | |
| default="sqlite:///./data/para_ai.db", | |
| env="DATABASE_URL", | |
| description="SQLite connection string" | |
| ) | |
| SQLITE_TIMEOUT: int = Field(default=30, env="SQLITE_TIMEOUT") | |
| SQLITE_CHECK_SAME_THREAD: bool = Field( | |
| default=False, | |
| env="SQLITE_CHECK_SAME_THREAD" | |
| ) | |
| # Pool settings (reduzidos) | |
| DB_POOL_SIZE: int = Field(default=5, env="DB_POOL_SIZE") | |
| DB_MAX_OVERFLOW: int = Field(default=10, env="DB_MAX_OVERFLOW") | |
| DB_POOL_TIMEOUT: int = Field(default=30, env="DB_POOL_TIMEOUT") | |
| DB_POOL_RECYCLE: int = Field(default=3600, env="DB_POOL_RECYCLE") | |
| SQL_ECHO: bool = Field(default=False, env="SQL_ECHO") | |
| # ======================================================================== | |
| # FILES & STORAGE | |
| # ======================================================================== | |
| FILES_BASE_PATH: str = Field(default="./data/files", env="FILES_BASE_PATH") | |
| UPLOAD_PATH: str = Field(default="./data/uploads", env="UPLOAD_PATH") | |
| OUTPUT_PATH: str = Field(default="./data/outputs", env="OUTPUT_PATH") | |
| TEMP_PATH: str = Field(default="./data/temp", env="TEMP_PATH") | |
| BACKUP_PATH: str = Field(default="./data/backups", env="BACKUP_PATH") | |
| MAX_UPLOAD_SIZE_MB: int = Field(default=100, env="MAX_UPLOAD_SIZE_MB") | |
| # ======================================================================== | |
| # LLM PROVIDERS | |
| # ======================================================================== | |
| GROQ_API_KEY: Optional[str] = Field(default=None, env="GROQ_API_KEY") | |
| OPENAI_API_KEY: Optional[str] = Field(default=None, env="OPENAI_API_KEY") | |
| ANTHROPIC_API_KEY: Optional[str] = Field(default=None, env="ANTHROPIC_API_KEY") | |
| DEFAULT_LLM_PROVIDER: str = Field(default="groq", env="DEFAULT_LLM_PROVIDER") | |
| DEFAULT_MODEL_TYPE: str = Field(default="openai/gpt-oss-120b", env="DEFAULT_MODEL_TYPE") | |
| LLM_DEFAULT_TEMPERATURE: float = Field(default=1.4, env="LLM_DEFAULT_TEMPERATURE") | |
| LLM_DEFAULT_MAX_TOKENS: int = Field(default=10024, env="LLM_DEFAULT_MAX_TOKENS") | |
| LLM_TIMEOUT_SECONDS: int = Field(default=3000, env="LLM_TIMEOUT_SECONDS") | |
| # ======================================================================== | |
| # PROCESSING | |
| # ======================================================================== | |
| MAX_CONCURRENT_PROCESSES: int = Field(default=4, env="MAX_CONCURRENT_PROCESSES") | |
| PROCESS_TIMEOUT_SECONDS: int = Field(default=3000, env="PROCESS_TIMEOUT_SECONDS") | |
| ENABLE_PARALLEL_PROCESSING: bool = Field(default=True, env="ENABLE_PARALLEL") | |
| DEFAULT_MAX_WORKERS: int = Field(default=1, env="DEFAULT_MAX_WORKERS") | |
| BATCH_SIZE: int = Field(default=10, env="BATCH_SIZE") | |
| BATCH_DELAY_MS: int = Field(default=200, env="BATCH_DELAY_MS") | |
| # ======================================================================== | |
| # LOGGING | |
| # ======================================================================== | |
| LOG_LEVEL: str = Field(default="DEBUG", env="LOG_LEVEL") | |
| LOG_FORMAT: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" | |
| LOG_FILE_ENABLED: bool = Field(default=True, env="LOG_FILE_ENABLED") | |
| LOG_FILE_PATH: str = Field(default="./logs", env="LOG_FILE_PATH") | |
| LOG_FILE_MAX_BYTES: int = Field(default=5242880, env="LOG_FILE_MAX_BYTES") | |
| LOG_FILE_BACKUP_COUNT: int = Field(default=3, env="LOG_FILE_BACKUP_COUNT") | |
| # ======================================================================== | |
| # SECURITY | |
| # ======================================================================== | |
| CORS_ORIGINS: str = Field(default="*", env="CORS_ORIGINS") | |
| CORS_ALLOW_CREDENTIALS: bool = True | |
| API_KEY_HEADER: str = Field(default="X-API-Key", env="API_KEY_HEADER") | |
| REQUIRE_API_KEY: bool = Field(default=False, env="REQUIRE_API_KEY") | |
| VALID_API_KEYS: str = Field(default="test-key-123,dev-key-456", env="VALID_API_KEYS") | |
| RATE_LIMIT_ENABLED: bool = Field(default=False, env="RATE_LIMIT_ENABLED") | |
| RATE_LIMIT_PER_MINUTE: int = Field(default=1000, env="RATE_LIMIT_PER_MINUTE") | |
| # ======================================================================== | |
| # CACHE | |
| # ======================================================================== | |
| ENABLE_CACHE: bool = Field(default=True, env="ENABLE_CACHE") | |
| CACHE_TTL_SECONDS: int = Field(default=300, env="CACHE_TTL_SECONDS") | |
| CACHE_BACKEND: str = Field(default="memory", env="CACHE_BACKEND") | |
| REDIS_URL: Optional[str] = Field(default=None, env="REDIS_URL") | |
| # ======================================================================== | |
| # METRICS | |
| # ======================================================================== | |
| ENABLE_METRICS: bool = Field(default=True, env="ENABLE_METRICS") | |
| METRICS_EXPORT_INTERVAL: int = Field(default=60, env="METRICS_EXPORT_INTERVAL") | |
| PROMETHEUS_ENABLED: bool = Field(default=False, env="PROMETHEUS_ENABLED") | |
| PROMETHEUS_PORT: int = Field(default=9090, env="PROMETHEUS_PORT") | |
| # ======================================================================== | |
| # PROCESSORS | |
| # ======================================================================== | |
| MIN_CONFIDENCE_THRESHOLD: float = Field(default=0.5, env="MIN_CONFIDENCE_THRESHOLD") | |
| # ======================================================================== | |
| # DEVELOPMENT | |
| # ======================================================================== | |
| DEV_MODE: bool = Field(default=True, env="DEV_MODE") | |
| MOCK_LLM_RESPONSES: bool = Field(default=False, env="MOCK_LLM_RESPONSES") | |
| SAVE_DEBUG_FILES: bool = Field(default=True, env="SAVE_DEBUG_FILES") | |
| # ======================================================================== | |
| # ADVANCED | |
| # ======================================================================== | |
| ENABLE_REQUEST_ID: bool = Field(default=True, env="ENABLE_REQUEST_ID") | |
| REQUEST_ID_HEADER: str = "X-Request-ID" | |
| ENABLE_GZIP: bool = Field(default=True, env="ENABLE_GZIP") | |
| GZIP_MIN_SIZE: int = Field(default=1000, env="GZIP_MIN_SIZE") | |
| HTTP_TIMEOUT_SECONDS: int = Field(default=60, env="HTTP_TIMEOUT_SECONDS") | |
| TASK_RETENTION_HOURS: int = Field(default=6, env="TASK_RETENTION_HOURS") | |
| AUTO_CLEANUP_ENABLED: bool = Field(default=True, env="AUTO_CLEANUP_ENABLED") | |
| # ======================================================================== | |
| # SQLITE-SPECIFIC | |
| # ======================================================================== | |
| SQLITE_JOURNAL_MODE: str = Field(default="WAL", env="SQLITE_JOURNAL_MODE") | |
| SQLITE_SYNCHRONOUS: str = Field(default="NORMAL", env="SQLITE_SYNCHRONOUS") | |
| SQLITE_CACHE_SIZE: int = Field(default=10000, env="SQLITE_CACHE_SIZE") | |
| SQLITE_AUTO_VACUUM: bool = Field(default=True, env="SQLITE_AUTO_VACUUM") | |
| class Config: | |
| env_file = ".env" | |
| env_file_encoding = "utf-8" | |
| case_sensitive = True | |
| extra = "ignore" | |
| def cors_origins_list(self) -> List[str]: | |
| if isinstance(self.CORS_ORIGINS, str): | |
| if self.CORS_ORIGINS == "*": | |
| return ["*"] | |
| return [origin.strip() for origin in self.CORS_ORIGINS.split(",") if origin.strip()] | |
| return [self.CORS_ORIGINS] | |
| def valid_api_keys_list(self) -> List[str]: | |
| if not self.VALID_API_KEYS: | |
| return [] | |
| if isinstance(self.VALID_API_KEYS, str): | |
| return [key.strip() for key in self.VALID_API_KEYS.split(",") if key.strip()] | |
| return [] | |
| def is_production(self) -> bool: | |
| return self.APP_ENV.lower() == "production" | |
| def is_development(self) -> bool: | |
| return self.APP_ENV.lower() in ["development", "dev", "test"] | |
| def is_sqlite(self) -> bool: | |
| return self.DATABASE_TYPE.lower() == "sqlite" or self.DATABASE_URL.startswith("sqlite") | |
| def database_url_masked(self) -> str: | |
| if self.is_sqlite: | |
| return self.DATABASE_URL | |
| if "@" in self.DATABASE_URL: | |
| parts = self.DATABASE_URL.split("@") | |
| return f"***@{parts[1]}" | |
| return self.DATABASE_URL | |
| def sqlite_db_file_path(self) -> Path: | |
| if self.DATABASE_URL.startswith("sqlite:///"): | |
| path_str = self.DATABASE_URL.replace("sqlite:///", "") | |
| return Path(path_str) | |
| return Path(self.SQLITE_DB_PATH) | |
| def get_llm_providers_status(self) -> dict: | |
| return { | |
| "groq": bool(self.GROQ_API_KEY), | |
| "openai": bool(self.OPENAI_API_KEY), | |
| "anthropic": bool(self.ANTHROPIC_API_KEY) | |
| } | |
| def get_sqlite_connection_args(self) -> dict: | |
| return { | |
| "check_same_thread": self.SQLITE_CHECK_SAME_THREAD, | |
| "timeout": self.SQLITE_TIMEOUT | |
| } | |
| def get_sqlite_pragma_statements(self) -> List[str]: | |
| return [ | |
| f"PRAGMA journal_mode={self.SQLITE_JOURNAL_MODE}", | |
| f"PRAGMA synchronous={self.SQLITE_SYNCHRONOUS}", | |
| f"PRAGMA cache_size={self.SQLITE_CACHE_SIZE}", | |
| "PRAGMA foreign_keys=ON", | |
| f"PRAGMA auto_vacuum={'FULL' if self.SQLITE_AUTO_VACUUM else 'NONE'}" | |
| ] | |
| def validate_paths(self) -> None: | |
| paths = [ | |
| self.FILES_BASE_PATH, | |
| self.UPLOAD_PATH, | |
| self.OUTPUT_PATH, | |
| self.TEMP_PATH, | |
| self.BACKUP_PATH, | |
| self.LOG_FILE_PATH | |
| ] | |
| for path_str in paths: | |
| path = Path(path_str) | |
| path.mkdir(parents=True, exist_ok=True) | |
| db_path = self.sqlite_db_file_path | |
| db_path.parent.mkdir(parents=True, exist_ok=True) | |
| def to_dict(self) -> dict: | |
| data = self.model_dump() | |
| sensitive_keys = [ | |
| "GROQ_API_KEY", | |
| "OPENAI_API_KEY", | |
| "ANTHROPIC_API_KEY", | |
| "VALID_API_KEYS", | |
| "REDIS_URL" | |
| ] | |
| for key in sensitive_keys: | |
| if key in data and data[key]: | |
| data[key] = "***HIDDEN***" | |
| data["_runtime_info"] = { | |
| "is_sqlite": self.is_sqlite, | |
| "is_development": self.is_development, | |
| "database_file_exists": self.sqlite_db_file_path.exists() if self.is_sqlite else None, | |
| "database_file_size_mb": ( | |
| self.sqlite_db_file_path.stat().st_size / (1024**2) | |
| if self.is_sqlite and self.sqlite_db_file_path.exists() | |
| else None | |
| ) | |
| } | |
| return data | |
| def get_settings() -> Settings: | |
| settings = Settings() | |
| settings.validate_paths() | |
| return settings | |
| def get_env(key: str, default: any = None) -> any: | |
| return os.getenv(key, default) | |
| def is_production() -> bool: | |
| return get_settings().is_production | |
| def is_development() -> bool: | |
| return get_settings().is_development | |
| def is_sqlite() -> bool: | |
| return get_settings().is_sqlite | |
| def get_database_info() -> dict: | |
| settings = get_settings() | |
| info = { | |
| "type": "sqlite" if settings.is_sqlite else "postgresql", | |
| "url_masked": settings.database_url_masked | |
| } | |
| if settings.is_sqlite: | |
| db_path = settings.sqlite_db_file_path | |
| info.update({ | |
| "file_path": str(db_path), | |
| "file_exists": db_path.exists(), | |
| "file_size_mb": ( | |
| db_path.stat().st_size / (1024**2) | |
| if db_path.exists() | |
| else 0 | |
| ), | |
| "journal_mode": settings.SQLITE_JOURNAL_MODE, | |
| "synchronous": settings.SQLITE_SYNCHRONOUS | |
| }) | |
| return info | |