File size: 3,325 Bytes
17a78b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
from typing import Literal, Optional
from pydantic_settings import BaseSettings, SettingsConfigDict


class Settings(BaseSettings):
    model_config = SettingsConfigDict(
        env_file=".env",
        env_file_encoding="utf-8",
        extra="ignore",
    )

    # App mode — "demo" (seeded showcase) or "personal" (real financial data)
    app_mode: Literal["demo", "personal"] = "personal"

    # Database — auto-reads DB_HOST, DB_PORT, etc. from .env
    db_host: str = "localhost"
    db_port: int = 5432
    db_name: str = "financial_db"  # fallback; overridden by resolved_db_name
    db_name_demo: str = "cashy_demo"
    db_name_personal: str = "financial_db"
    db_user: str = "financial_advisor"
    db_password: str = ""
    db_sslmode: str = ""  # "require" for Neon; empty for local

    # LLM provider — set explicitly or auto-detected from API keys
    llm_provider: Optional[Literal["openai", "anthropic", "google", "huggingface", "free-tier", ""]] = None

    # Per-provider API keys (only one needed)
    openai_api_key: str = ""
    anthropic_api_key: str = ""
    google_api_key: str = ""
    hf_token: str = ""

    # Model configuration
    model_name: str = ""  # Optional override; defaults per provider
    model_max_tokens: int = 512
    model_temperature: float = 0.1

    # HuggingFace-specific
    hf_inference_provider: str = "together"

    # LangSmith — auto-reads LANGSMITH_* from .env
    langsmith_tracing: str = "true"
    langsmith_api_key: str = ""
    langsmith_project: str = "cashy-financial-advisor"

    # App
    environment: str = "development"
    debug: bool = True

    @property
    def resolved_db_name(self) -> str:
        """Return the database name based on app_mode."""
        if self.app_mode == "demo":
            return self.db_name_demo
        return self.db_name_personal

    @property
    def database_url(self) -> str:
        """Build DATABASE_URL from individual DB components."""
        url = (
            f"postgresql://{self.db_user}:{self.db_password}"
            f"@{self.db_host}:{self.db_port}/{self.resolved_db_name}"
        )
        if self.db_sslmode:
            url += f"?sslmode={self.db_sslmode}"
        return url

    @property
    def database_url_safe(self) -> str:
        """Database URL with password redacted for logging."""
        return self.database_url.replace(f":{self.db_password}@", ":***@")

    @property
    def resolved_provider(self) -> Optional[str]:
        """Return the active LLM provider: explicit setting, auto-detected from keys, or None."""
        if self.llm_provider and self.llm_provider != "":
            return self.llm_provider

        # Auto-detect from populated API keys (priority order)
        if self.openai_api_key and self.openai_api_key != "sk-...":
            return "openai"
        if self.anthropic_api_key and self.anthropic_api_key != "sk-ant-...":
            return "anthropic"
        if self.google_api_key and self.google_api_key != "AI...":
            return "google"
        if self.hf_token and self.hf_token != "hf_...":
            # In demo mode, default to free-tier (user can switch to huggingface BYOK)
            if self.app_mode == "demo":
                return "free-tier"
            return "huggingface"

        return None


settings = Settings()