File size: 4,299 Bytes
75bea1c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4f92e8f
75bea1c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4f92e8f
 
43256f7
4f92e8f
75bea1c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4f92e8f
 
75bea1c
 
 
 
 
 
 
 
 
4f92e8f
 
75bea1c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
"""Configuration management using Pydantic Settings."""

from functools import lru_cache
from typing import Literal

from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict


class Settings(BaseSettings):
    """Application settings loaded from environment variables."""

    model_config = SettingsConfigDict(
        env_file=".env",
        env_file_encoding="utf-8",
        case_sensitive=False,
        extra="ignore",
    )

    # LLM Provider Configuration
    llm_provider: Literal["openai", "anthropic", "ollama", "huggingface"] = Field(
        default="ollama", description="LLM provider to use"
    )

    # OpenAI Configuration
    openai_api_key: str = Field(default="", description="OpenAI API key")
    openai_model: str = Field(default="gpt-4-turbo-preview", description="OpenAI model name")

    # Anthropic Configuration
    anthropic_api_key: str = Field(default="", description="Anthropic API key")
    anthropic_model: str = Field(
        default="claude-3-opus-20240229", description="Anthropic model name"
    )

    # Ollama Configuration
    ollama_base_url: str = Field(default="http://localhost:11434", description="Ollama server URL")
    ollama_model: str = Field(default="llama3.2", description="Ollama model name")

    # Hugging Face Configuration
    hf_token: str = Field(default="", description="Hugging Face API token")
    hf_model: str = Field(default="HuggingFaceH4/zephyr-7b-beta", description="Hugging Face model name")

    # Search Provider Configuration
    search_provider: Literal["tavily", "serpapi", "duckduckgo"] = Field(
        default="tavily", description="Search provider to use"
    )

    # Tavily Configuration
    tavily_api_key: str = Field(default="", description="Tavily API key")

    # SerpAPI Configuration
    serpapi_api_key: str = Field(default="", description="SerpAPI key")

    # Agent Configuration
    max_iterations: int = Field(default=5, description="Maximum ReACT loop iterations")
    max_search_results: int = Field(default=5, description="Maximum search results per query")
    response_timeout: int = Field(default=30, description="Response timeout in seconds")
    enable_reflection: bool = Field(default=True, description="Enable self-reflection")
    max_tokens: int = Field(default=2048, description="Maximum tokens in LLM response")
    temperature: float = Field(default=0.7, description="LLM temperature")

    # Logging Configuration
    log_level: str = Field(default="INFO", description="Logging level")
    log_format: Literal["json", "text"] = Field(default="text", description="Log format")

    # API Server Configuration
    api_host: str = Field(default="0.0.0.0", description="API server host")
    api_port: int = Field(default=8000, description="API server port")

    # Environment Configuration
    debug: bool = Field(default=True, description="Debug mode")
    environment: str = Field(default="development", description="Environment name")

    @property
    def llm_api_key(self) -> str:
        """Get the API key for the configured LLM provider."""
        if self.llm_provider == "openai":
            return self.openai_api_key
        elif self.llm_provider == "anthropic":
            return self.anthropic_api_key
        elif self.llm_provider == "huggingface":
            return self.hf_token
        return ""  # Ollama doesn't need an API key

    @property
    def llm_model(self) -> str:
        """Get the model name for the configured LLM provider."""
        if self.llm_provider == "openai":
            return self.openai_model
        elif self.llm_provider == "anthropic":
            return self.anthropic_model
        elif self.llm_provider == "huggingface":
            return self.hf_model
        return self.ollama_model

    @property
    def search_api_key(self) -> str:
        """Get the API key for the configured search provider."""
        if self.search_provider == "tavily":
            return self.tavily_api_key
        elif self.search_provider == "serpapi":
            return self.serpapi_api_key
        return ""  # DuckDuckGo doesn't need an API key


@lru_cache
def get_settings() -> Settings:
    """Get cached settings instance."""
    return Settings()


# Global settings instance
settings = get_settings()