ask-the-web-agent / src /utils /config.py
debashis2007's picture
Use Zephyr-7B model (works without token)
43256f7 verified
"""Configuration management using Pydantic Settings."""
from functools import lru_cache
from typing import Literal
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
"""Application settings loaded from environment variables."""
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
extra="ignore",
)
# LLM Provider Configuration
llm_provider: Literal["openai", "anthropic", "ollama", "huggingface"] = Field(
default="ollama", description="LLM provider to use"
)
# OpenAI Configuration
openai_api_key: str = Field(default="", description="OpenAI API key")
openai_model: str = Field(default="gpt-4-turbo-preview", description="OpenAI model name")
# Anthropic Configuration
anthropic_api_key: str = Field(default="", description="Anthropic API key")
anthropic_model: str = Field(
default="claude-3-opus-20240229", description="Anthropic model name"
)
# Ollama Configuration
ollama_base_url: str = Field(default="http://localhost:11434", description="Ollama server URL")
ollama_model: str = Field(default="llama3.2", description="Ollama model name")
# Hugging Face Configuration
hf_token: str = Field(default="", description="Hugging Face API token")
hf_model: str = Field(default="HuggingFaceH4/zephyr-7b-beta", description="Hugging Face model name")
# Search Provider Configuration
search_provider: Literal["tavily", "serpapi", "duckduckgo"] = Field(
default="tavily", description="Search provider to use"
)
# Tavily Configuration
tavily_api_key: str = Field(default="", description="Tavily API key")
# SerpAPI Configuration
serpapi_api_key: str = Field(default="", description="SerpAPI key")
# Agent Configuration
max_iterations: int = Field(default=5, description="Maximum ReACT loop iterations")
max_search_results: int = Field(default=5, description="Maximum search results per query")
response_timeout: int = Field(default=30, description="Response timeout in seconds")
enable_reflection: bool = Field(default=True, description="Enable self-reflection")
max_tokens: int = Field(default=2048, description="Maximum tokens in LLM response")
temperature: float = Field(default=0.7, description="LLM temperature")
# Logging Configuration
log_level: str = Field(default="INFO", description="Logging level")
log_format: Literal["json", "text"] = Field(default="text", description="Log format")
# API Server Configuration
api_host: str = Field(default="0.0.0.0", description="API server host")
api_port: int = Field(default=8000, description="API server port")
# Environment Configuration
debug: bool = Field(default=True, description="Debug mode")
environment: str = Field(default="development", description="Environment name")
@property
def llm_api_key(self) -> str:
"""Get the API key for the configured LLM provider."""
if self.llm_provider == "openai":
return self.openai_api_key
elif self.llm_provider == "anthropic":
return self.anthropic_api_key
elif self.llm_provider == "huggingface":
return self.hf_token
return "" # Ollama doesn't need an API key
@property
def llm_model(self) -> str:
"""Get the model name for the configured LLM provider."""
if self.llm_provider == "openai":
return self.openai_model
elif self.llm_provider == "anthropic":
return self.anthropic_model
elif self.llm_provider == "huggingface":
return self.hf_model
return self.ollama_model
@property
def search_api_key(self) -> str:
"""Get the API key for the configured search provider."""
if self.search_provider == "tavily":
return self.tavily_api_key
elif self.search_provider == "serpapi":
return self.serpapi_api_key
return "" # DuckDuckGo doesn't need an API key
@lru_cache
def get_settings() -> Settings:
"""Get cached settings instance."""
return Settings()
# Global settings instance
settings = get_settings()