coding-llm-space / src /config.py
girish00's picture
Upload folder using huggingface_hub
07a91a1 verified
"""Project-level configuration values."""
from dataclasses import dataclass
import os
from dotenv import load_dotenv
load_dotenv()
@dataclass(frozen=True)
class Settings:
"""Central settings object for model and runtime controls."""
model_name: str = os.getenv("MODEL_NAME", "Qwen/Qwen2.5-Coder-1.5B-Instruct")
fallback_model_name: str = os.getenv("FALLBACK_MODEL_NAME", "Qwen/Qwen2.5-Coder-0.5B-Instruct")
final_fallback_model_name: str = os.getenv("FINAL_FALLBACK_MODEL_NAME", "sshleifer/tiny-gpt2")
embedding_model: str = os.getenv("EMBEDDING_MODEL", "sentence-transformers/all-MiniLM-L6-v2")
max_new_tokens: int = int(os.getenv("MAX_NEW_TOKENS", "256"))
temperature: float = float(os.getenv("TEMPERATURE", "0.2"))
top_p: float = float(os.getenv("TOP_P", "0.95"))
use_rag: bool = os.getenv("USE_RAG", "true").lower() == "true"
force_mock_mode: bool = os.getenv("FORCE_MOCK_MODE", "false").lower() == "true"
api_key: str = os.getenv("API_KEY", "")
rate_limit_per_minute: int = int(os.getenv("RATE_LIMIT_PER_MINUTE", "30"))
settings = Settings()