kgbchatbot / src /config.py
thomascerniglia's picture
Add LLM integration with Qwen2.5-7B-Instruct via HF Inference API
2e0e474
raw
history blame contribute delete
866 Bytes
import os
from pydantic import BaseModel
class Settings(BaseModel):
raw_dir: str = os.path.join("data", "raw")
storage_dir: str = "storage"
index_path: str = os.path.join("storage", "index.faiss")
docs_path: str = os.path.join("storage", "docs.pkl")
meta_path: str = os.path.join("storage", "meta.json")
embedding_model: str = "sentence-transformers/all-MiniLM-L6-v2"
local_generation_model: str = os.getenv("LOCAL_GENERATION_MODEL", "").strip()
hf_token: str = os.getenv("HF_TOKEN", "").strip()
@property
def mode(self) -> str:
return "rag" if (self.hf_token or self.local_generation_model) else "retrieval"
top_k: int = 5
max_context_chars: int = 9000
title: str = "KGB Document Chatbot"
description: str = "Ask questions about declassified KGB documents using AI-powered search and analysis."