Spaces:
Sleeping
Sleeping
File size: 2,344 Bytes
721ca73 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 | """
config.py
βββββββββ
Central configuration for the AstroBot RAG application.
All tuneable parameters live here β change once, affects everywhere.
"""
import os
from dataclasses import dataclass, field
@dataclass
class AppConfig:
# ββ Groq LLM ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
groq_api_key: str = field(default_factory=lambda: os.environ.get("GROQ_API_KEY", ""))
groq_model: str = "llama-3.1-8b-instant"
groq_temperature: float = 0.2
groq_max_tokens: int = 1024
# ββ Hugging Face Dataset βββββββββββββββββββββββββββββββββββββββββββββββββββ
hf_dataset: str = field(default_factory=lambda: os.environ.get("HF_DATASET", ""))
hf_token: str = field(default_factory=lambda: os.environ.get("HF_TOKEN", ""))
dataset_split: str = "train"
# Ordered list of candidate column names that hold the raw text
text_column_candidates: list = field(default_factory=lambda: [
"text", "content", "body", "page_content", "extracted_text"
])
# ββ Embeddings & Retrieval βββββββββββββββββββββββββββββββββββββββββββββββββ
embed_model: str = "sentence-transformers/all-MiniLM-L6-v2"
chunk_size: int = 512
chunk_overlap: int = 64
top_k: int = 5
# ββ App Meta βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
app_title: str = "π AstroBot β Astrology Learning Assistant"
app_description: str = (
"Ask me anything about astrology concepts β planets, houses, aspects, "
"signs, transits, chart reading, and more. "
"**Note:** This bot explains concepts only; no personal predictions are made."
)
# Singleton β import this everywhere
cfg = AppConfig()
|