import os import gradio as gr from backend_app.ingest import run_ingestion from backend_app.rag_hf import RAGEngineHF from backend_app.config import ( DATA_DIR, FAISS_INDEX_PATH, DOCSTORE_PATH, BOT_WELCOME, BOT_NAME ) # ---------- Prepare data once ---------- os.makedirs(DATA_DIR, exist_ok=True) if not (os.path.exists(FAISS_INDEX_PATH) and os.path.exists(DOCSTORE_PATH)): run_ingestion() rag = RAGEngineHF() def respond(message, history, lang): message = (message or "").strip() if not message: return "" try: result = rag.answer(message, preferred_lang=lang) answer = (result.get("answer") or "").strip() return answer if answer else "I couldn’t find enough information to answer that. Please rephrase." except Exception: return "I ran into a temporary error while generating the answer. Please try again." with gr.Blocks(title=BOT_NAME) as demo: gr.Markdown(f"## {BOT_NAME}") gr.Markdown(BOT_WELCOME) lang = gr.Dropdown( choices=["English", "Sinhala", "Tamil"], value="English", label="Response language" ) # ✅ Minimal ChatInterface for maximum compatibility gr.ChatInterface( fn=respond, additional_inputs=[lang] ) demo.queue() if __name__ == "__main__": demo.launch()