File size: 1,342 Bytes
e0b48b1
 
 
 
 
f5f65f4
65867d3
 
 
 
 
f5f65f4
e0b48b1
d1328b3
e0b48b1
92a2269
e0b48b1
 
 
 
a94a9b7
65867d3
 
d1328b3
 
 
589d766
a94a9b7
65867d3
92a2269
 
a94a9b7
d1328b3
 
65867d3
d1328b3
 
 
 
65867d3
ee68c07
65867d3
 
 
 
 
92a2269
65867d3
 
d1328b3
e0b48b1
ce23a62
d1328b3
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import os
import gradio as gr

from backend_app.ingest import run_ingestion
from backend_app.rag_hf import RAGEngineHF
from backend_app.config import (
    DATA_DIR,
    FAISS_INDEX_PATH,
    DOCSTORE_PATH,
    BOT_WELCOME,
    BOT_NAME
)

# ---------- Prepare data once ----------
os.makedirs(DATA_DIR, exist_ok=True)

if not (os.path.exists(FAISS_INDEX_PATH) and os.path.exists(DOCSTORE_PATH)):
    run_ingestion()

rag = RAGEngineHF()


def respond(message, history, lang):
    message = (message or "").strip()
    if not message:
        return ""

    try:
        result = rag.answer(message, preferred_lang=lang)
        answer = (result.get("answer") or "").strip()
        return answer if answer else "I couldn’t find enough information to answer that. Please rephrase."
    except Exception:
        return "I ran into a temporary error while generating the answer. Please try again."


with gr.Blocks(title=BOT_NAME) as demo:
    gr.Markdown(f"## {BOT_NAME}")
    gr.Markdown(BOT_WELCOME)

    lang = gr.Dropdown(
        choices=["English", "Sinhala", "Tamil"],
        value="English",
        label="Response language"
    )

    # ✅ Minimal ChatInterface for maximum compatibility
    gr.ChatInterface(
        fn=respond,
        additional_inputs=[lang]
    )

demo.queue()

if __name__ == "__main__":
    demo.launch()