File size: 2,286 Bytes
1ef6995
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
# web/app.py
import os, time, gradio as gr
from langchain_ollama import OllamaEmbeddings, OllamaLLM
from langchain_chroma import Chroma
from langchain.prompts import PromptTemplate
from langchain.chains.combine_documents import create_stuff_documents_chain

HOME = os.path.expanduser("~")
PROJECT = os.path.expanduser(os.environ.get("PALI_PROJECT_ROOT", f"{HOME}/PaLi-CANON"))
PERSIST_DIR = os.path.expanduser(os.environ.get("LOTUS_CHROMA_DIR", f"{PROJECT}/chroma"))
COLLECTION  = os.environ.get("LOTUS_CHROMA_COLLECTION", "lotus_canon")
EMBED_MODEL = os.environ.get("LOTUS_EMBED_MODEL", "nomic-embed-text")
DEFAULT_LLM = os.environ.get("LOTUS_LLM_MODEL", "mistral")

def build_db():
    emb = OllamaEmbeddings(model=EMBED_MODEL)
    return Chroma(embedding_function=emb, persist_directory=PERSIST_DIR, collection_name=COLLECTION)

PROMPT = PromptTemplate.from_template(
    "You are a calm Theravāda teacher. Use only the provided context.\n\n"
    "Question:\n{question}\n\nContext:\n{context}"
)

def ask(question: str) -> str:
    t0 = time.time()
    db = build_db()
    retriever = db.as_retriever(search_type="mmr", search_kwargs={"k": 12, "fetch_k": 50})
    docs = retriever.invoke(question)
    llm = OllamaLLM(model=DEFAULT_LLM, temperature=0.5)
    chain = create_stuff_documents_chain(llm, PROMPT)
    ans = chain.invoke({"question": question, "context": docs})
    return f"{ans}\n\n— ⏱ {time.time()-t0:.2f}s"

CSS = """
:root { color-scheme: light dark; }
body { background: Canvas; color: CanvasText; font-family: system-ui, -apple-system, 'Inter', sans-serif; }
.container { max-width: 820px; margin: 36px auto; }
h1 { text-align:center; margin: 0 0 16px; font-size: 28px; font-weight: 700; }
"""

with gr.Blocks(css=CSS, title="PaLi-CANON") as demo:
    gr.Markdown("<div class='container'><h1>PaLi-CANON</h1></div>")
    chat = gr.ChatInterface(
        fn=lambda message, history: ask(message),              # return just the assistant string
        chatbot=gr.Chatbot(type="messages", height=520),       # messages API (no deprecation)
        textbox=gr.Textbox(placeholder="Ask about the Pāli Canon…", lines=1),
        submit_btn="Ask",
        stop_btn="Stop",
    )

if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860)