|
|
|
|
|
import os, time, gradio as gr |
|
|
from langchain_ollama import OllamaEmbeddings, OllamaLLM |
|
|
from langchain_chroma import Chroma |
|
|
from langchain.prompts import PromptTemplate |
|
|
from langchain.chains.combine_documents import create_stuff_documents_chain |
|
|
|
|
|
HOME = os.path.expanduser("~") |
|
|
PROJECT = os.path.expanduser(os.environ.get("PALI_PROJECT_ROOT", f"{HOME}/PaLi-CANON")) |
|
|
PERSIST_DIR = os.path.expanduser(os.environ.get("LOTUS_CHROMA_DIR", f"{PROJECT}/chroma")) |
|
|
COLLECTION = os.environ.get("LOTUS_CHROMA_COLLECTION", "lotus_canon") |
|
|
EMBED_MODEL = os.environ.get("LOTUS_EMBED_MODEL", "nomic-embed-text") |
|
|
DEFAULT_LLM = os.environ.get("LOTUS_LLM_MODEL", "mistral") |
|
|
|
|
|
def build_db(): |
|
|
emb = OllamaEmbeddings(model=EMBED_MODEL) |
|
|
return Chroma(embedding_function=emb, persist_directory=PERSIST_DIR, collection_name=COLLECTION) |
|
|
|
|
|
PROMPT = PromptTemplate.from_template( |
|
|
"You are a calm Theravāda teacher. Use only the provided context.\n\n" |
|
|
"Question:\n{question}\n\nContext:\n{context}" |
|
|
) |
|
|
|
|
|
def ask(question: str) -> str: |
|
|
t0 = time.time() |
|
|
db = build_db() |
|
|
retriever = db.as_retriever(search_type="mmr", search_kwargs={"k": 12, "fetch_k": 50}) |
|
|
docs = retriever.invoke(question) |
|
|
llm = OllamaLLM(model=DEFAULT_LLM, temperature=0.5) |
|
|
chain = create_stuff_documents_chain(llm, PROMPT) |
|
|
ans = chain.invoke({"question": question, "context": docs}) |
|
|
return f"{ans}\n\n— ⏱ {time.time()-t0:.2f}s" |
|
|
|
|
|
CSS = """ |
|
|
:root { color-scheme: light dark; } |
|
|
body { background: Canvas; color: CanvasText; font-family: system-ui, -apple-system, 'Inter', sans-serif; } |
|
|
.container { max-width: 820px; margin: 36px auto; } |
|
|
h1 { text-align:center; margin: 0 0 16px; font-size: 28px; font-weight: 700; } |
|
|
""" |
|
|
|
|
|
with gr.Blocks(css=CSS, title="PaLi-CANON") as demo: |
|
|
gr.Markdown("<div class='container'><h1>PaLi-CANON</h1></div>") |
|
|
chat = gr.ChatInterface( |
|
|
fn=lambda message, history: ask(message), |
|
|
chatbot=gr.Chatbot(type="messages", height=520), |
|
|
textbox=gr.Textbox(placeholder="Ask about the Pāli Canon…", lines=1), |
|
|
submit_btn="Ask", |
|
|
stop_btn="Stop", |
|
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch(server_name="0.0.0.0", server_port=7860) |
|
|
|