| from shiny import App, ui, reactive, render |
| import uuid |
| from datetime import datetime |
| import os |
| import sys |
| from pathlib import Path |
| from chatbot import ask_question, clear_conversation |
| from shinyswatch import theme |
|
|
| |
| app_ui = ui.page_fluid( |
| ui.panel_title("American University Academic Advisor Chatbot"), |
| |
| |
| ui.row( |
| |
| ui.column(3, |
| ui.div( |
| {"class": "card mb-3"}, |
| ui.div( |
| {"class": "card-header"}, |
| ui.h4("Settings", class_="mb-0") |
| ), |
| ui.div( |
| {"class": "card-body"}, |
| ui.tags.div( |
| {"class": "mb-3"}, |
| ui.tags.label("Session ID:", class_="form-label"), |
| ui.tags.div(str(uuid.uuid4()), class_="form-text") |
| ), |
| ui.input_action_button("clear_chat", "Clear Chat History", |
| class_="btn btn-warning btn-block mb-3 w-100"), |
| |
| ui.download_button("download_chat", "Export Chat History", |
| class_="btn btn-primary btn-block mb-3 w-100"), |
| ui.hr(), |
| ui.h5("Model Parameters"), |
| ui.input_slider("n_results", "Documents to retrieve", |
| min=0, max=16, value=8), |
| ui.input_slider("temperature", "Temperature", |
| min=0.1, max=1.0, value=0.7, step=0.1), |
| ui.hr(), |
| ui.h5("About/Warning"), |
| ui.p("This AI advisor is an on-going student research project using a RAG architecture with Python, a Chroma database and the Mistral 7B LLM. It provides answers to questions about American University's academic offerings related to the Math/Stat Department. While it draws from authoritative sources, it is known to produce some answers that are incomplete, inaccurate, or irrelevant. All responses should be checked with the references and one's human advisor.") |
| ) |
| ) |
| ), |
| |
| |
| ui.column(9, |
| ui.div( |
| {"class": "card h-100"}, |
| ui.div( |
| {"class": "card-body"}, |
| |
| ui.chat_ui("academic_chat", width="100%", height="75vh") |
| ) |
| ) |
| ) |
| ), |
| |
| title="American University Academic Advisor", |
| theme=theme.flatly |
| ) |
|
|
| |
| def server(input, output, session): |
| |
| chat = ui.Chat( |
| id="academic_chat", |
| messages=[{"role": "assistant", "content": "Hello! I'm your American University Academic Advisor. How can I help you today?"}] |
| ) |
| |
| |
| @reactive.Effect |
| @reactive.event(input.clear_chat) |
| async def _(): |
| |
| await chat.clear() |
| await chat.append_message({"role": "assistant", |
| "content": "Chat history cleared. How can I help you today?"}) |
| |
| clear_conversation() |
| |
| |
| @output |
| @render.download |
| def download_chat(): |
| |
| messages = chat.messages() |
| |
| |
| timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") |
| filename = f"au_advisor_chat_{timestamp}.md" |
| |
| |
| md_lines = [] |
| md_lines.append("# American University Academic Advisor Chat") |
| md_lines.append(f"*Exported on: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}*\n") |
| |
| for msg in messages: |
| role = msg.get("role", "unknown") |
| content = msg.get("content", "") |
| |
| if role == "user": |
| md_lines.append(f"## User\n\n{content}\n") |
| elif role == "assistant": |
| md_lines.append(f"## Assistant\n\n{content}\n") |
| |
| content = "\n".join(md_lines) |
| |
| |
| return filename, "text/markdown", content.encode("utf-8") |
| |
| |
| @chat.on_user_submit |
| async def handle_user_input(user_input: str): |
| |
| result = ask_question( |
| query=user_input, |
| n_results=input.n_results(), |
| temperature=input.temperature() |
| ) |
| |
| |
| response = result["response"].strip() |
| if result.get("contexts") and result.get("metadata"): |
| sources = [] |
| for i, (context, meta) in enumerate(zip(result.get("contexts", []), result.get("metadata", []))): |
| title = meta.get("title", "") if meta else "" |
| url = meta.get("url", "") |
| |
| if title in ["Table Row", "Paragraph"] or not title: |
| content_preview = context.strip()[:50] + "..." if len(context) > 50 else context.strip() |
| description = content_preview |
| else: |
| description = title |
| |
| if url: |
| sources.append(f"[{i+1}] <a href='{url}' target='_blank'>{description}</a>") |
| else: |
| sources.append(f"[{i+1}] {description}") |
| |
| if sources and "Sources:" not in response: |
| response += "<br><br><strong>Sources:</strong><br>" + "<br>".join(sources) |
| |
| |
| await chat.append_message({"role": "assistant", "content": response}) |
|
|
| |
| app = App(app_ui, server) |
|
|
| if __name__ == "__main__": |
| |
| import shiny |
| shiny.run_app( |
| host="0.0.0.0", |
| port=7860, |
| launch_browser=False |
| ) |