Spaces:
Runtime error
Runtime error
| import os | |
| import gradio as gr | |
| from gradio_client import Client | |
| import json, datetime | |
| import gspread | |
| from google.oauth2.service_account import Credentials | |
| import time | |
| # === Load Google service account credentials from HF secret === | |
| # === Google Sheets Setup === | |
| SHEET_ID = "1O0pRqNDp-MkTGfASVwQPSZPLocLJTSOFbx0bjMzkJQg" | |
| def _get_worksheet(): | |
| """Authorise Google Sheets once and cache worksheet.""" | |
| creds_json = os.environ.get("GOOGLE_CREDS") | |
| if not creds_json: | |
| raise RuntimeError("Missing GOOGLE_CREDS secret in Hugging Face Space") | |
| creds_dict = json.loads(creds_json) | |
| scopes = ["https://www.googleapis.com/auth/spreadsheets", "https://www.googleapis.com/auth/drive"] | |
| creds = Credentials.from_service_account_info(creds_dict, scopes=scopes) | |
| gc = gspread.authorize(creds) | |
| return gc.open_by_key(SHEET_ID).sheet1 # First worksheet | |
| # Cache the worksheet object so we don't re-auth every call | |
| print("Loading worksheet") | |
| worksheet = _get_worksheet() | |
| print("worksheet loaded") | |
| def log_interaction(question: str, response: str, routing: str, aura: str, latency: float): | |
| """ | |
| Append a row to Google Sheet with: | |
| Question | Response | Routing | AURA | Latency (s) | |
| """ | |
| try: | |
| worksheet.append_row([ | |
| question, | |
| response, | |
| routing, | |
| aura, | |
| f"{latency:.2f}" | |
| ]) | |
| except Exception as e: | |
| print(f"⚠️ Logging failed: {e}") | |
| # === Backend config === | |
| BACKEND_URL = os.environ.get("BACKEND_URL") | |
| TOKEN = os.environ.get("HF_PRIVATE_SPACE_TOKEN") | |
| # Single shared client (Space process persists across requests) | |
| client = Client(BACKEND_URL, hf_token=TOKEN) | |
| def respond(message: str, history: list | None): | |
| """Proxy to private Space /chat endpoint and mirror outputs. | |
| Returns (chat_history, internal_routing_md, aura_md) | |
| """ | |
| try: | |
| t0 = time.perf_counter() | |
| chat, md1, md2 = client.predict( | |
| message=message, | |
| history=history or [], | |
| api_name="/chat", | |
| ) | |
| bot_response = chat[-1][1] if chat else "" | |
| md1 = md1 or "" | |
| md2 = md2 or "" | |
| latency = time.perf_counter() - t0 | |
| # ✅ Log cleanly | |
| try: | |
| print("attempting log") | |
| log_interaction(message, bot_response, md1, md2, latency) | |
| print("success") | |
| except Exception as e: | |
| print(f"Failed to log: {e}") | |
| return chat, md1, md2 | |
| except Exception as e: | |
| chat = (history or []) + [(message, f"⚠️ Backend error: {e}")] | |
| return chat, "", "" | |
| def clear_all(): | |
| """Call backend /clear and propagate cleared state. | |
| Fallback to local clear if backend clear fails. | |
| """ | |
| try: | |
| chat, md1, md2 = client.predict(api_name="/clear") | |
| return chat or [], (md1 or ""), (md2 or "") | |
| except Exception: | |
| return [], "", "" | |
| with gr.Blocks(fill_height=True, theme=gr.themes.Soft()) as demo: | |
| gr.Markdown("# Project Nightingale - Due Diligence Explorer (Public Proxy)") | |
| gr.Markdown( | |
| """ | |
| **10.10.25 1700 BST: BT running evals for christopher with additional questions - please message me if you wish to run additional queries as don't want to mess up the latency** | |
| **I will take down this notice as soon as I have finished** | |
| Ask a question — we will: | |
| 1. Select sources intelligently with our internal routing agent | |
| 2. Search the indexed guideline data using our **G-PRISM** retrieval pipeline | |
| 3. Internally verify responses with the **AURA** adversarial critical evaluator mechanism | |
| """ | |
| ) | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| chatbot = gr.Chatbot( | |
| label="Clinical Knowledge Chat (G-PRISM Retrieval)", | |
| height=500, | |
| render_markdown=True, | |
| sanitize_html=False, | |
| ) | |
| with gr.Row(): | |
| user_in = gr.Textbox(placeholder="Type your question…", scale=4) | |
| send_btn = gr.Button("Send", scale=1, variant="primary") | |
| clear_btn = gr.Button("Clear Chat") | |
| with gr.Column(scale=2): | |
| with gr.Group(): | |
| gr.Markdown("## Intelligent source selection (Internal Routing)") | |
| spec_box = gr.Markdown(value="") | |
| with gr.Group(): | |
| gr.Markdown("## Critical Appraisal (AURA)") | |
| critical_box = gr.Markdown(value="") | |
| # Wire events | |
| user_in.submit(respond, inputs=[user_in, chatbot], outputs=[chatbot, spec_box, critical_box]) | |
| send_btn.click(respond, inputs=[user_in, chatbot], outputs=[chatbot, spec_box, critical_box]) | |
| # Clear input after sending | |
| user_in.submit(lambda: "", None, user_in) | |
| send_btn.click(lambda: "", None, user_in) | |
| # Clear everything | |
| clear_btn.click(clear_all, outputs=[chatbot, spec_box, critical_box]) | |
| if __name__ == "__main__": | |
| demo.launch() | |