Rajan Sharma commited on
Commit
dddc062
·
verified ·
1 Parent(s): 00c16d0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -14
app.py CHANGED
@@ -4,7 +4,10 @@ import gradio as gr
4
  import pandas as pd
5
  from typing import List, Tuple, Dict, Any
6
 
7
- from settings import HEALTHCARE_SETTINGS, GENERAL_CONVERSATION_PROMPT, USE_SCENARIO_ENGINE
 
 
 
8
  from audit_log import log_event
9
  from privacy import safety_filter, refusal_reply
10
  from data_registry import DataRegistry
@@ -13,7 +16,7 @@ from healthcare_analysis import HealthcareAnalyzer
13
  from scenario_planner import parse_to_plan
14
  from scenario_engine import ScenarioEngine
15
  from rag import RAGIndex
16
- from llm_router import generate_narrative, cohere_chat, open_fallback_chat
17
 
18
  def _sanitize_text(s: str) -> str:
19
  if not isinstance(s, str): return s
@@ -35,6 +38,19 @@ def is_healthcare_scenario(text: str, has_files: bool) -> bool:
35
  def _append_msg(history_messages: List[Dict[str, str]], role: str, content: str) -> List[Dict[str, str]]:
36
  return (history_messages or []) + [{"role": role, "content": content}]
37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  def handle(user_msg: str, history_messages: List[Dict[str, str]], files: list) -> Tuple[List[Dict[str, str]], str]:
39
  try:
40
  safe_in, blocked_in, reason_in = safety_filter(user_msg, mode="input")
@@ -66,19 +82,16 @@ def handle(user_msg: str, history_messages: List[Dict[str, str]], files: list) -
66
  datasets = analyzer.comprehensive_analysis(safe_in)
67
  catalog = _dataset_catalog(datasets)
68
 
69
- # LLM → plan (Cohere API)
70
  plan = parse_to_plan(safe_in, catalog)
71
-
72
- # Deterministic execution
73
  structured_md = ScenarioEngine.execute_plan(plan, datasets)
74
 
75
- # Narrative via Cohere API (fallback only if enabled)
76
  rag_hits = [txt for txt, _ in rag.retrieve(safe_in, k=6)]
77
  narrative = generate_narrative(safe_in, structured_md, rag_hits)
78
 
79
- reply = _sanitize_text(f"{structured_md}\n\n# Narrative & Recommendations\n\n{narrative}")
 
80
  else:
81
- # General chat via Cohere API
82
  prompt = f"{GENERAL_CONVERSATION_PROMPT}\n\nUser: {safe_in}\nAssistant:"
83
  reply = cohere_chat(prompt) or open_fallback_chat(prompt) or "How can I help further?"
84
  reply = _sanitize_text(reply)
@@ -97,12 +110,14 @@ def handle(user_msg: str, history_messages: List[Dict[str, str]], files: list) -
97
  # -------- UI --------
98
  with gr.Blocks(analytics_enabled=False) as demo:
99
  gr.Markdown("## Canadian Healthcare AI • Cohere API • Scenario-Agnostic • Deterministic analytics")
100
- chat = gr.Chatbot(type="messages", height=520) # OpenAI-style role/content
101
- files = gr.Files(
102
- file_count="multiple",
103
- type="filepath",
104
- file_types=HEALTHCARE_SETTINGS["supported_file_types"]
105
- )
 
 
106
  msg = gr.Textbox(placeholder="Paste any scenario (Background / Situation / Tasks / Deliverables) or just chat.")
107
  send = gr.Button("Send")
108
  clear = gr.Button("Clear")
@@ -111,9 +126,18 @@ with gr.Blocks(analytics_enabled=False) as demo:
111
  h2, _ = handle(m, h or [], f or [])
112
  return h2, ""
113
 
 
114
  send.click(_on_send, inputs=[msg, chat, files], outputs=[chat, msg])
115
  msg.submit(_on_send, inputs=[msg, chat, files], outputs=[chat, msg])
116
  clear.click(lambda: ([], ""), outputs=[chat, msg])
117
 
118
  if __name__ == "__main__":
 
 
 
 
 
 
 
 
119
  demo.launch(server_name="0.0.0.0", server_port=int(os.getenv("PORT", "7860")))
 
4
  import pandas as pd
5
  from typing import List, Tuple, Dict, Any
6
 
7
+ from settings import (
8
+ HEALTHCARE_SETTINGS, GENERAL_CONVERSATION_PROMPT, USE_SCENARIO_ENGINE,
9
+ DEBUG_PLAN, COHERE_MODEL_PRIMARY, COHERE_TIMEOUT_S, USE_OPEN_FALLBACKS
10
+ )
11
  from audit_log import log_event
12
  from privacy import safety_filter, refusal_reply
13
  from data_registry import DataRegistry
 
16
  from scenario_planner import parse_to_plan
17
  from scenario_engine import ScenarioEngine
18
  from rag import RAGIndex
19
+ from llm_router import generate_narrative, cohere_chat, open_fallback_chat, _co_client
20
 
21
  def _sanitize_text(s: str) -> str:
22
  if not isinstance(s, str): return s
 
38
  def _append_msg(history_messages: List[Dict[str, str]], role: str, content: str) -> List[Dict[str, str]]:
39
  return (history_messages or []) + [{"role": role, "content": content}]
40
 
41
+ def ping_cohere():
42
+ try:
43
+ cli = _co_client()
44
+ if not cli:
45
+ return "Cohere client not initialized. Is COHERE_API_KEY set?"
46
+ from llm_router import cohere_embed
47
+ vecs = cohere_embed(["hello", "world"])
48
+ if vecs and len(vecs) == 2:
49
+ return "Cohere OK ✅ (embed call succeeded)"
50
+ return "Cohere reachable, but embedding returned no vectors."
51
+ except Exception as e:
52
+ return f"Cohere ping failed: {e}"
53
+
54
  def handle(user_msg: str, history_messages: List[Dict[str, str]], files: list) -> Tuple[List[Dict[str, str]], str]:
55
  try:
56
  safe_in, blocked_in, reason_in = safety_filter(user_msg, mode="input")
 
82
  datasets = analyzer.comprehensive_analysis(safe_in)
83
  catalog = _dataset_catalog(datasets)
84
 
 
85
  plan = parse_to_plan(safe_in, catalog)
 
 
86
  structured_md = ScenarioEngine.execute_plan(plan, datasets)
87
 
 
88
  rag_hits = [txt for txt, _ in rag.retrieve(safe_in, k=6)]
89
  narrative = generate_narrative(safe_in, structured_md, rag_hits)
90
 
91
+ debug_note = f"\n\n> **Planner note:** {getattr(plan, 'notes', '')}" if (DEBUG_PLAN and getattr(plan, "notes", None)) else ""
92
+ reply = _sanitize_text(f"{structured_md}\n\n# Narrative & Recommendations\n\n{narrative}{debug_note}")
93
  else:
94
+ # General conversation via Cohere (fallback if enabled)
95
  prompt = f"{GENERAL_CONVERSATION_PROMPT}\n\nUser: {safe_in}\nAssistant:"
96
  reply = cohere_chat(prompt) or open_fallback_chat(prompt) or "How can I help further?"
97
  reply = _sanitize_text(reply)
 
110
  # -------- UI --------
111
  with gr.Blocks(analytics_enabled=False) as demo:
112
  gr.Markdown("## Canadian Healthcare AI • Cohere API • Scenario-Agnostic • Deterministic analytics")
113
+
114
+ # diagnostics row
115
+ with gr.Row():
116
+ ping_btn = gr.Button("Ping Cohere")
117
+ ping_out = gr.Markdown()
118
+
119
+ chat = gr.Chatbot(type="messages", height=520)
120
+ files = gr.Files(file_count="multiple", type="filepath", file_types=HEALTHCARE_SETTINGS["supported_file_types"])
121
  msg = gr.Textbox(placeholder="Paste any scenario (Background / Situation / Tasks / Deliverables) or just chat.")
122
  send = gr.Button("Send")
123
  clear = gr.Button("Clear")
 
126
  h2, _ = handle(m, h or [], f or [])
127
  return h2, ""
128
 
129
+ ping_btn.click(lambda: ping_cohere(), outputs=[ping_out])
130
  send.click(_on_send, inputs=[msg, chat, files], outputs=[chat, msg])
131
  msg.submit(_on_send, inputs=[msg, chat, files], outputs=[chat, msg])
132
  clear.click(lambda: ([], ""), outputs=[chat, msg])
133
 
134
  if __name__ == "__main__":
135
+ from audit_log import log_event
136
+ log_event("startup", None, {
137
+ "cohere_key_present": bool(os.getenv("COHERE_API_KEY")),
138
+ "cohere_model": COHERE_MODEL_PRIMARY,
139
+ "timeout_s": COHERE_TIMEOUT_S,
140
+ "open_fallbacks": USE_OPEN_FALLBACKS
141
+ })
142
+ gr.set_static_paths({})
143
  demo.launch(server_name="0.0.0.0", server_port=int(os.getenv("PORT", "7860")))