File size: 1,576 Bytes
40db972
76eb61d
c1ff5e2
 
ff957d1
76eb61d
 
d5495e2
76eb61d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import gradio as gr
from settings import HEALTHCARE_SETTINGS, GENERAL_CONVERSATION_PROMPT, USE_SCENARIO_ENGINE
from data_registry import DataRegistry
from upload_ingest import extract_text_from_files
from healthcare_analysis import HealthcareAnalyzer
from rag import RAGIndex
from scenario_planner import plan_from_llm
from scenario_engine import ScenarioEngine
from llm_router import cohere_chat

def is_healthcare_scenario(text, files):
    return any(k in text.lower() for k in HEALTHCARE_SETTINGS["healthcare_keywords"]) and bool(files)

def handle(msg, history, files):
    registry=DataRegistry()
    for f in files or []: registry.add_path(f)
    rag=RAGIndex(); rag.add(extract_text_from_files(files).get("chunks",[]))
    if is_healthcare_scenario(msg, files) and USE_SCENARIO_ENGINE:
        analyzer=HealthcareAnalyzer(registry)
        results=analyzer.comprehensive_analysis(msg)
        catalog={n:list(df.columns) for n,df in results.items() if hasattr(df,"columns")}
        plan=plan_from_llm(msg, catalog)
        structured=ScenarioEngine.render_plan(plan, results)
        return history+[(msg, structured)], ""
    else:
        out=cohere_chat(f"{GENERAL_CONVERSATION_PROMPT}\n\nUser: {msg}\nAssistant:") or "..."
        return history+[(msg, out)], ""

with gr.Blocks() as demo:
    chat=gr.Chatbot()
    files=gr.Files(type="filepath", file_count="multiple")
    msg=gr.Textbox()
    btn=gr.Button("Send")
    btn.click(handle,[msg,chat,files],[chat,msg])
    msg.submit(handle,[msg,chat,files],[chat,msg])

if __name__=="__main__":
    demo.launch()