Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from settings import HEALTHCARE_SETTINGS, GENERAL_CONVERSATION_PROMPT, USE_SCENARIO_ENGINE | |
| from data_registry import DataRegistry | |
| from upload_ingest import extract_text_from_files | |
| from healthcare_analysis import HealthcareAnalyzer | |
| from rag import RAGIndex | |
| from scenario_planner import plan_from_llm | |
| from scenario_engine import ScenarioEngine | |
| from llm_router import cohere_chat | |
| def is_healthcare_scenario(text, files): | |
| return any(k in text.lower() for k in HEALTHCARE_SETTINGS["healthcare_keywords"]) and bool(files) | |
| def handle(msg, history, files): | |
| registry=DataRegistry() | |
| for f in files or []: registry.add_path(f) | |
| rag=RAGIndex(); rag.add(extract_text_from_files(files).get("chunks",[])) | |
| if is_healthcare_scenario(msg, files) and USE_SCENARIO_ENGINE: | |
| analyzer=HealthcareAnalyzer(registry) | |
| results=analyzer.comprehensive_analysis(msg) | |
| catalog={n:list(df.columns) for n,df in results.items() if hasattr(df,"columns")} | |
| plan=plan_from_llm(msg, catalog) | |
| structured=ScenarioEngine.render_plan(plan, results) | |
| return history+[(msg, structured)], "" | |
| else: | |
| out=cohere_chat(f"{GENERAL_CONVERSATION_PROMPT}\n\nUser: {msg}\nAssistant:") or "..." | |
| return history+[(msg, out)], "" | |
| with gr.Blocks() as demo: | |
| chat=gr.Chatbot() | |
| files=gr.Files(type="filepath", file_count="multiple") | |
| msg=gr.Textbox() | |
| btn=gr.Button("Send") | |
| btn.click(handle,[msg,chat,files],[chat,msg]) | |
| msg.submit(handle,[msg,chat,files],[chat,msg]) | |
| if __name__=="__main__": | |
| demo.launch() | |