Spaces:
Running
Running
Delete app_space.py
Browse files- app_space.py +0 -150
app_space.py
DELETED
|
@@ -1,150 +0,0 @@
|
|
| 1 |
-
import io
|
| 2 |
-
import json
|
| 3 |
-
import os
|
| 4 |
-
import sys
|
| 5 |
-
import math
|
| 6 |
-
from typing import Any, Dict, List
|
| 7 |
-
|
| 8 |
-
import gradio as gr
|
| 9 |
-
import matplotlib.pyplot as plt
|
| 10 |
-
import numpy as np
|
| 11 |
-
import matplotlib.image as mpimg
|
| 12 |
-
import pandas as pd
|
| 13 |
-
|
| 14 |
-
# Ensure local package is importable when running in Hugging Face Spaces
|
| 15 |
-
ROOT = os.path.dirname(os.path.abspath(__file__))
|
| 16 |
-
if ROOT not in sys.path:
|
| 17 |
-
sys.path.insert(0, ROOT)
|
| 18 |
-
|
| 19 |
-
from app.ml.gating import gate_signal
|
| 20 |
-
from app.ml.inference import infer_ecg, load_model
|
| 21 |
-
from app.rules.engine import evaluate_ecg_rules
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
# Preload model (uses ./checkpoints/ecg_classifier.pt if present)
|
| 25 |
-
load_model()
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
def parse_signal(text: str | List[float]) -> List[float]:
|
| 29 |
-
if isinstance(text, list):
|
| 30 |
-
return [float(x) for x in text]
|
| 31 |
-
try:
|
| 32 |
-
return [float(x) for x in json.loads(text)]
|
| 33 |
-
except Exception:
|
| 34 |
-
raise gr.Error("Provide ECG samples as a JSON list, e.g., [0.1, 0.2, 0.3]")
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
def run_infer(signal_text: str) -> Dict[str, Any]:
|
| 38 |
-
sig = parse_signal(signal_text)
|
| 39 |
-
gated, gating_meta = gate_signal(sig, return_windows=True)
|
| 40 |
-
model_output: Dict[str, Any] = infer_ecg(
|
| 41 |
-
gated,
|
| 42 |
-
original_len=len(sig),
|
| 43 |
-
gating_meta=gating_meta,
|
| 44 |
-
)
|
| 45 |
-
patient_context = {"patient_id": "demo"}
|
| 46 |
-
rules_result = evaluate_ecg_rules(patient_context, model_output)
|
| 47 |
-
explanations = [
|
| 48 |
-
*(model_output.get("gating", {}).get("explanations", []) if isinstance(model_output.get("gating"), dict) else []),
|
| 49 |
-
*rules_result.get("explanations", []),
|
| 50 |
-
]
|
| 51 |
-
summary = f"Windows kept: {gating_meta.get('selected_windows',0)}/{gating_meta.get('total_windows',0)} • ratio={gating_meta.get('ratio',1):.2f}"
|
| 52 |
-
return {
|
| 53 |
-
"label": model_output.get("label"),
|
| 54 |
-
"score": round(float(model_output.get("score", 0.0)), 3),
|
| 55 |
-
"hr": model_output.get("hr"),
|
| 56 |
-
"alert_level": rules_result.get("alert_level", "none"),
|
| 57 |
-
"gated_ratio": round(model_output.get("gated_ratio", 1.0), 3),
|
| 58 |
-
"gating": gating_meta,
|
| 59 |
-
"gating_summary": summary,
|
| 60 |
-
"explanations": explanations,
|
| 61 |
-
}
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
def plot_gating(signal_text: str):
|
| 65 |
-
sig = parse_signal(signal_text)
|
| 66 |
-
gated, meta = gate_signal(sig, return_windows=True)
|
| 67 |
-
fig, axes = plt.subplots(2, 1, figsize=(6, 4))
|
| 68 |
-
axes[0].plot(sig, color="#0066ff", linewidth=1)
|
| 69 |
-
axes[0].set_title("Raw signal")
|
| 70 |
-
axes[1].plot(gated, color="#ff6600", linewidth=1)
|
| 71 |
-
axes[1].set_title(f"Gated signal (ratio={meta['ratio']:.2f})")
|
| 72 |
-
fig.tight_layout()
|
| 73 |
-
buf = io.BytesIO()
|
| 74 |
-
fig.savefig(buf, format="png", dpi=120)
|
| 75 |
-
plt.close(fig)
|
| 76 |
-
buf.seek(0)
|
| 77 |
-
np_img = mpimg.imread(buf)
|
| 78 |
-
windows = meta.get("windows", [])
|
| 79 |
-
table_rows = []
|
| 80 |
-
seen = set()
|
| 81 |
-
for w in windows:
|
| 82 |
-
key = (w.get("start"), w.get("end"), bool(w.get("selected")), bool(w.get("forced", False)))
|
| 83 |
-
if key in seen:
|
| 84 |
-
continue
|
| 85 |
-
seen.add(key)
|
| 86 |
-
table_rows.append(
|
| 87 |
-
[
|
| 88 |
-
w.get("start"),
|
| 89 |
-
w.get("end"),
|
| 90 |
-
round(float(w.get("significance", 0.0)), 3),
|
| 91 |
-
round(float(w.get("probability", 0.0)), 3),
|
| 92 |
-
bool(w.get("selected")),
|
| 93 |
-
bool(w.get("forced", False)),
|
| 94 |
-
]
|
| 95 |
-
)
|
| 96 |
-
df = pd.DataFrame(table_rows, columns=["start", "end", "significance", "prob", "selected", "forced"])
|
| 97 |
-
summary = f"Windows kept: {meta.get('selected_windows',0)}/{meta.get('total_windows',0)} • ratio={meta.get('ratio',1):.2f}"
|
| 98 |
-
return np_img, summary, df
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
# Demo signals with more structure so gating can skip/keep meaningfully
|
| 102 |
-
demo_normal = [0.05 * math.sin(2 * math.pi * 2 * (i / 256)) for i in range(256)]
|
| 103 |
-
demo_afib = [
|
| 104 |
-
0.25 * math.sin(2 * math.pi * 6 * (i / 256))
|
| 105 |
-
+ 0.05 * math.sin(2 * math.pi * 15 * (i / 256))
|
| 106 |
-
+ (0.15 if i % 40 == 0 else 0.0)
|
| 107 |
-
for i in range(256)
|
| 108 |
-
]
|
| 109 |
-
demo_noise = [0.02 * math.sin(2 * math.pi * 1 * (i / 256)) + (0.01 if i % 13 == 0 else 0.0) for i in range(256)]
|
| 110 |
-
|
| 111 |
-
with gr.Blocks(title="Sundew ECG Demo") as demo:
|
| 112 |
-
gr.Markdown("### Neurosymbolic ECG • Sundew Gating + Rules")
|
| 113 |
-
with gr.Tabs():
|
| 114 |
-
with gr.Tab("Upload/Infer"):
|
| 115 |
-
inp = gr.Textbox(
|
| 116 |
-
label="ECG samples (JSON list)",
|
| 117 |
-
value=json.dumps(demo_afib[:128]),
|
| 118 |
-
)
|
| 119 |
-
out = gr.JSON(label="Inference")
|
| 120 |
-
btn = gr.Button("Run")
|
| 121 |
-
btn.click(run_infer, inputs=inp, outputs=out)
|
| 122 |
-
with gr.Tab("Gating Preview"):
|
| 123 |
-
inp2 = gr.Textbox(
|
| 124 |
-
label="ECG samples (JSON list)",
|
| 125 |
-
value=json.dumps(demo_afib[:128]),
|
| 126 |
-
)
|
| 127 |
-
img = gr.Image(type="numpy", label="Raw vs Gated")
|
| 128 |
-
summary_box = gr.Textbox(label="Gating summary")
|
| 129 |
-
table = gr.Dataframe(
|
| 130 |
-
headers=["start", "end", "significance", "prob", "selected", "forced"],
|
| 131 |
-
datatype=["number", "number", "number", "number", "bool", "bool"],
|
| 132 |
-
wrap=True,
|
| 133 |
-
)
|
| 134 |
-
btn2 = gr.Button("Show gating")
|
| 135 |
-
btn2.click(plot_gating, inputs=inp2, outputs=[img, summary_box, table])
|
| 136 |
-
with gr.Tab("Demos"):
|
| 137 |
-
out_demo = gr.JSON()
|
| 138 |
-
btn_n = gr.Button("Normal")
|
| 139 |
-
btn_a = gr.Button("Arrhythmia-ish")
|
| 140 |
-
btn_noise = gr.Button("Noisy baseline")
|
| 141 |
-
hidden_n = gr.Textbox(value=json.dumps(demo_normal), visible=False)
|
| 142 |
-
hidden_a = gr.Textbox(value=json.dumps(demo_afib), visible=False)
|
| 143 |
-
hidden_noise = gr.Textbox(value=json.dumps(demo_noise), visible=False)
|
| 144 |
-
btn_n.click(run_infer, inputs=hidden_n, outputs=out_demo)
|
| 145 |
-
btn_a.click(run_infer, inputs=hidden_a, outputs=out_demo)
|
| 146 |
-
btn_noise.click(run_infer, inputs=hidden_noise, outputs=out_demo)
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
if __name__ == "__main__":
|
| 150 |
-
demo.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|