|
|
""" |
|
|
Gradio app to convert user input into a layered Knowledge Graph (IoT + GNN style) |
|
|
Ready to deploy on Hugging Face Spaces (Gradio) |
|
|
|
|
|
Requirements (put in requirements.txt on HF Space): |
|
|
- gradio |
|
|
- networkx |
|
|
- matplotlib |
|
|
- pillow |
|
|
|
|
|
Features: |
|
|
- Build layered Knowledge Graph (IoT β GNN β Actions) |
|
|
- Generate 2D pipeline diagram |
|
|
- NEW: Generate Input β GNN β Output scatter plot to visualize how GNN applies (X=inputs, Y=outputs) |
|
|
|
|
|
Save this file as app.py in your HF Space repository and add a requirements.txt with the packages above. |
|
|
""" |
|
|
|
|
|
import json |
|
|
import io |
|
|
from typing import List |
|
|
|
|
|
import matplotlib.pyplot as plt |
|
|
import networkx as nx |
|
|
import gradio as gr |
|
|
from PIL import Image |
|
|
|
|
|
|
|
|
DEFAULTS = { |
|
|
"Sensors": "Temp, Humidity, Smoke, CO, CO2, Accelerometer, Magnetic, Gas(LEL), HeartRate, SpO2, Vibration, SkinTemp, GPS, Light, Sound, Camera, Mic, Pressure, Proximity, TapButton", |
|
|
"Features": "F_Temp, F_Air, F_Motion, F_Sound, F_Medical, F_Image, F_Anomaly", |
|
|
"EdgeProcessing": "Edge Processor, Anomaly Detector, Power/Battery, Sensor Health Monitor, Feature Store/DB", |
|
|
"AI_Core": "Sensor Fusion, Graph Neural Network (GNN), Model Repo/Explainability, OTA/Update Service, Security/Auth", |
|
|
"States": "State_Normal, State_Warning, State_Critical, State_Camera_HELP, State_Voice_HELP, State_Medical_HELP, State_Tap_HELP", |
|
|
"Alerts": "LED_Green, LED_Yellow, LED_Red, Buzzer, Camera Capture, Local Storage, SendAlert", |
|
|
"Cloud": "Cloud ML & Dashboard, GSM/Cell, Internet, Geolocation Service", |
|
|
"Messaging": "WhatsApp, Email, Twitter/SMS", |
|
|
"External": "Friend/Contact, Ambulance, Hospital, FireDept, Police, RegionalOffice" |
|
|
} |
|
|
|
|
|
COLOR_MAP = { |
|
|
"Sensors": "#8ecae6", |
|
|
"Features": "#bde0a8", |
|
|
"EdgeProcessing": "#ffe29a", |
|
|
"AI_Core": "#ffb4a2", |
|
|
"States": "#f4a261", |
|
|
"Alerts": "#e76f51", |
|
|
"Cloud": "#89c2d9", |
|
|
"Messaging": "#cdb4db", |
|
|
"External": "#bfbfbf" |
|
|
} |
|
|
|
|
|
|
|
|
def parse_list(text: str) -> List[str]: |
|
|
if not text: |
|
|
return [] |
|
|
items = [t.strip() for t in text.split(",") if t.strip()] |
|
|
seen = set() |
|
|
out = [] |
|
|
for i in items: |
|
|
if i not in seen: |
|
|
seen.add(i) |
|
|
out.append(i) |
|
|
return out |
|
|
|
|
|
|
|
|
def build_graph_from_inputs(inputs: dict) -> nx.DiGraph: |
|
|
G = nx.DiGraph() |
|
|
for layer_idx, (layer_name, text) in enumerate(inputs.items()): |
|
|
nodes = parse_list(text) |
|
|
for n in nodes: |
|
|
G.add_node(n, layer=layer_idx, category=layer_name) |
|
|
layer_order = list(inputs.keys()) |
|
|
for i in range(len(layer_order) - 1): |
|
|
src_nodes = parse_list(inputs[layer_order[i]]) |
|
|
dst_nodes = parse_list(inputs[layer_order[i + 1]]) |
|
|
if not src_nodes or not dst_nodes: |
|
|
continue |
|
|
for si, s in enumerate(src_nodes): |
|
|
d1 = dst_nodes[si % len(dst_nodes)] |
|
|
G.add_edge(s, d1) |
|
|
if dst_nodes: |
|
|
G.add_edge(s, dst_nodes[0]) |
|
|
ai_nodes = parse_list(inputs.get("AI_Core", "")) |
|
|
if "Sensor Fusion" in ai_nodes and "Graph Neural Network (GNN)" in ai_nodes: |
|
|
G.add_edge("Sensor Fusion", "Graph Neural Network (GNN)") |
|
|
if "Graph Neural Network (GNN)" in ai_nodes: |
|
|
for s in parse_list(inputs.get("States", "")): |
|
|
G.add_edge("Graph Neural Network (GNN)", s) |
|
|
return G |
|
|
|
|
|
|
|
|
def draw_layered_graph_png(G: nx.DiGraph, inputs: dict, figsize=(1400, 700)) -> bytes: |
|
|
layers = {} |
|
|
for n, d in G.nodes(data=True): |
|
|
layer = d.get("layer", 0) |
|
|
layers.setdefault(layer, []).append(n) |
|
|
|
|
|
pos = {} |
|
|
x_gap = 1.5 |
|
|
for layer_idx in sorted(layers.keys()): |
|
|
nodes = layers[layer_idx] |
|
|
y_start = -(len(nodes) - 1) / 2 |
|
|
for j, node in enumerate(nodes): |
|
|
pos[node] = (layer_idx * x_gap, y_start + j) |
|
|
|
|
|
plt.figure(figsize=(figsize[0] / 100, figsize[1] / 100), dpi=100) |
|
|
ax = plt.gca() |
|
|
ax.set_facecolor("white") |
|
|
nx.draw_networkx_edges(G, pos, ax=ax, edge_color="#222222", alpha=0.35, arrows=True, arrowsize=12) |
|
|
|
|
|
categories = {} |
|
|
for n, d in G.nodes(data=True): |
|
|
cat = d.get("category", "") |
|
|
categories.setdefault(cat, []).append(n) |
|
|
|
|
|
for cat, nodes in categories.items(): |
|
|
color = COLOR_MAP.get(cat, "#cccccc") |
|
|
nx.draw_networkx_nodes(G, pos, nodelist=nodes, node_color=color, node_size=1200, edgecolors="#000000") |
|
|
nx.draw_networkx_labels(G, pos, labels={n: n for n in nodes}, font_size=8, font_weight="bold") |
|
|
|
|
|
xticks = [] |
|
|
xlabels = [] |
|
|
for layer_idx, key in enumerate(inputs.keys()): |
|
|
xticks.append(layer_idx * x_gap) |
|
|
xlabels.append(key) |
|
|
plt.xticks(xticks, xlabels, fontsize=10, weight='bold') |
|
|
plt.yticks([]) |
|
|
plt.title("Layered Knowledge Graph (IoT -> GNN -> Actions)", fontsize=14, weight="bold") |
|
|
plt.tight_layout() |
|
|
|
|
|
buf = io.BytesIO() |
|
|
plt.savefig(buf, format="png", bbox_inches="tight") |
|
|
plt.close() |
|
|
buf.seek(0) |
|
|
return buf.read() |
|
|
|
|
|
|
|
|
def draw_gnn_xy_plot(inputs: dict) -> bytes: |
|
|
|
|
|
x_inputs = parse_list(inputs.get("Sensors", "")) + parse_list(inputs.get("Features", "")) + parse_list(inputs.get("EdgeProcessing", "")) |
|
|
|
|
|
y_outputs = parse_list(inputs.get("States", "")) + parse_list(inputs.get("Alerts", "")) |
|
|
|
|
|
gnn_nodes = [n for n in parse_list(inputs.get("AI_Core", "")) if "GNN" in n] |
|
|
|
|
|
plt.figure(figsize=(8, 6)) |
|
|
|
|
|
|
|
|
for i, node in enumerate(x_inputs): |
|
|
plt.scatter(0, i, c="#8ecae6", s=500, edgecolors="k") |
|
|
plt.text(0, i, node, ha="center", va="center", fontsize=8, weight="bold") |
|
|
|
|
|
|
|
|
for j, node in enumerate(gnn_nodes): |
|
|
plt.scatter(1, j, c="#ffb4a2", s=800, edgecolors="k") |
|
|
plt.text(1, j, node, ha="center", va="center", fontsize=9, weight="bold") |
|
|
|
|
|
|
|
|
for k, node in enumerate(y_outputs): |
|
|
plt.scatter(2, k, c="#f4a261", s=500, edgecolors="k") |
|
|
plt.text(2, k, node, ha="center", va="center", fontsize=8, weight="bold") |
|
|
|
|
|
plt.xticks([0, 1, 2], ["Inputs", "GNN", "Outputs"], fontsize=10, weight="bold") |
|
|
plt.yticks([]) |
|
|
plt.title("GNN Input β Hidden β Output Mapping", fontsize=14, weight="bold") |
|
|
plt.tight_layout() |
|
|
|
|
|
buf = io.BytesIO() |
|
|
plt.savefig(buf, format="png", bbox_inches="tight") |
|
|
plt.close() |
|
|
buf.seek(0) |
|
|
return buf.read() |
|
|
|
|
|
|
|
|
def graph_to_adj_json(G: nx.DiGraph) -> str: |
|
|
adj = {n: list(G.successors(n)) for n in G.nodes} |
|
|
return json.dumps(adj, indent=2) |
|
|
|
|
|
|
|
|
def generate_graph(sensors, features, edgeprocessing, ai_core, states, alerts, cloud, messaging, external): |
|
|
inputs = { |
|
|
"Sensors": sensors, |
|
|
"Features": features, |
|
|
"EdgeProcessing": edgeprocessing, |
|
|
"AI_Core": ai_core, |
|
|
"States": states, |
|
|
"Alerts": alerts, |
|
|
"Cloud": cloud, |
|
|
"Messaging": messaging, |
|
|
"External": external |
|
|
} |
|
|
G = build_graph_from_inputs(inputs) |
|
|
layered_png = draw_layered_graph_png(G, inputs) |
|
|
gnn_png = draw_gnn_xy_plot(inputs) |
|
|
adj_json = graph_to_adj_json(G) |
|
|
|
|
|
return Image.open(io.BytesIO(layered_png)), Image.open(io.BytesIO(gnn_png)), adj_json |
|
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
gr.Markdown("# Knowledge Graph Builder β IoT + GNN Converter\nEnter comma-separated node lists for each layer and press Generate.") |
|
|
|
|
|
with gr.Row(): |
|
|
sensors_in = gr.Textbox(value=DEFAULTS["Sensors"], label="Sensors (comma-separated)", lines=3) |
|
|
features_in = gr.Textbox(value=DEFAULTS["Features"], label="Features (comma-separated)", lines=3) |
|
|
with gr.Row(): |
|
|
edge_in = gr.Textbox(value=DEFAULTS["EdgeProcessing"], label="Edge Processing (comma-separated)", lines=3) |
|
|
ai_in = gr.Textbox(value=DEFAULTS["AI_Core"], label="AI Core (comma-separated)", lines=3) |
|
|
with gr.Row(): |
|
|
states_in = gr.Textbox(value=DEFAULTS["States"], label="States (comma-separated)", lines=3) |
|
|
alerts_in = gr.Textbox(value=DEFAULTS["Alerts"], label="Alerts/Actuators (comma-separated)", lines=3) |
|
|
with gr.Row(): |
|
|
cloud_in = gr.Textbox(value=DEFAULTS["Cloud"], label="Cloud/Comm (comma-separated)", lines=2) |
|
|
messaging_in = gr.Textbox(value=DEFAULTS["Messaging"], label="Messaging (comma-separated)", lines=2) |
|
|
external_in = gr.Textbox(value=DEFAULTS["External"], label="External Entities (comma-separated)", lines=2) |
|
|
|
|
|
generate_btn = gr.Button("Generate Knowledge Graph & GNN Plot") |
|
|
output_img1 = gr.Image(type="pil", label="Layered Knowledge Graph") |
|
|
output_img2 = gr.Image(type="pil", label="GNN Input β Output Plot") |
|
|
output_adj = gr.Textbox(label="Adjacency List (JSON)") |
|
|
|
|
|
generate_btn.click(fn=generate_graph, inputs=[sensors_in, features_in, edge_in, ai_in, states_in, alerts_in, cloud_in, messaging_in, external_in], outputs=[output_img1, output_img2, output_adj]) |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |
|
|
|