YashsharmaPhD commited on
Commit
35ca794
·
verified ·
1 Parent(s): 5625ab9

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +212 -0
app.py ADDED
@@ -0,0 +1,212 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Gradio app to convert user input into a layered Knowledge Graph (IoT + GNN style)
3
+ Ready to deploy on Hugging Face Spaces (Gradio)
4
+
5
+ Requirements (put in requirements.txt on HF Space):
6
+ - gradio
7
+ - networkx
8
+ - matplotlib
9
+ - pillow
10
+
11
+ How it works:
12
+ - User provides comma-separated lists for each category (Sensors, Features, Edge Processing, AI Core, States, Alerts, Cloud, Messaging, External)
13
+ - The app builds a directed NetworkX graph, arranges nodes in layered X-axis positions, draws a clear colored plot, and returns the PNG and a JSON (adj list)
14
+
15
+ Save this file as app.py in your HF Space repository and add a requirements.txt with the packages above.
16
+ """
17
+
18
+ import json
19
+ import io
20
+ from typing import List
21
+
22
+ import matplotlib.pyplot as plt
23
+ import networkx as nx
24
+ import gradio as gr
25
+ from PIL import Image
26
+
27
+
28
+ DEFAULTS = {
29
+ "Sensors": "Temp, Humidity, Smoke, CO, CO2, Accelerometer, Magnetic, Gas(LEL), HeartRate, SpO2, Vibration, SkinTemp, GPS, Light, Sound, Camera, Mic, Pressure, Proximity, TapButton",
30
+ "Features": "F_Temp, F_Air, F_Motion, F_Sound, F_Medical, F_Image, F_Anomaly",
31
+ "EdgeProcessing": "Edge Processor, Anomaly Detector, Power/Battery, Sensor Health Monitor, Feature Store/DB",
32
+ "AI_Core": "Sensor Fusion, Graph Neural Network (GNN), Model Repo/Explainability, OTA/Update Service, Security/Auth",
33
+ "States": "State_Normal, State_Warning, State_Critical, State_Camera_HELP, State_Voice_HELP, State_Medical_HELP, State_Tap_HELP",
34
+ "Alerts": "LED_Green, LED_Yellow, LED_Red, Buzzer, Camera Capture, Local Storage, SendAlert",
35
+ "Cloud": "Cloud ML & Dashboard, GSM/Cell, Internet, Geolocation Service",
36
+ "Messaging": "WhatsApp, Email, Twitter/SMS",
37
+ "External": "Friend/Contact, Ambulance, Hospital, FireDept, Police, RegionalOffice"
38
+ }
39
+
40
+ COLOR_MAP = {
41
+ "Sensors": "#8ecae6",
42
+ "Features": "#bde0a8",
43
+ "EdgeProcessing": "#ffe29a",
44
+ "AI_Core": "#ffb4a2",
45
+ "States": "#f4a261",
46
+ "Alerts": "#e76f51",
47
+ "Cloud": "#89c2d9",
48
+ "Messaging": "#cdb4db",
49
+ "External": "#bfbfbf"
50
+ }
51
+
52
+
53
+ def parse_list(text: str) -> List[str]:
54
+ if not text:
55
+ return []
56
+ # split by commas and strip
57
+ items = [t.strip() for t in text.split(",") if t.strip()]
58
+ # keep original order and uniqueness
59
+ seen = set()
60
+ out = []
61
+ for i in items:
62
+ if i not in seen:
63
+ seen.add(i)
64
+ out.append(i)
65
+ return out
66
+
67
+
68
+ def build_graph_from_inputs(inputs: dict) -> nx.DiGraph:
69
+ G = nx.DiGraph()
70
+ # Add nodes with layer attribute
71
+ for layer_idx, (layer_name, text) in enumerate(inputs.items()):
72
+ nodes = parse_list(text)
73
+ for n in nodes:
74
+ G.add_node(n, layer=layer_idx, category=layer_name)
75
+ # Heuristic edges to form pipeline:
76
+ # Sensors -> Features -> EdgeProcessing -> AI_Core -> States -> Alerts -> Cloud -> Messaging -> External
77
+ layer_order = list(inputs.keys())
78
+
79
+ # connect nodes from one layer to the next using a fan-out heuristic
80
+ for i in range(len(layer_order) - 1):
81
+ src_nodes = parse_list(inputs[layer_order[i]])
82
+ dst_nodes = parse_list(inputs[layer_order[i + 1]])
83
+ if not src_nodes or not dst_nodes:
84
+ continue
85
+ # Connect each source to one or more destinations to avoid enormous full mesh
86
+ for si, s in enumerate(src_nodes):
87
+ # choose a destination index (round-robin) and also one central dest
88
+ d1 = dst_nodes[si % len(dst_nodes)]
89
+ G.add_edge(s, d1)
90
+ # Also connect to a central node (first dst)
91
+ if dst_nodes:
92
+ G.add_edge(s, dst_nodes[0])
93
+
94
+ # add some internal AI core edges if present
95
+ ai_nodes = parse_list(inputs.get("AI_Core", ""))
96
+ if "Sensor Fusion" in ai_nodes and "Graph Neural Network (GNN)" in ai_nodes:
97
+ G.add_edge("Sensor Fusion", "Graph Neural Network (GNN)")
98
+ if "Graph Neural Network (GNN)" in ai_nodes:
99
+ # connect GNN to all States if exists
100
+ for s in parse_list(inputs.get("States", "")):
101
+ G.add_edge("Graph Neural Network (GNN)", s)
102
+ return G
103
+
104
+
105
+ def draw_layered_graph_png(G: nx.DiGraph, inputs: dict, figsize=(1400, 700)) -> bytes:
106
+ # Create layered positions
107
+ layers = {}
108
+ for n, d in G.nodes(data=True):
109
+ layer = d.get("layer", 0)
110
+ layers.setdefault(layer, []).append(n)
111
+
112
+ pos = {}
113
+ # x positions spaced by layer
114
+ x_gap = 1.5
115
+ for layer_idx in sorted(layers.keys()):
116
+ nodes = layers[layer_idx]
117
+ y_start = -(len(nodes) - 1) / 2
118
+ for j, node in enumerate(nodes):
119
+ pos[node] = (layer_idx * x_gap, y_start + j)
120
+
121
+ # Draw
122
+ plt.figure(figsize=(figsize[0] / 100, figsize[1] / 100), dpi=100)
123
+ ax = plt.gca()
124
+ ax.set_facecolor("white")
125
+
126
+ # draw edges lightly
127
+ nx.draw_networkx_edges(G, pos, ax=ax, edge_color="#222222", alpha=0.35, arrows=True, arrowsize=12)
128
+
129
+ # draw nodes by category
130
+ categories = {}
131
+ for n, d in G.nodes(data=True):
132
+ cat = d.get("category", "")
133
+ categories.setdefault(cat, []).append(n)
134
+
135
+ for cat, nodes in categories.items():
136
+ color = COLOR_MAP.get(cat, "#cccccc")
137
+ nx.draw_networkx_nodes(G, pos, nodelist=nodes, node_color=color, node_size=1200, edgecolors="#000000")
138
+ nx.draw_networkx_labels(G, pos, labels={n: n for n in nodes}, font_size=8, font_weight="bold")
139
+
140
+ # annotate layers on x-axis
141
+ xticks = []
142
+ xlabels = []
143
+ for layer_idx, key in enumerate(inputs.keys()):
144
+ xticks.append(layer_idx * x_gap)
145
+ xlabels.append(key)
146
+ plt.xticks(xticks, xlabels, fontsize=10, weight='bold')
147
+ plt.yticks([])
148
+ plt.title("Layered Knowledge Graph (IoT -> GNN -> Actions)", fontsize=14, weight="bold")
149
+ plt.tight_layout()
150
+
151
+ buf = io.BytesIO()
152
+ plt.savefig(buf, format="png", bbox_inches="tight")
153
+ plt.close()
154
+ buf.seek(0)
155
+ return buf.read()
156
+
157
+
158
+ def graph_to_adj_json(G: nx.DiGraph) -> str:
159
+ # adjacency list JSON
160
+ adj = {n: list(G.successors(n)) for n in G.nodes}
161
+ return json.dumps(adj, indent=2)
162
+
163
+
164
+ # Gradio interface function
165
+ def generate_graph(sensors, features, edgeprocessing, ai_core, states, alerts, cloud, messaging, external):
166
+ inputs = {
167
+ "Sensors": sensors,
168
+ "Features": features,
169
+ "EdgeProcessing": edgeprocessing,
170
+ "AI_Core": ai_core,
171
+ "States": states,
172
+ "Alerts": alerts,
173
+ "Cloud": cloud,
174
+ "Messaging": messaging,
175
+ "External": external
176
+ }
177
+ G = build_graph_from_inputs(inputs)
178
+ png_bytes = draw_layered_graph_png(G, inputs)
179
+ adj_json = graph_to_adj_json(G)
180
+
181
+ # return image and JSON text
182
+ image = Image.open(io.BytesIO(png_bytes))
183
+ return image, adj_json
184
+
185
+
186
+ # Build Gradio UI
187
+ with gr.Blocks() as demo:
188
+ gr.Markdown("# Knowledge Graph Builder — IoT + GNN Layered Converter\nEnter comma-separated node lists for each layer and press Generate.")
189
+
190
+ with gr.Row():
191
+ sensors_in = gr.Textbox(value=DEFAULTS["Sensors"], label="Sensors (comma-separated)", lines=3)
192
+ features_in = gr.Textbox(value=DEFAULTS["Features"], label="Features (comma-separated)", lines=3)
193
+ with gr.Row():
194
+ edge_in = gr.Textbox(value=DEFAULTS["EdgeProcessing"], label="Edge Processing (comma-separated)", lines=3)
195
+ ai_in = gr.Textbox(value=DEFAULTS["AI_Core"], label="AI Core (comma-separated)", lines=3)
196
+ with gr.Row():
197
+ states_in = gr.Textbox(value=DEFAULTS["States"], label="States (comma-separated)", lines=3)
198
+ alerts_in = gr.Textbox(value=DEFAULTS["Alerts"], label="Alerts/Actuators (comma-separated)", lines=3)
199
+ with gr.Row():
200
+ cloud_in = gr.Textbox(value=DEFAULTS["Cloud"], label="Cloud/Comm (comma-separated)", lines=2)
201
+ messaging_in = gr.Textbox(value=DEFAULTS["Messaging"], label="Messaging (comma-separated)", lines=2)
202
+ external_in = gr.Textbox(value=DEFAULTS["External"], label="External Entities (comma-separated)", lines=2)
203
+
204
+ generate_btn = gr.Button("Generate Knowledge Graph")
205
+ output_img = gr.Image(type="pil", label="Generated Graph")
206
+ output_adj = gr.Textbox(label="Adjacency List (JSON)")
207
+
208
+ generate_btn.click(fn=generate_graph, inputs=[sensors_in, features_in, edge_in, ai_in, states_in, alerts_in, cloud_in, messaging_in, external_in], outputs=[output_img, output_adj])
209
+
210
+
211
+ if __name__ == "__main__":
212
+ demo.launch()