Pream912 commited on
Commit
0db80c6
Β·
verified Β·
1 Parent(s): 3c0d7ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +672 -34
app.py CHANGED
@@ -129,36 +129,388 @@ def remove_colors(img: np.ndarray) -> np.ndarray:
129
  return result
130
 
131
 
132
- def estimate_wall_thickness(binary: np.ndarray, fallback: int = 12) -> int:
133
- h, w = binary.shape
134
- n_cols = min(200, w)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  col_idx = np.linspace(0, w-1, n_cols, dtype=int)
136
- runs = []
 
137
  for ci in col_idx:
138
- col = (binary[:, ci] > 0).astype(np.int8)
139
  pad = np.concatenate([[0], col, [0]])
140
  d = np.diff(pad.astype(np.int16))
141
  s = np.where(d == 1)[0]
142
  e = np.where(d == -1)[0]
143
  n = min(len(s), len(e))
144
  r = (e[:n] - s[:n]).astype(int)
145
- runs.extend(r[(r >= 2) & (r <= h*0.15)].tolist())
146
  if runs:
147
- return max(6, int(np.median(runs)))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
148
  return fallback
149
 
150
 
151
- def extract_walls_adaptive(img_clean: np.ndarray) -> Tuple[np.ndarray, int]:
152
- h, w = img_clean.shape[:2]
153
- gray = cv2.cvtColor(img_clean, cv2.COLOR_BGR2GRAY)
154
- otsu_t, binary = cv2.threshold(
155
- gray, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU
156
- )
157
- wall_threshold = int(otsu_t)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
158
  _, binary = cv2.threshold(gray, wall_threshold, 255, cv2.THRESH_BINARY_INV)
159
 
160
  min_line_len = max(8, int(0.012 * w))
161
- body_thickness = estimate_wall_thickness(binary)
162
  body_thickness = int(np.clip(body_thickness, 9, 30))
163
 
164
  k_h = cv2.getStructuringElement(cv2.MORPH_RECT, (min_line_len, 1))
@@ -175,15 +527,13 @@ def extract_walls_adaptive(img_clean: np.ndarray) -> Tuple[np.ndarray, int]:
175
 
176
  collision = cv2.bitwise_and(dil_h, dil_v)
177
  safe_zone = cv2.bitwise_and(collision, orig_walls)
178
- walls = cv2.bitwise_or(
179
- cv2.bitwise_and(walls, cv2.bitwise_not(collision)), safe_zone
180
- )
181
 
182
  dist = cv2.distanceTransform(cv2.bitwise_not(orig_walls), cv2.DIST_L2, 5)
183
- keep_mask = (dist <= body_thickness / 2).astype(np.uint8) * 255
184
  walls = cv2.bitwise_and(walls, keep_mask)
 
185
 
186
- # noise removal
187
  n_lbl, labels, stats, _ = cv2.connectedComponentsWithStats(walls, connectivity=8)
188
  if n_lbl > 1:
189
  areas = stats[1:, cv2.CC_STAT_AREA]
@@ -192,9 +542,272 @@ def extract_walls_adaptive(img_clean: np.ndarray) -> Tuple[np.ndarray, int]:
192
  keep_lut[1:] = (areas >= min_n).astype(np.uint8)
193
  walls = (keep_lut[labels] * 255).astype(np.uint8)
194
 
 
 
195
  return walls, body_thickness
196
 
197
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
198
  def apply_user_lines_to_walls(
199
  walls: np.ndarray,
200
  lines: List[Tuple[int,int,int,int]],
@@ -588,6 +1201,8 @@ def init_state() -> Dict:
588
  "img_cropped": None,
589
  "img_clean": None,
590
  "walls": None,
 
 
591
  "user_lines": [], # [(x1,y1,x2,y2), …]
592
  "draw_start": None, # pending line start pixel
593
  "walls_thickness": 8,
@@ -639,19 +1254,46 @@ def cb_preprocess(state):
639
  if img is None:
640
  return None, None, state, "Load an image first."
641
 
 
642
  cropped = remove_title_block(img)
643
- clean = remove_colors(cropped)
644
 
645
- state["img_cropped"] = cropped
646
- state["img_clean"] = clean
 
 
 
 
 
 
 
 
 
 
 
 
647
 
648
- walls, thick = extract_walls_adaptive(clean)
 
 
 
 
 
 
 
 
 
 
649
  state["walls"] = walls.copy()
 
650
  state["walls_thickness"] = thick
 
651
 
652
  walls_rgb = cv2.cvtColor(walls, cv2.COLOR_GRAY2RGB)
653
- clean_rgb = cv2.cvtColor(clean, cv2.COLOR_BGR2RGB)
654
- return clean_rgb, walls_rgb, state, f"βœ… Walls extracted (thicknessβ‰ˆ{thick}px)"
 
 
 
655
 
656
 
657
  def cb_add_door_line(evt: gr.SelectData, state):
@@ -698,16 +1340,12 @@ def cb_undo_door_line(state):
698
  state["user_lines"].pop()
699
  state["draw_start"] = None
700
 
701
- walls = state.get("walls")
702
- img = state.get("img_clean")
703
- if walls is None:
704
  return None, state, "Re-run preprocessing."
705
 
706
- # recompute from scratch
707
- walls_base, thick = extract_walls_adaptive(state["img_clean"])
708
- walls_upd = apply_user_lines_to_walls(
709
- walls_base, state["user_lines"], thick
710
- )
711
  state["walls"] = walls_upd
712
 
713
  vis = cv2.cvtColor(walls_upd, cv2.COLOR_GRAY2RGB)
 
129
  return result
130
 
131
 
132
+ # ════════════════════════════════════════════════════════════════════════════
133
+ # WALL CALIBRATION (exact port from GeometryAgent WallCalibration)
134
+ # ════════════════════════════════════════════════════════════════════════════
135
+
136
+ from dataclasses import dataclass, field
137
+
138
+ @dataclass
139
+ class WallCalibration:
140
+ stroke_width : int = 3
141
+ min_component_dim : int = 30
142
+ min_component_area: int = 45
143
+ bridge_min_gap : int = 2
144
+ bridge_max_gap : int = 14
145
+ door_gap : int = 41
146
+ max_bridge_thick : int = 15
147
+
148
+
149
+ def calibrate_wall(mask: np.ndarray) -> WallCalibration:
150
+ cal = WallCalibration()
151
+ h, w = mask.shape
152
+
153
+ n_cols = min(200, w)
154
  col_idx = np.linspace(0, w-1, n_cols, dtype=int)
155
+ runs: List[int] = []
156
+ max_run = max(2, int(h * 0.05))
157
  for ci in col_idx:
158
+ col = (mask[:, ci] > 0).astype(np.int8)
159
  pad = np.concatenate([[0], col, [0]])
160
  d = np.diff(pad.astype(np.int16))
161
  s = np.where(d == 1)[0]
162
  e = np.where(d == -1)[0]
163
  n = min(len(s), len(e))
164
  r = (e[:n] - s[:n]).astype(int)
165
+ runs.extend(r[(r >= 1) & (r <= max_run)].tolist())
166
  if runs:
167
+ arr = np.array(runs, dtype=np.int32)
168
+ hist = np.bincount(np.clip(arr, 0, 200))
169
+ cal.stroke_width = max(2, int(np.argmax(hist[1:])) + 1)
170
+
171
+ cal.min_component_dim = max(15, cal.stroke_width * 10)
172
+ cal.min_component_area = max(30, cal.stroke_width * cal.min_component_dim // 2)
173
+
174
+ gap_sizes: List[int] = []
175
+ row_step = max(3, h // 200)
176
+ col_step = max(3, w // 200)
177
+ for row in range(5, h-5, row_step):
178
+ rd = (mask[row, :] > 0).astype(np.int8)
179
+ pad = np.concatenate([[0], rd, [0]])
180
+ dif = np.diff(pad.astype(np.int16))
181
+ ends = np.where(dif == -1)[0]
182
+ starts = np.where(dif == 1)[0]
183
+ for e in ends:
184
+ nxt = starts[starts > e]
185
+ if len(nxt):
186
+ g = int(nxt[0] - e)
187
+ if 1 < g < 200: gap_sizes.append(g)
188
+ for col in range(5, w-5, col_step):
189
+ cd = (mask[:, col] > 0).astype(np.int8)
190
+ pad = np.concatenate([[0], cd, [0]])
191
+ dif = np.diff(pad.astype(np.int16))
192
+ ends = np.where(dif == -1)[0]
193
+ starts = np.where(dif == 1)[0]
194
+ for e in ends:
195
+ nxt = starts[starts > e]
196
+ if len(nxt):
197
+ g = int(nxt[0] - e)
198
+ if 1 < g < 200: gap_sizes.append(g)
199
+
200
+ cal.bridge_min_gap = 2
201
+ if len(gap_sizes) >= 20:
202
+ g = np.array(gap_sizes)
203
+ sm = g[g <= 30]
204
+ if len(sm) >= 10:
205
+ cal.bridge_max_gap = int(np.clip(np.percentile(sm, 75), 4, 20))
206
+ else:
207
+ cal.bridge_max_gap = cal.stroke_width * 4
208
+ door = g[(g > cal.bridge_max_gap) & (g <= 80)]
209
+ if len(door) >= 5:
210
+ raw = int(np.percentile(door, 90))
211
+ else:
212
+ raw = max(35, cal.stroke_width * 12)
213
+ raw = int(np.clip(raw, 25, 80))
214
+ cal.door_gap = raw if raw % 2 == 1 else raw + 1
215
+
216
+ cal.max_bridge_thick = cal.stroke_width * 5
217
+ return cal
218
+
219
+
220
+ # ════════════════════════════════════════════════════════════════════════════
221
+ # SKELETON / TIP HELPERS
222
+ # ════════════════════════════════════════════════════════════════════════════
223
+
224
+ def _skel(binary: np.ndarray) -> np.ndarray:
225
+ try:
226
+ from skimage.morphology import skeletonize as _sk
227
+ return (_sk(binary > 0) * 255).astype(np.uint8)
228
+ except ImportError:
229
+ return _morphological_skeleton(binary)
230
+
231
+
232
+ def _tip_pixels(skel_u8: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
233
+ sb = (skel_u8 > 0).astype(np.float32)
234
+ nbr = cv2.filter2D(sb, -1, np.ones((3,3), np.float32), borderType=cv2.BORDER_CONSTANT)
235
+ return np.where((sb == 1) & (nbr.astype(np.int32) == 2))
236
+
237
+
238
+ def _outward_vectors(ex, ey, skel_u8: np.ndarray, lookahead: int):
239
+ n = len(ex)
240
+ odx = np.zeros(n, np.float32)
241
+ ody = np.zeros(n, np.float32)
242
+ sy, sx = np.where(skel_u8 > 0)
243
+ skel_set = set(zip(sx.tolist(), sy.tolist()))
244
+ D8 = [(-1,0),(1,0),(0,-1),(0,1),(-1,-1),(-1,1),(1,-1),(1,1)]
245
+ for i in range(n):
246
+ ox, oy = int(ex[i]), int(ey[i])
247
+ cx, cy = ox, oy
248
+ px, py = ox, oy
249
+ for _ in range(lookahead):
250
+ moved = False
251
+ for dx, dy in D8:
252
+ nx2, ny2 = cx+dx, cy+dy
253
+ if (nx2, ny2) == (px, py): continue
254
+ if (nx2, ny2) in skel_set:
255
+ px, py = cx, cy; cx, cy = nx2, ny2; moved = True; break
256
+ if not moved: break
257
+ ix, iy = float(cx-ox), float(cy-oy)
258
+ nr = max(1e-6, float(np.hypot(ix, iy)))
259
+ odx[i], ody[i] = -ix/nr, -iy/nr
260
+ return odx, ody
261
+
262
+
263
+ # ════════════════════════════════════════════════════════════════════════════
264
+ # ANALYZE IMAGE CHARACTERISTICS (brightness-aware threshold)
265
+ # ════════════════════════════════════════════════════════════════════════════
266
+
267
+ def analyze_image_characteristics(img: np.ndarray) -> Dict[str, Any]:
268
+ gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
269
+ brightness = float(np.mean(gray))
270
+ contrast = float(np.std(gray))
271
+ otsu_thr, _ = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)
272
+ if brightness > 220:
273
+ wall_threshold = max(200, int(otsu_thr * 1.1))
274
+ elif brightness < 180:
275
+ wall_threshold = max(150, int(otsu_thr * 0.9))
276
+ else:
277
+ wall_threshold = int(otsu_thr)
278
+ return {"brightness": brightness, "contrast": contrast,
279
+ "wall_threshold": wall_threshold, "otsu_threshold": otsu_thr}
280
+
281
+
282
+ # ════════════════════════════════════════════════════════════════════════════
283
+ # DOOR ARC DETECTION (exact port from GeometryAgent)
284
+ # ════════════════════════════════════════════════════════════════════════════
285
+
286
+ def detect_and_close_door_arcs(img: np.ndarray) -> np.ndarray:
287
+ R_MIN=60; R_MAX=320; DP=1.2; PARAM1=50; PARAM2=22; MIN_DIST=50
288
+ MAX_ARC=115.0; MIN_ARC=60.0; LEAF_FRAC=0.92; LEAF_THR=0.35
289
+ WALL_R=1.25; WALL_THR=12; SNAP_R=30
290
+ DOUBLE_R_RATIO=1.4; DOUBLE_DIST=1.8; LINE_T=3
291
+
292
+ gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
293
+ h, w = gray.shape
294
+ result = img.copy()
295
+
296
+ _, binary = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)
297
+ binary = cv2.morphologyEx(binary, cv2.MORPH_CLOSE, np.ones((3,3), np.uint8))
298
+ blurred = cv2.GaussianBlur(gray, (7,7), 1.5)
299
+
300
+ raw = cv2.HoughCircles(blurred, cv2.HOUGH_GRADIENT, dp=DP, minDist=MIN_DIST,
301
+ param1=PARAM1, param2=PARAM2, minRadius=R_MIN, maxRadius=R_MAX)
302
+ if raw is None:
303
+ return result
304
+
305
+ circles = np.round(raw[0]).astype(np.int32)
306
+
307
+ def sample_ring(cx, cy, r, n=360):
308
+ ang = np.linspace(0, 2*np.pi, n, endpoint=False)
309
+ xs = np.clip((cx + r*np.cos(ang)).astype(np.int32), 0, w-1)
310
+ ys = np.clip((cy + r*np.sin(ang)).astype(np.int32), 0, h-1)
311
+ return ang, xs, ys
312
+
313
+ def arc_span(cx, cy, r):
314
+ ang, xs, ys = sample_ring(cx, cy, r)
315
+ on = ang[binary[ys, xs] > 0]
316
+ if len(on) == 0: return 0.0, np.array([])
317
+ return float(np.degrees(on[-1]-on[0])), on
318
+
319
+ def has_leaf(cx, cy, r):
320
+ lr = r*LEAF_FRAC; n = max(60, int(r))
321
+ ang = np.linspace(0, 2*np.pi, n, endpoint=False)
322
+ xs = np.clip((cx+lr*np.cos(ang)).astype(np.int32), 0, w-1)
323
+ ys = np.clip((cy+lr*np.sin(ang)).astype(np.int32), 0, h-1)
324
+ return float(np.mean(binary[ys,xs]>0)) >= LEAF_THR
325
+
326
+ def wall_outside(cx, cy, r):
327
+ pr = r*WALL_R; ang = np.linspace(0, 2*np.pi, 36, endpoint=False)
328
+ xs = np.clip((cx+pr*np.cos(ang)).astype(np.int32), 0, w-1)
329
+ ys = np.clip((cy+pr*np.sin(ang)).astype(np.int32), 0, h-1)
330
+ return int(np.sum(binary[ys,xs]>0)) >= WALL_THR
331
+
332
+ def endpoints(cx, cy, r, occ):
333
+ gap_t = np.radians(25.0); diffs = np.diff(occ)
334
+ big = np.where(diffs > gap_t)[0]
335
+ if len(big) == 0: sa, ea = occ[0], occ[-1]
336
+ else:
337
+ sp = big[np.argmax(diffs[big])]
338
+ sa, ea = occ[sp+1], occ[sp]
339
+ def snap(a):
340
+ px2 = int(round(cx+r*np.cos(a))); py2 = int(round(cy+r*np.sin(a)))
341
+ y0=max(0,py2-SNAP_R); y1=min(h,py2+SNAP_R+1)
342
+ x0=max(0,px2-SNAP_R); x1=min(w,px2+SNAP_R+1)
343
+ roi = binary[y0:y1, x0:x1]
344
+ wy2, wx2 = np.where(roi>0)
345
+ if len(wx2)==0: return px2, py2
346
+ dd = np.hypot(wx2-(px2-x0), wy2-(py2-y0))
347
+ i = int(np.argmin(dd))
348
+ return int(wx2[i]+x0), int(wy2[i]+y0)
349
+ return snap(sa), snap(ea)
350
+
351
+ valid = []
352
+ for cx, cy, r in circles:
353
+ span, occ = arc_span(cx, cy, r)
354
+ if not (MIN_ARC <= span <= MAX_ARC): continue
355
+ if not has_leaf(cx, cy, r): continue
356
+ if not wall_outside(cx, cy, r): continue
357
+ ep1, ep2 = endpoints(cx, cy, r, occ)
358
+ valid.append((cx, cy, r, ep1, ep2))
359
+
360
+ used = [False]*len(valid)
361
+ double_pairs = []
362
+ for i in range(len(valid)):
363
+ if used[i]: continue
364
+ cx1,cy1,r1,_,_ = valid[i]
365
+ best_j, best_d = -1, 1e9
366
+ for j in range(i+1, len(valid)):
367
+ if used[j]: continue
368
+ cx2,cy2,r2,_,_ = valid[j]
369
+ if max(r1,r2)/(min(r1,r2)+1e-6) > DOUBLE_R_RATIO: continue
370
+ cd = float(np.hypot(cx2-cx1, cy2-cy1))
371
+ if cd < (r1+r2)*DOUBLE_DIST and cd < best_d:
372
+ best_d, best_j = cd, j
373
+ if best_j >= 0:
374
+ double_pairs.append((i, best_j))
375
+ used[i] = used[best_j] = True
376
+
377
+ singles = [i for i in range(len(valid)) if not used[i]]
378
+ for idx in singles:
379
+ cx,cy,r,ep1,ep2 = valid[idx]
380
+ cv2.line(result, ep1, ep2, (0,0,0), LINE_T)
381
+ for i_idx, j_idx in double_pairs:
382
+ cx1,cy1,r1,ep1a,ep1b = valid[i_idx]
383
+ cx2,cy2,r2,ep2a,ep2b = valid[j_idx]
384
+ daa = np.hypot(ep1a[0]-ep2a[0], ep1a[1]-ep2a[1])
385
+ dab = np.hypot(ep1a[0]-ep2b[0], ep1a[1]-ep2b[1])
386
+ if daa <= dab: inner1,outer1,inner2,outer2 = ep1a,ep1b,ep2a,ep2b
387
+ else: inner1,outer1,inner2,outer2 = ep1a,ep1b,ep2b,ep2a
388
+ cv2.line(result, outer1, outer2, (0,0,0), LINE_T)
389
+ cv2.line(result, inner1, inner2, (0,0,0), LINE_T)
390
+
391
+ return result
392
+
393
+
394
+ # ════════════════════════════════════════════════════════════════════════════
395
+ # EXTRACT WALLS ADAPTIVE (exact port β€” brightness-aware + double-line filter)
396
+ # ════════════════════════════════════════════════════════════════════════════
397
+
398
+ def _estimate_wall_body_thickness(binary: np.ndarray, fallback: int = 12) -> int:
399
+ h, w = binary.shape
400
+ n_cols = min(200, w)
401
+ col_idx = np.linspace(0, w-1, n_cols, dtype=int)
402
+ cols = (binary[:, col_idx] > 0).astype(np.int8)
403
+ padded = np.concatenate([np.zeros((1,n_cols),np.int8), cols,
404
+ np.zeros((1,n_cols),np.int8)], axis=0)
405
+ diff = np.diff(padded.astype(np.int16), axis=0)
406
+ run_lengths = []
407
+ for ci in range(n_cols):
408
+ d = diff[:, ci]
409
+ s = np.where(d == 1)[0]
410
+ e = np.where(d == -1)[0]
411
+ if len(s)==0 or len(e)==0: continue
412
+ r = e - s
413
+ r = r[(r >= 2) & (r <= h*0.15)]
414
+ if len(r): run_lengths.append(r)
415
+ if run_lengths:
416
+ return int(np.median(np.concatenate(run_lengths)))
417
  return fallback
418
 
419
 
420
+ def _remove_thin_lines(walls: np.ndarray, min_thickness: int) -> np.ndarray:
421
+ dist = cv2.distanceTransform(walls, cv2.DIST_L2, 5)
422
+ thick_mask = dist >= (min_thickness / 2)
423
+ n_lbl, labels, _, _ = cv2.connectedComponentsWithStats(walls, connectivity=8)
424
+ if n_lbl <= 1: return walls
425
+ thick_labels = labels[thick_mask]
426
+ if len(thick_labels) == 0: return np.zeros_like(walls)
427
+ has_thick = np.zeros(n_lbl, dtype=bool)
428
+ has_thick[thick_labels] = True
429
+ keep_lut = has_thick.astype(np.uint8)*255; keep_lut[0] = 0
430
+ return keep_lut[labels]
431
+
432
+
433
+ def _filter_double_lines_and_thick(walls: np.ndarray) -> np.ndarray:
434
+ MIN_SINGLE_DIM = 20; DOUBLE_GAP = 60; DOUBLE_PCT = 12
435
+
436
+ n_lbl, labels, stats, _ = cv2.connectedComponentsWithStats(walls, connectivity=8)
437
+ if n_lbl <= 1: return walls
438
+
439
+ try:
440
+ skel_full = cv2.ximgproc.thinning(walls, thinningType=cv2.ximgproc.THINNING_ZHANGSUEN)
441
+ except AttributeError:
442
+ skel_full = _morphological_skeleton(walls)
443
+
444
+ skel_bin = skel_full > 0
445
+ keep_ids: set = set()
446
+ thin_cands = []
447
+
448
+ for i in range(1, n_lbl):
449
+ bw = int(stats[i, cv2.CC_STAT_WIDTH]); bh = int(stats[i, cv2.CC_STAT_HEIGHT])
450
+ if min(bw, bh) >= MIN_SINGLE_DIM: keep_ids.add(i)
451
+ else: thin_cands.append(i)
452
+
453
+ if not thin_cands:
454
+ filtered = np.zeros_like(walls)
455
+ for i in keep_ids: filtered[labels==i] = 255
456
+ return filtered
457
+
458
+ skel_labels = labels * skel_bin
459
+ img_h, img_w = labels.shape
460
+ probe_dists = np.arange(3, DOUBLE_GAP+1, 3, dtype=np.float32)
461
+
462
+ for i in thin_cands:
463
+ bys, bxs = np.where(skel_labels == i)
464
+ if len(bys) < 4: continue
465
+ step = max(1, len(bys)//80)
466
+ sy = bys[::step].astype(np.float32); sx = bxs[::step].astype(np.float32)
467
+ n_s = len(sy)
468
+ sy_prev=np.roll(sy,1); sy_prev[0]=sy[0]
469
+ sy_next=np.roll(sy,-1); sy_next[-1]=sy[-1]
470
+ sx_prev=np.roll(sx,1); sx_prev[0]=sx[0]
471
+ sx_next=np.roll(sx,-1); sx_next[-1]=sx[-1]
472
+ dr=(sy_next-sy_prev); dc=(sx_next-sx_prev)
473
+ dlen=np.maximum(1.0, np.hypot(dr, dc))
474
+ pr=(-dc/dlen)[:,np.newaxis]; pc=(dr/dlen)[:,np.newaxis]
475
+ for sign in (1.0, -1.0):
476
+ rr = np.round(sy[:,np.newaxis] + sign*pr*probe_dists).astype(np.int32)
477
+ cc = np.round(sx[:,np.newaxis] + sign*pc*probe_dists).astype(np.int32)
478
+ valid_m = (rr>=0)&(rr<img_h)&(cc>=0)&(cc<img_w)
479
+ safe_rr = np.clip(rr, 0, img_h-1); safe_cc = np.clip(cc, 0, img_w-1)
480
+ lbl_at = labels[safe_rr, safe_cc]
481
+ partner = valid_m & (lbl_at>0) & (lbl_at!=i)
482
+ hit_any = partner.any(axis=1)
483
+ hit_rows = np.where(hit_any)[0]
484
+ if len(hit_rows) == 0: continue
485
+ first_col = partner[hit_rows].argmax(axis=1)
486
+ partner_ids = lbl_at[hit_rows, first_col]
487
+ keep_ids.update(partner_ids.tolist())
488
+ if 100.0*len(hit_rows)/n_s >= DOUBLE_PCT:
489
+ keep_ids.add(i); break
490
+
491
+ if keep_ids:
492
+ ka = np.array(sorted(keep_ids), dtype=np.int32)
493
+ lut = np.zeros(n_lbl, dtype=np.uint8); lut[ka] = 255
494
+ return lut[labels]
495
+ return np.zeros_like(walls)
496
+
497
+
498
+ def extract_walls_adaptive(img_clean: np.ndarray,
499
+ img_stats: Optional[Dict] = None) -> Tuple[np.ndarray, int]:
500
+ h, w = img_clean.shape[:2]
501
+ gray = cv2.cvtColor(img_clean, cv2.COLOR_BGR2GRAY)
502
+
503
+ # brightness-aware threshold (from analyze_image_characteristics)
504
+ if img_stats:
505
+ wall_threshold = img_stats["wall_threshold"]
506
+ else:
507
+ otsu_t, _ = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)
508
+ wall_threshold = int(otsu_t)
509
+
510
  _, binary = cv2.threshold(gray, wall_threshold, 255, cv2.THRESH_BINARY_INV)
511
 
512
  min_line_len = max(8, int(0.012 * w))
513
+ body_thickness = _estimate_wall_body_thickness(binary, fallback=12)
514
  body_thickness = int(np.clip(body_thickness, 9, 30))
515
 
516
  k_h = cv2.getStructuringElement(cv2.MORPH_RECT, (min_line_len, 1))
 
527
 
528
  collision = cv2.bitwise_and(dil_h, dil_v)
529
  safe_zone = cv2.bitwise_and(collision, orig_walls)
530
+ walls = cv2.bitwise_or(cv2.bitwise_and(walls, cv2.bitwise_not(collision)), safe_zone)
 
 
531
 
532
  dist = cv2.distanceTransform(cv2.bitwise_not(orig_walls), cv2.DIST_L2, 5)
533
+ keep_mask = (dist <= body_thickness/2).astype(np.uint8) * 255
534
  walls = cv2.bitwise_and(walls, keep_mask)
535
+ walls = _remove_thin_lines(walls, min_thickness=body_thickness)
536
 
 
537
  n_lbl, labels, stats, _ = cv2.connectedComponentsWithStats(walls, connectivity=8)
538
  if n_lbl > 1:
539
  areas = stats[1:, cv2.CC_STAT_AREA]
 
542
  keep_lut[1:] = (areas >= min_n).astype(np.uint8)
543
  walls = (keep_lut[labels] * 255).astype(np.uint8)
544
 
545
+ walls = _filter_double_lines_and_thick(walls) # ← was missing
546
+
547
  return walls, body_thickness
548
 
549
 
550
+ # ════════════════════════════════════════════════════════════════════════════
551
+ # REMOVE FIXTURE SYMBOLS (exact port from GeometryAgent)
552
+ # ════════════════════════════════════════════════════════════════════════════
553
+
554
+ FIXTURE_MAX_BLOB=80; FIXTURE_MAX_AREA=4000; FIXTURE_MAX_ASP=4.0
555
+ FIXTURE_DENSITY_R=50; FIXTURE_DENSITY_THR=0.35; FIXTURE_MIN_ZONE=1500
556
+
557
+ def remove_fixture_symbols(walls: np.ndarray) -> np.ndarray:
558
+ h, w = walls.shape
559
+ n_lbl, labels, stats, centroids = cv2.connectedComponentsWithStats(walls, connectivity=8)
560
+ if n_lbl <= 1: return walls
561
+
562
+ bw_a=stats[1:,cv2.CC_STAT_WIDTH].astype(np.float32)
563
+ bh_a=stats[1:,cv2.CC_STAT_HEIGHT].astype(np.float32)
564
+ ar_a=stats[1:,cv2.CC_STAT_AREA].astype(np.float32)
565
+ cx_a=np.round(centroids[1:,0]).astype(np.int32)
566
+ cy_a=np.round(centroids[1:,1]).astype(np.int32)
567
+ mx=np.maximum(bw_a,bh_a); mn=np.minimum(bw_a,bh_a)
568
+ asp=mx/(mn+1e-6)
569
+ cand=(bw_a<FIXTURE_MAX_BLOB)&(bh_a<FIXTURE_MAX_BLOB)&(ar_a<FIXTURE_MAX_AREA)&(asp<=FIXTURE_MAX_ASP)
570
+ ci=np.where(cand)[0]; cand_ids=ci+1; ccx=cx_a[ci]; ccy=cy_a[ci]
571
+
572
+ if len(cand_ids)==0: return walls
573
+ heatmap=np.zeros((h,w),dtype=np.float32)
574
+ for x2,y2 in zip(ccx.tolist(), ccy.tolist()):
575
+ cv2.circle(heatmap,(x2,y2),int(FIXTURE_DENSITY_R),1.0,-1)
576
+ bk=max(3,(int(FIXTURE_DENSITY_R)//2)|1)
577
+ density=cv2.GaussianBlur(heatmap,(bk*4+1,bk*4+1),bk)
578
+ dm=float(density.max())
579
+ if dm>0: density/=dm
580
+ zone=(density>=FIXTURE_DENSITY_THR).astype(np.uint8)*255
581
+ nz,zlbl,zst,_=cv2.connectedComponentsWithStats(zone,connectivity=8)
582
+ cz=np.zeros_like(zone)
583
+ if nz>1:
584
+ za=zst[1:,cv2.CC_STAT_AREA]; kz=np.where(za>=FIXTURE_MIN_ZONE)[0]+1
585
+ if len(kz):
586
+ lut2=np.zeros(nz,dtype=np.uint8); lut2[kz]=255; cz=lut2[zlbl]
587
+ zone=cz
588
+ vc=(ccy>=0)&(ccy<h)&(ccx>=0)&(ccx<w)
589
+ in_zone=vc&(zone[ccy.clip(0,h-1), ccx.clip(0,w-1)]>0)
590
+ erase_ids=cand_ids[in_zone]
591
+ result=walls.copy()
592
+ if len(erase_ids):
593
+ el=np.zeros(n_lbl,dtype=np.uint8); el[erase_ids]=1
594
+ result[el[labels].astype(bool)]=0
595
+ return result
596
+
597
+
598
+ # ════════════════════════════════════════════════════════════════════════════
599
+ # WALL RECONSTRUCTION β€” 3-stage calibrated pipeline
600
+ # [5c] remove_thin_lines_calibrated
601
+ # [5d] bridge_wall_endpoints_v2
602
+ # [5e] close_door_openings_v2
603
+ # ════════════════════════════════════════════════════════════════════════════
604
+
605
+ def _remove_thin_lines_calibrated(walls: np.ndarray, cal: WallCalibration) -> np.ndarray:
606
+ n_cc, cc, stats, _ = cv2.connectedComponentsWithStats(walls, connectivity=8)
607
+ if n_cc <= 1: return walls
608
+ bw=stats[1:,cv2.CC_STAT_WIDTH]; bh=stats[1:,cv2.CC_STAT_HEIGHT]
609
+ ar=stats[1:,cv2.CC_STAT_AREA]; mx=np.maximum(bw,bh)
610
+ keep=(mx>=cal.min_component_dim)|(ar>=cal.min_component_area*3)
611
+ lut=np.zeros(n_cc,np.uint8); lut[1:]=keep.astype(np.uint8)*255
612
+ return lut[cc]
613
+
614
+
615
+ def _bridge_wall_endpoints_v2(walls: np.ndarray, cal: WallCalibration,
616
+ angle_tol: float = 15.0) -> np.ndarray:
617
+ try:
618
+ from scipy.spatial import cKDTree as _KDTree
619
+ _SCIPY = True
620
+ except ImportError:
621
+ _SCIPY = False
622
+
623
+ result=walls.copy(); h,w=walls.shape; FCOS=np.cos(np.radians(70.0))
624
+ skel=_skel(walls); ey,ex=_tip_pixels(skel); n_ep=len(ey)
625
+ if n_ep < 2: return result
626
+
627
+ _,cc_map=cv2.connectedComponents(walls,connectivity=8)
628
+ ep_cc=cc_map[ey,ex]
629
+ lookahead=max(8, cal.stroke_width*3)
630
+ out_dx,out_dy=_outward_vectors(ex,ey,skel,lookahead)
631
+ pts=np.stack([ex,ey],axis=1).astype(np.float32)
632
+
633
+ if _SCIPY:
634
+ from scipy.spatial import cKDTree
635
+ pairs=cKDTree(pts).query_pairs(float(cal.bridge_max_gap), output_type='ndarray')
636
+ ii=pairs[:,0].astype(np.int64); jj=pairs[:,1].astype(np.int64)
637
+ else:
638
+ _ii,_jj=np.triu_indices(n_ep,k=1)
639
+ ok=np.hypot(pts[_jj,0]-pts[_ii,0],pts[_jj,1]-pts[_ii,1])<=cal.bridge_max_gap
640
+ ii=_ii[ok].astype(np.int64); jj=_jj[ok].astype(np.int64)
641
+ if len(ii)==0: return result
642
+
643
+ dxij=pts[jj,0]-pts[ii,0]; dyij=pts[jj,1]-pts[ii,1]
644
+ dists=np.hypot(dxij,dyij); safe=np.maximum(dists,1e-6)
645
+ ux,uy=dxij/safe,dyij/safe
646
+ ang=np.degrees(np.arctan2(np.abs(dyij),np.abs(dxij)))
647
+ is_H=ang<=angle_tol; is_V=ang>=(90.0-angle_tol)
648
+ g1=(dists>=cal.bridge_min_gap)&(dists<=cal.bridge_max_gap); g2=is_H|is_V
649
+ g3=((out_dx[ii]*ux+out_dy[ii]*uy)>=FCOS)&((out_dx[jj]*-ux+out_dy[jj]*-uy)>=FCOS)
650
+ g4=ep_cc[ii]!=ep_cc[jj]
651
+ pre_ok=g1&g2&g3&g4; pre_idx=np.where(pre_ok)[0]
652
+
653
+ N_SAMP=9; clr=np.ones(len(pre_idx),dtype=bool)
654
+ for k,pidx in enumerate(pre_idx):
655
+ ia,ib=int(ii[pidx]),int(jj[pidx])
656
+ ax,ay=int(ex[ia]),int(ey[ia]); bx2,by2=int(ex[ib]),int(ey[ib])
657
+ if is_H[pidx]:
658
+ xs=np.linspace(ax,bx2,N_SAMP,np.float32); ys=np.full(N_SAMP,ay,np.float32)
659
+ else:
660
+ xs=np.full(N_SAMP,ax,np.float32); ys=np.linspace(ay,by2,N_SAMP,np.float32)
661
+ sxs=np.clip(np.round(xs[1:-1]).astype(np.int32),0,w-1)
662
+ sys_=np.clip(np.round(ys[1:-1]).astype(np.int32),0,h-1)
663
+ if np.any(walls[sys_,sxs]>0): clr[k]=False
664
+ valid=pre_idx[clr]
665
+ if len(valid)==0: return result
666
+
667
+ vi=ii[valid]; vj=jj[valid]; vd=dists[valid]; vH=is_H[valid]
668
+ order=np.argsort(vd); vi,vj,vd,vH=vi[order],vj[order],vd[order],vH[order]
669
+ used=np.zeros(n_ep,dtype=bool)
670
+ for k in range(len(vi)):
671
+ ia,ib=int(vi[k]),int(vj[k])
672
+ if used[ia] or used[ib]: continue
673
+ ax,ay=int(ex[ia]),int(ey[ia]); bx2,by2=int(ex[ib]),int(ey[ib])
674
+ p1,p2=((min(ax,bx2),ay),(max(ax,bx2),ay)) if vH[k] else ((ax,min(ay,by2)),(ax,max(ay,by2)))
675
+ cv2.line(result,p1,p2,255,cal.stroke_width)
676
+ used[ia]=used[ib]=True
677
+ return result
678
+
679
+
680
+ def _close_door_openings_v2(walls: np.ndarray, cal: WallCalibration) -> np.ndarray:
681
+ gap=cal.door_gap
682
+ def _shape_close(mask, kwh, axis, max_thick):
683
+ k=cv2.getStructuringElement(cv2.MORPH_RECT, kwh)
684
+ cls=cv2.morphologyEx(mask,cv2.MORPH_CLOSE,k)
685
+ new=cv2.bitwise_and(cls,cv2.bitwise_not(mask))
686
+ if not np.any(new): return np.zeros_like(mask)
687
+ n2,lbl2,st2,_=cv2.connectedComponentsWithStats(new,connectivity=8)
688
+ if n2<=1: return np.zeros_like(mask)
689
+ perp=st2[1:,cv2.CC_STAT_HEIGHT if axis=='H' else cv2.CC_STAT_WIDTH]
690
+ keep=perp<=max_thick; lut2=np.zeros(n2,np.uint8); lut2[1:]=keep.astype(np.uint8)*255
691
+ return lut2[lbl2]
692
+ add_h=_shape_close(walls,(gap,1),'H',cal.max_bridge_thick)
693
+ add_v=_shape_close(walls,(1,gap),'V',cal.max_bridge_thick)
694
+ return cv2.bitwise_or(walls, cv2.bitwise_or(add_h,add_v))
695
+
696
+
697
+ def reconstruct_walls(walls: np.ndarray) -> Tuple[np.ndarray, WallCalibration]:
698
+ """Full 3-stage wall repair pipeline (5c/5d/5e)."""
699
+ cal = calibrate_wall(walls)
700
+ walls = _remove_thin_lines_calibrated(walls, cal)
701
+ walls = _bridge_wall_endpoints_v2(walls, cal)
702
+ walls = _close_door_openings_v2(walls, cal)
703
+ return walls, cal
704
+
705
+
706
+ # ════════════════════════════════════════════════════════════════════════════
707
+ # REMOVE DANGLING LINES (exact port from GeometryAgent)
708
+ # ════════════════════════════════════════════════════════════════════════════
709
+
710
+ def remove_dangling_lines(walls: np.ndarray, cal: WallCalibration) -> np.ndarray:
711
+ stroke = cal.stroke_width
712
+ connect_radius = max(6, stroke*3)
713
+ n_cc,cc_map,stats,_ = cv2.connectedComponentsWithStats(walls,connectivity=8)
714
+ if n_cc <= 1: return walls
715
+
716
+ skel=_skel(walls); tip_y,tip_x=_tip_pixels(skel)
717
+ tip_cc=cc_map[tip_y,tip_x]
718
+ free_counts=np.zeros(n_cc,dtype=np.int32)
719
+ for i in range(len(tip_x)): free_counts[tip_cc[i]]+=1
720
+
721
+ remove=np.zeros(n_cc,dtype=bool)
722
+ ker=cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(connect_radius*2+1,connect_radius*2+1))
723
+ for cc_id in range(1,n_cc):
724
+ if free_counts[cc_id]<2: continue
725
+ bw2=int(stats[cc_id,cv2.CC_STAT_WIDTH]); bh2=int(stats[cc_id,cv2.CC_STAT_HEIGHT])
726
+ if max(bw2,bh2) > stroke*40: continue
727
+ cm=(cc_map==cc_id).astype(np.uint8)
728
+ dc=cv2.dilate(cm,ker)
729
+ overlap=cv2.bitwise_and(dc,((walls>0)&(cc_map!=cc_id)).astype(np.uint8))
730
+ if np.count_nonzero(overlap)==0: remove[cc_id]=True
731
+
732
+ lut=np.ones(n_cc,dtype=np.uint8); lut[0]=0; lut[remove]=0
733
+ return (lut[cc_map]*255).astype(np.uint8)
734
+
735
+
736
+ # ════════════════════════════════════════════════════════════════════════════
737
+ # CLOSE LARGE DOOR GAPS (exact port from GeometryAgent 180–320px)
738
+ # ════════════════════════════════════════════════════════════════════════════
739
+
740
+ def close_large_door_gaps(walls: np.ndarray, cal: WallCalibration) -> np.ndarray:
741
+ try:
742
+ from scipy.spatial import cKDTree
743
+ _SCIPY = True
744
+ except ImportError:
745
+ _SCIPY = False
746
+
747
+ DOOR_MIN=180; DOOR_MAX=320; ANGLE_TOL=12.0
748
+ FCOS=np.cos(np.radians(90.0-ANGLE_TOL))
749
+ stroke=cal.stroke_width; line_width=max(stroke,3)
750
+ result=walls.copy(); h,w=walls.shape
751
+
752
+ skel=_skel(walls); tip_y,tip_x=_tip_pixels(skel)
753
+ n_ep=len(tip_x)
754
+ if n_ep<2: return result
755
+
756
+ _,cc_map=cv2.connectedComponents(walls,connectivity=8)
757
+ ep_cc=cc_map[tip_y,tip_x]
758
+ lookahead=max(12,stroke*4)
759
+ out_dx,out_dy=_outward_vectors(tip_x,tip_y,skel,lookahead)
760
+ pts=np.stack([tip_x,tip_y],axis=1).astype(np.float32)
761
+
762
+ if _SCIPY:
763
+ pairs=cKDTree(pts).query_pairs(float(DOOR_MAX),output_type='ndarray')
764
+ ii=pairs[:,0].astype(np.int64); jj=pairs[:,1].astype(np.int64)
765
+ else:
766
+ _ii,_jj=np.triu_indices(n_ep,k=1)
767
+ ok=np.hypot(pts[_jj,0]-pts[_ii,0],pts[_jj,1]-pts[_ii,1])<=DOOR_MAX
768
+ ii=_ii[ok].astype(np.int64); jj=_jj[ok].astype(np.int64)
769
+ if len(ii)==0: return result
770
+
771
+ dxij=pts[jj,0]-pts[ii,0]; dyij=pts[jj,1]-pts[ii,1]
772
+ dists=np.hypot(dxij,dyij); safe=np.maximum(dists,1e-6)
773
+ ux,uy=dxij/safe,dyij/safe
774
+ ang=np.degrees(np.arctan2(np.abs(dyij),np.abs(dxij)))
775
+ is_H=ang<=ANGLE_TOL; is_V=ang>=(90.0-ANGLE_TOL)
776
+ g1=(dists>=DOOR_MIN)&(dists<=DOOR_MAX); g2=is_H|is_V
777
+ g3=((out_dx[ii]*ux+out_dy[ii]*uy)>=FCOS)&((out_dx[jj]*-ux+out_dy[jj]*-uy)>=FCOS)
778
+ g4=ep_cc[ii]!=ep_cc[jj]
779
+ pre_ok=g1&g2&g3&g4; pre_idx=np.where(pre_ok)[0]
780
+
781
+ N_SAMP=15; clr=np.ones(len(pre_idx),dtype=bool)
782
+ for k,pidx in enumerate(pre_idx):
783
+ ia,ib=int(ii[pidx]),int(jj[pidx])
784
+ ax,ay=int(tip_x[ia]),int(tip_y[ia]); bx2,by2=int(tip_x[ib]),int(tip_y[ib])
785
+ if is_H[pidx]:
786
+ xs=np.linspace(ax,bx2,N_SAMP,dtype=np.float32)
787
+ ys=np.full(N_SAMP,(ay+by2)/2.0,dtype=np.float32)
788
+ else:
789
+ xs=np.full(N_SAMP,(ax+bx2)/2.0,dtype=np.float32)
790
+ ys=np.linspace(ay,by2,N_SAMP,dtype=np.float32)
791
+ sxs=np.clip(np.round(xs[1:-1]).astype(np.int32),0,w-1)
792
+ sys_=np.clip(np.round(ys[1:-1]).astype(np.int32),0,h-1)
793
+ if np.any(walls[sys_,sxs]>0): clr[k]=False
794
+ valid=pre_idx[clr]
795
+ if len(valid)==0: return result
796
+
797
+ vi=ii[valid]; vj=jj[valid]; vd=dists[valid]; vH=is_H[valid]
798
+ order=np.argsort(vd); vi,vj,vd,vH=vi[order],vj[order],vd[order],vH[order]
799
+ used=np.zeros(n_ep,dtype=bool)
800
+ for k in range(len(vi)):
801
+ ia,ib=int(vi[k]),int(vj[k])
802
+ if used[ia] or used[ib]: continue
803
+ ax,ay=int(tip_x[ia]),int(tip_y[ia]); bx2,by2=int(tip_x[ib]),int(tip_y[ib])
804
+ if vH[k]: p1=(min(ax,bx2),(ay+by2)//2); p2=(max(ax,bx2),(ay+by2)//2)
805
+ else: p1=((ax+bx2)//2,min(ay,by2)); p2=((ax+bx2)//2,max(ay,by2))
806
+ cv2.line(result,p1,p2,255,line_width)
807
+ used[ia]=used[ib]=True
808
+ return result
809
+
810
+
811
  def apply_user_lines_to_walls(
812
  walls: np.ndarray,
813
  lines: List[Tuple[int,int,int,int]],
 
1201
  "img_cropped": None,
1202
  "img_clean": None,
1203
  "walls": None,
1204
+ "walls_base": None, # walls after full pipeline, before user lines
1205
+ "wall_cal": None, # WallCalibration
1206
  "user_lines": [], # [(x1,y1,x2,y2), …]
1207
  "draw_start": None, # pending line start pixel
1208
  "walls_thickness": 8,
 
1254
  if img is None:
1255
  return None, None, state, "Load an image first."
1256
 
1257
+ # ── Step 1: crop title block ──────────────────────────────────────────
1258
  cropped = remove_title_block(img)
 
1259
 
1260
+ # ── Step 2: remove CAD colours ────────────────────────────────────────
1261
+ img_clean = remove_colors(cropped)
1262
+
1263
+ # ── Step 3: close door arcs (before wall extraction) ─────────────────
1264
+ img_clean = detect_and_close_door_arcs(img_clean)
1265
+
1266
+ # ── Step 4: brightness-aware image stats ─────────────────────────────
1267
+ img_stats = analyze_image_characteristics(cropped)
1268
+
1269
+ # ── Step 5: extract walls adaptive (brightness-aware + double-line filter) ──
1270
+ walls, thick = extract_walls_adaptive(img_clean, img_stats)
1271
+
1272
+ # ── Step 5b: remove fixture symbols (toilets / stalls) ───────────────
1273
+ walls = remove_fixture_symbols(walls)
1274
 
1275
+ # ── Step 5c/5d/5e: calibrated 3-stage wall reconstruction ────────────
1276
+ walls, cal = reconstruct_walls(walls)
1277
+
1278
+ # ── Step 5f: remove dangling unconnected stubs ────────────────────────
1279
+ walls = remove_dangling_lines(walls, cal)
1280
+
1281
+ # ── Step 5g: close large door gaps (180–320 px) ──────────────────────
1282
+ walls = close_large_door_gaps(walls, cal)
1283
+
1284
+ state["img_cropped"] = cropped
1285
+ state["img_clean"] = img_clean
1286
  state["walls"] = walls.copy()
1287
+ state["walls_base"] = walls.copy() # kept for undo recompute
1288
  state["walls_thickness"] = thick
1289
+ state["wall_cal"] = cal
1290
 
1291
  walls_rgb = cv2.cvtColor(walls, cv2.COLOR_GRAY2RGB)
1292
+ clean_rgb = cv2.cvtColor(img_clean, cv2.COLOR_BGR2RGB)
1293
+ msg = (f"βœ… Full pipeline done | strokeβ‰ˆ{cal.stroke_width}px "
1294
+ f"bodyβ‰ˆ{thick}px bridge_gap=[{cal.bridge_min_gap},{cal.bridge_max_gap}]px "
1295
+ f"door_gap={cal.door_gap}px")
1296
+ return clean_rgb, walls_rgb, state, msg
1297
 
1298
 
1299
  def cb_add_door_line(evt: gr.SelectData, state):
 
1340
  state["user_lines"].pop()
1341
  state["draw_start"] = None
1342
 
1343
+ walls_base = state.get("walls_base")
1344
+ if walls_base is None:
 
1345
  return None, state, "Re-run preprocessing."
1346
 
1347
+ thick = state.get("walls_thickness", 8)
1348
+ walls_upd = apply_user_lines_to_walls(walls_base, state["user_lines"], thick)
 
 
 
1349
  state["walls"] = walls_upd
1350
 
1351
  vis = cv2.cvtColor(walls_upd, cv2.COLOR_GRAY2RGB)