subm / junction.py
Neritz's picture
Add handcrafted_submission_2026 contents
5b6e3d8 verified
"""Junction-type constraints for 3D roof wireframes.
After merging per-view detections into a 3D graph, we apply simple topology
priors to drop obviously wrong edges/vertices:
1. Collinear merge: if a vertex has degree 2 with two nearly antiparallel edges,
it is most likely a spurious point on a longer edge — merge the edges and
drop the vertex.
2. Duplicate-direction prune: if a vertex has two incident edges that point in
(nearly) the same direction, keep only the stronger one (stronger = higher
sklearn score if available, else longer edge).
3. Isolated leaf prune: vertices with degree 1 whose only edge is very short
(< 0.4 m) are dropped — they are almost always noise.
The module is intentionally pure-numpy and side-effect-free so it can be
dropped into both the heuristic and the triangulation pipelines.
"""
from __future__ import annotations
import numpy as np
from typing import Sequence
def _edge_directions(vertices: np.ndarray, edges: np.ndarray) -> np.ndarray:
"""Unit vectors for each edge (from a→b). Shape (E, 3)."""
if len(edges) == 0:
return np.empty((0, 3), dtype=np.float32)
diffs = vertices[edges[:, 1]] - vertices[edges[:, 0]]
norms = np.linalg.norm(diffs, axis=1, keepdims=True)
norms = np.where(norms < 1e-6, 1.0, norms)
return diffs / norms
def _build_adj(n_vertices: int, edges: np.ndarray):
"""Return list[list[(neighbour, edge_index)]]."""
adj = [[] for _ in range(n_vertices)]
for ei, (a, b) in enumerate(edges):
adj[int(a)].append((int(b), ei))
adj[int(b)].append((int(a), ei))
return adj
def apply_junction_constraints(
vertices: np.ndarray,
edges: Sequence[tuple],
edge_scores: np.ndarray | None = None,
collinear_cos: float = 0.97,
duplicate_cos: float = 0.985,
leaf_min_len: float = 0.4,
max_passes: int = 3,
) -> tuple[np.ndarray, list]:
"""Apply junction-type constraints to a 3D wireframe.
Parameters
----------
vertices : (N, 3) array of 3D vertex positions.
edges : list of (i, j) undirected edges.
edge_scores : optional (E,) array in [0, 1] giving edge confidence.
When missing, all edges are treated as equal (tie-break by length).
collinear_cos : cosine threshold above which two incident edges are
considered antiparallel → triggers collinear merge.
duplicate_cos : cosine threshold above which two incident edges pointing
the same way are treated as duplicates → keep only the stronger one.
leaf_min_len : edges shorter than this feeding a degree-1 vertex get cut.
max_passes : how many passes to iterate since removing one edge can
create new opportunities.
Returns
-------
(vertices_new, edges_new) where vertices_new may keep indices identical
to the input (we do not reindex; instead we return only the surviving
subset of edges). Fully-isolated vertices are filtered by callers that
already run `prune_not_connected`.
"""
verts = np.asarray(vertices, dtype=np.float32)
edges_arr = np.asarray(list(edges), dtype=np.int64) if len(edges) else np.empty((0, 2), dtype=np.int64)
if len(edges_arr) == 0 or len(verts) == 0:
return verts, list(edges)
if edge_scores is None:
scores = np.ones(len(edges_arr), dtype=np.float32)
else:
scores = np.asarray(edge_scores, dtype=np.float32)
if len(scores) != len(edges_arr):
scores = np.ones(len(edges_arr), dtype=np.float32)
alive = np.ones(len(edges_arr), dtype=bool)
for _ in range(max_passes):
changed = False
directions = _edge_directions(verts, edges_arr)
lengths = np.linalg.norm(
verts[edges_arr[:, 1]] - verts[edges_arr[:, 0]], axis=1
)
adj = _build_adj(len(verts), edges_arr[alive])
# We need the original edge indices, not the compacted ones, for mutation.
# Rebuild adjacency using absolute indices.
adj = [[] for _ in range(len(verts))]
for ei, (a, b) in enumerate(edges_arr):
if not alive[ei]:
continue
adj[int(a)].append((int(b), ei))
adj[int(b)].append((int(a), ei))
# Pass 1: collinear merge on degree-2 vertices
for v in range(len(verts)):
if len(adj[v]) != 2:
continue
(n1, e1), (n2, e2) = adj[v]
if n1 == n2:
continue
# Direction from v outward
d1 = verts[n1] - verts[v]
d2 = verts[n2] - verts[v]
l1, l2 = np.linalg.norm(d1), np.linalg.norm(d2)
if l1 < 1e-6 or l2 < 1e-6:
continue
d1 /= l1
d2 /= l2
# Antiparallel = straight line through v
if float(np.dot(d1, d2)) < -collinear_cos:
# Merge: kill e1, reroute e2 to connect (n1, n2)
if (n1, n2) in {tuple(edges_arr[i]) for i in range(len(edges_arr)) if alive[i]} or \
(n2, n1) in {tuple(edges_arr[i]) for i in range(len(edges_arr)) if alive[i]}:
# Already exists — just drop both incident edges (degenerate)
alive[e1] = False
alive[e2] = False
else:
alive[e1] = False
edges_arr[e2] = (min(n1, n2), max(n1, n2))
changed = True
break
if changed:
continue
# Pass 2: duplicate-direction prune
for v in range(len(verts)):
if len(adj[v]) < 2:
continue
nbrs = adj[v]
# Build direction vectors for each incident alive edge
dirs = []
for nb, ei in nbrs:
d = verts[nb] - verts[v]
nrm = np.linalg.norm(d)
if nrm < 1e-6:
dirs.append(None)
else:
dirs.append(d / nrm)
# Find any duplicate pair
drop_ei = None
for i in range(len(nbrs)):
if dirs[i] is None:
continue
for j in range(i + 1, len(nbrs)):
if dirs[j] is None:
continue
if float(np.dot(dirs[i], dirs[j])) > duplicate_cos:
ei_i, ei_j = nbrs[i][1], nbrs[j][1]
# Keep the one with higher score; tiebreak by length
s_i = (scores[ei_i], lengths[ei_i])
s_j = (scores[ei_j], lengths[ei_j])
drop_ei = ei_j if s_i >= s_j else ei_i
break
if drop_ei is not None:
break
if drop_ei is not None:
alive[drop_ei] = False
changed = True
break
if changed:
continue
# Pass 3: leaf prune (degree-1 short edge)
for v in range(len(verts)):
if len(adj[v]) != 1:
continue
nb, ei = adj[v][0]
if lengths[ei] < leaf_min_len:
alive[ei] = False
changed = True
break
if not changed:
break
surviving = [tuple(map(int, edges_arr[i])) for i in range(len(edges_arr)) if alive[i]]
return verts, surviving