auto-fix check for opposite directions
Browse files
app.py
CHANGED
|
@@ -19,13 +19,14 @@ def extract_motion_vectors(data):
|
|
| 19 |
|
| 20 |
|
| 21 |
# ============================================================
|
| 22 |
-
# 🧮 2. Dominant Flow Clustering (Cosine-based)
|
| 23 |
# ============================================================
|
| 24 |
def learn_flows_improved(vectors, n_clusters=2):
|
| 25 |
"""Cosine-based clustering of normalized motion directions."""
|
| 26 |
if len(vectors) < n_clusters:
|
| 27 |
return None, None
|
| 28 |
|
|
|
|
| 29 |
norms = np.linalg.norm(vectors, axis=1, keepdims=True)
|
| 30 |
dirs = vectors / (norms + 1e-6)
|
| 31 |
valid = (norms[:, 0] > 1.5)
|
|
@@ -33,11 +34,23 @@ def learn_flows_improved(vectors, n_clusters=2):
|
|
| 33 |
if len(dirs) < n_clusters:
|
| 34 |
return None, None
|
| 35 |
|
|
|
|
| 36 |
kmeans = KMeans(n_clusters=n_clusters, n_init=20, random_state=42)
|
| 37 |
kmeans.fit(dirs)
|
| 38 |
centers = kmeans.cluster_centers_
|
| 39 |
centers = centers / (np.linalg.norm(centers, axis=1, keepdims=True) + 1e-6)
|
| 40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
sims = np.dot(vectors / (np.linalg.norm(vectors, axis=1, keepdims=True) + 1e-6), centers.T)
|
| 42 |
labels = np.argmax(sims, axis=1)
|
| 43 |
return labels, centers
|
|
@@ -68,19 +81,21 @@ def draw_flow_overlay(vectors, labels, centers, bg_img=None,
|
|
| 68 |
bg = np.ones((600, 900, 3), dtype=np.uint8) * 40
|
| 69 |
|
| 70 |
overlay = bg.copy()
|
| 71 |
-
colors = [(0, 0, 255), (255, 255, 0)]
|
| 72 |
|
| 73 |
norms = np.linalg.norm(vectors, axis=1, keepdims=True)
|
| 74 |
vectors = np.divide(vectors, norms + 1e-6) * 10
|
| 75 |
|
|
|
|
| 76 |
for i, ((vx, vy), lab) in enumerate(zip(vectors, labels)):
|
| 77 |
if i % 15 != 0:
|
| 78 |
continue
|
| 79 |
start = (np.random.randint(0, overlay.shape[1]),
|
| 80 |
np.random.randint(0, overlay.shape[0]))
|
| 81 |
end = (int(start[0] + vx), int(start[1] + vy))
|
| 82 |
-
cv2.arrowedLine(overlay, start, end, colors[lab %
|
| 83 |
|
|
|
|
| 84 |
h, w = overlay.shape[:2]
|
| 85 |
scale = 300
|
| 86 |
center_pt = (w // 2, h // 2)
|
|
@@ -125,16 +140,16 @@ def process_json(json_file, background=None):
|
|
| 125 |
if len(vectors) == 0:
|
| 126 |
return None, {"error": "No motion vectors found."}
|
| 127 |
|
| 128 |
-
|
|
|
|
| 129 |
if labels is None:
|
| 130 |
return None, {"error": "Insufficient data for clustering."}
|
| 131 |
|
| 132 |
road_angle = estimate_road_angle(centers)
|
| 133 |
|
| 134 |
-
# Optionally define default polygons (can be user-drawn later)
|
| 135 |
drive_zone = [[100, 100], [800, 100], [800, 500], [100, 500]]
|
| 136 |
entry_zones = [
|
| 137 |
-
[[50, 100], [100, 100], [100, 500], [50, 500]]
|
| 138 |
]
|
| 139 |
|
| 140 |
img_path = draw_flow_overlay(vectors, labels, centers,
|
|
@@ -155,9 +170,9 @@ def process_json(json_file, background=None):
|
|
| 155 |
# 🖥️ 6. Gradio Interface
|
| 156 |
# ============================================================
|
| 157 |
description_text = """
|
| 158 |
-
### 🧭 Dominant Flow Learning (Stage 2 — Angle + Zone-Aware)
|
| 159 |
Uploads the **trajectories JSON** from Stage 1 and optionally a background frame.
|
| 160 |
-
Outputs dominant flow directions, estimated road angle, and zone polygons for Stage 3.
|
| 161 |
"""
|
| 162 |
|
| 163 |
example_json = "trajectories_sample.json" if os.path.exists("trajectories_sample.json") else None
|
|
@@ -173,7 +188,7 @@ demo = gr.Interface(
|
|
| 173 |
gr.Image(label="Dominant Flow Overlay"),
|
| 174 |
gr.JSON(label="Flow Stats (Stage 2 Output)")
|
| 175 |
],
|
| 176 |
-
title="🚗 Dominant Flow Learning – Stage 2 (
|
| 177 |
description=description_text,
|
| 178 |
examples=[[example_json, example_bg]] if example_json else None,
|
| 179 |
)
|
|
|
|
| 19 |
|
| 20 |
|
| 21 |
# ============================================================
|
| 22 |
+
# 🧮 2. Dominant Flow Clustering (Cosine-based + Auto-Fix)
|
| 23 |
# ============================================================
|
| 24 |
def learn_flows_improved(vectors, n_clusters=2):
|
| 25 |
"""Cosine-based clustering of normalized motion directions."""
|
| 26 |
if len(vectors) < n_clusters:
|
| 27 |
return None, None
|
| 28 |
|
| 29 |
+
# --- Normalize & filter weak motions ---
|
| 30 |
norms = np.linalg.norm(vectors, axis=1, keepdims=True)
|
| 31 |
dirs = vectors / (norms + 1e-6)
|
| 32 |
valid = (norms[:, 0] > 1.5)
|
|
|
|
| 34 |
if len(dirs) < n_clusters:
|
| 35 |
return None, None
|
| 36 |
|
| 37 |
+
# --- Cluster ---
|
| 38 |
kmeans = KMeans(n_clusters=n_clusters, n_init=20, random_state=42)
|
| 39 |
kmeans.fit(dirs)
|
| 40 |
centers = kmeans.cluster_centers_
|
| 41 |
centers = centers / (np.linalg.norm(centers, axis=1, keepdims=True) + 1e-6)
|
| 42 |
|
| 43 |
+
# --- ✅ Auto-fix: ensure at least one pair of flows are truly opposite ---
|
| 44 |
+
if len(centers) >= 2:
|
| 45 |
+
sim = np.dot(centers[0], centers[1])
|
| 46 |
+
if sim > -0.8:
|
| 47 |
+
# If not opposite enough, flip the smaller-magnitude cluster
|
| 48 |
+
if np.linalg.norm(centers[0]) < np.linalg.norm(centers[1]):
|
| 49 |
+
centers[0] = -centers[0]
|
| 50 |
+
else:
|
| 51 |
+
centers[1] = -centers[1]
|
| 52 |
+
|
| 53 |
+
# --- Assign labels ---
|
| 54 |
sims = np.dot(vectors / (np.linalg.norm(vectors, axis=1, keepdims=True) + 1e-6), centers.T)
|
| 55 |
labels = np.argmax(sims, axis=1)
|
| 56 |
return labels, centers
|
|
|
|
| 81 |
bg = np.ones((600, 900, 3), dtype=np.uint8) * 40
|
| 82 |
|
| 83 |
overlay = bg.copy()
|
| 84 |
+
colors = [(0, 0, 255), (255, 255, 0), (0, 255, 255), (255, 0, 255)]
|
| 85 |
|
| 86 |
norms = np.linalg.norm(vectors, axis=1, keepdims=True)
|
| 87 |
vectors = np.divide(vectors, norms + 1e-6) * 10
|
| 88 |
|
| 89 |
+
# --- Sampled arrows for visual flow density ---
|
| 90 |
for i, ((vx, vy), lab) in enumerate(zip(vectors, labels)):
|
| 91 |
if i % 15 != 0:
|
| 92 |
continue
|
| 93 |
start = (np.random.randint(0, overlay.shape[1]),
|
| 94 |
np.random.randint(0, overlay.shape[0]))
|
| 95 |
end = (int(start[0] + vx), int(start[1] + vy))
|
| 96 |
+
cv2.arrowedLine(overlay, start, end, colors[lab % len(colors)], 1, tipLength=0.3)
|
| 97 |
|
| 98 |
+
# --- Draw dominant flow arrows ---
|
| 99 |
h, w = overlay.shape[:2]
|
| 100 |
scale = 300
|
| 101 |
center_pt = (w // 2, h // 2)
|
|
|
|
| 140 |
if len(vectors) == 0:
|
| 141 |
return None, {"error": "No motion vectors found."}
|
| 142 |
|
| 143 |
+
# --- Use 2 clusters normally, can bump to 3 if road has multiple flows ---
|
| 144 |
+
labels, centers = learn_flows_improved(vectors, n_clusters=2)
|
| 145 |
if labels is None:
|
| 146 |
return None, {"error": "Insufficient data for clustering."}
|
| 147 |
|
| 148 |
road_angle = estimate_road_angle(centers)
|
| 149 |
|
|
|
|
| 150 |
drive_zone = [[100, 100], [800, 100], [800, 500], [100, 500]]
|
| 151 |
entry_zones = [
|
| 152 |
+
[[50, 100], [100, 100], [100, 500], [50, 500]]
|
| 153 |
]
|
| 154 |
|
| 155 |
img_path = draw_flow_overlay(vectors, labels, centers,
|
|
|
|
| 170 |
# 🖥️ 6. Gradio Interface
|
| 171 |
# ============================================================
|
| 172 |
description_text = """
|
| 173 |
+
### 🧭 Dominant Flow Learning (Stage 2 — Angle + Zone-Aware + Auto-Fix)
|
| 174 |
Uploads the **trajectories JSON** from Stage 1 and optionally a background frame.
|
| 175 |
+
Outputs dominant flow directions (auto-corrected if not opposite), estimated road angle, and zone polygons for Stage 3.
|
| 176 |
"""
|
| 177 |
|
| 178 |
example_json = "trajectories_sample.json" if os.path.exists("trajectories_sample.json") else None
|
|
|
|
| 188 |
gr.Image(label="Dominant Flow Overlay"),
|
| 189 |
gr.JSON(label="Flow Stats (Stage 2 Output)")
|
| 190 |
],
|
| 191 |
+
title="🚗 Dominant Flow Learning – Stage 2 (Auto-Fix for Opposite Flows)",
|
| 192 |
description=description_text,
|
| 193 |
examples=[[example_json, example_bg]] if example_json else None,
|
| 194 |
)
|