Ayesha352 commited on
Commit
666ed4b
·
verified ·
1 Parent(s): f644374

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +86 -93
app.py CHANGED
@@ -1,135 +1,128 @@
1
- import gradio as gr
2
  import cv2
3
  import numpy as np
4
  import json
5
- import math
 
6
 
7
  # === Helper: Rotated rectangle corners ===
8
  def get_rotated_rect_corners(x, y, w, h, rotation_deg):
9
  rot_rad = np.deg2rad(rotation_deg)
10
  cos_r = np.cos(rot_rad)
11
  sin_r = np.sin(rot_rad)
 
 
 
 
12
  cx = x + w/2
13
  cy = y + h/2
 
14
  local_corners = np.array([
15
  [-w/2, -h/2],
16
  [ w/2, -h/2],
17
  [ w/2, h/2],
18
  [-w/2, h/2]
19
  ])
20
- R = np.array([[cos_r, -sin_r],
21
- [sin_r, cos_r]])
22
  rotated_corners = np.dot(local_corners, R.T)
23
  corners = rotated_corners + np.array([cx, cy])
24
  return corners.astype(np.float32)
25
 
26
- # === Preprocessing ===
27
  def preprocess_gray_clahe(img):
28
- gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
29
  clahe = cv2.createCLAHE(clipLimit=3.0, tileGridSize=(8, 8))
30
  return clahe.apply(gray)
31
 
32
- # === Feature detectors ===
33
- def get_detector(detector_name):
34
- if detector_name == "SIFT":
35
- return cv2.SIFT_create(nfeatures=5000)
36
- elif detector_name == "ORB":
37
- return cv2.ORB_create(5000)
38
- elif detector_name == "BRISK":
39
- return cv2.BRISK_create()
40
- elif detector_name == "AKAZE":
41
- return cv2.AKAZE_create()
42
- elif detector_name == "KAZE":
43
- return cv2.KAZE_create()
44
- else:
45
- return None
46
-
47
- def detect_and_match(img1_gray, img2_gray, detector_name, ratio_thresh=0.78):
48
- detector = get_detector(detector_name)
49
- kp1, des1 = detector.detectAndCompute(img1_gray, None)
50
- kp2, des2 = detector.detectAndCompute(img2_gray, None)
51
- if detector_name in ["SIFT", "KAZE"]:
52
  matcher = cv2.BFMatcher(cv2.NORM_L2)
53
- else:
 
 
 
 
 
 
 
 
54
  matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
55
- matches = matcher.knnMatch(des1, des2, k=2)
56
- good = [m for m,n in matches if m.distance < ratio_thresh*n.distance]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  return kp1, kp2, good
58
 
59
- # === Main processing ===
60
- def process_images(flat_img, persp_img, json_file):
61
- if flat_img is None or persp_img is None or json_file is None:
62
- return []
63
-
64
- # Load JSON
65
- try:
66
- with open(json_file.name, 'r') as f:
67
- data = json.load(f)
68
- except Exception as e:
69
- print("JSON read error:", e)
70
- return []
71
-
72
- roi = data["printAreas"][0]
73
- roi_x = roi["position"]["x"]
74
- roi_y = roi["position"]["y"]
75
- roi_w = roi["width"]
76
- roi_h = roi["height"]
77
- roi_rot_deg = roi["rotation"]
78
-
79
- # Preprocess images
80
  flat_gray = preprocess_gray_clahe(flat_img)
81
  persp_gray = preprocess_gray_clahe(persp_img)
82
 
83
- detectors = ["SIFT", "ORB", "BRISK", "AKAZE", "KAZE"]
84
- gallery_images = []
85
 
86
- for det in detectors:
87
- kp1, kp2, good_matches = detect_and_match(flat_gray, persp_gray, det)
88
- if len(good_matches) < 4:
89
- print(f"Not enough matches for {det}")
90
- continue
91
 
92
- src_pts = np.float32([kp1[m.queryIdx].pt for m in good_matches]).reshape(-1,1,2)
93
- dst_pts = np.float32([kp2[m.trainIdx].pt for m in good_matches]).reshape(-1,1,2)
94
  H, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 5.0)
95
 
96
- # ROI corners
97
  roi_corners_flat = get_rotated_rect_corners(roi_x, roi_y, roi_w, roi_h, roi_rot_deg)
98
- roi_corners_persp = cv2.perspectiveTransform(roi_corners_flat.reshape(-1,1,2), H).reshape(-1,2)
99
-
100
- # Draw ROI
101
- persp_out = persp_img.copy()
102
- cv2.polylines(persp_out, [roi_corners_persp.astype(int)], True, (0,255,0), 3)
103
 
104
- # Draw attractive detector label box
105
- text = det
106
- font = cv2.FONT_HERSHEY_SIMPLEX
107
- scale = 1
108
- thickness = 2
109
- text_size = cv2.getTextSize(text, font, scale, thickness)[0]
110
- text_x, text_y = 10, 30
111
- cv2.rectangle(persp_out, (text_x-5, text_y-25), (text_x + text_size[0]+5, text_y+5), (0,0,0), -1)
112
- cv2.putText(persp_out, text, (text_x, text_y), font, scale, (0,255,255), thickness)
113
 
114
- # Convert to RGB for Gradio
115
- persp_rgb = cv2.cvtColor(persp_out, cv2.COLOR_BGR2RGB)
116
- gallery_images.append(persp_rgb)
117
 
118
- return gallery_images
119
 
120
  # === Gradio Interface ===
121
- iface = gr.Interface(
122
- fn=process_images,
123
- inputs=[
124
- gr.Image(type="numpy", label="Flat Image"),
125
- gr.Image(type="numpy", label="Perspective Image"),
126
- gr.File(type="filepath", label="JSON File")
127
- ],
128
- outputs=[
129
- gr.Gallery(label="Detector Results (Perspective Images with ROI & Detector Label)")
130
- ],
131
- title="Feature Detection ROI Projection",
132
- description="Shows SIFT, ORB, BRISK, AKAZE, KAZE detector results on the Perspective image only, with ROI and attractive detector label."
133
- )
134
-
135
- iface.launch()
 
 
1
  import cv2
2
  import numpy as np
3
  import json
4
+ import gradio as gr
5
+ import matplotlib.pyplot as plt
6
 
7
  # === Helper: Rotated rectangle corners ===
8
  def get_rotated_rect_corners(x, y, w, h, rotation_deg):
9
  rot_rad = np.deg2rad(rotation_deg)
10
  cos_r = np.cos(rot_rad)
11
  sin_r = np.sin(rot_rad)
12
+
13
+ R = np.array([[cos_r, -sin_r],
14
+ [sin_r, cos_r]])
15
+
16
  cx = x + w/2
17
  cy = y + h/2
18
+
19
  local_corners = np.array([
20
  [-w/2, -h/2],
21
  [ w/2, -h/2],
22
  [ w/2, h/2],
23
  [-w/2, h/2]
24
  ])
25
+
 
26
  rotated_corners = np.dot(local_corners, R.T)
27
  corners = rotated_corners + np.array([cx, cy])
28
  return corners.astype(np.float32)
29
 
30
+ # === Preprocessing: Grayscale + CLAHE ===
31
  def preprocess_gray_clahe(img):
32
+ gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
33
  clahe = cv2.createCLAHE(clipLimit=3.0, tileGridSize=(8, 8))
34
  return clahe.apply(gray)
35
 
36
+ # === Detect and match features ===
37
+ def detect_and_match(img1_gray, img2_gray, method="SIFT", ratio_thresh=0.78):
38
+ if method == "SIFT":
39
+ sift = cv2.SIFT_create(nfeatures=5000)
40
+ kp1, des1 = sift.detectAndCompute(img1_gray, None)
41
+ kp2, des2 = sift.detectAndCompute(img2_gray, None)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  matcher = cv2.BFMatcher(cv2.NORM_L2)
43
+ elif method == "ORB":
44
+ orb = cv2.ORB_create(5000)
45
+ kp1, des1 = orb.detectAndCompute(img1_gray, None)
46
+ kp2, des2 = orb.detectAndCompute(img2_gray, None)
47
+ matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
48
+ elif method == "BRISK":
49
+ brisk = cv2.BRISK_create()
50
+ kp1, des1 = brisk.detectAndCompute(img1_gray, None)
51
+ kp2, des2 = brisk.detectAndCompute(img2_gray, None)
52
  matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
53
+ elif method == "KAZE":
54
+ kaze = cv2.KAZE_create()
55
+ kp1, des1 = kaze.detectAndCompute(img1_gray, None)
56
+ kp2, des2 = kaze.detectAndCompute(img2_gray, None)
57
+ matcher = cv2.BFMatcher(cv2.NORM_L2)
58
+ elif method == "AKAZE":
59
+ akaze = cv2.AKAZE_create()
60
+ kp1, des1 = akaze.detectAndCompute(img1_gray, None)
61
+ kp2, des2 = akaze.detectAndCompute(img2_gray, None)
62
+ matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
63
+ else:
64
+ return None, None, None
65
+
66
+ raw_matches = matcher.knnMatch(des1, des2, k=2)
67
+ good = []
68
+ for m, n in raw_matches:
69
+ if m.distance < ratio_thresh * n.distance:
70
+ good.append(m)
71
  return kp1, kp2, good
72
 
73
+ # === Main function for Gradio ===
74
+ def homography_demo(flat_file, persp_file, json_file):
75
+ flat_img = cv2.imdecode(np.frombuffer(flat_file.read(), np.uint8), cv2.IMREAD_COLOR)
76
+ persp_img = cv2.imdecode(np.frombuffer(persp_file.read(), np.uint8), cv2.IMREAD_COLOR)
77
+ mockup = json.load(json_file)
78
+
79
+ roi_data = mockup["printAreas"][0]["position"]
80
+ roi_x = roi_data["x"]
81
+ roi_y = roi_data["y"]
82
+ roi_w = mockup["printAreas"][0]["width"]
83
+ roi_h = mockup["printAreas"][0]["height"]
84
+ roi_rot_deg = mockup["printAreas"][0]["rotation"]
85
+
 
 
 
 
 
 
 
 
86
  flat_gray = preprocess_gray_clahe(flat_img)
87
  persp_gray = preprocess_gray_clahe(persp_img)
88
 
89
+ methods = ["SIFT", "ORB", "BRISK", "KAZE", "AKAZE"]
90
+ outputs = []
91
 
92
+ for method in methods:
93
+ kp1, kp2, good_matches = detect_and_match(flat_gray, persp_gray, method=method)
94
+
95
+ src_pts = np.float32([kp1[m.queryIdx].pt for m in good_matches]).reshape(-1, 1, 2)
96
+ dst_pts = np.float32([kp2[m.trainIdx].pt for m in good_matches]).reshape(-1, 1, 2)
97
 
 
 
98
  H, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 5.0)
99
 
 
100
  roi_corners_flat = get_rotated_rect_corners(roi_x, roi_y, roi_w, roi_h, roi_rot_deg)
101
+ roi_corners_persp = cv2.perspectiveTransform(roi_corners_flat.reshape(-1, 1, 2), H).reshape(-1, 2)
 
 
 
 
102
 
103
+ persp_debug = persp_img.copy()
104
+ cv2.polylines(persp_debug, [roi_corners_persp.astype(int)], True, (0, 255, 0), 2)
105
+ for (px, py) in roi_corners_persp:
106
+ cv2.circle(persp_debug, (int(px), int(py)), 5, (255, 0, 0), -1)
 
 
 
 
 
107
 
108
+ # Convert BGR to RGB for Gradio display
109
+ persp_debug_rgb = cv2.cvtColor(persp_debug, cv2.COLOR_BGR2RGB)
110
+ outputs.append((persp_debug_rgb, method))
111
 
112
+ return outputs
113
 
114
  # === Gradio Interface ===
115
+ with gr.Blocks() as demo:
116
+ gr.Markdown("## Homography ROI Demo with Multiple Feature Detectors")
117
+ with gr.Row():
118
+ flat_input = gr.File(label="Upload Flat Image")
119
+ persp_input = gr.File(label="Upload Perspective Image")
120
+ json_input = gr.File(label="Upload mockup.json")
121
+ output_gallery = gr.Gallery(label="Perspective ROI Results").style(grid=[2], height="400px")
122
+ run_btn = gr.Button("Run Homography")
123
+
124
+ run_btn.click(homography_demo,
125
+ inputs=[flat_input, persp_input, json_input],
126
+ outputs=output_gallery)
127
+
128
+ demo.launch()