Ayesha352's picture
Update app.py
dd80a33 verified
raw
history blame
4.53 kB
import gradio as gr
import cv2
import numpy as np
import json
import math
import matplotlib.pyplot as plt
# === Helper Functions ===
def get_rotated_rect_corners(x, y, w, h, rotation_deg):
rot_rad = np.deg2rad(rotation_deg)
cos_r = np.cos(rot_rad)
sin_r = np.sin(rot_rad)
R = np.array([[cos_r, -sin_r],
[sin_r, cos_r]])
cx, cy = x + w/2, y + h/2
local_corners = np.array([[-w/2,-h/2],[w/2,-h/2],[w/2,h/2],[-w/2,h/2]])
rotated_corners = np.dot(local_corners, R.T) + np.array([cx, cy])
return rotated_corners.astype(np.float32)
def preprocess_gray_clahe(img):
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
clahe = cv2.createCLAHE(clipLimit=3.0, tileGridSize=(8,8))
return clahe.apply(gray)
def detect_and_match(img1_gray, img2_gray, detector_type, ratio_thresh=0.78):
if detector_type == "SIFT":
detector = cv2.SIFT_create(nfeatures=5000)
matcher = cv2.BFMatcher(cv2.NORM_L2)
elif detector_type == "BRISK":
detector = cv2.BRISK_create()
matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
elif detector_type == "ORB":
detector = cv2.ORB_create(5000)
matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
elif detector_type == "AKAZE":
detector = cv2.AKAZE_create()
matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
elif detector_type == "KAZE":
detector = cv2.KAZE_create()
matcher = cv2.BFMatcher(cv2.NORM_L2)
else:
return None, None, []
kp1, des1 = detector.detectAndCompute(img1_gray, None)
kp2, des2 = detector.detectAndCompute(img2_gray, None)
if des1 is None or des2 is None:
return kp1, kp2, []
raw_matches = matcher.knnMatch(des1, des2, k=2)
good = [m for m,n in raw_matches if m.distance < ratio_thresh * n.distance]
return kp1, kp2, good
def get_roi_points_from_json(json_file):
data = json.load(json_file)
area = data["printAreas"][0]
x = area["position"]["x"]
y = area["position"]["y"]
w = area["width"]
h = area["height"]
rot = area["rotation"]
return x, y, w, h, rot
def process_images(flat_img, persp_img, json_file):
# Preprocess
flat_gray = preprocess_gray_clahe(flat_img)
persp_gray = preprocess_gray_clahe(persp_img)
x, y, w, h, rot = get_roi_points_from_json(json_file)
detectors = ["SIFT","BRISK","ORB","AKAZE","KAZE"]
gallery_images = []
for det in detectors:
kp1, kp2, matches = detect_and_match(flat_gray, persp_gray, det)
if len(matches) < 4:
# Skip if too few matches
continue
src_pts = np.float32([kp1[m.queryIdx].pt for m in matches]).reshape(-1,1,2)
dst_pts = np.float32([kp2[m.trainIdx].pt for m in matches]).reshape(-1,1,2)
H, _ = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0)
# ROI in flat
roi_flat = get_rotated_rect_corners(x,y,w,h,rot)
flat_copy = flat_img.copy()
cv2.polylines(flat_copy, [roi_flat.astype(int)], True, (0,0,255),2)
# Project ROI to perspective
roi_persp = cv2.perspectiveTransform(roi_flat.reshape(-1,1,2), H).reshape(-1,2)
persp_copy = persp_img.copy()
cv2.polylines(persp_copy, [roi_persp.astype(int)], True, (0,255,0),2)
for px, py in roi_persp:
cv2.circle(persp_copy, (int(px),int(py)), 5, (255,0,0), -1)
# Side-by-side for this detector
fig, ax = plt.subplots(1,2,figsize=(12,6))
ax[0].imshow(flat_copy)
ax[0].set_title(f"Flat ROI - {det}")
ax[0].axis("off")
ax[1].imshow(persp_copy)
ax[1].set_title(f"Perspective ROI - {det}")
ax[1].axis("off")
plt.tight_layout()
filename = f"{det}_result.png"
plt.savefig(filename)
plt.close(fig)
gallery_images.append(filename)
return gallery_images
iface = gr.Interface(
fn=process_images,
inputs=[
gr.Image(type="numpy", label="Flat Image"),
gr.Image(type="numpy", label="Perspective Image"),
gr.File(type="file", label="JSON File")
], # <-- ye closing bracket should be ]
outputs=[ # <-- starts a new list
gr.Gallery(label="Results"),
gr.File(label="Download SIFT Result"),
gr.File(label="Download ORB Result"),
gr.File(label="Download BRISK Result"),
gr.File(label="Download AKAZE Result"),
gr.File(label="Download KAZE Result")
], # <-- should be ] not )
title="Homography & ROI Projection",
description="..."
)