Spaces:
Running
Running
| import os | |
| import cv2 | |
| import numpy as np | |
| from finger_detector import FingerDetector | |
| from models import FingerQualityResult | |
| from quality_analyzer import QualityAnalyzer, QualityConfig | |
| from utils import finger_quality_result_to_json | |
| from visualizer import Visualizer | |
| class FingerQualityAssessor: | |
| """ | |
| End-to-end finger quality computation on single-finger mobile images. | |
| """ | |
| def __init__(self, config: QualityConfig): | |
| self.config = config | |
| self.detector = FingerDetector(min_contour_area_ratio=0.02) | |
| self.analyzer = QualityAnalyzer(config) | |
| def assess(self, bgr: np.ndarray, draw_debug: bool = False): | |
| if bgr is None or bgr.size == 0: | |
| raise ValueError("Input image is empty") | |
| img = self.analyzer.resize_keep_aspect(bgr, self.config.target_width) | |
| h, w = img.shape[:2] | |
| frame_area = float(h * w) | |
| mask = self.detector.segment_skin_ycbcr(img) | |
| contour = self.detector.find_largest_contour(mask, frame_area) | |
| if contour is None: | |
| result = FingerQualityResult( | |
| blur_score=0.0, | |
| illumination_score=0.0, | |
| coverage_ratio=0.0, | |
| orientation_angle_deg=0.0, | |
| blur_pass=False, | |
| illumination_pass=False, | |
| coverage_pass=False, | |
| orientation_pass=False, | |
| quality_score=0.0, | |
| overall_pass=False, | |
| bbox=None, | |
| contour_area=0.0, | |
| ) | |
| feedback = self.analyzer.generate_feedback(result) | |
| return result, feedback, (img if draw_debug else None) | |
| contour_area = float(cv2.contourArea(contour)) | |
| bbox = self.detector.bounding_box(contour) | |
| x, y, w_box, h_box = bbox | |
| roi = img[y:y + h_box, x:x + w_box] | |
| roi_gray = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY) | |
| mask_roi = mask[y:y + h_box, x:x + w_box] | |
| blur_score = self.analyzer.blur_score_laplacian(roi_gray) | |
| illumination_score = float(roi_gray.mean()) | |
| coverage_ratio = float(np.count_nonzero(mask_roi)) / float(frame_area) | |
| orientation_angle_deg = self.detector.orientation_pca_deg(contour) | |
| blur_pass = blur_score >= self.config.blur_min | |
| illum_pass = self.config.illum_min <= illumination_score <= self.config.illum_max | |
| coverage_pass = coverage_ratio >= self.config.coverage_min | |
| orientation_pass = self.analyzer.orientation_pass(orientation_angle_deg) | |
| quality_score = self.analyzer.compute_quality_score( | |
| blur_score=blur_score, | |
| illumination_score=illumination_score, | |
| coverage_ratio=coverage_ratio, | |
| orientation_ok=orientation_pass, | |
| ) | |
| overall_pass = quality_score >= self.config.overall_quality_threshold | |
| result = FingerQualityResult( | |
| blur_score=float(blur_score), | |
| illumination_score=float(illumination_score), | |
| coverage_ratio=float(coverage_ratio), | |
| orientation_angle_deg=float(orientation_angle_deg), | |
| blur_pass=bool(blur_pass), | |
| illumination_pass=bool(illum_pass), | |
| coverage_pass=bool(coverage_pass), | |
| orientation_pass=bool(orientation_pass), | |
| quality_score=float(quality_score), | |
| overall_pass=bool(overall_pass), | |
| bbox=bbox, | |
| contour_area=contour_area, | |
| feedback=None | |
| ) | |
| feedback = self.analyzer.generate_feedback(result) | |
| # NEW: embed feedback into result | |
| result.feedback = feedback | |
| debug_img = None | |
| if draw_debug and contour is not None and result.bbox is not None: | |
| debug_img = Visualizer.draw_debug(img, contour, bbox, orientation_angle_deg, result) | |
| return result, feedback, debug_img | |
| def main(): | |
| image_path = r"C:\SagarKV\sol9x\ContactlessFinger\TRACK_A\finger_inputs\OM_TH.jpg" | |
| img = cv2.imread(image_path) | |
| if img is None: | |
| raise RuntimeError("Image not found or path is wrong") | |
| config = QualityConfig( | |
| target_width=640, | |
| blur_min=60.0, | |
| illum_min=50.0, | |
| illum_max=200.0, | |
| coverage_min=0.10, | |
| orientation_max_deviation=45.0, | |
| vertical_expected=True, | |
| overall_quality_threshold=0.70, | |
| ) | |
| assessor = FingerQualityAssessor(config) | |
| result, feedback, debug_image = assessor.assess(img, draw_debug=True) | |
| os.makedirs("results", exist_ok=True) | |
| quality_json = finger_quality_result_to_json(result) | |
| with open("results/finger_quality_result.json", "w", encoding="utf-8") as f: | |
| f.write(quality_json) | |
| # Print JSON + feedback | |
| print(quality_json) | |
| for m in feedback.messages: | |
| print(f"[{m.severity.upper()}] {m.category}: {m.message}") | |
| print("Acceptable:", feedback.is_acceptable) | |
| if debug_image is not None: | |
| cv2.imshow("Finger Quality Debug", debug_image) | |
| cv2.waitKey(0) | |
| cv2.destroyAllWindows() | |
| if __name__ == "__main__": | |
| main() | |