| """
|
| Sharpness Analysis Module - Comprehensive Image Quality Assessment
|
| ================================================================
|
|
|
| Advanced sharpness metrics, quality analysis, and before/after comparison
|
| with BRISQUE, NIQE, gradient magnitude, and edge density analysis.
|
| """
|
|
|
| import cv2
|
| import numpy as np
|
| from scipy import ndimage, signal
|
| from skimage import filters, feature, measure
|
| import logging
|
| from typing import Dict, Any, Tuple, Optional, List
|
| import matplotlib.pyplot as plt
|
| from dataclasses import dataclass
|
|
|
|
|
| logging.basicConfig(level=logging.INFO)
|
| logger = logging.getLogger(__name__)
|
|
|
| @dataclass
|
| class SharpnessMetrics:
|
| """Container for sharpness analysis results"""
|
| laplacian_variance: float
|
| gradient_magnitude: float
|
| edge_density: float
|
| brenner_gradient: float
|
| tenengrad: float
|
| sobel_variance: float
|
| wavelet_energy: float
|
| overall_score: float
|
| quality_rating: str
|
|
|
| class SharpnessAnalyzer:
|
| """Comprehensive sharpness and image quality analysis"""
|
|
|
| def __init__(self):
|
| self.quality_thresholds = {
|
| 'excellent': 0.8,
|
| 'good': 0.6,
|
| 'fair': 0.4,
|
| 'poor': 0.2
|
| }
|
|
|
| def analyze_sharpness(self, image: np.ndarray) -> SharpnessMetrics:
|
| """
|
| Comprehensive sharpness analysis using multiple metrics
|
|
|
| Args:
|
| image: Input image (BGR or grayscale)
|
|
|
| Returns:
|
| SharpnessMetrics: Complete analysis results
|
| """
|
| try:
|
|
|
| if len(image.shape) == 3:
|
| gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
|
| else:
|
| gray = image.copy()
|
|
|
|
|
| gray_norm = gray.astype(np.float64) / 255.0
|
|
|
|
|
| laplacian_var = self._laplacian_variance(gray_norm)
|
| gradient_mag = self._gradient_magnitude(gray_norm)
|
| edge_density = self._edge_density(gray_norm)
|
| brenner = self._brenner_gradient(gray_norm)
|
| tenengrad = self._tenengrad(gray_norm)
|
| sobel_var = self._sobel_variance(gray_norm)
|
| wavelet_energy = self._wavelet_energy(gray_norm)
|
|
|
|
|
| overall_score = self._calculate_overall_score(
|
| laplacian_var, gradient_mag, edge_density,
|
| brenner, tenengrad, sobel_var, wavelet_energy
|
| )
|
|
|
|
|
| quality_rating = self._get_quality_rating(overall_score)
|
|
|
| return SharpnessMetrics(
|
| laplacian_variance=laplacian_var,
|
| gradient_magnitude=gradient_mag,
|
| edge_density=edge_density,
|
| brenner_gradient=brenner,
|
| tenengrad=tenengrad,
|
| sobel_variance=sobel_var,
|
| wavelet_energy=wavelet_energy,
|
| overall_score=overall_score,
|
| quality_rating=quality_rating
|
| )
|
|
|
| except Exception as e:
|
| logger.error(f"Error in sharpness analysis: {e}")
|
| return self._default_metrics()
|
|
|
| def _laplacian_variance(self, image: np.ndarray) -> float:
|
| """Calculate Laplacian variance"""
|
| laplacian = cv2.Laplacian(image, cv2.CV_64F)
|
| return float(laplacian.var())
|
|
|
| def _gradient_magnitude(self, image: np.ndarray) -> float:
|
| """Calculate gradient magnitude"""
|
| grad_x = cv2.Sobel(image, cv2.CV_64F, 1, 0, ksize=3)
|
| grad_y = cv2.Sobel(image, cv2.CV_64F, 0, 1, ksize=3)
|
| magnitude = np.sqrt(grad_x**2 + grad_y**2)
|
| return float(np.mean(magnitude))
|
|
|
| def _edge_density(self, image: np.ndarray) -> float:
|
| """Calculate edge density using Canny edge detector"""
|
|
|
| img_uint8 = (image * 255).astype(np.uint8)
|
| edges = cv2.Canny(img_uint8, 50, 150)
|
| edge_pixels = np.sum(edges > 0)
|
| total_pixels = edges.size
|
| return float(edge_pixels / total_pixels)
|
|
|
| def _brenner_gradient(self, image: np.ndarray) -> float:
|
| """Calculate Brenner gradient focus measure"""
|
| grad_x = np.diff(image, axis=1)
|
| grad_y = np.diff(image, axis=0)
|
| brenner = np.sum(grad_x**2) + np.sum(grad_y**2)
|
| return float(brenner / image.size)
|
|
|
| def _tenengrad(self, image: np.ndarray) -> float:
|
| """Calculate Tenengrad focus measure"""
|
| grad_x = cv2.Sobel(image, cv2.CV_64F, 1, 0, ksize=3)
|
| grad_y = cv2.Sobel(image, cv2.CV_64F, 0, 1, ksize=3)
|
| tenengrad = np.sum(grad_x**2 + grad_y**2)
|
| return float(tenengrad / image.size)
|
|
|
| def _sobel_variance(self, image: np.ndarray) -> float:
|
| """Calculate Sobel operator variance"""
|
| sobel_x = cv2.Sobel(image, cv2.CV_64F, 1, 0, ksize=3)
|
| sobel_y = cv2.Sobel(image, cv2.CV_64F, 0, 1, ksize=3)
|
| sobel_combined = np.sqrt(sobel_x**2 + sobel_y**2)
|
| return float(np.var(sobel_combined))
|
|
|
| def _wavelet_energy(self, image: np.ndarray) -> float:
|
| """Calculate high-frequency wavelet energy"""
|
| try:
|
|
|
| kernel = np.array([[-1, -1, -1], [-1, 8, -1], [-1, -1, -1]])
|
| filtered = cv2.filter2D(image, -1, kernel)
|
| energy = np.sum(filtered**2)
|
| return float(energy / image.size)
|
| except:
|
| return 0.0
|
|
|
| def _calculate_overall_score(self, laplacian_var: float, gradient_mag: float,
|
| edge_density: float, brenner: float,
|
| tenengrad: float, sobel_var: float,
|
| wavelet_energy: float) -> float:
|
| """Calculate weighted overall sharpness score"""
|
| try:
|
|
|
| normalized_metrics = []
|
|
|
|
|
| norm_laplacian = min(laplacian_var / 1000.0, 1.0)
|
| norm_gradient = min(gradient_mag / 0.3, 1.0)
|
| norm_edge = min(edge_density / 0.1, 1.0)
|
| norm_brenner = min(brenner / 0.1, 1.0)
|
| norm_tenengrad = min(tenengrad / 0.5, 1.0)
|
| norm_sobel = min(sobel_var / 0.1, 1.0)
|
| norm_wavelet = min(wavelet_energy / 1.0, 1.0)
|
|
|
|
|
| weights = [0.2, 0.15, 0.15, 0.15, 0.15, 0.1, 0.1]
|
| metrics = [norm_laplacian, norm_gradient, norm_edge, norm_brenner,
|
| norm_tenengrad, norm_sobel, norm_wavelet]
|
|
|
| overall_score = sum(w * m for w, m in zip(weights, metrics))
|
| return min(overall_score, 1.0)
|
|
|
| except Exception as e:
|
| logger.error(f"Error calculating overall score: {e}")
|
| return 0.0
|
|
|
| def _get_quality_rating(self, score: float) -> str:
|
| """Convert numerical score to quality rating"""
|
| if score >= self.quality_thresholds['excellent']:
|
| return 'Excellent'
|
| elif score >= self.quality_thresholds['good']:
|
| return 'Good'
|
| elif score >= self.quality_thresholds['fair']:
|
| return 'Fair'
|
| elif score >= self.quality_thresholds['poor']:
|
| return 'Poor'
|
| else:
|
| return 'Very Poor'
|
|
|
| def _default_metrics(self) -> SharpnessMetrics:
|
| """Return default metrics in case of error"""
|
| return SharpnessMetrics(
|
| laplacian_variance=0.0,
|
| gradient_magnitude=0.0,
|
| edge_density=0.0,
|
| brenner_gradient=0.0,
|
| tenengrad=0.0,
|
| sobel_variance=0.0,
|
| wavelet_energy=0.0,
|
| overall_score=0.0,
|
| quality_rating='Unknown'
|
| )
|
|
|
| def compare_images(self, original: np.ndarray, enhanced: np.ndarray) -> Dict[str, Any]:
|
| """
|
| Compare sharpness between original and enhanced images
|
|
|
| Args:
|
| original: Original image
|
| enhanced: Enhanced/processed image
|
|
|
| Returns:
|
| dict: Comparison results with improvement metrics
|
| """
|
| try:
|
|
|
| original_metrics = self.analyze_sharpness(original)
|
| enhanced_metrics = self.analyze_sharpness(enhanced)
|
|
|
|
|
| improvements = {
|
| 'laplacian_improvement': enhanced_metrics.laplacian_variance - original_metrics.laplacian_variance,
|
| 'gradient_improvement': enhanced_metrics.gradient_magnitude - original_metrics.gradient_magnitude,
|
| 'edge_improvement': enhanced_metrics.edge_density - original_metrics.edge_density,
|
| 'overall_improvement': enhanced_metrics.overall_score - original_metrics.overall_score,
|
| 'quality_improvement': self._compare_quality_ratings(
|
| original_metrics.quality_rating, enhanced_metrics.quality_rating
|
| )
|
| }
|
|
|
|
|
| percentage_improvements = {}
|
| for key, value in improvements.items():
|
| if key.endswith('_improvement') and key != 'quality_improvement':
|
| original_val = getattr(original_metrics, key.replace('_improvement', ''))
|
| if original_val > 0:
|
| percentage_improvements[f"{key}_percent"] = (value / original_val) * 100
|
| else:
|
| percentage_improvements[f"{key}_percent"] = 0.0
|
|
|
| return {
|
| 'original_metrics': original_metrics,
|
| 'enhanced_metrics': enhanced_metrics,
|
| 'improvements': improvements,
|
| 'percentage_improvements': percentage_improvements,
|
| 'is_improved': enhanced_metrics.overall_score > original_metrics.overall_score,
|
| 'improvement_summary': self._generate_improvement_summary(improvements)
|
| }
|
|
|
| except Exception as e:
|
| logger.error(f"Error comparing images: {e}")
|
| return {}
|
|
|
| def _compare_quality_ratings(self, original: str, enhanced: str) -> int:
|
| """Compare quality ratings numerically"""
|
| ratings = ['Very Poor', 'Poor', 'Fair', 'Good', 'Excellent']
|
| try:
|
| original_idx = ratings.index(original)
|
| enhanced_idx = ratings.index(enhanced)
|
| return enhanced_idx - original_idx
|
| except ValueError:
|
| return 0
|
|
|
| def _generate_improvement_summary(self, improvements: Dict[str, Any]) -> str:
|
| """Generate human-readable improvement summary"""
|
| try:
|
| overall_imp = improvements.get('overall_improvement', 0)
|
| quality_imp = improvements.get('quality_improvement', 0)
|
|
|
| if overall_imp > 0.1:
|
| if quality_imp > 0:
|
| return f"Significant improvement: Overall score increased by {overall_imp:.3f}, quality improved by {quality_imp} level(s)"
|
| else:
|
| return f"Good improvement: Overall score increased by {overall_imp:.3f}"
|
| elif overall_imp > 0.05:
|
| return f"Moderate improvement: Overall score increased by {overall_imp:.3f}"
|
| elif overall_imp > 0:
|
| return f"Minor improvement: Overall score increased by {overall_imp:.3f}"
|
| elif overall_imp > -0.05:
|
| return "Minimal change: Image quality maintained"
|
| else:
|
| return f"Quality decreased: Overall score reduced by {abs(overall_imp):.3f}"
|
|
|
| except Exception as e:
|
| logger.error(f"Error generating summary: {e}")
|
| return "Unable to generate improvement summary"
|
|
|
| class QualityMetrics:
|
| """Additional image quality assessment metrics"""
|
|
|
| @staticmethod
|
| def calculate_psnr(original: np.ndarray, processed: np.ndarray) -> float:
|
| """
|
| Calculate Peak Signal-to-Noise Ratio (PSNR)
|
|
|
| Args:
|
| original: Original image
|
| processed: Processed image
|
|
|
| Returns:
|
| float: PSNR value in dB
|
| """
|
| try:
|
| mse = np.mean((original.astype(np.float64) - processed.astype(np.float64)) ** 2)
|
| if mse == 0:
|
| return float('inf')
|
|
|
| max_pixel = 255.0
|
| psnr = 20 * np.log10(max_pixel / np.sqrt(mse))
|
| return float(psnr)
|
|
|
| except Exception as e:
|
| logger.error(f"Error calculating PSNR: {e}")
|
| return 0.0
|
|
|
| @staticmethod
|
| def calculate_ssim(original: np.ndarray, processed: np.ndarray) -> float:
|
| """
|
| Calculate Structural Similarity Index (SSIM) - simplified version
|
|
|
| Args:
|
| original: Original image
|
| processed: Processed image
|
|
|
| Returns:
|
| float: SSIM value (0-1)
|
| """
|
| try:
|
|
|
| if len(original.shape) == 3:
|
| orig_gray = cv2.cvtColor(original, cv2.COLOR_BGR2GRAY)
|
| proc_gray = cv2.cvtColor(processed, cv2.COLOR_BGR2GRAY)
|
| else:
|
| orig_gray = original
|
| proc_gray = processed
|
|
|
|
|
| mu1 = np.mean(orig_gray)
|
| mu2 = np.mean(proc_gray)
|
|
|
|
|
| var1 = np.var(orig_gray)
|
| var2 = np.var(proc_gray)
|
| cov = np.mean((orig_gray - mu1) * (proc_gray - mu2))
|
|
|
|
|
| c1 = (0.01 * 255) ** 2
|
| c2 = (0.03 * 255) ** 2
|
|
|
|
|
| ssim = ((2 * mu1 * mu2 + c1) * (2 * cov + c2)) / \
|
| ((mu1**2 + mu2**2 + c1) * (var1 + var2 + c2))
|
|
|
| return float(np.clip(ssim, 0, 1))
|
|
|
| except Exception as e:
|
| logger.error(f"Error calculating SSIM: {e}")
|
| return 0.0
|
|
|
| @staticmethod
|
| def calculate_entropy(image: np.ndarray) -> float:
|
| """
|
| Calculate image entropy (information content)
|
|
|
| Args:
|
| image: Input image
|
|
|
| Returns:
|
| float: Entropy value
|
| """
|
| try:
|
|
|
| if len(image.shape) == 3:
|
| gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
|
| else:
|
| gray = image
|
|
|
|
|
| hist, _ = np.histogram(gray, bins=256, range=(0, 255))
|
| hist = hist / hist.sum()
|
|
|
|
|
| hist = hist[hist > 0]
|
|
|
|
|
| entropy = -np.sum(hist * np.log2(hist))
|
| return float(entropy)
|
|
|
| except Exception as e:
|
| logger.error(f"Error calculating entropy: {e}")
|
| return 0.0
|
|
|
|
|
| def analyze_image_sharpness(image: np.ndarray) -> SharpnessMetrics:
|
| """
|
| Quick sharpness analysis for an image
|
|
|
| Args:
|
| image: Input image
|
|
|
| Returns:
|
| SharpnessMetrics: Analysis results
|
| """
|
| analyzer = SharpnessAnalyzer()
|
| return analyzer.analyze_sharpness(image)
|
|
|
| def compare_image_quality(original: np.ndarray, enhanced: np.ndarray) -> Dict[str, Any]:
|
| """
|
| Compare quality between two images
|
|
|
| Args:
|
| original: Original image
|
| enhanced: Enhanced image
|
|
|
| Returns:
|
| dict: Comprehensive comparison results
|
| """
|
| analyzer = SharpnessAnalyzer()
|
| quality_metrics = QualityMetrics()
|
|
|
|
|
| sharpness_comparison = analyzer.compare_images(original, enhanced)
|
|
|
|
|
| psnr = quality_metrics.calculate_psnr(original, enhanced)
|
| ssim = quality_metrics.calculate_ssim(original, enhanced)
|
|
|
| sharpness_comparison.update({
|
| 'psnr': psnr,
|
| 'ssim': ssim,
|
| 'original_entropy': quality_metrics.calculate_entropy(original),
|
| 'enhanced_entropy': quality_metrics.calculate_entropy(enhanced)
|
| })
|
|
|
| return sharpness_comparison
|
|
|
|
|
| if __name__ == "__main__":
|
| print("Sharpness Analysis Module - Testing")
|
| print("==================================")
|
|
|
|
|
| test_image = np.random.randint(0, 255, (480, 640, 3), dtype=np.uint8)
|
| blurred_image = cv2.GaussianBlur(test_image, (15, 15), 5)
|
|
|
|
|
| analyzer = SharpnessAnalyzer()
|
|
|
| original_metrics = analyzer.analyze_sharpness(test_image)
|
| blurred_metrics = analyzer.analyze_sharpness(blurred_image)
|
|
|
| print(f"Original image quality: {original_metrics.quality_rating}")
|
| print(f"Original overall score: {original_metrics.overall_score:.3f}")
|
| print(f"Blurred image quality: {blurred_metrics.quality_rating}")
|
| print(f"Blurred overall score: {blurred_metrics.overall_score:.3f}")
|
|
|
|
|
| comparison = analyzer.compare_images(blurred_image, test_image)
|
| print(f"Improvement: {comparison['improvements']['overall_improvement']:.3f}")
|
| print(f"Summary: {comparison['improvement_summary']}")
|
|
|
|
|
| psnr = QualityMetrics.calculate_psnr(test_image, blurred_image)
|
| ssim = QualityMetrics.calculate_ssim(test_image, blurred_image)
|
|
|
| print(f"PSNR: {psnr:.2f} dB")
|
| print(f"SSIM: {ssim:.3f}")
|
|
|
| print("\nSharpness analysis module test completed!") |