img_comparer / src /metrics_features.py
Vivek Vaddina
πŸ› Fix imports
2175bbf unverified
import cv2
import imagehash
import numpy as np
from collections import defaultdict
from skimage.metrics import structural_similarity as ssim
from scipy.spatial.distance import directed_hausdorff
from sklearn.metrics import (
jaccard_score,
f1_score,
)
from src.utils import binarize
from src.config import log
def hausdorff(a, b):
# get coordinates of nonzero pixels
coords_a = np.column_stack(np.nonzero(a))
coords_b = np.column_stack(np.nonzero(b))
# directed Hausdorff both ways
d_ab = directed_hausdorff(coords_a, coords_b)[0]
d_ba = directed_hausdorff(coords_b, coords_a)[0]
return max(d_ab, d_ba)
def get_hist_sim(ref_mask, sample_mask):
# Intersection and Correlation metrics of pixel distribution histrograms
# https://medium.com/scrapehero/exploring-image-similarity-approaches-in-python-b8ca0a3ed5a3
image1 = cv2.cvtColor(np.array(ref_mask), cv2.COLOR_RGB2BGR)
image2 = cv2.cvtColor(np.array(sample_mask), cv2.COLOR_RGB2BGR)
hist_img1 = cv2.calcHist(
[image1], [0, 1, 2], None, [256, 256, 256], [0, 256, 0, 256, 0, 256]
)
hist_img1[255, 255, 255] = 0 # ignore all white pixels
cv2.normalize(hist_img1, hist_img1, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)
hist_img2 = cv2.calcHist(
[image2], [0, 1, 2], None, [256, 256, 256], [0, 256, 0, 256, 0, 256]
)
hist_img2[255, 255, 255] = 0 # ignore all white pixels
cv2.normalize(hist_img2, hist_img2, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)
return cv2.compareHist(hist_img1, hist_img2, cv2.HISTCMP_CORREL)
def get_common_metrics(ref, samples, hash_size=8):
metrics = defaultdict(list)
flat_bn_ref = binarize(np.array(ref.convert("L"))).flatten()
if not isinstance(samples, list):
samples = [samples]
for sample in samples:
flat_bn_sample = binarize(np.array(sample.convert("L"))).flatten()
log.debug("computing image hashes")
metrics["avg_hash"].append(
imagehash.average_hash(ref, hash_size=hash_size)
- imagehash.average_hash(sample, hash_size=hash_size)
)
metrics["phash"].append(
imagehash.phash(ref, hash_size=hash_size)
- imagehash.phash(sample, hash_size=hash_size)
)
metrics["phash_simple"].append(
imagehash.phash_simple(ref, hash_size=hash_size)
- imagehash.phash_simple(sample, hash_size=hash_size)
)
metrics["dhash"].append(
imagehash.dhash(ref, hash_size=hash_size)
- imagehash.dhash(sample, hash_size=hash_size)
)
metrics["dhash_vertical"].append(
imagehash.dhash_vertical(ref, hash_size=hash_size)
- imagehash.dhash_vertical(sample, hash_size=hash_size)
)
metrics["whash"].append(
imagehash.whash(ref, hash_size=hash_size)
- imagehash.whash(sample, hash_size=hash_size)
)
metrics["crop_resistant_hash"].append(
imagehash.crop_resistant_hash(ref) - imagehash.crop_resistant_hash(sample)
)
log.debug("computing ssim")
metrics["ssim"].append(
float(ssim(np.array(ref), np.array(sample), data_range=255, channel_axis=2))
)
# log.debug('computing hausdorff')
# metrics['haussdorff'].append(hausdorff(ref, sample))
log.debug("computing dice_coeff")
metrics["dice_coeff"].append(f1_score(flat_bn_ref, flat_bn_sample, pos_label=1))
log.debug("computing iou")
metrics["iou"].append(jaccard_score(flat_bn_ref, flat_bn_sample, pos_label=1))
log.debug("computing hist_sim")
metrics["hist_sim"].append(get_hist_sim(ref, sample))
_metrics = defaultdict(list)
for k, v in metrics.items():
_metrics[f"metric_{k}"] = v
return _metrics