| | import os |
| | import json |
| | import cv2 |
| | import glob |
| | import numpy as np |
| | from tqdm import tqdm |
| | from time import time |
| | from sklearn.metrics import jaccard_score, precision_score |
| | from concurrent.futures import ThreadPoolExecutor, as_completed |
| |
|
| | def mask_to_bbox(mask): |
| | |
| | idx = np.where(mask > 0) |
| | if len(idx[0]) == 0: |
| | return None |
| | y1, x1 = idx[0].min(), idx[1].min() |
| | y2, x2 = idx[0].max(), idx[1].max() |
| | return x1, y1, x2, y2 |
| |
|
| | def compute_giou(pred, label): |
| | ''' |
| | pred: 2D np.uint8 array, 0/1 |
| | label: 2D np.uint8 array, 0/1 |
| | return: giou score |
| | ''' |
| | bbox1 = mask_to_bbox(pred) |
| | bbox2 = mask_to_bbox(label) |
| | if bbox1 is None or bbox2 is None: |
| | return 0.0 |
| | xA = max(bbox1[0], bbox2[0]) |
| | yA = max(bbox1[1], bbox2[1]) |
| | xB = min(bbox1[2], bbox2[2]) |
| | yB = min(bbox1[3], bbox2[3]) |
| | interArea = max(0, xB - xA + 1) * max(0, yB - yA + 1) |
| | boxAArea = (bbox1[2] - bbox1[0] + 1) * (bbox1[3] - bbox1[1] + 1) |
| | boxBArea = (bbox2[2] - bbox2[0] + 1) * (bbox2[3] - bbox2[1] + 1) |
| | unionArea = boxAArea + boxBArea - interArea |
| | iou = interArea / unionArea if unionArea > 0 else 0.0 |
| | xC = min(bbox1[0], bbox2[0]) |
| | yC = min(bbox1[1], bbox2[1]) |
| | xD = max(bbox1[2], bbox2[2]) |
| | yD = max(bbox1[3], bbox2[3]) |
| | encloseArea = (xD - xC + 1) * (yD - yC + 1) |
| | giou = iou - (encloseArea - unionArea) / encloseArea if encloseArea > 0 else iou |
| | return giou |
| |
|
| | def compute_ciou(pred, label): |
| | ''' |
| | pred: 2D np.uint8 array, 0/1 |
| | label: 2D np.uint8 array, 0/1 |
| | return: ciou score |
| | ''' |
| | bbox1 = mask_to_bbox(pred) |
| | bbox2 = mask_to_bbox(label) |
| | if bbox1 is None or bbox2 is None: |
| | return 0.0 |
| | xA = max(bbox1[0], bbox2[0]) |
| | yA = max(bbox1[1], bbox2[1]) |
| | xB = min(bbox1[2], bbox2[2]) |
| | yB = min(bbox1[3], bbox2[3]) |
| | interArea = max(0, xB - xA + 1) * max(0, yB - yA + 1) |
| | boxAArea = (bbox1[2] - bbox1[0] + 1) * (bbox1[3] - bbox1[1] + 1) |
| | boxBArea = (bbox2[2] - bbox2[0] + 1) * (bbox2[3] - bbox2[1] + 1) |
| | unionArea = boxAArea + boxBArea - interArea |
| | iou = interArea / unionArea if unionArea > 0 else 0.0 |
| | c1x = (bbox1[0] + bbox1[2]) / 2 |
| | c1y = (bbox1[1] + bbox1[3]) / 2 |
| | c2x = (bbox2[0] + bbox2[2]) / 2 |
| | c2y = (bbox2[1] + bbox2[3]) / 2 |
| | center_dist = (c1x - c2x) ** 2 + (c1y - c2y) ** 2 |
| | xC = min(bbox1[0], bbox2[0]) |
| | yC = min(bbox1[1], bbox2[1]) |
| | xD = max(bbox1[2], bbox2[2]) |
| | yD = max(bbox1[3], bbox2[3]) |
| | diag = (xD - xC) ** 2 + (yD - yC) ** 2 |
| | ciou = iou - center_dist / diag if diag > 0 else iou |
| | return ciou |
| |
|
| |
|
| | def to_binary_mask(img, flip=False): |
| | """自动将0/1或0/255掩码转为0/1二值掩码""" |
| | if flip: |
| | if img.max() == 1: |
| | return (1 - img).astype(np.uint8) |
| | elif img.max() > 1: |
| | return (img <= 127).astype(np.uint8) |
| | else: |
| | return np.zeros_like(img, dtype=np.uint8) |
| | else: |
| | if img.max() == 1: |
| | return img.astype(np.uint8) |
| | elif img.max() > 1: |
| | return (img > 127).astype(np.uint8) |
| | else: |
| | return np.zeros_like(img, dtype=np.uint8) |
| |
|
| | def process_sample(sample, result_dir, debug=False, dataset='GeoPixInstruct'): |
| | |
| | base_name = os.path.splitext(os.path.basename(sample["image_path"]))[0] |
| | try: |
| | if debug: |
| | print(f"[DEBUG] Processing sample: {base_name}") |
| | possible_files = glob.glob(os.path.join(result_dir, base_name + "*.*")) |
| | if not possible_files: |
| | print(f"[WARN] Prediction file not found for {base_name} in {result_dir}, all metrics set to 0") |
| | return (0.0, 0.0, 0.0, [0.0, 0.0, 0.0, 0.0, 0.0]) |
| | pred_path = possible_files[0] |
| | label_path = sample["label_path"] |
| | |
| | if dataset == "GeoPixInstruct": |
| | label_path = label_path.split('Data-Source/')[-1][:-6] |
| | if label_path.endswith('_'): |
| | label_path = label_path[:-1] |
| | label_path = '/home/l/Files/' + label_path.replace('labels_valid', 'gray') + '.png' |
| | elif dataset == "EarthReason": |
| | label_path = '/home/l/YanJiafeng/GRASP/' + label_path.split('RS-RS/')[-1] |
| | elif dataset == "GRASP": |
| | label_path = '/home/l/Files/GRASP_test/' + label_path |
| |
|
| | if debug: |
| | print(f"[DEBUG] label_path: {label_path}, pred_path: {pred_path}") |
| | label = cv2.imread(label_path, cv2.IMREAD_GRAYSCALE) |
| | pred = cv2.imread(pred_path, cv2.IMREAD_GRAYSCALE) |
| | if label is None: |
| | print(f"[ERROR] Failed to read label: {label_path}") |
| | return (0.0, 0.0, 0.0, [0.0, 0.0, 0.0, 0.0, 0.0]) |
| | if pred is None: |
| | print(f"[ERROR] Failed to read pred: {pred_path}") |
| | return (0.0, 0.0, 0.0, [0.0, 0.0, 0.0, 0.0, 0.0]) |
| |
|
| | |
| | label_bin = to_binary_mask(label, flip=(dataset=="GeoPixInstruct")) |
| | pred_bin = to_binary_mask(pred) |
| |
|
| | if label_bin.shape != pred_bin.shape: |
| | print(f"[ERROR] Shape mismatch: label {label_bin.shape}, pred {pred_bin.shape} for {base_name}") |
| | return (0.0, 0.0, 0.0, [0.0, 0.0, 0.0, 0.0, 0.0]) |
| | iou = jaccard_score(label_bin.flatten(), pred_bin.flatten(), zero_division=0) |
| | giou = compute_giou(pred_bin, label_bin) |
| | ciou = compute_ciou(pred_bin, label_bin) |
| | precisions = [] |
| | for t in [0.5]: |
| | p = precision_score(label_bin.flatten(), pred_bin.flatten(), zero_division=0) |
| | precisions.append(p) |
| | if debug: |
| | print(f"[DEBUG] {base_name} done: IoU={iou:.4f}, gIoU={giou:.4f}, cIoU={ciou:.4f}") |
| | return (iou, giou, ciou, precisions) |
| | except Exception as e: |
| | print(f"[EXCEPTION] Error processing {base_name}: {e}") |
| | import traceback |
| | traceback.print_exc() |
| | return (0.0, 0.0, 0.0, [0.0, 0.0, 0.0, 0.0, 0.0]) |
| | |
| |
|
| | import pandas as pd |
| | import os |
| | from openpyxl import load_workbook |
| |
|
| | def save_metrics(data_dict, file_path="metrics.xlsx", sheet_name="Sheet1", mode="append"): |
| | """ |
| | 将字典增量保存到 Excel 文件 |
| | |
| | 参数: |
| | data_dict (dict): 要保存的字典数据 |
| | file_path (str): Excel 文件路径 |
| | sheet_name (str): 工作表名称 (默认 "Sheet1") |
| | mode (str): 保存模式 - "append" (追加) 或 "replace" (替换整个工作表) |
| | """ |
| | |
| | file_exists = os.path.isfile(file_path) |
| | |
| | |
| | if not file_exists or mode == "replace": |
| | df = pd.DataFrame([data_dict]) |
| | df.to_excel(file_path, sheet_name=sheet_name, index=False) |
| | print(f"{'创建' if not file_exists else '替换'}Excel文件: {file_path}") |
| | return |
| | |
| | |
| | try: |
| | |
| | with pd.ExcelFile(file_path) as xls: |
| | if sheet_name in xls.sheet_names: |
| | existing_df = pd.read_excel(xls, sheet_name=sheet_name) |
| | else: |
| | existing_df = pd.DataFrame() |
| | |
| | |
| | new_df = pd.DataFrame([data_dict]) |
| | |
| | |
| | combined_df = pd.concat([existing_df, new_df], ignore_index=True) |
| | |
| | |
| | with pd.ExcelWriter(file_path, engine='openpyxl', mode='a', if_sheet_exists='replace') as writer: |
| | combined_df.to_excel(writer, sheet_name=sheet_name, index=False) |
| | |
| | print(f"成功使用pandas追加数据到Excel文件: {file_path}") |
| | |
| | except Exception as e2: |
| | print(f"所有方法失败: {e2}") |
| | raise RuntimeError("无法保存数据到Excel文件") |
| | |
| | def evaluate_segmentation(qa_json_path, result_dir, num_workers=8, debug=False, dataset='GeoPixInstruct'): |
| | with open(qa_json_path, 'r', encoding='utf-8') as f: |
| | samples = json.load(f) |
| |
|
| | metrics = { |
| | "mIoU": [], |
| | "gIoU": [], |
| | "cIoU": [], |
| | "precision@0.5": [] |
| | } |
| | print(f'Currently evaluating: {dataset}') |
| |
|
| | start_time = time() |
| | results = [] |
| | with ThreadPoolExecutor(max_workers=num_workers) as executor: |
| | futures = [executor.submit(process_sample, sample, result_dir, debug, dataset) for sample in samples] |
| | for idx, f in enumerate(tqdm(as_completed(futures), total=len(futures), desc="Evaluating")): |
| | try: |
| | res = f.result() |
| | if res is not None: |
| | iou, giou, ciou, precisions = res |
| | metrics["mIoU"].append(iou) |
| | metrics["gIoU"].append(giou) |
| | metrics["cIoU"].append(ciou) |
| | for pidx, t in enumerate([0.5]): |
| | metrics[f"precision@{t}"].append(precisions[pidx]) |
| | else: |
| | if debug: |
| | print(f"[DEBUG] Future {idx} returned None") |
| | except Exception as e: |
| | print(f"[EXCEPTION] Error in future {idx}: {e}") |
| | import traceback |
| | traceback.print_exc() |
| |
|
| | elapsed = time() - start_time |
| | print(f"\n✅ Evaluation completed in {elapsed:.2f} seconds") |
| |
|
| | avg_metrics = {k: float(np.mean(v)) if v else 0.0 for k, v in metrics.items()} |
| | print("\n📊 Average Metrics:") |
| | for k, v in avg_metrics.items(): |
| | print(f"{k}: {v:.4f}") |
| | |
| | model_dataset_name = result_dir.split("contrast_test_result/")[-1].split("/Binary")[0] |
| | model_name, dataset_name = model_dataset_name.split("/") |
| | model_size = result_dir.split('-')[-1] |
| | if len(model_size) <= 3: |
| | model_name += '-' + model_size |
| | avg_metrics['model_name'] = model_name |
| | avg_metrics['dataset_name'] = dataset_name |
| |
|
| | |
| | save_metrics(avg_metrics) |
| |
|
| | return avg_metrics |
| |
|
| | |
| | if __name__ == "__main__": |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | |
| | |
| |
|
| | qa_json = { |
| | "GeoPixInstruct": "/home/l/YanJiafeng/GRASP/GRASP-data/in_domain_test/GeoPixInstruct/test.json", |
| | "EarthReason": "/home/l/YanJiafeng/GRASP/GRASP-data/in_domain_test/EarthReason/test.json", |
| | "GRASP": "/home/l/Files/GRASP_test/grasp_test_annotation.json" |
| | } |
| | result_folder = { |
| | "GeoPixInstruct": [ |
| | |
| | '/home/l/Files/GRASP_contrast_results/contrast_test_result/GeoPixel/GeoPixInstruct/Binary', |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | ], |
| | "EarthReason": [ |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | ], |
| | "GRASP": [ |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | ] |
| | } |
| | debug = False |
| |
|
| | for dataset in ['GeoPixInstruct', 'EarthReason', 'GRASP']: |
| | qa = qa_json[dataset] |
| | for result_dir in result_folder[dataset]: |
| | evaluate_segmentation(qa, result_dir, num_workers=16, debug=debug, dataset=dataset) |