From d139f5afcface3956269ab635696a3ddce2c4516 Mon Sep 17 00:00:00 2001 From: myh Date: Mon, 21 Apr 2025 17:51:32 +0800 Subject: [PATCH] =?UTF-8?q?=E8=AF=84=E4=BB=B7=E6=8C=87=E6=A0=87?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- image_fusion/evaluate_module/evaluate.py | 74 ++++++++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 image_fusion/evaluate_module/evaluate.py diff --git a/image_fusion/evaluate_module/evaluate.py b/image_fusion/evaluate_module/evaluate.py new file mode 100644 index 0000000..1ac811f --- /dev/null +++ b/image_fusion/evaluate_module/evaluate.py @@ -0,0 +1,74 @@ +import numpy as np +import cv2 +from skimage.metrics import structural_similarity as ssim +from skimage.filters import sobel +from sklearn.metrics import mutual_info_score + + +# Helper to compute mutual information between two grayscale images +def evaluate_mutual_information(img1_gray, img2_gray): + hist_2d, _, _ = np.histogram2d(img1_gray.ravel(), img2_gray.ravel(), bins=256) + pxy = hist_2d / float(np.sum(hist_2d)) + px = np.sum(pxy, axis=1) + py = np.sum(pxy, axis=0) + px_py = np.outer(px, py) + nzs = pxy > 0 + mi = np.sum(pxy[nzs] * np.log(pxy[nzs] / px_py[nzs])) + return mi + + +# Compute SSIM between two grayscale images +def evaluate_registration_ssim(img1_gray, img2_gray): + return ssim(img1_gray, img2_gray) + + +# Entropy of grayscale image (fusion quality) +def evaluate_fusion_entropy(fusion_img): + gray = cv2.cvtColor(fusion_img, cv2.COLOR_RGB2GRAY) + hist = cv2.calcHist([gray], [0], None, [256], [0, 256]) + hist = hist.ravel() / hist.sum() + entropy = -np.sum(hist * np.log2(hist + 1e-9)) + return entropy + + +# Edge strength using Sobel (fusion quality) +def evaluate_fusion_edges(fusion_img): + gray = cv2.cvtColor(fusion_img, cv2.COLOR_RGB2GRAY) + edges = sobel(gray.astype(float) / 255.0) + return np.mean(edges) + + +# SSIM between fused image and one of the sources +def evaluate_fusion_ssim(fusion_img, reference_img): + fusion_gray = cv2.cvtColor(fusion_img, cv2.COLOR_RGB2GRAY) + ref_gray = cv2.cvtColor(reference_img, cv2.COLOR_RGB2GRAY) + return ssim(fusion_gray, ref_gray) + + +# Return all in one place (stub images would be required to test) +def summarize_evaluation(img1_gray, img2_gray, fusion_img, ref_img_for_ssim): + return { + "Registration SSIM": evaluate_registration_ssim(img1_gray, img2_gray), + "Mutual Information": evaluate_mutual_information(img1_gray, img2_gray), + "Fusion Entropy": evaluate_fusion_entropy(fusion_img), + "Fusion Edge Strength": evaluate_fusion_edges(fusion_img), + "Fusion SSIM (vs Ref)": evaluate_fusion_ssim(fusion_img, ref_img_for_ssim), + } + +# 将所有评价封装成一个高层函数 evaluate_all +def evaluate_all(img1_gray, img2_gray, fusion_img, ref_img_for_ssim, verbose=True): + """ + 评估图像配准和融合质量的通用函数 + :param img1_gray: 可见光灰度图像(原图) + :param img2_gray: 红外灰度图像(配准后) + :param fusion_img: 融合图像(RGB) + :param ref_img_for_ssim: 可见光RGB图,用于对比SSIM + :param verbose: 是否打印结果 + :return: dict 评价指标结果 + """ + results = summarize_evaluation(img1_gray, img2_gray, fusion_img, ref_img_for_ssim) + if verbose: + print("图像评价指标如下:") + for k, v in results.items(): + print(f"{k}: {v:.4f}") + return results