2004
@article{VNK2004,
vgclass = {refpap},
author = {Van der Weken, Dietrich and Mike Nachtegael and Etienne E.
Kerre},
title = {Using similarity measures and homogeneity for the
comparison of images},
journal = {Image and Vision Computing},
volume = {22},
number = {9},
pages = {695--702},
year = {2004},
abstract = {Fuzzy techniques can be applied in several domains of
image processing. In this paper, we will show how notions of fuzzy set
theory are used in establishing measures for image comparison.
Objective quality measures or measures of comparison are of great
importance in the field of image processing. These measures serve as a
tool to evaluate and to compare different algorithms designed to solve
problems, such as noise reduction, deblurring, compression, etc.
Consequently these measures serve as a basis on which one algorithm is
preferred to another. It is well known that classical quality measures,
such as the MSE (mean square error) or the PSNR
(peak-signal-to-noise-ratio), do not always correspond to visual
observations. Therefore, several researchers areâand have
beenâlooking for new quality measures, better adapted to human
perception.
Van der Weken et al. [Proceedings of ICASSP'2002, Orlando, 2002] gave
an overview of similarity measures, originally introduced to express
the degree of comparison between two fuzzy sets, which can be applied
to images. These similarity measures are all pixel-based, and have
therefore not always satisfactory results. To cope with this drawback,
we propose similarity measures based on neighbourhoods, so that the
relevant structures of the images are observed better. In this way, 13
new similarity measures were found to be appropriate for the comparison
of images.},
}