1996
@inproceedings{MaA1996,
vgclass = {refpap},
vgproject = {nn},
author = {Marques, Gon\,{c}alo and Lu\'{i}s B. Almeida},
title = {An Objective Function for Independence},
booktitle = {Proceedings of the IEEE International Conference on Neural
Networks},
year = {1996},
abstract = {The problem of separating a linear or nonlinear mixture of
independent sources has been the focus of many studies in recent years.
It is well known that the classical principal components analysis
method, which is based on second order statistics, performs poorly even
in the linear case, if the sources do not have Gaussian distributions.
Based on this fact, several algorithms take in account higher than
second order statistics in their approach to the problem. Other
algorithms use the Kullback-Leibler divergence to find a transformation
that can separate the independent signals. Nevertheless, the great
majority of these algorithms only take in account a finite number of
statistics, usually up to the fourth order, or use some kind of
smoothed approximations. In this paper we present a new class of
objective functions for source separation. The objective functions use
statistics of all orders simultaneously, and have the advantage of
being continuous, differentiable functions that can be computed
directly from the training data. A derivation of the class of functions
for two dimensional data, some numerical examples illustrating its
performance, and some implementation considerations are described.},
}