Search results for key=LHL1994 : 1 match found.

Refereed full papers (journals, book chapters, international conferences)

1994

@inproceedings{LHL1994,
	vgclass =	{refpap},
	vgproject =	{nn},
	author =	{Benny Lautrup and Lars Kai Hansen and Ian Law and Niels
	M{\o}rch and Claus Svarer and Stephen Strother},
	title =	{Massive Weight-Sharing: A Cure for Extremely Ill-Posed
	Problems},
	editor =	{H. Hermann and D. Wolf and E. P\"{o}ppel},
	booktitle =	{Workshop on Supercomputing in Brain Research: From
	Tomography to Neural Networks, J\"{u}lich, Germany},
	pages =	{137},
	organization =	{HLRZ},
	publisher =	{World Scientific},
	month =	{November},
	year =	{1994},
	abstract =	{In most learning problems, adaptation to given examples is
	well-posed because the number of examples far exceeds the number of
	internal parameters in the learning machine. Extremely ill-posed
	learning problems are, however, common in image and spectral analysis.
	They are characterized by a vast number of highly correlated inputs,
	\emph{e.g.} pixel or pin values, and a modest number of patterns,
	\emph{e.g.} images or spectra. In this paper we show, for the case of a
	set of PET images differing only in the value of one stimulus
	parameter, that it is possible to train a neural network to learn the
	underlying rule without using an excessive number of network weights or
	large amounts of computer time. The method is based upon the
	observation that the standard learning rules conserve the subspace
	spanned by the input images.},
}