%Aigaion2 BibTeX export from Idiap Publications
%Friday 03 May 2024 11:13:17 AM

@TECHREPORT{Thimm-97.3,
         author = {Thimm, Georg and Fiesler, Emile},
       keywords = {generalization performance, high order perceptron, network topology, neural network optimization, optimal network size, pruning},
       projects = {Idiap},
          title = {Pruning of Neural Networks},
           type = {Idiap-RR},
         number = {Idiap-RR-03-1997},
           year = {1997},
    institution = {IDIAP},
       abstract = {Choosing a suitable topology for a neural network, given an application, is a difficult problem. Usually, after a tedious trial-and-error process, an oversized topology is chosen, which is prone to various drawbacks like a high demand on computational resources and a high generalization error. A way to solve this is to trim the network size during the training process. This is done with so-called \emph{pruning} methods, of which an overview is given. From these methods, those that are potentially suitable for high order perceptrons are selected, and then adapted accordingly. Next, they are tested on a variety of benchmarks by means of a large number of experiments. The conclusions are both of a generic nature, pointing out some pitfalls of neural network pruning in general, and of a more specific nature, identifying the best pruning methods for high order perceptrons.},
            pdf = {https://publications.idiap.ch/attachments/reports/1997/rr97-03.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/reports/1997/rr97-03.ps.gz},
ipdmembership={learning},
nofpages={17},
}