%Aigaion2 BibTeX export from Idiap Publications
%Monday 29 April 2024 06:14:45 PM

@INPROCEEDINGS{Thimm-96.5,
         author = {Thimm, Georg and Fiesler, Emile},
         editor = {Furuhashi, Takeshi},
       keywords = {generalization, network size, neural network, neural network optimization, parameters, pruning},
       projects = {Idiap},
          month = {8},
          title = {Neural Network Pruning and Pruning Parameters},
      booktitle = {The 1st Workshop on Soft Computing},
           year = {1996},
   organization = {Dept. of Information Electronics Nagoya University},
        address = {Furo-cho, Chikusa-ku, Nagoya 464-01, Japan},
           note = {Is published as http://www.bioele.nuee.nagoya-u.ac.jp/wsc1/},
       abstract = {The default multilayer neural network topology is a fully interlayer connected one. This simplistic choice facilitates the design but it limits the performance of the resulting neural networks. The best-known methods for obtaining partially connected neural networks are the so called pruning methods which are used for optimizing both the size and the generalization capabilities of neural networks. Two of the most promising pruning techniques have therefore been selected for a comparative study. It is shown that these novel techniques are hampered by having numerous user-tunable parameters, which can easily nullify the benefits of these advanced methods. Finally, based on the results, conclusions about the execution of experiments and suggestions for conducting future research on neural network pruning are drawn.},
            pdf = {https://publications.idiap.ch/attachments/papers/1996/prune96.pdf},
ipdhtml={https://www.idiap.ch/nn-papers/MLP_pruning_parameters/pruning.html},
ipdmembership={learning},
ipdpriority={7},
}