%Aigaion2 BibTeX export from Idiap Publications
%Wednesday 17 July 2024 11:33:04 AM

@INPROCEEDINGS{Lundin-96,
         author = {Lundin, Tomas and Fiesler, Emile and Moerland, Perry},
       projects = {Idiap},
          title = {Connectionist Quantization Functions},
      booktitle = {Proceedings of the '96 {SIPAR}-Workshop on Parallel and Distributed Computing},
           year = {1996},
       location = {Geneva, Switzerland},
   organization = {Scientific and Parallel Computing Group, University of Geneva},
       abstract = {One of the main strengths of connectionist systems, also known as neural networks, is their massive parallelism. However, most neural networks are simulated on serial computers where the advantage of massive parallelism is lost. For large and real-world applications, parallel hardware implementations are therefore essential. Since a discretization or quantization of the neural network parameters is of great benefit for both analog and digital hardware implementations, they are the focus of study in this paper. In 1987 a successful weight discretization method was developed, which is flexible and produces networks with few discretization levels and without significant loss of performance. However, recent studies have shown that the chosen quantization function is not optimal. In this paper, new quantization functions are introduced and evaluated for improving the performance of this flexible weight discretization method.},
            pdf = {https://publications.idiap.ch/attachments/reports/1996/96sipar.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/papers/neural/lundin.quant.ps.gz},
dates={October, 4, 1996},
ipdmembership={learning},
}