%Aigaion2 BibTeX export from Idiap Publications
%Thursday 21 November 2024 12:48:32 PM

@ARTICLE{Bittar_FRONT.NEUROSCI._2022,
         author = {Bittar, Alexandre and Garner, Philip N.},
       projects = {Idiap, NAST},
          month = aug,
          title = {A surrogate gradient spiking baseline for speech command recognition},
        journal = {Frontiers in Neuroscience},
           year = {2022},
            url = {https://www.frontiersin.org/articles/10.3389/fnins.2022.865897/full},
            doi = {doi: 10.3389/fnins.2022.865897},
       abstract = {Artificial neural networks (ANNs) are the basis of recent advances in artificial intelligence (AI); they typically use real valued neuron responses. By contrast, biological neurons are known to operate using spike trains. In principle, spiking neural networks (SNNs) may have a greater representational capability than ANNs, especially for time series such as speech; however their adoption has been held back by both a lack of stable training algorithms and a lack of compatible baselines. We begin with a fairly thorough review of literature around the conjunction of ANNs and SNNs. Focusing on surrogate gradient approaches, we proceed to define a simple but relevant evaluation based on recent speech command tasks. After evaluating a representative selection of architectures, we show that a combination of adaptation, recurrence and surrogate gradients can yield light spiking architectures that are not only able to compete with ANN solutions, but also retain a high degree of compatibility with them in modern deep learning frameworks. We conclude tangibly that SNNs are appropriate for future research in AI, in particular for speech processing applications, and more speculatively that they may also assist in inference about biological function.}
}