%Aigaion2 BibTeX export from Idiap Publications
%Saturday 21 December 2024 04:35:17 PM

@TECHREPORT{Luyet_Idiap-RR-04-2016,
         author = {Luyet, Gil and Dighe, Pranay and Asaei, Afsaneh and Bourlard, Herv{\'{e}}},
       keywords = {automatic speech recognition (ASR), Deep neural network (DNN) , k-nearest neighbor (kNN) search , low-rank representation (LRR) , posterior probability },
       projects = {Idiap},
          month = {3},
          title = {Low-Rank Representation of Nearest Neighbor Phone Posterior Probabilities to Enhance DNN Acoustic Modeling},
           type = {Idiap-RR},
         number = {Idiap-RR-04-2016},
           year = {2016},
    institution = {Idiap},
       abstract = {We hypothesize that optimal deep neural networks (DNN) class-conditional posterior probabilities live in a union of low-dimensional subspaces. In real test conditions, DNN posteriors encode uncertainties which can be regarded as a superposition of unstructured sparse noise to the optimal posteriors. We aim to investigate different ways to structure the DNN outputs exploiting low-rank representation (LRR) techniques. Using a large number of training posterior vectors, the underlying low-dimensional subspace is identified through nearest neighbor analysis, and low-rank decomposition enables separation of the ``optimal'' posteriors from the spurious uncertainties at the DNN output. Experiments demonstrate that by processing subsets of posteriors which possess strong subspace similarity, low-rank representation enables enhancement of posterior probabilities, and lead to higher speech recognition accuracy based on the hybrid DNN-hidden Markov model (HMM) system.},
            pdf = {https://publications.idiap.ch/attachments/reports/2016/Luyet_Idiap-RR-04-2016.pdf}
}