%Aigaion2 BibTeX export from Idiap Publications
%Thursday 21 November 2024 12:55:15 PM

@INPROCEEDINGS{misr03,
         author = {Misra, Hemant and Bourlard, Herv{\'{e}} and Tyagi, Vivek},
       projects = {Idiap},
          month = {4},
          title = {New Entropy Based Combination Rules in {HMM/ANN} Multi-stream {ASR}},
      booktitle = {Proceedings of IEEE International Conference on Acoustics, Speech, and Signal Processing (ICASSP)},
           year = {2003},
        address = {Hong Kong},
           note = {IDIAP-RR 2002 31},
       crossref = {misra-rr-02-31},
       abstract = {Classifier performance is often enhanced through combining multiple streams of information. In the context of multi-stream HMM/ANN systems in ASR, a confidence measure widely used in classifier combination is the entropy of the posteriors distribution output from each ANN, which generally increases as classification becomes less reliable. The rule most commonly used is to select the ANN with the minimum entropy. However, this is not necessarily the best way to use entropy in classifier combination. In this article, we test three new entropy based combination rules in a full-combination multi-stream HMM/ANN system for noise robust speech recognition. Best results were obtained by combining all the classifiers having entropy below average using a weighting proportional to their inverse entropy.},
            pdf = {https://publications.idiap.ch/attachments/reports/2003/misra_2003_icassp.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/reports/2003/misra_2003_icassp.ps.gz},
ipdmembership={speech},
}



crossreferenced publications: 
@TECHREPORT{misra-rr-02-31,
         author = {Misra, Hemant and Bourlard, Herv{\'{e}} and Tyagi, Vivek},
       projects = {Idiap},
          title = {Entropy-based Multi-stream Combination},
           type = {Idiap-RR},
         number = {Idiap-RR-31-2002},
           year = {2002},
    institution = {IDIAP},
        address = {Martigny, Switzerland},
           note = {{in Proceedings of the IEEE International Conference on Acoustics, Speech, and Signal Processing {(ICASSP)}, 2003}},
       abstract = {Full-combination multi-band approach has been proposed in the literature and performs well for band-limited noise. But the approach fails to deliver in case of wide-band noise. To overcome this, multi-stream approaches are proposed in literature with varying degree of success. Based on our observation that for a classifier trained on clean speech, the entropy at the output of the classifier increases in presence of noise at its input, we used entropy as a measure of confidence to give weightage to a classifier output. In this paper, we propose a new entropy based combination strategy for full-combination multi-stream approach. In this entropy based approach, a particular stream is weighted inversely proportional to the output entropy of its specific classifier. A few variations of this basic approach are also suggested. It is observed that the word-error-rate (WER) achieved by the proposed combination methods is better for different types of noises and for their different signal-to-noise-ratios (SNRs). Some interesting relationship is observed between the WER performances of different combination methods and their respective entropies.},
            pdf = {https://publications.idiap.ch/attachments/reports/2002/rr02-31.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/reports/2002/rr02-31.ps.gz},
ipdinar={2002},
ipdmembership={speech},
language={English},
}