%Aigaion2 BibTeX export from Idiap Publications
%Thursday 21 November 2024 01:20:46 PM

@INPROCEEDINGS{McGreevy04b,
         author = {McGreevy, Michael},
       projects = {Idiap},
          month = {12},
          title = {Pseudo-Syntactic Language Modeling for Disfluent Speech Recognition},
      booktitle = {Proceedings of SST 2004 (10th Australian International Conference on Speech Science & Technology,',','),
 Sydney, Australia, 2004},
           year = {2004},
           note = {IDIAP-RR 04-55},
       crossref = {mcgreevy04a},
       abstract = {Language models for speech recognition are generally trained on text corpora. Since these corpora do not contain the disfluencies found in natural speech, there is a train/test mismatch when these models are applied to conversational speech. In this work we investigate a language model (LM) designed to model these disfluencies as a syntactic process. By modeling self-corrections we obtain an improvement over our baseline syntactic model. We also obtain a 30\\% relative reduction in perplexity from the best performing standard {N-gram} model when we interpolate it with our syntactically derived models.},
            pdf = {https://publications.idiap.ch/attachments/papers/2004/mcgreevy-sst04.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/papers/speech/mcgreevy-sst04.ps.gz},
ipdmembership={speech},
}



crossreferenced publications: 
@TECHREPORT{McGreevy04a,
         author = {McGreevy, Michael},
       projects = {Idiap},
          title = {Pseudo-Syntactic Language Modeling for Disfluent Speech Recognition},
           type = {Idiap-RR},
         number = {Idiap-RR-55-2004},
           year = {2004},
    institution = {IDIAP},
           note = {Published in Proceedings of SST, 2004},
       abstract = {Language models for speech recognition are generally trained on text corpora. Since these corpora do not contain the disfluencies found in natural speech, there is a train/test mismatch when these models are applied to conversational speech. In this work we investigate a language model (LM) designed to model these disfluencies as a syntactic process. By modeling self-corrections we obtain an improvement over our baseline syntactic model. We also obtain a 30\\% relative reduction in perplexity from the best performing standard {N-gram} model when we interpolate it with our syntactically derived models.},
            pdf = {https://publications.idiap.ch/attachments/reports/2004/rr04-55.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/reports/2004/rr04-55.ps.gz},
ipdmembership={speech},
}