%Aigaion2 BibTeX export from Idiap Publications %Thursday 21 November 2024 12:59:31 PM @INPROCEEDINGS{Korchagin_ICME-2_2011, author = {Korchagin, Danil and Motlicek, Petr and Duffner, Stefan and Bourlard, Herv{\'{e}}}, keywords = {association rules, data analysis, multimodal signal processing, sensor fusion}, projects = {Idiap, TA2}, month = jul, title = {Just-in-Time Multimodal Association and Fusion from Home Entertainment}, booktitle = {Proceedings IEEE International Conference on Multimedia & Expo}, year = {2011}, location = {Barcelona, Spain}, crossref = {Korchagin_Idiap-RR-10-2011}, abstract = {In this paper, we describe a real-time multimodal analysis system with just-in-time multimodal association and fusion for a living room environment, where multiple people may enter, interact and leave the observable world with no constraints. It comprises detection and tracking of up to 4 faces, detection and localisation of verbal and paralinguistic events, their association and fusion. The system is designed to be used in open, unconstrained environments like in next generation video conferencing systems that automatically “orchestrate” the transmitted video streams to improve the overall experience of interaction between spatially separated families and friends. Performance levels achieved to date on hand-labelled dataset have shown sufficient reliability at the same time as fulfilling real-time processing requirements.}, pdf = {https://publications.idiap.ch/attachments/papers/2011/Korchagin_ICME-2_2011.pdf} } crossreferenced publications: @TECHREPORT{Korchagin_Idiap-RR-10-2011, author = {Korchagin, Danil and Motlicek, Petr and Duffner, Stefan and Bourlard, Herv{\'{e}}}, keywords = {association rules, data analysis, multimodal signal processing, sensor fusion}, projects = {Idiap, TA2}, month = {5}, title = {Just-in-Time Multimodal Association and Fusion from Home Entertainment}, type = {Idiap-RR}, number = {Idiap-RR-10-2011}, year = {2011}, institution = {Idiap}, address = {Martigny, Switzerland}, abstract = {In this paper, we describe a real-time multimodal analysis system with just-in-time multimodal association and fusion for a living room environment, where multiple people may enter, interact and leave the observable world with no constraints. It comprises detection and tracking of up to 4 faces, detection and localisation of verbal and paralinguistic events, their association and fusion. The system is designed to be used in open, unconstrained environments like in next generation video conferencing systems that automatically “orchestrate” the transmitted video streams to improve the overall experience of interaction between spatially separated families and friends. Performance levels achieved to date on hand-labelled dataset have shown sufficient reliability at the same time as fulfilling real-time processing requirements.}, pdf = {https://publications.idiap.ch/attachments/reports/2011/Korchagin_Idiap-RR-10-2011.pdf} }