%Aigaion2 BibTeX export from Idiap Publications %Thursday 21 November 2024 12:53:37 PM @INPROCEEDINGS{Anemueller_ICMI2008_2008, author = {Anemueller, Joern and Back, Joerg-Henrik and Caputo, Barbara and havlena, michal and Luo, Jie and Kayser, Hendrik and Leibe, Bastian and Motlicek, Petr and Pajdla, Tomas and Pavel, Misha and Torii, Akihiko and Gool, Luc Van and Zweig, Alon and Hermansky, Hynek}, projects = {Idiap, DIRAC}, title = {The DIRAC AWEAR Audio-Visual Platform for Detection of Unexpected and Incongruent Events}, booktitle = {Proceedings of the International Conference on Multimodal Interfaces}, year = {2008}, crossref = {Anemueller_Idiap-RR-41-2010}, abstract = {It is of prime importance in everyday human life to cope with and respond appropriately to events that are not foreseen by prior experience. Machines to a large extent lack the ability to respond appropriately to such inputs. An important class of unexpected events is defined by incongruent combinations of inputs from different modalities and therefore multimodal information provides a crucial cue for the identification of such events, e.g., the sound of a voice is being heard while the person in the fieldof- view does not move her lips. In the project DIRAC ({\^{a}}€{\oe}Detection and Identification of Rare Audio-visual Cues{\^{a}}€) we have been developing algorithmic approaches to the detection of such events, as well as an experimental hardware platform to test it. An audio-visual platform ({\^{a}}€{\oe}AWEAR{\^{a}}€ {\^{a}}€“ audio-visual wearable device) has been constructed with the goal to help users with disabilities or a high cognitive load to deal with unexpected events. Key hardware components include stereo panoramic vision sensors and 6-channel worn-behind-the-ear (hearing aid) microphone arrays. Data have been recorded to study audio-visual tracking, a/v scene/object classification and a/v detection of incongruencies.}, pdf = {https://publications.idiap.ch/attachments/papers/2008/Anemueller_ICMI2008_2008.pdf} } crossreferenced publications: @TECHREPORT{Anemueller_Idiap-RR-41-2010, author = {Anemueller, Joern and Back, Joerg-Henrik and Caputo, Barbara and havlena, michal and Luo, Jie and Kayser, Hendrik and Leibe, Bastian and Motlicek, Petr and Pajdla, Tomas and Pavel, Misha and Torii, Akihiko and Gool, Luc Van and Hermansky, Hynek and Zweig, Alon}, projects = {Idiap, DIRAC}, month = {11}, title = {The DIRAC AWEAR Audio-Visual Platform for Detection of Unexpected and Incongruent Events}, type = {Idiap-RR}, number = {Idiap-RR-41-2010}, year = {2010}, institution = {Idiap}, abstract = {It is of prime importance in everyday human life to cope with and respond appropriately to events that are not foreseen by prior experience. Machines to a large extent lack the ability to respond appropriately to such inputs. An important class of unexpected events is defined by incongruent combinations of inputs from different modalities and therefore multimodal information provides a crucial cue for the identification of such events, e.g., the sound of a voice is being heard while the person in the fieldof- view does not move her lips. In the project DIRAC ({\^{a}}€{\oe}Detection and Identification of Rare Audio-visual Cues{\^{a}}€) we have been developing algorithmic approaches to the detection of such events, as well as an experimental hardware platform to test it. An audio-visual platform ({\^{a}}€{\oe}AWEAR{\^{a}}€ {\^{a}}€“ audio-visual wearable device) has been constructed with the goal to help users with disabilities or a high cognitive load to deal with unexpected events. Key hardware components include stereo panoramic vision sensors and 6-channel worn-behind-the-ear (hearing aid) microphone arrays. Data have been recorded to study audio-visual tracking, a/v scene/object classification and a/v detection of incongruencies.}, pdf = {https://publications.idiap.ch/attachments/reports/2008/Anemueller_Idiap-RR-41-2010.pdf} }