<?xml version="1.0" encoding="UTF-8"?>
<collection xmlns="http://www.loc.gov/MARC21/slim">
	<record>
		<datafield tag="980" ind1=" " ind2=" ">
			<subfield code="a">CONF</subfield>
		</datafield>
		<datafield tag="970" ind1=" " ind2=" ">
			<subfield code="a">Korchagin_UCM_2009/IDIAP</subfield>
		</datafield>
		<datafield tag="245" ind1=" " ind2=" ">
			<subfield code="a">Memoirs of Togetherness from Audio Logs</subfield>
		</datafield>
		<datafield tag="700" ind1=" " ind2=" ">
			<subfield code="a">Korchagin, Danil</subfield>
		</datafield>
		<datafield tag="653" ind1="1" ind2=" ">
			<subfield code="a">confidence estimation</subfield>
		</datafield>
		<datafield tag="653" ind1="1" ind2=" ">
			<subfield code="a">pattern matching</subfield>
		</datafield>
		<datafield tag="653" ind1="1" ind2=" ">
			<subfield code="a">time-frequency analysis</subfield>
		</datafield>
		<datafield tag="856" ind1="4" ind2="0">
			<subfield code="i">EXTERNAL</subfield>
			<subfield code="u">http://publications.idiap.ch/attachments/papers/2009/Korchagin_UCM_2009.pdf</subfield>
			<subfield code="x">PUBLIC</subfield>
		</datafield>
		<datafield tag="856" ind1="4" ind2=" ">
			<subfield code="u">http://publications.idiap.ch/index.php/publications/showcite/Korchagin_Idiap-RR-36-2009</subfield>
			<subfield code="z">Related documents</subfield>
		</datafield>
		<datafield tag="711" ind1="2" ind2=" ">
			<subfield code="a">Proceedings International ICST Conference on User Centric Media</subfield>
			<subfield code="c">Venice, Italy</subfield>
		</datafield>
		<datafield tag="260" ind1=" " ind2=" ">
			<subfield code="c">2009</subfield>
			<subfield code="a">P.O. Box 592, CH-1920 Martigny, Switzerland</subfield>
		</datafield>
		<datafield tag="771" ind1="2" ind2=" ">
			<subfield code="d">December 2009</subfield>
		</datafield>
		<datafield tag="520" ind1=" " ind2=" ">
			<subfield code="a">In this paper, we propose a new concept how tempo-social information about moments of togetherness within a social group of people can be retrieved in the palm of the hand from social context. The social context is digitised by audio logging of the same user centric device such as mobile phone. Being asynchronously driven it allows automatically logging social events with involved parties and thus helps to feel at home anywhere anytime and to nurture user to group relationships. The core of the algorithm is based on perceptual time-frequency analysis via confidence estimate of dynamic cepstral pattern matching between audio logs of people within a social group. The results show robust retrieval and surpass the performance of cross correlation while keeping lower system requirements.</subfield>
		</datafield>
	</record>
	<record>
		<datafield tag="980" ind1=" " ind2=" ">
			<subfield code="a">REPORT</subfield>
		</datafield>
		<datafield tag="970" ind1=" " ind2=" ">
			<subfield code="a">Korchagin_Idiap-RR-36-2009/IDIAP</subfield>
		</datafield>
		<datafield tag="245" ind1=" " ind2=" ">
			<subfield code="a">Memoirs of Togetherness from Audio Logs</subfield>
		</datafield>
		<datafield tag="700" ind1=" " ind2=" ">
			<subfield code="a">Korchagin, Danil</subfield>
		</datafield>
		<datafield tag="856" ind1="4" ind2="0">
			<subfield code="i">EXTERNAL</subfield>
			<subfield code="u">http://publications.idiap.ch/attachments/reports/2009/Korchagin_Idiap-RR-36-2009.pdf</subfield>
			<subfield code="x">PUBLIC</subfield>
		</datafield>
		<datafield tag="088" ind1=" " ind2=" ">
			<subfield code="a">Idiap-RR-36-2009</subfield>
		</datafield>
		<datafield tag="260" ind1=" " ind2=" ">
			<subfield code="c">2009</subfield>
			<subfield code="b">Idiap</subfield>
			<subfield code="a">P.O. Box 592, CH-1920 Martigny, Switzerland</subfield>
		</datafield>
		<datafield tag="771" ind1="2" ind2=" ">
			<subfield code="d">December 2009</subfield>
		</datafield>
		<datafield tag="520" ind1=" " ind2=" ">
			<subfield code="a">In this paper, we propose a new concept how tempo-social information about moments of togetherness within a social group of people can be retrieved in the palm of the hand from social context. The social context is digitised by audio logging of the same user centric device such as mobile phone. Being asynchronously driven it allows automatically logging social events with involved parties and thus helps to feel at home anywhere anytime and to nurture user to group relationships. The core of the algorithm is based on perceptual time-frequency analysis via confidence estimate of dynamic cepstral pattern matching between audio logs of people within a social group. The results show robust retrieval and surpass the performance of cross correlation while keeping lower system requirements.</subfield>
		</datafield>
	</record>
</collection>