<?xml version="1.0" encoding="UTF-8"?>
<collection xmlns="http://www.loc.gov/MARC21/slim">
	<record>
		<datafield tag="980" ind1=" " ind2=" ">
			<subfield code="a">CONF</subfield>
		</datafield>
		<datafield tag="970" ind1=" " ind2=" ">
			<subfield code="a">ruihu_ACMMM_2014/IDIAP</subfield>
		</datafield>
		<datafield tag="245" ind1=" " ind2=" ">
			<subfield code="a">Automatic Maya Hieroglyph Retrieval Using Shape and Context Information</subfield>
		</datafield>
		<datafield tag="700" ind1=" " ind2=" ">
			<subfield code="a">Hu, Rui</subfield>
		</datafield>
		<datafield tag="700" ind1=" " ind2=" ">
			<subfield code="a">Pallan, Carlos</subfield>
		</datafield>
		<datafield tag="700" ind1=" " ind2=" ">
			<subfield code="a">Krempel, Guido</subfield>
		</datafield>
		<datafield tag="700" ind1=" " ind2=" ">
			<subfield code="a">Odobez, Jean-Marc</subfield>
		</datafield>
		<datafield tag="700" ind1=" " ind2=" ">
			<subfield code="a">Gatica-Perez, Daniel</subfield>
		</datafield>
		<datafield tag="856" ind1="4" ind2="0">
			<subfield code="i">EXTERNAL</subfield>
			<subfield code="u">http://publications.idiap.ch/attachments/papers/2014/ruihu_ACMMM_2014.pdf</subfield>
			<subfield code="x">PUBLIC</subfield>
		</datafield>
		<datafield tag="711" ind1="2" ind2=" ">
			<subfield code="a">ACM MM</subfield>
		</datafield>
		<datafield tag="260" ind1=" " ind2=" ">
			<subfield code="c">2014</subfield>
		</datafield>
		<datafield tag="773" ind1=" " ind2=" ">
			<subfield code="c">4</subfield>
		</datafield>
		<datafield tag="856" ind1="4" ind2=" ">
			<subfield code="u">http://www.idiap.ch/dataset/maya-codex</subfield>
			<subfield code="z">URL</subfield>
		</datafield>
		<datafield tag="520" ind1=" " ind2=" ">
			<subfield code="a">We propose an automatic Maya hieroglyph retrieval method integrating shape and glyph context information. Two recent local shape descriptors, Gradient Field Histogram of Orientation Gradient (GF-HOG) and Histogram of Orientation Shape Context (HOOSC), are evaluated. To encode the context information, we propose to convert each Maya glyph block into a first-order Markov chain and apply the co-occurrence of neighbouring glyphs. The retrieval results obtained based on visual matching are therefore re-ranked. Experimental results show that our method can significantly improve the glyph retrieval accuracy even with a basic co-occurrence model. Furthermore, two unique glyph datasets are contributed which can be used as novel shape benchmarks in future research.</subfield>
		</datafield>
	</record>
</collection>