%Aigaion2 BibTeX export from Idiap Publications
%Monday 29 April 2024 02:43:51 AM

@TECHREPORT{Mrini_Idiap-RR-26-2017,
         author = {Mrini, Khalil and Pappas, Nikolaos and Popescu-Belis, Andrei},
       keywords = {document labeling, multilingual hierarchical networks},
       projects = {Idiap, SUMMA},
          month = {9},
          title = {Cross-lingual Transfer for News Article Labeling: Benchmarking Statistical and Neural Models},
           type = {Idiap-RR},
         number = {Idiap-RR-26-2017},
           year = {2017},
    institution = {Idiap},
        address = {Rue Marconi 19, CH-1920 Martigny},
           note = {Report of EPFL semester project done by Khalil Mrini (1st year I&C MSc student), supervised by N. Pappas and A. Popescu-Belis.},
       abstract = {Cross-lingual transfer has been shown to increase the performance of a text classification model thanks to the use of Multilingual Hierarchical Attention Networks (MHAN), on which this work is based.  Firstly, we compared the performance of monolingual and mulitilingual HANs with three types of bag-of-words models.  We found that the Binary Unigram model outperforms the HAN model with Dense encoders on the full vocabulary in 6 out of 8 languages, and ties against MHAN with the Dense encoders, when it uses the full vocabulary i.e.~many more parameters than neural models. However, this is not true when we limit the number of parameters and (or) we increase the sophistication of the neural encoders to GRU or biGRU. Secondly, new configurations of parameter sharing were tested.  We found that sharing attention at the sentence level was the best configuration by a small margin when transferring from 5 out of 7 languages to English, as well as for cross-lingual transfer between English and Spanish, Russian, and Arabic. The tests were performed on the Deutsche Welle news corpus with 8 languages and 600k documents.},
            pdf = {https://publications.idiap.ch/attachments/reports/2017/Mrini_Idiap-RR-26-2017.pdf}
}