%Aigaion2 BibTeX export from Idiap Publications
%Saturday 21 December 2024 07:09:02 PM

@INPROCEEDINGS{Habibi_ACMRECOMMENDERSYSTEMS2012_2012,
         author = {Habibi, Maryam and Popescu-Belis, Andrei},
       projects = {Idiap, IM2},
          month = sep,
          title = {Using Crowdsourcing to Compare Document Recommendation Strategies for Conversations},
      booktitle = {RecSys, Recommendation Utility Evaluation (RUE 2012)},
           year = {2012},
          pages = {15-20},
       location = {Dublin, Ireland},
       crossref = {Habibi_Idiap-RR-14-2012},
       abstract = {This paper explores a crowdsourcing approach to the evaluation of a document recommender system intended for use in meetings. The system uses words from the conversation to perform just-in-time document retrieval. We compare several versions of the system, including the use of keywords, retrieval using semantic similarity, and the possibility for user initiative. The system's results are submitted for comparative evaluations to workers recruited via a crowdsourcing platform, Amazon's Mechanical Turk. We introduce a new method, Pearson Correlation Coefficient-Information Entropy (PCC-H), to abstract over the quality of the workers' judgments and produce system-level scores. We measure the workers' reliability by the inter-rater agreement of each of them against the others, and use entropy to weight the difficulty of each comparison task. The proposed evaluation method is shown to be reliable, and the results show that adding user initiative improves the relevance of recommendations.},
            pdf = {https://publications.idiap.ch/attachments/papers/2016/Habibi_ACMRECOMMENDERSYSTEMS2012_2012.pdf}
}



crossreferenced publications: 
@TECHREPORT{Habibi_Idiap-RR-14-2012,
         author = {Habibi, Maryam and Popescu-Belis, Andrei},
       keywords = {Amazon Mechanical Turk, comparative evaluation, Crowdsourcing, Document recommender system, user initiative},
       projects = {Idiap, IM2},
          month = {6},
          title = {Using Crowdsourcing to Compare Document Recommendation Strategies for Conversations},
           type = {Idiap-RR},
         number = {Idiap-RR-14-2012},
           year = {2012},
    institution = {Idiap},
       abstract = {This paper explores a crowdsourcing approach to the evaluation of a document recommender system intended for use in meetings. The system uses words from the conversation to perform just-in-time document retrieval. We compare several versions of the system, including the use of keywords, retrieval using semantic similarity, and the possibility for user initiative. The system's results are submitted for comparative evaluations to workers recruited via a crowdsourcing platform, Amazon's Mechanical Turk. We introduce a new method, Pearson Correlation Coefficient-Information Entropy (PCC-H), to abstract over the quality of the workers' judgments and produce system-level scores. We measure the workers' reliability by the inter-rater agreement of each of them against the others, and use entropy to weight the difficulty of each comparison task. The proposed evaluation method is shown to be reliable, and demonstrates that adding user initiative improves the relevance of recommendations.},
            pdf = {https://publications.idiap.ch/attachments/reports/2012/Habibi_Idiap-RR-14-2012.pdf}
}