%Aigaion2 BibTeX export from Idiap Publications %Saturday 21 December 2024 06:10:31 PM @ARTICLE{Mohammadshahi_TACL-2_2020, author = {Mohammadshahi, Alireza and Henderson, James}, projects = {Idiap}, month = oct, title = {Recursive Non-Autoregressive Graph-to-Graph Transformer for Dependency Parsing with Iterative Refinement}, journal = {Transactions of the Association for Computational Linguistics}, year = {2020}, url = {https://arxiv.org/abs/2003.13118}, crossref = {Mohammadshahi_TACL_2020}, abstract = {We propose the Recursive Non-autoregressive Graph-to-graph Transformer architecture (RNG-Tr) for the iterative refinement of arbitrary graphs through the recursive application of a non-autoregressive Graph-to-Graph Transformer and apply it to syntactic dependency parsing. The Graph-to-Graph Transformer architecture of \newcite{mohammadshahi2019graphtograph} has previously been used for autoregressive graph prediction, but here we use it to predict all edges of the graph independently, conditioned on a previous prediction of the same graph. We demonstrate the power and effectiveness of RNG-Tr on several dependency corpora, using a refinement model pre-trained with BERT. We also introduce Dependency BERT (DepBERT), a non-recursive parser similar to our refinement model. RNG-Tr is able to improve the accuracy of a variety of initial parsers on 13 languages from the Universal Dependencies Treebanks and the English and Chinese Penn Treebanks, even improving over the new state-of-the-art results achieved by DepBERT, significantly improving the state-of-the-art for all corpora tested.}, pdf = {https://publications.idiap.ch/attachments/papers/2020/Mohammadshahi_TACL-2_2020.pdf} } crossreferenced publications: @ARTICLE{Mohammadshahi_TACL_2020, author = {Mohammadshahi, Alireza and Henderson, James}, keywords = {Natural language processing, NLP, Parsing, Transformer}, projects = {Idiap}, title = {Recursive Non-Autoregressive Graph-to-Graph Transformer for Dependency Parsing with Iterative Refinement}, journal = {Transactions of the Association for Computational Linguistics(under submission)}, year = {2020}, abstract = {We propose the Recursive Non-autoregressive Graph-to-graph Transformer architecture (RNG-Tr) for the iterative refinement of arbitrary graphs through the recursive application of a non-autoregressive Graph-to-Graph Transformer and apply it to syntactic dependency parsing. The Graph-to-Graph Transformer architecture of \newcite{mohammadshahi2019graphtograph} has previously been used for autoregressive graph prediction, but here we use it to predict all edges of the graph independently, conditioned on a previous prediction of the same graph. We demonstrate the power and effectiveness of RNG-Tr on several dependency corpora, using a refinement model pre-trained with BERT~\cite{devlin2018bert}. We also introduce Dependency BERT (DepBERT), a non-recursive parser similar to our refinement model. RNG-Tr is able to improve the accuracy of a variety of initial parsers on 13 languages from the Universal Dependencies Treebanks and the English and Chinese Penn Treebanks, even improving over the new state-of-the-art results achieved by DepBERT, significantly improving the state-of-the-art for all corpora tested.} }