Jingfei Du, Edouard Grave, Beliz Gunel, Vishrav Chaudhary, Onur Celebi, Michael Auli, Veselin Stoyanov, Alexis Conneau. Self-training Improves Pre-training for Natural Language Understanding. In Kristina Toutanova, Anna Rumshisky, Luke Zettlemoyer, Dilek Hakkani-Tür, Iz Beltagy, Steven Bethard, Ryan Cotterell, Tanmoy Chakraborty 0002, Yichao Zhou, editors, Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, NAACL-HLT 2021, Online, June 6-11, 2021. pages 5408-5418, Association for Computational Linguistics, 2021. [doi]
@inproceedings{DuGGCCASC21, title = {Self-training Improves Pre-training for Natural Language Understanding}, author = {Jingfei Du and Edouard Grave and Beliz Gunel and Vishrav Chaudhary and Onur Celebi and Michael Auli and Veselin Stoyanov and Alexis Conneau}, year = {2021}, url = {https://www.aclweb.org/anthology/2021.naacl-main.426/}, researchr = {https://researchr.org/publication/DuGGCCASC21}, cites = {0}, citedby = {0}, pages = {5408-5418}, booktitle = {Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, NAACL-HLT 2021, Online, June 6-11, 2021}, editor = {Kristina Toutanova and Anna Rumshisky and Luke Zettlemoyer and Dilek Hakkani-Tür and Iz Beltagy and Steven Bethard and Ryan Cotterell and Tanmoy Chakraborty 0002 and Yichao Zhou}, publisher = {Association for Computational Linguistics}, isbn = {978-1-954085-46-6}, }