Daniel Bis, Maksim Podkorytov, Xiuwen Liu. Too Much in Common: Shifting of Embeddings in Transformer Language Models and its Implications. In Kristina Toutanova, Anna Rumshisky, Luke Zettlemoyer, Dilek Hakkani-Tür, Iz Beltagy, Steven Bethard, Ryan Cotterell, Tanmoy Chakraborty 0002, Yichao Zhou, editors, Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, NAACL-HLT 2021, Online, June 6-11, 2021. pages 5117-5130, Association for Computational Linguistics, 2021. [doi]
@inproceedings{BisPL21, title = {Too Much in Common: Shifting of Embeddings in Transformer Language Models and its Implications}, author = {Daniel Bis and Maksim Podkorytov and Xiuwen Liu}, year = {2021}, url = {https://www.aclweb.org/anthology/2021.naacl-main.403/}, researchr = {https://researchr.org/publication/BisPL21}, cites = {0}, citedby = {0}, pages = {5117-5130}, booktitle = {Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, NAACL-HLT 2021, Online, June 6-11, 2021}, editor = {Kristina Toutanova and Anna Rumshisky and Luke Zettlemoyer and Dilek Hakkani-Tür and Iz Beltagy and Steven Bethard and Ryan Cotterell and Tanmoy Chakraborty 0002 and Yichao Zhou}, publisher = {Association for Computational Linguistics}, isbn = {978-1-954085-46-6}, }