@inproceedings{d18bced19d5a4afd8f3bd049d0b99338,
title = "Zero-shot Cross-lingual Transfer is Under-specified Optimization",
abstract = "Pretrained multilingual encoders enable zero-shot cross-lingual transfer, but often produce unreliable models that exhibit high performance variance on the target language. We postulate that this high variance results from zero-shot cross-lingual transfer solving an under-specified optimization problem. We show that any linear-interpolated model between the source language monolingual model and source + target bilingual model has equally low source language generalization error, yet the target language generalization error reduces smoothly and linearly as we move from the monolingual to bilingual model, suggesting that the model struggles to identify good solutions for both source and target languages using the source language alone. Additionally, we show that zero-shot solution lies in non-flat region of target language error generalization surface, causing the high variance.",
author = "Shijie Wu and {Van Durme}, Benjamin and Mark Dredze",
note = "Publisher Copyright: {\textcopyright} 2022 Association for Computational Linguistics.; 7th Workshop on Representation Learning for NLP, RepL4NLP 2022 at ACL 2022 ; Conference date: 26-05-2022",
year = "2022",
language = "English (US)",
series = "Proceedings of the Annual Meeting of the Association for Computational Linguistics",
publisher = "Association for Computational Linguistics (ACL)",
pages = "236--248",
booktitle = "ACL 2022 - 7th Workshop on Representation Learning for NLP, RepL4NLP 2022 - Proceedings of the Workshop",
address = "United States",
}