Zhou et al. "MoE-LPR: Multilingual Extension of Large Language Models Through Mixture-of-Experts with Language Priors Routing." AAAI Conference on Artificial Intelligence, 2025. doi:10.1609/AAAI.V39I24.34805
Markdown
[Zhou et al. "MoE-LPR: Multilingual Extension of Large Language Models Through Mixture-of-Experts with Language Priors Routing." AAAI Conference on Artificial Intelligence, 2025.](https://mlanthology.org/aaai/2025/zhou2025aaai-moe/) doi:10.1609/AAAI.V39I24.34805
BibTeX
@inproceedings{zhou2025aaai-moe,
title = {{MoE-LPR: Multilingual Extension of Large Language Models Through Mixture-of-Experts with Language Priors Routing}},
author = {Zhou, Hao and Wang, Zhijun and Huang, Shujian and Huang, Xin and Han, Xue and Feng, Junlan and Deng, Chao and Luo, Weihua and Chen, Jiajun},
booktitle = {AAAI Conference on Artificial Intelligence},
year = {2025},
pages = {26092-26100},
doi = {10.1609/AAAI.V39I24.34805},
url = {https://mlanthology.org/aaai/2025/zhou2025aaai-moe/}
}