@inproceedings{bi-etal-2019-multi,
title = "Multi-agent Learning for Neural Machine Translation",
author = "Bi, Tianchi and
Xiong, Hao and
He, Zhongjun and
Wu, Hua and
Wang, Haifeng",
editor = "Inui, Kentaro and
Jiang, Jing and
Ng, Vincent and
Wan, Xiaojun",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/D19-1079/",
doi = "10.18653/v1/D19-1079",
pages = "856--865",
abstract = "Conventional Neural Machine Translation (NMT) models benefit from the training with an additional agent, e.g., dual learning, and bidirectional decoding with one agent decod- ing from left to right and the other decoding in the opposite direction. In this paper, we extend the training framework to the multi-agent sce- nario by introducing diverse agents in an in- teractive updating process. At training time, each agent learns advanced knowledge from others, and they work together to improve translation quality. Experimental results on NIST Chinese-English, IWSLT 2014 German- English, WMT 2014 English-German and large-scale Chinese-English translation tasks indicate that our approach achieves absolute improvements over the strong baseline sys- tems and shows competitive performance on all tasks."
}
Markdown (Informal)
[Multi-agent Learning for Neural Machine Translation](https://preview.aclanthology.org/jlcl-multiple-ingestion/D19-1079/) (Bi et al., EMNLP-IJCNLP 2019)
ACL
- Tianchi Bi, Hao Xiong, Zhongjun He, Hua Wu, and Haifeng Wang. 2019. Multi-agent Learning for Neural Machine Translation. In Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP), pages 856–865, Hong Kong, China. Association for Computational Linguistics.