@inproceedings{zhang-etal-2023-bridging-gap,
title = "Bridging The Gap: Entailment Fused-T5 for Open-retrieval Conversational Machine Reading Comprehension",
author = "Zhang, Xiao and
Huang, Heyan and
Chi, Zewen and
Mao, Xian-Ling",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.acl-long.857/",
doi = "10.18653/v1/2023.acl-long.857",
pages = "15374--15386",
abstract = "Open-retrieval conversational machine reading comprehension (OCMRC) simulates real-life conversational interaction scenes. Machines are required to make a decision of {\textquotedblleft}Yes/No/Inquire{\textquotedblright} or generate a follow-up question when the decision is {\textquotedblleft}Inquire{\textquotedblright} based on retrieved rule texts, user scenario, user question and dialogue history. Recent studies try to reduce the information gap between decision-making and question generation, in order to improve the performance of generation. However, the information gap still persists because these methods are still limited in pipeline framework, where decision-making and question generation are performed separately, making it hard to share the entailment reasoning used in decision-making across all stages. To tackle the above problem, we propose a novel one-stage end-to-end framework, called Entailment Fused-T5 (EFT), to bridge the information gap between decision-making and question generation in a global understanding manner. The extensive experimental results demonstrate that our proposed framework achieves new state-of-the-art performance on the OR-ShARC benchmark. Our model and code are publicly available at an anonymous link."
}
Markdown (Informal)
[Bridging The Gap: Entailment Fused-T5 for Open-retrieval Conversational Machine Reading Comprehension](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.acl-long.857/) (Zhang et al., ACL 2023)
ACL