@inproceedings{di-liello-etal-2023-context,
title = "Context-Aware Transformer Pre-Training for Answer Sentence Selection",
author = "Di Liello, Luca and
Garg, Siddhant and
Moschitti, Alessandro",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.acl-short.40/",
doi = "10.18653/v1/2023.acl-short.40",
pages = "458--468",
abstract = "Answer Sentence Selection (AS2) is a core component for building an accurate Question Answering pipeline. AS2 models rank a set of candidate sentences based on how likely they answer a given question. The state of the art in AS2 exploits pre-trained transformers by transferring them on large annotated datasets, while using local contextual information around the candidate sentence. In this paper, we propose three pre-training objectives designed to mimic the downstream fine-tuning task of contextual AS2. This allows for specializing LMs when fine-tuning for contextual AS2. Our experiments on three public and two large-scale industrial datasets show that our pre-training approaches (applied to RoBERTa and ELECTRA) can improve baseline contextual AS2 accuracy by up to 8{\%} on some datasets."
}
Markdown (Informal)
[Context-Aware Transformer Pre-Training for Answer Sentence Selection](https://preview.aclanthology.org/fix-sig-urls/2023.acl-short.40/) (Di Liello et al., ACL 2023)
ACL