@inproceedings{assem-etal-2021-dtafa,
title = "{DTAFA}: Decoupled Training Architecture for Efficient {FAQ} Retrieval",
author = "Assem, Haytham and
Dutta, Sourav and
Burgin, Edward",
editor = "Li, Haizhou and
Levow, Gina-Anne and
Yu, Zhou and
Gupta, Chitralekha and
Sisman, Berrak and
Cai, Siqi and
Vandyke, David and
Dethlefs, Nina and
Wu, Yan and
Li, Junyi Jessy",
booktitle = "Proceedings of the 22nd Annual Meeting of the Special Interest Group on Discourse and Dialogue",
month = jul,
year = "2021",
address = "Singapore and Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Author-page-Marten-During-lu/2021.sigdial-1.44/",
doi = "10.18653/v1/2021.sigdial-1.44",
pages = "423--430",
abstract = "Automated Frequently Asked Question (FAQ) retrieval provides an effective procedure to provide prompt responses to natural language based queries, providing an efficient platform for large-scale service-providing companies for presenting readily available information pertaining to customers' questions. We propose DTAFA, a novel multi-lingual FAQ retrieval system that aims at improving the top-1 retrieval accuracy with the least number of parameters. We propose two decoupled deep learning architectures trained for (i) candidate generation via text classification for a user question, and (ii) learning fine-grained semantic similarity between user questions and the FAQ repository for candidate refinement. We validate our system using real-life enterprise data as well as open source dataset. Empirically we show that DTAFA achieves better accuracy compared to existing state-of-the-art while requiring nearly 30{\texttimes} lesser number of training parameters."
}
Markdown (Informal)
[DTAFA: Decoupled Training Architecture for Efficient FAQ Retrieval](https://preview.aclanthology.org/Author-page-Marten-During-lu/2021.sigdial-1.44/) (Assem et al., SIGDIAL 2021)
ACL