@inproceedings{mahmoudi-etal-2023-gym,
title = "{GYM} at Qur`an {QA} 2023 Shared Task: Multi-Task Transfer Learning for {Q}uranic Passage Retrieval and Question Answering with Large Language Models",
author = "Mahmoudi, Ghazaleh and
Morshedzadeh, Yeganeh and
Eetemadi, Sauleh",
editor = "Sawaf, Hassan and
El-Beltagy, Samhaa and
Zaghouani, Wajdi and
Magdy, Walid and
Abdelali, Ahmed and
Tomeh, Nadi and
Abu Farha, Ibrahim and
Habash, Nizar and
Khalifa, Salam and
Keleg, Amr and
Haddad, Hatem and
Zitouni, Imed and
Mrini, Khalil and
Almatham, Rawan",
booktitle = "Proceedings of ArabicNLP 2023",
month = dec,
year = "2023",
address = "Singapore (Hybrid)",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest_wac_2008/2023.arabicnlp-1.79/",
doi = "10.18653/v1/2023.arabicnlp-1.79",
pages = "714--719",
abstract = "This work addresses the challenges of question answering for vintage texts like the Quran. It introduces two tasks: passage retrieval and reading comprehension. For passage retrieval, it employs unsupervised fine-tuning sentence encoders and supervised multi-task learning. In reading comprehension, it fine-tunes an Electra-based model, demonstrating significant improvements over baseline models. Our best AraElectra model achieves 46.1{\%} partial Average Precision (pAP) on the unseen test set, outperforming the baseline by 23{\%}."
}
Markdown (Informal)
[GYM at Qur’an QA 2023 Shared Task: Multi-Task Transfer Learning for Quranic Passage Retrieval and Question Answering with Large Language Models](https://preview.aclanthology.org/ingest_wac_2008/2023.arabicnlp-1.79/) (Mahmoudi et al., ArabicNLP 2023)
ACL