@inproceedings{kumar-etal-2022-augmenting,
title = "Augmenting e{B}ooks with with recommended questions using contrastive fine-tuned T5",
author = "Kumar, Shobhan and
Chauhan, Arun and
Kumar, Pavan",
editor = "Akhtar, Md. Shad and
Chakraborty, Tanmoy",
booktitle = "Proceedings of the 19th International Conference on Natural Language Processing (ICON)",
month = dec,
year = "2022",
address = "New Delhi, India",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/moar-dois/2022.icon-main.15/",
pages = "109--115",
abstract = "The recent advances in AI made generation of questions from natural language text possible, the approach is completely excludes human in the loop while generating the appropriate questions which improves the students learning engagement. The ever growing amount of educational content renders it increasingly difficult to manually generate sufficient practice or quiz questions to accompany it. Reading comprehension can be improved by asking the right questions, So, this work offers a Transformer based question generation model for autonomously producing quiz questions from educational information, such as eBooks. This work proposes an contrastive training approach for `Text-to-Text Transfer Transformer' (T5) model where the model (T5-eQG) creates the summarized text for the input document and then automatically generates the questions. Our model shows promising results over earlier neural network-based and rules-based models for question generating task on benchmark datasets and NCERT eBooks."
}
Markdown (Informal)
[Augmenting eBooks with with recommended questions using contrastive fine-tuned T5](https://preview.aclanthology.org/moar-dois/2022.icon-main.15/) (Kumar et al., ICON 2022)
ACL