@inproceedings{mangrulkar-etal-2018-context,
title = "A Context-aware Convolutional Natural Language Generation model for Dialogue Systems",
author = "Mangrulkar, Sourab and
Shrivastava, Suhani and
Thenkanidiyoor, Veena and
Aroor Dinesh, Dileep",
booktitle = "Proceedings of the 19th Annual {SIG}dial Meeting on Discourse and Dialogue",
month = jul,
year = "2018",
address = "Melbourne, Australia",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W18-5020",
doi = "10.18653/v1/W18-5020",
pages = "191--200",
abstract = "Natural language generation (NLG) is an important component in spoken dialog systems (SDSs). A model for NLG involves sequence to sequence learning. State-of-the-art NLG models are built using recurrent neural network (RNN) based sequence to sequence models (Ond{\v{r}}ej Du{\v{s}}ek and Filip Jur{\v{c}}{\'\i}{\v{c}}ek, 2016a). Convolutional sequence to sequence based models have been used in the domain of machine translation but their application as Natural Language Generators in dialogue systems is still unexplored. In this work, we propose a novel approach to NLG using convolutional neural network (CNN) based sequence to sequence learning. CNN-based approach allows to build a hierarchical model which encapsulates dependencies between words via shorter path unlike RNNs. In contrast to recurrent models, convolutional approach allows for efficient utilization of computational resources by parallelizing computations over all elements, and eases the learning process by applying constant number of nonlinearities. We also propose to use CNN-based reranker for obtaining responses having semantic correspondence with input dialogue acts. The proposed model is capable of entrainment. Studies using a standard dataset shows the effectiveness of the proposed CNN-based approach to NLG.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="mangrulkar-etal-2018-context">
<titleInfo>
<title>A Context-aware Convolutional Natural Language Generation model for Dialogue Systems</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sourab</namePart>
<namePart type="family">Mangrulkar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Suhani</namePart>
<namePart type="family">Shrivastava</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Veena</namePart>
<namePart type="family">Thenkanidiyoor</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dileep</namePart>
<namePart type="family">Aroor Dinesh</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-jul</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 19th Annual SIGdial Meeting on Discourse and Dialogue</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Melbourne, Australia</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Natural language generation (NLG) is an important component in spoken dialog systems (SDSs). A model for NLG involves sequence to sequence learning. State-of-the-art NLG models are built using recurrent neural network (RNN) based sequence to sequence models (Ondřej Dušek and Filip Jurčíček, 2016a). Convolutional sequence to sequence based models have been used in the domain of machine translation but their application as Natural Language Generators in dialogue systems is still unexplored. In this work, we propose a novel approach to NLG using convolutional neural network (CNN) based sequence to sequence learning. CNN-based approach allows to build a hierarchical model which encapsulates dependencies between words via shorter path unlike RNNs. In contrast to recurrent models, convolutional approach allows for efficient utilization of computational resources by parallelizing computations over all elements, and eases the learning process by applying constant number of nonlinearities. We also propose to use CNN-based reranker for obtaining responses having semantic correspondence with input dialogue acts. The proposed model is capable of entrainment. Studies using a standard dataset shows the effectiveness of the proposed CNN-based approach to NLG.</abstract>
<identifier type="citekey">mangrulkar-etal-2018-context</identifier>
<identifier type="doi">10.18653/v1/W18-5020</identifier>
<location>
<url>https://aclanthology.org/W18-5020</url>
</location>
<part>
<date>2018-jul</date>
<extent unit="page">
<start>191</start>
<end>200</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T A Context-aware Convolutional Natural Language Generation model for Dialogue Systems
%A Mangrulkar, Sourab
%A Shrivastava, Suhani
%A Thenkanidiyoor, Veena
%A Aroor Dinesh, Dileep
%S Proceedings of the 19th Annual SIGdial Meeting on Discourse and Dialogue
%D 2018
%8 jul
%I Association for Computational Linguistics
%C Melbourne, Australia
%F mangrulkar-etal-2018-context
%X Natural language generation (NLG) is an important component in spoken dialog systems (SDSs). A model for NLG involves sequence to sequence learning. State-of-the-art NLG models are built using recurrent neural network (RNN) based sequence to sequence models (Ondřej Dušek and Filip Jurčíček, 2016a). Convolutional sequence to sequence based models have been used in the domain of machine translation but their application as Natural Language Generators in dialogue systems is still unexplored. In this work, we propose a novel approach to NLG using convolutional neural network (CNN) based sequence to sequence learning. CNN-based approach allows to build a hierarchical model which encapsulates dependencies between words via shorter path unlike RNNs. In contrast to recurrent models, convolutional approach allows for efficient utilization of computational resources by parallelizing computations over all elements, and eases the learning process by applying constant number of nonlinearities. We also propose to use CNN-based reranker for obtaining responses having semantic correspondence with input dialogue acts. The proposed model is capable of entrainment. Studies using a standard dataset shows the effectiveness of the proposed CNN-based approach to NLG.
%R 10.18653/v1/W18-5020
%U https://aclanthology.org/W18-5020
%U https://doi.org/10.18653/v1/W18-5020
%P 191-200
Markdown (Informal)
[A Context-aware Convolutional Natural Language Generation model for Dialogue Systems](https://aclanthology.org/W18-5020) (Mangrulkar et al., 2018)
ACL