@inproceedings{elder-etal-2020-make,
title = "How to Make Neural Natural Language Generation as Reliable as Templates in Task-Oriented Dialogue",
author = "Elder, Henry and
O{'}Connor, Alexander and
Foster, Jennifer",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.emnlp-main.230",
doi = "10.18653/v1/2020.emnlp-main.230",
pages = "2877--2888",
abstract = "Neural Natural Language Generation (NLG) systems are well known for their unreliability. To overcome this issue, we propose a data augmentation approach which allows us to restrict the output of a network and guarantee reliability. While this restriction means generation will be less diverse than if randomly sampled, we include experiments that demonstrate the tendency of existing neural generation approaches to produce dull and repetitive text, and we argue that reliability is more important than diversity for this task. The system trained using this approach scored 100{\%} in semantic accuracy on the E2E NLG Challenge dataset, the same as a template system.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="elder-etal-2020-make">
<titleInfo>
<title>How to Make Neural Natural Language Generation as Reliable as Templates in Task-Oriented Dialogue</title>
</titleInfo>
<name type="personal">
<namePart type="given">Henry</namePart>
<namePart type="family">Elder</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alexander</namePart>
<namePart type="family">O’Connor</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jennifer</namePart>
<namePart type="family">Foster</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Bonnie</namePart>
<namePart type="family">Webber</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Trevor</namePart>
<namePart type="family">Cohn</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yulan</namePart>
<namePart type="family">He</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yang</namePart>
<namePart type="family">Liu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Neural Natural Language Generation (NLG) systems are well known for their unreliability. To overcome this issue, we propose a data augmentation approach which allows us to restrict the output of a network and guarantee reliability. While this restriction means generation will be less diverse than if randomly sampled, we include experiments that demonstrate the tendency of existing neural generation approaches to produce dull and repetitive text, and we argue that reliability is more important than diversity for this task. The system trained using this approach scored 100% in semantic accuracy on the E2E NLG Challenge dataset, the same as a template system.</abstract>
<identifier type="citekey">elder-etal-2020-make</identifier>
<identifier type="doi">10.18653/v1/2020.emnlp-main.230</identifier>
<location>
<url>https://aclanthology.org/2020.emnlp-main.230</url>
</location>
<part>
<date>2020-11</date>
<extent unit="page">
<start>2877</start>
<end>2888</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T How to Make Neural Natural Language Generation as Reliable as Templates in Task-Oriented Dialogue
%A Elder, Henry
%A O’Connor, Alexander
%A Foster, Jennifer
%Y Webber, Bonnie
%Y Cohn, Trevor
%Y He, Yulan
%Y Liu, Yang
%S Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)
%D 2020
%8 November
%I Association for Computational Linguistics
%C Online
%F elder-etal-2020-make
%X Neural Natural Language Generation (NLG) systems are well known for their unreliability. To overcome this issue, we propose a data augmentation approach which allows us to restrict the output of a network and guarantee reliability. While this restriction means generation will be less diverse than if randomly sampled, we include experiments that demonstrate the tendency of existing neural generation approaches to produce dull and repetitive text, and we argue that reliability is more important than diversity for this task. The system trained using this approach scored 100% in semantic accuracy on the E2E NLG Challenge dataset, the same as a template system.
%R 10.18653/v1/2020.emnlp-main.230
%U https://aclanthology.org/2020.emnlp-main.230
%U https://doi.org/10.18653/v1/2020.emnlp-main.230
%P 2877-2888
Markdown (Informal)
[How to Make Neural Natural Language Generation as Reliable as Templates in Task-Oriented Dialogue](https://aclanthology.org/2020.emnlp-main.230) (Elder et al., EMNLP 2020)
ACL