@inproceedings{yu-etal-2020-towards,
title = "Towards Context-Aware Code Comment Generation",
author = "Yu, Xiaohan and
Huang, Quzhe and
Wang, Zheng and
Feng, Yansong and
Zhao, Dongyan",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2020",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.findings-emnlp.350",
doi = "10.18653/v1/2020.findings-emnlp.350",
pages = "3938--3947",
abstract = "Code comments are vital for software maintenance and comprehension, but many software projects suffer from the lack of meaningful and up-to-date comments in practice. This paper presents a novel approach to automatically generate code comments at a function level by targeting object-oriented programming languages. Unlike prior work that only uses information locally available within the target function, our approach leverages broader contextual information by considering all other functions of the same class. To propagate and integrate information beyond the scope of the target function, we design a novel learning framework based on the bidirectional gated recurrent unit and a graph attention network with a pointer mechanism. We apply our approach to produce code comments for Java methods and compare it against four strong baseline methods. Experimental results show that our approach outperforms most methods by a large margin and achieves a comparable result with the state-of-the-art method.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="yu-etal-2020-towards">
<titleInfo>
<title>Towards Context-Aware Code Comment Generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Xiaohan</namePart>
<namePart type="family">Yu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Quzhe</namePart>
<namePart type="family">Huang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zheng</namePart>
<namePart type="family">Wang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yansong</namePart>
<namePart type="family">Feng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dongyan</namePart>
<namePart type="family">Zhao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-nov</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Findings of the Association for Computational Linguistics: EMNLP 2020</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Code comments are vital for software maintenance and comprehension, but many software projects suffer from the lack of meaningful and up-to-date comments in practice. This paper presents a novel approach to automatically generate code comments at a function level by targeting object-oriented programming languages. Unlike prior work that only uses information locally available within the target function, our approach leverages broader contextual information by considering all other functions of the same class. To propagate and integrate information beyond the scope of the target function, we design a novel learning framework based on the bidirectional gated recurrent unit and a graph attention network with a pointer mechanism. We apply our approach to produce code comments for Java methods and compare it against four strong baseline methods. Experimental results show that our approach outperforms most methods by a large margin and achieves a comparable result with the state-of-the-art method.</abstract>
<identifier type="citekey">yu-etal-2020-towards</identifier>
<identifier type="doi">10.18653/v1/2020.findings-emnlp.350</identifier>
<location>
<url>https://aclanthology.org/2020.findings-emnlp.350</url>
</location>
<part>
<date>2020-nov</date>
<extent unit="page">
<start>3938</start>
<end>3947</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Towards Context-Aware Code Comment Generation
%A Yu, Xiaohan
%A Huang, Quzhe
%A Wang, Zheng
%A Feng, Yansong
%A Zhao, Dongyan
%S Findings of the Association for Computational Linguistics: EMNLP 2020
%D 2020
%8 nov
%I Association for Computational Linguistics
%C Online
%F yu-etal-2020-towards
%X Code comments are vital for software maintenance and comprehension, but many software projects suffer from the lack of meaningful and up-to-date comments in practice. This paper presents a novel approach to automatically generate code comments at a function level by targeting object-oriented programming languages. Unlike prior work that only uses information locally available within the target function, our approach leverages broader contextual information by considering all other functions of the same class. To propagate and integrate information beyond the scope of the target function, we design a novel learning framework based on the bidirectional gated recurrent unit and a graph attention network with a pointer mechanism. We apply our approach to produce code comments for Java methods and compare it against four strong baseline methods. Experimental results show that our approach outperforms most methods by a large margin and achieves a comparable result with the state-of-the-art method.
%R 10.18653/v1/2020.findings-emnlp.350
%U https://aclanthology.org/2020.findings-emnlp.350
%U https://doi.org/10.18653/v1/2020.findings-emnlp.350
%P 3938-3947
Markdown (Informal)
[Towards Context-Aware Code Comment Generation](https://aclanthology.org/2020.findings-emnlp.350) (Yu et al., Findings 2020)
ACL
- Xiaohan Yu, Quzhe Huang, Zheng Wang, Yansong Feng, and Dongyan Zhao. 2020. Towards Context-Aware Code Comment Generation. In Findings of the Association for Computational Linguistics: EMNLP 2020, pages 3938–3947, Online. Association for Computational Linguistics.