@inproceedings{84ba1d86435e49caabdd1657a59787ac,
title = "CARU: A Content-Adaptive Recurrent Unit for the Transition of Hidden State in NLP",
abstract = "This article introduces a novel RNN unit inspired by GRU, namely the Content-Adaptive Recurrent Unit (CARU). The design of CARU contains all the features of GRU but requires fewer training parameters. We make use of the concept of weights in our design to analyze the transition of hidden states. At the same time, we also describe how the content adaptive gate handles the received words and alleviates the long-term dependence problem. As a result, the unit can improve the accuracy of the experiments, and the results show that CARU not only has better performance than GRU, but also produces faster training. Moreover, the proposed unit is general and can be applied to all RNN related neural network models.",
keywords = "Content-adaptive, Gate recurrent unit, Long-Short Term Memory, Natural Language Processing, Recurrent neural network",
author = "Chan, {Ka Hou} and Wei Ke and Im, {Sio Kei}",
note = "Publisher Copyright: {\textcopyright} 2020, Springer Nature Switzerland AG.; 27th International Conference on Neural Information Processing, ICONIP 2020 ; Conference date: 18-11-2020 Through 22-11-2020",
year = "2020",
doi = "10.1007/978-3-030-63830-6_58",
language = "English",
isbn = "9783030638290",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "693--703",
editor = "Haiqin Yang and Kitsuchart Pasupa and Leung, {Andrew Chi-Sing} and Kwok, {James T.} and Chan, {Jonathan H.} and Irwin King",
booktitle = "Neural Information Processing - 27th International Conference, ICONIP 2020, Proceedings",
address = "Germany",
}