@inproceedings{e5f7ebaff28d4fa0a4a05da999c42a2c,
title = "Applying and Optimizing NLP Model with CARU",
abstract = "RNN for language models can solve the problem of sparse content and high-dimensional features in traditional N-gram models. However, due to the problems of overfitting and gradient disappearance, the original RNN still lacks long-term content dependence and noise interference. This paper proposes an improved method based on a context word vector for RNN with CARU. In order to alleviate the overfitting problem, a modified DropConnect layer is employed in the proposed model. In addition, the multilayer CARU is used to add contextual word vectors to the model with the feature layer to strengthen the ability to learn long-distance information during the training process. Experimental results show that the proposed method effectively improves the performance of RNN-based language model.",
keywords = "DropConnect, Language Model, Model Analysis, Multilayer CARU, NLP, Word Embedding",
author = "Chan, {Ka Hou} and Im, {Sio Kei} and Giovanni Pau",
note = "Publisher Copyright: {\textcopyright} 2022 IEEE.; 8th International Conference on Advanced Computing and Communication Systems, ICACCS 2022 ; Conference date: 25-03-2022 Through 26-03-2022",
year = "2022",
doi = "10.1109/ICACCS54159.2022.9785075",
language = "English",
series = "8th International Conference on Advanced Computing and Communication Systems, ICACCS 2022",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "1018--1022",
booktitle = "8th International Conference on Advanced Computing and Communication Systems, ICACCS 2022",
address = "United States",
}