@inproceedings{78791a8c913949d5ae8cfdd04a5f30dc,
title = "Self-Adaptive Layer: An Application of Function Approximation Theory to Enhance Convergence Efficiency in Neural Networks",
abstract = "Neural networks provide a general architecture to model complex nonlinear systems, but the source data are often mixed with a lot of noise and interference information. One way to offer a smoother alternative for addressing this issue in training is to increase the neural or layer size. In this paper, a new self-adaptive layer is developed to overcome the problems of neural networks so as to achieve faster convergence and avoid local minimum. We incorporate function approximation theory into the layer element arrangement, so that the training process and the network approximation properties can be investigated via linear algebra, where the precision of adaptation can be controlled by the order of polynomials being used. Experimental results show that our proposed layer leads to significantly faster performance in convergence. As a result, this new layer greatly enhances the training accuracy. Moreover, the design and implementation can be easily deployed in most current systems.",
keywords = "Function Approximation, Neural Network, Orthogonal Polynomial, Self-Adaptive",
author = "Chan, {Ka Hou} and Im, {Sio Kei} and Wei Ke",
note = "Publisher Copyright: {\textcopyright} 2020 IEEE.; 34th International Conference on Information Networking, ICOIN 2020 ; Conference date: 07-01-2020 Through 10-01-2020",
year = "2020",
month = jan,
doi = "10.1109/ICOIN48656.2020.9016534",
language = "English",
series = "International Conference on Information Networking",
publisher = "IEEE Computer Society",
pages = "447--452",
booktitle = "34th International Conference on Information Networking, ICOIN 2020",
address = "United States",
}