@article{scholars17103, year = {2022}, journal = {Applied Sciences (Switzerland)}, publisher = {MDPI}, number = {4}, volume = {12}, note = {cited By 0}, doi = {10.3390/app12041842}, title = {PrimeNet: Adaptive Multi-Layer Deep Neural Structure for Enhanced Feature Selection in Early Convolution Stage}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85124731648&doi=10.3390\%2fapp12041842&partnerID=40&md5=80f329af124e88013c5f6ea8c43cd596}, abstract = {The colossal depths of the deep neural network sometimes suffer from ineffective backpropagation of the gradients through all its depths, whereas the strong performance of shallower multilayer neural structures proves their ability to increase the gradient signals in the early stages of training, which easily gets backpropagated for global loss corrections. Shallow neural structures are always a good starting point for encouraging the sturdy feature characteristics of the input. In this research, a shallow, deep neural structure called PrimeNet is proposed. PrimeNet is aimed to dynamically identify and encourage the quality visual indicators from the input to be used by the subsequent deep network layers and increase the gradient signals in the lower stages of the training pipeline. In addition to this, the layer-wise training is performed with the help of locally generated errors, which means the gradient is not backpropagated to previous layers, and the hidden layer weights are updated during the forward pass, making this structure a backpropagation free variant. PrimeNet has obtained state-of-the-art results on various image datasets, attaining the dual objective of: (1) a compact dynamic deep neural structure, which (2) eliminates the problem of backward-locking. The PrimeNet unit is proposed as an alternative to traditional convolution and dense blocks for faster and memory-efficient training, outperforming previously reported results aimed at adaptive methods for parallel and multilayer deep neural systems. {\^A}{\copyright} 2022 by the authors. Licensee MDPI, Basel, Switzerland.}, issn = {20763417}, author = {Khan, F. U. and Aziz, I.} }