@article {10.3844/jcssp.2008.631.637, article_type = {journal}, title = {Self-Generation ART-1 Neural Network with Gradient-Descent Method Aid for Latin Alphabet Recognition}, author = {Zacharie, Mbaïtiga}, volume = {4}, number = {8}, year = {2008}, month = {Aug}, pages = {631-637}, doi = {10.3844/jcssp.2008.631.637}, url = {https://thescipub.com/abstract/jcssp.2008.631.637}, abstract = {Problem statement: In this study a self-generation ART-1 neural network that is an efficient algorithm that emulates the self-organizing pattern recognition developed to avoid the stability-plasticity dilemma in competitive networks learning, is presented for Latin alphabet recognition to use in a vision system for road sings recognition. Approach: The first step of our approach deals with the training process where a set of input vectors are presented sequentially to the preprocessor to specify the inputs for the networks. Secondly the value of the mean squared error was used to measure the candidate for the output in the recognition phase. Thirdly to move down the large error-surface created by delta rule during the search phase the gradient-descent is used by changing each value of the weights by an amount that is proportional to the negative of the sigmoid function slope. Results: In the simulation test our system can self organize in real time producing stable recognition while getting inputs pattern beyond those originally stored. It can preserve its previously learned knowledge while keeping its ability to learn new patterns. Conclusions: The result suggests that the proposed system is pertinent to be put in practical use. }, journal = {Journal of Computer Science}, publisher = {Science Publications} }