@article {10.3844/jmssp.2016.312.316, article_type = {journal}, title = {Robust Linear Discriminant Analysis}, author = {Yahaya, Sharipah Soaad Syed and Lim, Yai-Fung and Ali, Hazlina and Omar, Zurni}, volume = {12}, year = {2017}, month = {Jan}, pages = {312-316}, doi = {10.3844/jmssp.2016.312.316}, url = {https://thescipub.com/abstract/jmssp.2016.312.316}, abstract = {Linear Discriminant Analysis (LDA) is the most commonly employed method for classification. This method which creates a linear discriminant function yields optimal classification rule between two or more groups under the assumptions of normality and homoscedasticity (equal covariance matrices). However, the calculation of parametric LDA highly relies on the sample mean vectors and pooled sample covariance matrix which are sensitive to non-normality. To overcome the sensitivity of this method towards non-normality as well as homoscedasticity, this study proposes two new robust LDA models. In these models, an automatic trimmed mean and its corresponding winsorized mean are employed to replace the mean vector in the parametric LDA. Meanwhile, for the covariance matrix, this study introduces two robust approaches namely the winsorization and the multiplication of Spearman's rho with the corresponding robust scale estimator used in the trimming process. Simulated and real financial data are used to test the performance of the proposed methods in terms of misclassification rate. The numerical result shows that the new method performs better if compared to the parametric LDA and the robust LDA with S-estimator. Thus, these new models can be recommended as alternatives to the parametric LDA when non-normality and heteroscedasticity (unequal covariance matrices) exist.}, journal = {Journal of Mathematics and Statistics}, publisher = {Science Publications} }