@Article{OriolRamosTerrades2009, author="Oriol Ramos Terrades and Ernest Valveny and Salvatore Tabbone", title="Optimal Classifier Fusion in a Non-Bayesian Probabilistic Framework", journal="IEEE Transactions on Pattern Analysis and Machine Intelligence", year="2009", volume="31", number="9", pages="1630--1644", abstract="The combination of the output of classifiers has been one of the strategies used to improve classification rates in general purpose classification systems. Some of the most common approaches can be explained using the Bayes{\textquoteright} formula. In this paper, we tackle the problem of the combination of classifiers using a non-Bayesian probabilistic framework. This approach permits us to derive two linear combination rules that minimize misclassification rates under some constraints on the distribution of classifiers. In order to show the validity of this approach we have compared it with other popular combination rules from a theoretical viewpoint using a synthetic data set, and experimentally using two standard databases: the MNIST handwritten digit database and the GREC symbol database. Results on the synthetic data set show the validity of the theoretical approach. Indeed, results on real data show that the proposed methods outperform other common combination schemes.", optnote="DAG", optnote="exported from refbase (http://refbase.cvc.uab.es/show.php?record=1220), last updated on Thu, 19 Dec 2013 16:20:40 +0100", issn="0162-8828", doi="10.1109/TPAMI.2008.224" }