@InProceedings{AgataLapedriza2014, author="Agata Lapedriza and David Masip and D.Sanchez", title="Emotions Classification using Facial Action Units Recognition", booktitle="17th International Conference of the Catalan Association for Artificial Intelligence", year="2014", volume="269", pages="55--64", abstract="In this work we build a system for automatic emotion classification from image sequences. We analyze subtle changes in facial expressions by detecting a subset of 12 representative facial action units (AUs). Then, we classify emotions based on the output of these AUs classifiers, i.e. the presence/absence of AUs. We base the AUs classification upon a set of spatio-temporal geometric and appearance features for facial representation, fusing them within the emotion classifier. A decision tree is trained for emotion classifying, making the resulting model easy to interpret by capturing the combination of AUs activation that lead to a particular emotion. For Cohn-Kanade database, the proposed system classifies 7 emotions with a mean accuracy of near 90\%, attaining a similar recognition accuracy in comparison with non-interpretable models that are not based in AUs detection.", optnote="OR;MV", optnote="exported from refbase (http://refbase.cvc.uab.es/show.php?record=2622), last updated on Tue, 15 Dec 2015 13:57:56 +0100", isbn="978-1-61499-451-0", doi="10.3233/978-1-61499-452-7-55" }