@Article{JianzhyGuo2018, author="Jianzhy Guo and Zhen Lei and Jun Wan and Egils Avots and Noushin Hajarolasvadi and Boris Knyazev and Artem Kuharenko and Julio C. S. Jacques Junior and Xavier Baro and Hasan Demirel and Sergio Escalera and Juri Allik and Gholamreza Anbarjafari", title="Dominant and Complementary Emotion Recognition from Still Images of Faces", journal="IEEE Access", year="2018", volume="6", pages="26391--26403", abstract="Emotion recognition has a key role in affective computing. Recently, fine-grained emotion analysis, such as compound facial expression of emotions, has attracted high interest of researchers working on affective computing. A compound facial emotion includes dominant and complementary emotions (e.g., happily-disgusted and sadly-fearful), which is more detailed than the seven classical facial emotions (e.g., happy, disgust, and so on). Current studies on compound emotions are limited to use data sets with limited number of categories and unbalanced data distributions, with labels obtained automatically by machine learning-based algorithms which could lead to inaccuracies. To address these problems, we released the iCV-MEFED data set, which includes 50 classes of compound emotions and labels assessed by psychologists. The task is challenging due to high similarities of compound facial emotions from different categories. In addition, we have organized a challenge based on the proposed iCV-MEFED data set, held at FG workshop 2017. In this paper, we analyze the top three winner methods and perform further detailed experiments on the proposed data set. Experiments indicate that pairs of compound emotion (e.g., surprisingly-happy vs happily-surprised) are more difficult to be recognized if compared with the seven basic emotions. However, we hope the proposed data set can help to pave the way for further research on compound facial emotion recognition.", optnote="HUPBA; no proj", optnote="exported from refbase (http://refbase.cvc.uab.es/show.php?record=3122), last updated on Tue, 20 Sep 2022 15:17:50 +0200", doi="10.1109/ACCESS.2018.2831927" }