@Article{Mar{\c c}alRusi{\~n}ol2018, author="Mar{\c{c}}al Rusi{\~n}ol and J. Chazalon and Katerine Diaz", title="Augmented Songbook: an Augmented Reality Educational Application for Raising Music Awareness", journal="Multimedia Tools and Applications", year="2018", volume="77", number="11", pages="13773--13798", optkeywords="Augmented reality", optkeywords="Document image matching", optkeywords="Educational applications", abstract="This paper presents the development of an Augmented Reality mobile application which aims at sensibilizing young children to abstract concepts of music. Such concepts are, for instance, the musical notation or the idea of rhythm. Recent studies in Augmented Reality for education suggest that such technologies have multiple benefits for students, including younger ones. As mobile document image acquisition and processing gains maturity on mobile platforms, we explore how it is possible to build a markerless and real-time application to augment the physical documents with didactic animations and interactive virtual content. Given a standard image processing pipeline, we compare the performance of different local descriptors at two key stages of the process. Results suggest alternatives to the SIFT local descriptors, regarding result quality and computational efficiency, both for document model identification and perspective transform estimation. All experiments are performed on an original and public dataset we introduce here.", optnote="DAG; ADAS; 600.084; 600.121; 600.118; 600.129", optnote="exported from refbase (http://refbase.cvc.uab.es/show.php?record=2996), last updated on Fri, 26 Feb 2021 13:49:48 +0100", doi="10.1007/s11042-017-4991-4", file=":http://refbase.cvc.uab.es/files/RCD2017.pdf:PDF" }