@InProceedings{KaiWang2022, author="Kai Wang and Xialei Liu and Andrew Bagdanov and Luis Herranz and Shangling Jui and Joost Van de Weijer", title="Incremental Meta-Learning via Episodic Replay Distillation for Few-Shot Image Recognition", booktitle="CVPR 2022 Workshop on Continual Learning (CLVision, 3rd Edition)", year="2022", pages="3728--3738", optkeywords="Training", optkeywords="Computer vision", optkeywords="Image recognition", optkeywords="Upper bound", optkeywords="Conferences", optkeywords="Pattern recognition", optkeywords="Task analysis", abstract="In this paper we consider the problem of incremental meta-learning in which classes are presented incrementally in discrete tasks. We propose Episodic Replay Distillation (ERD), that mixes classes from the current task with exemplars from previous tasks when sampling episodes for meta-learning. To allow the training to benefit from a large as possible variety of classes, which leads to more gener-alizable feature representations, we propose the cross-task meta loss. Furthermore, we propose episodic replay distillation that also exploits exemplars for improved knowledge distillation. Experiments on four datasets demonstrate that ERD surpasses the state-of-the-art. In particular, on the more challenging one-shot, long task sequence scenarios, we reduce the gap between Incremental Meta-Learning andthe joint-training upper bound from 3.5\% / 10.1\% / 13.4\% / 11.7\% with the current state-of-the-art to 2.6\% / 2.9\% / 5.0\% / 0.2\% with our method on Tiered-ImageNet / Mini-ImageNet / CIFAR100 / CUB, respectively.", optnote="LAMP; 600.147", optnote="exported from refbase (http://refbase.cvc.uab.es/show.php?record=3686), last updated on Thu, 27 Apr 2023 10:23:44 +0200", doi="10.1109/CVPRW56347.2022.00417", file=":http://refbase.cvc.uab.es/files/WLB2022.pdf:PDF" }