@Article{LuYu2022, author="Lu Yu and Xialei Liu and Joost Van de Weijer", title="Self-Training for Class-Incremental Semantic Segmentation", journal="IEEE Transactions on Neural Networks and Learning Systems", year="2022", optkeywords="Class-incremental learning", optkeywords="Self-training", optkeywords="Semantic segmentation.", abstract="In class-incremental semantic segmentation, we have no access to the labeled data of previous tasks. Therefore, when incrementally learning new classes, deep neural networks suffer from catastrophic forgetting of previously learned knowledge. To address this problem, we propose to apply a self-training approach that leverages unlabeled data, which is used for rehearsal of previous knowledge. Specifically, we first learn a temporary model for the current task, and then, pseudo labels for the unlabeled data are computed by fusing information from the old model of the previous task and the current temporary model. In addition, conflict reduction is proposed to resolve the conflicts of pseudo labels generated from both the old and temporary models. We show that maximizing self-entropy can further improve results by smoothing the overconfident predictions. Interestingly, in the experiments, we show that the auxiliary data can be different from the training data and that even general-purpose, but diverse auxiliary data can lead to large performance gains. The experiments demonstrate the state-of-the-art results: obtaining a relative gain of up to 114\% on Pascal-VOC 2012 and 8.5\% on the more challenging ADE20K compared to previous state-of-the-art methods.", optnote="LAMP; 600.147; 611.008;", optnote="exported from refbase (http://refbase.cvc.uab.es/show.php?record=3745), last updated on Tue, 25 Apr 2023 15:27:21 +0200", doi="10.1109/TNNLS.2022.3155746", file=":http://refbase.cvc.uab.es/files/YLW2022.pdf:PDF" }