@Misc{Md.MostafaKamalSarker2019, author="Md. Mostafa Kamal Sarker and Hatem A. Rashwan and Mohamed Abdel-Nasser and Vivek Kumar Singh and Syeda Furruka Banu and Farhan Akram and Forhad U. H. Chowdhury and Kabir Ahmed Choudhury and Sylvie Chambon and Petia Radeva and Domenec Puig", title="MobileGAN: Skin Lesion Segmentation Using a Lightweight Generative Adversarial Network", year="2019", abstract="CoRR abs/1907.00856Skin lesion segmentation in dermoscopic images is a challenge due to their blurry and irregular boundaries. Most of the segmentation approaches based on deep learning are time and memory consuming due to the hundreds of millions of parameters. Consequently, it is difficult to apply them to real dermatoscope devices with limited GPU and memory resources. In this paper, we propose a lightweight and efficient Generative Adversarial Networks (GAN) model, called MobileGAN for skin lesion segmentation. More precisely, the MobileGAN combines 1D non-bottleneck factorization networks with position and channel attention modules in a GAN model. The proposed model is evaluated on the test dataset of the ISBI 2017 challenges and the validation dataset of ISIC 2018 challenges. Although the proposed network has only 2.35 millions of parameters, it is still comparable with the state-of-the-art. The experimental results show that our MobileGAN obtains comparable performance with an accuracy of 97.61\%.", optnote="MILAB; no menciona", optnote="exported from refbase (http://refbase.cvc.uab.es/show.php?record=3384), last updated on Thu, 28 Jan 2021 10:29:04 +0100", opturl="https://arxiv.org/abs/1907.00856" }