@conference {cManas, title = {Seasonal Contrast: Unsupervised Pre-Training from Uncurated Remote Sensing Data}, booktitle = {International Conference in Computer Vision (ICCV)}, year = {2021}, publisher = {IEEE/CVF}, organization = {IEEE/CVF}, address = {Virtual}, abstract = {

Remote sensing and automatic earth monitoring are key to solve global-scale challenges such as disaster prevention, land use monitoring, or tackling climate change. Although there exist vast amounts of remote sensing data, most of it remains unlabeled and thus inaccessible for supervised learning algorithms. Transfer learning approaches can reduce the data requirements of deep learning algorithms. However, most of these methods are pre-trained on ImageNet and their generalization to remote sensing imagery is not guaranteed due to the domain gap. In this work, we propose Seasonal Contrast (SeCo), an effective pipeline to leverage unlabeled data for in-domain pre-training of re-mote sensing representations. The SeCo pipeline is com-posed of two parts. First, a principled procedure to gather large-scale, unlabeled and uncurated remote sensing datasets containing images from multiple Earth locations at different timestamps. Second, a self-supervised algorithm that takes advantage of time and position invariance to learn transferable representations for re-mote sensing applications. We empirically show that models trained with SeCo achieve better performance than their ImageNet pre-trained counterparts and state-of-the-art self-supervised learning methods on multiple downstream tasks. The datasets and models in SeCo will be made public to facilitate transfer learning and enable rapid progress in re-mote sensing applications.

}, url = {https://arxiv.org/abs/2103.16607}, author = {Ma{\~n}as,Oscar and Lacoste, Alexandre and Xavier Gir{\'o}-i-Nieto and Vazquez, David and Rodr{\'\i}guez, Pau} }