@inproceedings{discovery10151383,
          volume = {32},
            note = {This version is the version of record. For information on re-use, please refer to the publisher's terms and conditions.},
       booktitle = {Proceedings of the 35th Conference on Neural Information Processing Systems (NeurIPS 2021)},
           pages = {27160--27170},
           title = {The Role of Global Labels in Few-Shot Classification and How to Infer Them},
            year = {2021},
       publisher = {NeurIPS},
         journal = {Proceedings of the 35th Conference on Neural Information Processing Systems (NeurIPS 2021)},
             url = {https://proceedings.neurips.cc/paper/2021/file/e3b6fb0fd4df098162eede3313c54a8d-Paper.pdf},
          author = {Wang, R and Pontil, M and Ciliberto, C},
        abstract = {Few-shot learning is a central problem in meta-learning, where learners must quickly adapt to new tasks given limited training data. Recently, feature pre-training has become a ubiquitous component in state-of-the-art meta-learning methods and is shown to provide significant performance improvement. However, there is limited theoretical understanding of the connection between pre-training and meta-learning. Further, pre-training requires global labels shared across tasks, which may be unavailable in practice. In this paper, we show why exploiting pre-training is theoretically advantageous for meta-learning, and in particular the critical role of global labels. This motivates us to propose Meta Label Learning (MeLa), a novel meta-learning framework that automatically infers global labels to obtains robust few-shot models. Empirically, we demonstrate that MeLa is competitive with existing methods and provide extensive ablation experiments to highlight its key properties.}
}