@inproceedings{discovery10079536,
         journal = {ADVANCES IN NEURAL INFORMATION PROCESSING SYSTEMS 31 (NIPS 2018)},
       publisher = {Neural Information Processing Systems Foundation, Inc.},
            year = {2018},
           title = {Thermostat-assisted continuously-tempered Hamiltonian Monte Carlo for Bayesian learning},
          series = {Advances in Neural Information Processing Systems},
           month = {December},
          editor = {S Bengio and H Wallach and H Larochelle and K Grauman and N CesaBianchi and R Garnett},
       booktitle = {Advances In Neural Information Processing Systems 31 (Nips 2018)},
         address = {Montreal, Canada},
            note = {This version is the version of record. For information on re-use, please refer to the publisher's terms and conditions.},
          volume = {31},
             url = {https://papers.nips.cc/paper/8266-thermostat-assisted-continuously-tempered-hamiltonian-monte-carlo-for-bayesian-learning},
          author = {Luo, R and Wang, J and Yang, Y and Zhu, Z and Wang, J},
            issn = {1049-5258},
        abstract = {We propose a new sampling method, the thermostat-assisted continuously-tempered
Hamiltonian Monte Carlo, for Bayesian learning on large datasets and multimodal
distributions. It simulates the Nos{\'e}-Hoover dynamics of a continuously-tempered
Hamiltonian system built on the distribution of interest. A significant advantage of
this method is that it is not only able to efficiently draw representative i.i.d. samples
when the distribution contains multiple isolated modes, but capable of adaptively
neutralising the noise arising from mini-batches and maintaining accurate sampling.
While the properties of this method have been studied using synthetic distributions,
experiments on three real datasets also demonstrated the gain of performance over
several strong baselines with various types of neural networks plunged in.}
}