@inproceedings{ee8e2dbeeab44a47b6418aef7ec96693,
title = "Asynchronous stochastic Quasi-Newton MCMC for non-convex optimization supplementary document",
abstract = "Recent studies have illustrated that stochastic gradient Markov Chain Monte Carlo techniques have a strong potential in non-convex optimization, where local and global convergence guarantees can be shown under certain conditions. By building up on this recent theory, in this study, we develop an asynchronous-parallel stochastic L-BFGS algorithm for non-convex optimization. The proposed algorithm is suitable for both distributed and shared-memory settings. We provide formal theoretical analysis and show that the proposed method achieves an ergodic convergence rate of \{equation Presented\} (N being the total number of iterations) and it can achieve a linear speedup under certain conditions. We perform several experiments on both synthetic and real datasets. The results support our theory and show that the proposed algorithm provides a significant speedup over the recently proposed synchronous distributed L-BFGS algorithm.",
author = "Umut Simsekli and Cagatay Yildiz and Nguyen, \{Thanh Huy\} and Gael Richard and Cemgil, \{A. Taylan\}",
note = "Publisher Copyright: {\textcopyright} 2018 by the Authors All rights reserved.; 35th International Conference on Machine Learning, ICML 2018 ; Conference date: 10-07-2018 Through 15-07-2018",
year = "2018",
month = jan,
day = "1",
language = "English",
series = "35th International Conference on Machine Learning, ICML 2018",
publisher = "International Machine Learning Society (IMLS)",
pages = "7452--7459",
editor = "Andreas Krause and Jennifer Dy",
booktitle = "35th International Conference on Machine Learning, ICML 2018",
}