@inproceedings{564549f369374074aa962d29e90f9034,
title = "Smooth Minimization of Nonsmooth Functions with Parallel Coordinate Descent Methods",
abstract = "We study the performance of a family of randomized parallel coordinate descent methods for minimizing a nonsmooth nonseparable convex function. The problem class includes as a special case L1-regularized L1 regression and the minimization of the exponential loss (“AdaBoost problem”). We assume that the input data defining the loss function is contained in a sparse \$\$m\textbackslash{}times n\$\$ matrix A with at most \$\$\textbackslash{}omega \$\$ nonzeros in each row and that the objective function has a “max structure”, allowing us to smooth it. Our main contribution consists in identifying parameters with a closed-form expression that guarantees a parallelization speedup that depends on basic quantities of the problem (like its size and the number of processors). The theory relies on a fine study of the Lipschitz constant of the smoothed objective restricted to low dimensional subspaces and shows an increased acceleration for sparser problems.",
keywords = "Coordinate descent, Lipschitz constant, Parallel computing, Smoothing",
author = "Olivier Fercoq and Peter Richt{\'a}rik",
note = "Publisher Copyright: {\textcopyright} 2019, Springer Nature Switzerland AG.; Modeling and Optimization: Theory and Applications Conference, MOPTA 2017 ; Conference date: 16-08-2017 Through 18-08-2017",
year = "2019",
month = jan,
day = "1",
doi = "10.1007/978-3-030-12119-8\_4",
language = "English",
isbn = "9783030121181",
series = "Springer Proceedings in Mathematics and Statistics",
publisher = "Springer New York LLC",
pages = "57--96",
editor = "Pint{\'e}r, \{J{\'a}nos D.\} and Tam{\'a}s Terlaky",
booktitle = "Modeling and Optimization",
}