Convex learning algorithms, such as Support Vector Machines (SVMs), are often seen as highly desirable because they offer strong practical properties and are amenable to theoretical analysis. However, in this work we show how nonconvexity can provide scalability advantages over convexity. We show how concave-convex programming can be applied to produce (i) faster SVMs where training errors are no longer support vectors, and (ii) much faster Transductive SVMs.
Author(s): | Collobert, R. and Sinz, F. and Weston, J. and Bottou, L. |
Book Title: | Large Scale Kernel Machines |
Pages: | 275-300 |
Year: | 2007 |
Month: | September |
Day: | 0 |
Series: | Neural Information Processing |
Editors: | Bottou, L. , O. Chapelle, D. DeCoste, J. Weston |
Publisher: | MIT Press |
Bibtex Type: | Book Chapter (inbook) |
Address: | Cambridge, MA, USA |
Digital: | 0 |
Electronic Archiving: | grant_archive |
Language: | en |
Organization: | Max-Planck-Gesellschaft |
School: | Biologische Kybernetik |
Links: |
BibTex
@inbook{4435, title = {Trading Convexity for Scalability}, booktitle = {Large Scale Kernel Machines}, abstract = {Convex learning algorithms, such as Support Vector Machines (SVMs), are often seen as highly desirable because they offer strong practical properties and are amenable to theoretical analysis. However, in this work we show how nonconvexity can provide scalability advantages over convexity. We show how concave-convex programming can be applied to produce (i) faster SVMs where training errors are no longer support vectors, and (ii) much faster Transductive SVMs.}, pages = {275-300}, series = {Neural Information Processing}, editors = {Bottou, L. , O. Chapelle, D. DeCoste, J. Weston}, publisher = {MIT Press}, organization = {Max-Planck-Gesellschaft}, school = {Biologische Kybernetik}, address = {Cambridge, MA, USA}, month = sep, year = {2007}, slug = {4435}, author = {Collobert, R. and Sinz, F. and Weston, J. and Bottou, L.}, month_numeric = {9} }