Back
Training Support Vector Machines with Multiple Equality Constraints
In this paper we present a primal-dual decomposition algorithm for support vector machine training. As with existing methods that use very small working sets (such as Sequential Minimal Optimization (SMO), Successive Over-Relaxation (SOR) or the Kernel Adatron (KA)), our method scales well, is straightforward to implement, and does not require an external QP solver. Unlike SMO, SOR and KA, the method is applicable to a large number of SVM formulations regardless of the number of equality constraints involved. The effectiveness of our algorithm is demonstrated on a more difficult SVM variant in this respect, namely semi-parametric support vector regression.
@inproceedings{3511, title = {Training Support Vector Machines with Multiple Equality Constraints }, journal = {Machine Learning: ECML 2005}, booktitle = {Proceedings of the 16th European Conference on Machine Learning, Lecture Notes in Computer Science, Vol. 3720}, abstract = {In this paper we present a primal-dual decomposition algorithm for support vector machine training. As with existing methods that use very small working sets (such as Sequential Minimal Optimization (SMO), Successive Over-Relaxation (SOR) or the Kernel Adatron (KA)), our method scales well, is straightforward to implement, and does not require an external QP solver. Unlike SMO, SOR and KA, the method is applicable to a large number of SVM formulations regardless of the number of equality constraints involved. The effectiveness of our algorithm is demonstrated on a more difficult SVM variant in this respect, namely semi-parametric support vector regression.}, pages = {182-193}, editors = {JG Carbonell and J Siekmann}, publisher = {Springer}, organization = {Max-Planck-Gesellschaft}, school = {Biologische Kybernetik}, address = {Berlin, Germany}, month = nov, year = {2005}, slug = {3511}, author = {Kienzle, W. and Sch{\"o}lkopf, B.}, month_numeric = {11} }