Back
Multivariate Regression via Stiefel Manifold Constraints
We introduce a learning technique for regression between high-dimensional spaces. Standard methods typically reduce this task to many one-dimensional problems, with each output dimension considered independently. By contrast, in our approach the feature construction and the regression estimation are performed jointly, directly minimizing a loss function that we specify, subject to a rank constraint. A major advantage of this approach is that the loss is no longer chosen according to the algorithmic requirements, but can be tailored to the characteristics of the task at hand; the features will then be optimal with respect to this objective, and dependence between the outputs can be exploited.
@inproceedings{2845, title = {Multivariate Regression via Stiefel Manifold Constraints}, journal = {Pattern Recognition, Proceedings of the 26th DAGM Symposium}, booktitle = {Lecture Notes in Computer Science, Vol. 3175}, abstract = {We introduce a learning technique for regression between high-dimensional spaces. Standard methods typically reduce this task to many one-dimensional problems, with each output dimension considered independently. By contrast, in our approach the feature construction and the regression estimation are performed jointly, directly minimizing a loss function that we specify, subject to a rank constraint. A major advantage of this approach is that the loss is no longer chosen according to the algorithmic requirements, but can be tailored to the characteristics of the task at hand; the features will then be optimal with respect to this objective, and dependence between the outputs can be exploited.}, pages = {262-269}, editors = {CE Rasmussen and HH B{\"u}lthoff and B Sch{\"o}lkopf and MA Giese}, publisher = {Springer}, organization = {Max-Planck-Gesellschaft}, school = {Biologische Kybernetik}, address = {Berlin, Germany}, year = {2004}, slug = {2845}, author = {BakIr, G. and Gretton, A. and Franz, M. and Sch{\"o}lkopf, B.} }