Back
Learning low-rank output kernels
Output kernel learning techniques allow to simultaneously learn a vector-valued function and a positive semidefinite matrix which describes the relationships between the outputs. In this paper, we introduce a new formulation that imposes a low-rank constraint on the output kernel and operates directly on a factor of the kernel matrix. First, we investigate the connection between output kernel learning and a regularization problem for an architecture with two layers. Then, we show that a variety of methods such as nuclear norm regularized regression, reduced-rank regression, principal component analysis, and low rank matrix approximation can be seen as special cases of the output kernel learning framework. Finally, we introduce a block coordinate descent strategy for learning low-rank output kernels.
@inproceedings{DinuzzoF2011, title = {Learning low-rank output kernels}, booktitle = {JMLR Workshop and Conference Proceedings Volume 20}, abstract = {Output kernel learning techniques allow to simultaneously learn a vector-valued function and a positive semidefinite matrix which describes the relationships between the outputs. In this paper, we introduce a new formulation that imposes a low-rank constraint on the output kernel and operates directly on a factor of the kernel matrix. First, we investigate the connection between output kernel learning and a regularization problem for an architecture with two layers. Then, we show that a variety of methods such as nuclear norm regularized regression, reduced-rank regression, principal component analysis, and low rank matrix approximation can be seen as special cases of the output kernel learning framework. Finally, we introduce a block coordinate descent strategy for learning low-rank output kernels.}, pages = {181-196}, editors = {Hsu, C.-N. , W.S. Lee}, publisher = {JMLR}, address = {Cambridge, MA, USA}, month = nov, year = {2011}, slug = {dinuzzof2011}, author = {Dinuzzo, F. and Fukumizu, K.}, month_numeric = {11} }