Griebel, Michael; Oswald, Peter: Convergence analysis of online algorithms for vector-valued kernel regression. In: INS Preprints, 2302.
Online-Ausgabe in bonndoc: https://hdl.handle.net/20.500.11811/11572
Online-Ausgabe in bonndoc: https://hdl.handle.net/20.500.11811/11572
@unpublished{handle:20.500.11811/11572,
author = {{Michael Griebel} and {Peter Oswald}},
title = {Convergence analysis of online algorithms for vector-valued kernel regression},
publisher = {Institut für Numerische Simulation},
year = 2023,
month = sep,
INS Preprints},
volume = 2302,
note = {We consider the problem of approximating the regression function from noisy vectorvalued data by an online learning algorithm using an appropriate reproducing kernel Hilbert space (RKHS) as prior. In an online algorithm, i.i.d. samples become available one by one by a random process and are successively processed to build approximations to the regression function. We are interested in the asymptotic performance of such online approximation algorithms and show that the expected squared error in the RKHS norm can be bounded by C^2(m+1)^(−s/(2+s)), where m is the current number of processed data, the parameter 0 < s ≤ 1 expresses an additional smoothness assumption on the regression function and the constant C depends on the variance of the input noise, the smoothness of the regression function and further parameters of the algorithm.},
url = {https://hdl.handle.net/20.500.11811/11572}
}
author = {{Michael Griebel} and {Peter Oswald}},
title = {Convergence analysis of online algorithms for vector-valued kernel regression},
publisher = {Institut für Numerische Simulation},
year = 2023,
month = sep,
INS Preprints},
volume = 2302,
note = {We consider the problem of approximating the regression function from noisy vectorvalued data by an online learning algorithm using an appropriate reproducing kernel Hilbert space (RKHS) as prior. In an online algorithm, i.i.d. samples become available one by one by a random process and are successively processed to build approximations to the regression function. We are interested in the asymptotic performance of such online approximation algorithms and show that the expected squared error in the RKHS norm can be bounded by C^2(m+1)^(−s/(2+s)), where m is the current number of processed data, the parameter 0 < s ≤ 1 expresses an additional smoothness assumption on the regression function and the constant C depends on the variance of the input noise, the smoothness of the regression function and further parameters of the algorithm.},
url = {https://hdl.handle.net/20.500.11811/11572}
}