We consider the problem of adaptation to the margin in binary classification. We suggest a penalized empirical risk minimization classifier that adaptively attains, up to a logarithmic factor, fast optimal rates of convergence for the excess risk, that is, rates that can be faster than n−1/2, where n is the sample size. We show that our method also gives adaptive estimators for the problem of edge estimation.
@article{1120224100,
author = {Tsybakov, A. B. and van de Geer, S. A.},
title = {Square root penalty: Adaptation to the margin in classification and in edge estimation},
journal = {Ann. Statist.},
volume = {33},
number = {1},
year = {2005},
pages = { 1203-1224},
language = {en},
url = {http://dml.mathdoc.fr/item/1120224100}
}
Tsybakov, A. B.; van de Geer, S. A. Square root penalty: Adaptation to the margin in classification and in edge estimation. Ann. Statist., Tome 33 (2005) no. 1, pp. 1203-1224. http://gdmltest.u-ga.fr/item/1120224100/