Mots-clés : quasiconvex functional.
@article{TIMM_2021_27_4_a12,
author = {F. S. Stonyakin and S. S. Ablaev and I. V. Baran},
title = {Adaptive gradient-type methods for optimization problems with relative error and sharp minimum},
journal = {Trudy Instituta matematiki i mehaniki},
pages = {175--188},
year = {2021},
volume = {27},
number = {4},
language = {ru},
url = {http://geodesic.mathdoc.fr/item/TIMM_2021_27_4_a12/}
}
TY - JOUR AU - F. S. Stonyakin AU - S. S. Ablaev AU - I. V. Baran TI - Adaptive gradient-type methods for optimization problems with relative error and sharp minimum JO - Trudy Instituta matematiki i mehaniki PY - 2021 SP - 175 EP - 188 VL - 27 IS - 4 UR - http://geodesic.mathdoc.fr/item/TIMM_2021_27_4_a12/ LA - ru ID - TIMM_2021_27_4_a12 ER -
%0 Journal Article %A F. S. Stonyakin %A S. S. Ablaev %A I. V. Baran %T Adaptive gradient-type methods for optimization problems with relative error and sharp minimum %J Trudy Instituta matematiki i mehaniki %D 2021 %P 175-188 %V 27 %N 4 %U http://geodesic.mathdoc.fr/item/TIMM_2021_27_4_a12/ %G ru %F TIMM_2021_27_4_a12
F. S. Stonyakin; S. S. Ablaev; I. V. Baran. Adaptive gradient-type methods for optimization problems with relative error and sharp minimum. Trudy Instituta matematiki i mehaniki, Trudy Instituta Matematiki i Mekhaniki UrO RAN, Tome 27 (2021) no. 4, pp. 175-188. http://geodesic.mathdoc.fr/item/TIMM_2021_27_4_a12/
[1] Gasnikov A. V., Sovremennye chislennye metody optimizatsii. Metod universalnogo gradientnogo spuska, MTsNMO, M., 2021, 272 pp.
[2] Nemirovskii A. S., Yudin D. B., Slozhnost zadach i effektivnost metodov optimizatsii, Nauka, M., 1979, 384 pp.
[3] Nesterov Yu. E., Algoritmicheskaya vypuklaya optimizatsiya, dis. ...dokt. fiz.-mat. nauk, Mosk. fiz.-tekhn. in-t, M., 2013, 367 pp.
[4] Nesterov Yu. E., Effektivnye metody nelineinogo programmirovaniya, Radio i svyaz, M., 1989, 301 pp.
[5] Nesterov Yu. E., Metody vypukloi optimizatsii, MTsNMO, M., 2010, 281 pp.
[6] Polyak B. T., “Minimizatsiya negladkikh funktsionalov”, Zhurn. vychisl. matematiki i mat. fiziki, 9:3 (1969), 509–521 | Zbl
[7] Tyurin A. I., Gasnikov A. V., “Bystryi gradientnyi spusk dlya zadach vypukloi minimizatsii s orakulom, vydayuschim $(\delta, L)$-model funktsii v zaproshennoi tochke”, Zhurn. vychisl. matematiki i mat. fiziki, 59:7 (2019), 1137–1150 | DOI | Zbl
[8] Devolder O., Glineur F., Nesterov Yu., “First-order methods of smooth convex optimization with inexact oracle”, Math. Programming, 146:1 (2014), 37–75 | DOI | Zbl
[9] Hardt M., Ma T., Recht B., “Gradient descent learns linear dynamical systems”, J. of Machine Learning Research, 19:29 (2018), 1–44
[10] Hinder O., Sidford A., Sohoni N. S., “Near-optimal methods for minimizing star-convex functions and beyond”, Proceedings of Machine Learning Research, 125 (2020), 1894–1938
[11] Hu Y., Li J., Yu C. K. W., Convergence rates of subgradient methods for quasi-convex optimization problems, 2019, 28 pp., arXiv: math.1910.10879
[12] Jiang R., Li X., Holderian error bounds and Kurdyka-Lojasiewicz inequality for the trust region subproblem, 2020, 30 pp., arXiv: math.1911.11955
[13] Johnstone P. R., Moulin P., Faster subgradient methods for functions with Holderian growth, 2018, 50 pp., arXiv: math.1704.00196
[14] Liu M., Yang T., “Adaptive accelerated gradient converging methods under Holderian error bound condition”, Proc. 31st Intern. Conf. on Neural Information Processing Systems (NIPS'17), 2017, 3107–3117
[15] Nesterov Yu., “Rounding of convex sets and efficient gradient methods for linear programming problems”, Optimization Methods and Software, 23:1 (2008), 109–128 | DOI | Zbl
[16] Nesterov Yu., “Unconstrained convex minimization in relative scale”, Math. Oper. Res., 34:1 (2009), 180–193 | DOI | Zbl
[17] Nesterov Yu., “Universal gradient methods for convex optimization problems”, Math. Programming, 152:1-2(A) (2015), 381–404 | DOI | Zbl