Mots-clés : artificial neurons
@article{VUU_2018_28_2_a11,
author = {M. N. Nazarov},
title = {Neural networks with dynamical coefficients and adjustable connections on the basis of integrated backpropagation},
journal = {Vestnik Udmurtskogo universiteta. Matematika, mehanika, kompʹ\^uternye nauki},
pages = {260--274},
year = {2018},
volume = {28},
number = {2},
language = {en},
url = {http://geodesic.mathdoc.fr/item/VUU_2018_28_2_a11/}
}
TY - JOUR AU - M. N. Nazarov TI - Neural networks with dynamical coefficients and adjustable connections on the basis of integrated backpropagation JO - Vestnik Udmurtskogo universiteta. Matematika, mehanika, kompʹûternye nauki PY - 2018 SP - 260 EP - 274 VL - 28 IS - 2 UR - http://geodesic.mathdoc.fr/item/VUU_2018_28_2_a11/ LA - en ID - VUU_2018_28_2_a11 ER -
%0 Journal Article %A M. N. Nazarov %T Neural networks with dynamical coefficients and adjustable connections on the basis of integrated backpropagation %J Vestnik Udmurtskogo universiteta. Matematika, mehanika, kompʹûternye nauki %D 2018 %P 260-274 %V 28 %N 2 %U http://geodesic.mathdoc.fr/item/VUU_2018_28_2_a11/ %G en %F VUU_2018_28_2_a11
M. N. Nazarov. Neural networks with dynamical coefficients and adjustable connections on the basis of integrated backpropagation. Vestnik Udmurtskogo universiteta. Matematika, mehanika, kompʹûternye nauki, Tome 28 (2018) no. 2, pp. 260-274. http://geodesic.mathdoc.fr/item/VUU_2018_28_2_a11/
[1] Dreyfus S.E., “Artificial neural networks, back propagation, and the Kelley–Bryson gradient procedure”, Journal of Guidance, Control and Dynamics, 13:5 (1990), 926–928 | DOI | MR
[2] Broomhead D.S., Lowe D., “Multivariable functional interpolation and adaptive networks”, Complex Systems, 2 (1988), 321–355 http://sci2s.ugr.es/keel/pdf/algorithm/articulo/1988-Broomhead-CS.pdf | MR | Zbl
[3] Lecun Y., Bottou L., Bengio Y., Haffner P., “Gradient-based learning applied to document recognition”, Proceedings of the IEEE, 86:11 (1998), 2278–2324 | DOI
[4] Greff K., Srivastava R.K., Koutnik J., Steunebrink B.R., Schmidhuber J., “LSTM: A search space odyssey”, IEEE Transactions on Neural Networks and Learning Systems, 28:10 (2017), 2222–2232 | DOI | MR
[5] Chen G., A gentle tutorial of recurrent neural network with error backpropagation, 2016, arXiv: 1610.02583v3 [cs]
[6] Krizhevsky A., Sutskever I., Hinton G.E., “ImageNet classification with deep convolutional neural networks”, Communications of the ACM, 60:6 (2017), 84–90 | DOI
[7] Girshick R., Donahue J., Darrell T., Malik J., Rich feature hierarchies for accurate object detection and semantic segmentation, 2014, arXiv: 1311.2524v5 [cs]
[8] Park J., Sandberg I.W., “Universal approximation using radial-basis-function networks”, Neural Computation, 3:2 (1991), 246–257 | DOI
[9] Pham V., Bluche T., Kermorvant C., Louradour J., Dropout improves recurrent neural networks for handwriting recognition, 2013, arXiv: 1312.4569v2 [cs]
[10] Graves A., Generating sequences with recurrent neural networks, 2014, arXiv: 1308.0850v5 [cs] | MR
[11] Sutskever I., Vinyals O., Le Q.V., Sequence to sequence learning with neural networks, 2014, arXiv: 1409.3215v3 [cs]
[12] Sak H., Senior A., Beaufays F., “Long short-term memory recurrent neural network architectures for large scale acoustic modeling”, Proceedings of the Annual Conference of the International Speech Communication Association (Singapore, 2014), 338–342 https://static.googleusercontent.com/media/research.google.com/en//pubs/archive/43905.pdf
[13] Fan Y., Qian Y., Xie F., Soong F.K., “TTS synthesis with bidirectional LSTM based recurrent neural networks”, Proceedings of the Annual Conference of the International Speech Communication Association (Singapore, 2014), 1964–1968 https://pdfs.semanticscholar.org/564f/ed868f652f361bb3e345f6f94073d8f6f261.pdf
[14] Donahue J., Hendricks L.A., Guadarrama S., Rohrbach M., Venugopalan S., Saenko K., Darrell T., Long-term recurrent convolutional networks for visual recognition and description, 2016, arXiv: 1411.4389v4 [cs]
[15] Nazarov M.N., “Artificial neural network with modulation of synaptic coefficients”, Vestn. Samar. Gos. Tekhn. Univ., Ser. Fiz.-Mat. Nauki, 2:31 (2013), 58–71 | DOI
[16] Maslennikov O.V., Nekorkin V.I., “Adaptive dynamical networks”, Physics-Uspekhi, 60:7 (2017), 694–704 | DOI
[17] Srivastava N., Hinton G., Krizhevsky A., Sutskever I., Salakhutdinov R., “Dropout: A simple way to prevent neural networks from overfitting”, Journal of Machine Learning Research, 15 (2014), 1929–1958 http://www.cs.toronto.edu/h̃inton/absps/JMLRdropout.pdf | MR | Zbl