@article{PMFA_2024_69_4_a0,
author = {Buk, Zden\v{e}k Buk},
title = {Nobelova cena za z\'asadn{\'\i} objevy a inovace v oblasti um\v{e}l\'ych neuronov\'ych s{\'\i}t{\'\i}. {Od} biologick\'e inspirace k modern{\'\i} um\v{e}l\'e inteligenci},
journal = {Pokroky matematiky, fyziky a astronomie},
pages = {193--219},
year = {2024},
volume = {69},
number = {4},
language = {cs},
url = {http://geodesic.mathdoc.fr/item/PMFA_2024_69_4_a0/}
}
TY - JOUR AU - Buk, Zdeněk Buk TI - Nobelova cena za zásadní objevy a inovace v oblasti umělých neuronových sítí. Od biologické inspirace k moderní umělé inteligenci JO - Pokroky matematiky, fyziky a astronomie PY - 2024 SP - 193 EP - 219 VL - 69 IS - 4 UR - http://geodesic.mathdoc.fr/item/PMFA_2024_69_4_a0/ LA - cs ID - PMFA_2024_69_4_a0 ER -
%0 Journal Article %A Buk, Zdeněk Buk %T Nobelova cena za zásadní objevy a inovace v oblasti umělých neuronových sítí. Od biologické inspirace k moderní umělé inteligenci %J Pokroky matematiky, fyziky a astronomie %D 2024 %P 193-219 %V 69 %N 4 %U http://geodesic.mathdoc.fr/item/PMFA_2024_69_4_a0/ %G cs %F PMFA_2024_69_4_a0
Buk, Zdeněk Buk. Nobelova cena za zásadní objevy a inovace v oblasti umělých neuronových sítí. Od biologické inspirace k moderní umělé inteligenci. Pokroky matematiky, fyziky a astronomie, Tome 69 (2024) no. 4, pp. 193-219. http://geodesic.mathdoc.fr/item/PMFA_2024_69_4_a0/
[1] Buk, Z., Koutník, J., Šnorek, M.: NEAT in HyperNEAT substituted with genetic programming. In: Kolehmainen, M., Toivanen, P., Beliczynski, B. (eds.): Adaptive and Natural Computing Algorithms, Springer, 2009, 243–252.
[2] Geoffrey, H., Sejnowski, T.: Optimal perceptual inference. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, 1983.
[3] Geoffrey, H. E., Sejnowski, T. J.: Learning and relearning in Boltzmann machines. In: Rumelhart, D. E., McClelland, J. L. (eds.): Parallel Distributed Processing: Explorations in the Microstructure of Cognition, Volume 1: Foundations, MIT Press, 1986, 282–317.
[4] Goodfellow, I. J., Pouget-Abadie, J., Mirza, M., Xu, B., Warde-Farley, D., Ozair, S., Courville, A., Bengio, Y.: Generative adversarial networks. [online]. | arXiv
[5] Haykin, S. S.: Neural networks and learning machines. Third edition, Pearson Education, 2009.
[6] He, K., Zhang, X., Ren, S., Sun, J.: Deep residual learning for image recognition. In: 2016 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), 2016, 770–778.
[7] Hebb, D.: The organization of behavior: A neuropsychological theory. John Wiley, 1949.
[8] Hochreiter, S., Schmidhuber, J.: Long short-term memory. Neural Comput. 9 (1997), 1735–1780. | DOI
[9] Holland, J. H.: Adaptation in natural and artificial systems. MIT Press, 1992. | MR
[10] Kohonen, T.: Self-organized formation of topologically correct feature maps. Biol. Cybernet. 43 (1982), 59–69. | DOI
[11] Krizhevsky, A., Sutskever, I., Hinton, G. E.: Imagenet classification with deep convolutional neural networks. Commun. ACM 60 (2017), 84–90. | DOI
[12] LeCun, Y., Boser, B., Denker, J. S., Henderson, D., Howard, R. E., Hubbard, W., Jackel, L. D.: Backpropagation applied to handwritten zip code recognition. Neural Comput. 1 (1989), 541–551. | DOI
[13] Lefkowitz, M.: Professor’s perceptron paved the way for AI – 60 years too soon. [online]. https://news.cornell.edu/stories/2019/09/professors-perceptron-paved-way-ai-60-years-too-soon
[14] Lynn, C. N.: A representation for the adaptive generation of simple sequential programs. In: Grefenstette, J. J. (ed.): Proceedings of the 1st International Conference on Genetic Algorithms, L. Erlbaum Associates, Inc., 1985, 183–187.
[15] McCulloch, W. S., Pitts, W.: A logical calculus of the ideas immanent in nervous activity. Bull. Math. Biophys. 5 (1943), 115–133. | DOI | MR
[16] Minsky, M., Papert, S.: Perceptrons. MIT Press, 1969.
[17] Park, J., Sandberg, I. W.: Universal approximation using radial-basis-function networks. Neural Comput. 3 (1991), 246–257. | DOI
[18] Rosenblatt, F.: The perceptron: A probabilistic model for information storage and organization in the brain. Psychol. Rev. 65 (1958), 386–408. | DOI | MR
[19] Rumelhart, D. E., Hinton, G. E., Williams, R. J.: Learning representations by back-propagating errors. Nature 323 (1986), 533–536. | DOI
[20] Shi, X., Chen, Z., Wang, H., Yeung, D.-Y., Wong, W.-K., Woo, W.-Ch.: Convolutional LSTM network: A machine learning approach for precipitation nowcasting. In: Cortes, C., Lawrence, N., Lee, D., Sugiyama, M., Garnett, R. (eds.): Advances in Neural Information Processing Systems, vol. 28, Curran Associates, Inc., 2015.
[21] Stanley, K. O., D’Ambrosio, D. B., Gauci, J.: A hypercube-based encoding for evolving large-scale neural networks. Artif. Life 15 (2009), 185–212. | DOI
[22] Stanley, K. O., Miikkulainen, R.: Evolving neural networks through augmenting topologies. Evol. Comput. 10 (2002), 99–127. | DOI
[23] Vaswani, A., Shazeer, N., Parmar, N., Uszkoreit, J., Jones, L., Gomez, A. N., Kaiser, L., Polosukhin, I.: Attention is all you need. [online]. | arXiv