@article{ZNSL_2021_499_a12,
author = {V. Malykh and V. Lyalin},
title = {Improving classification robustness for noisy texts with robust word vectors},
journal = {Zapiski Nauchnykh Seminarov POMI},
pages = {236--247},
year = {2021},
volume = {499},
language = {en},
url = {http://geodesic.mathdoc.fr/item/ZNSL_2021_499_a12/}
}
V. Malykh; V. Lyalin. Improving classification robustness for noisy texts with robust word vectors. Zapiski Nauchnykh Seminarov POMI, Investigations on applied mathematics and informatics. Part I, Tome 499 (2021), pp. 236-247. http://geodesic.mathdoc.fr/item/ZNSL_2021_499_a12/
[1] A. Joulin, E. Grave, P. Bojanowski, T. Mikolov, Bag of Tricks for Efficient Text Classification, 2016, arXiv: 1607.01759
[2] V. Malykh, Robust Word Vectors: Embeddings for Noisy Texts, 2018, arXiv: 1607.01759 | Zbl
[3] Y. Kim, Y. Jernite, D. Sontag, A. M. Rush, “Character-Aware Neural Language Models”, AAAI 2016, 2741–2749
[4] A. L. Maas, R. E. Daly, P. T. Pham, D. Huang, A. Y. Ng, Ch. Potts, “Learning Word Vectors for Sentiment Analysis”, Proceedings of the 49th Annual Meeting of the Association for Computational Linguistics, 2011, 142–150
[5] S. Cucerzan, E. Brill, “Spelling correction as an iterative process that exploits the collective knowledge of web users”, Proceedings of the 2004 Conference on Empirical Methods in Natural Language Processing, 2004
[6] A. Joulin, E. Grave, P. Bojanowski, T. Mikolov, Bag of tricks for efficient text classification, 2016, arXiv: 1607.01759
[7] J. Howard, S. Ruder, Fine-tuned Language Models for Text Classification, 2018, arXiv: 1801.06146
[8] A. Vaswani, N. Shazeer, N. Parmar, J. Uszkoreit, L. Jones, A. N. Gomez, E. Kaiser, I. Polosukhin, “Attention is all you need”, Advances in Neural Information Processing Systems, 2017, 6000–6010
[9] X. Zhang, J. J. Zhao, Y. LeCun, Character-level Convolutional Networks for Text Classification, 2017, arXiv: 1509.01626
[10] Y. Kim, Convolutional Neural Networks for Sentence Classification, 2014, arXiv: 1408.5882
[11] K. Cho, B. van Merrienboer, D. Bahdanau, Y. Bengio, “On the Properties of Neural Machine Translation: Encoder-Decoder Approaches”, 2014, arXiv: 1409.1259
[12] D. Bahdanau, K. Cho, Y. Bengio, Neural Machine Translation by Jointly Learning to Align and Translate, 2014, arXiv: 1409.0473
[13] Y. Bengio, P. Simard, P. Frasconi, “Learning long-term dependencies with gradient descent is difficult”, IEEE transactions on neural networks, 5:2 (1994), 157–166 | DOI
[14] E. Tutubalina, S. Nikolenko, “Inferring sentiment-based priors in topic models”, Mexican International Conference on Artificial Intelligence, 2015, 92–104
[15] J. Niu, Y. Yang, S. Zhang, Z. Sun, W. Zhang, “Multi-task Character-Level Attentional Networks for Medical Concept Normalization”, Neural Processing Letters, 2018, 1–18
[16] Y. Li, T. Cohn, Y. Baldwin, “Learning robust representations of text”, Proceedings of the 2016 Conference on Empirical Methods in Natural Language Processing, 2016, 1979–1985
[17] N. V. Loukachevitch, et al., “SentiRuEval: Testing Object-oriented Sentiment Analysis Systems in Russian”, Proceedings of International Conference “Dialog”, 2015
[18] V. Malykh, “Generalizable Architecture for Robust Word Vectors Tested by Noisy Paraphrases”, Supplementary Proceedings of the Sixth International Conference on Analysis of Images, Social Networks and Texts, AIST 2017 (Moscow, Russia, 2017)