Voir la notice de l'article provenant de la source Math-Net.Ru
@article{PDM_2023_1_a6, author = {P. D. Shtykov and A. G. Dyakonov}, title = {A generalized dialogue graph construction and~visualization based on a corpus of dialogues}, journal = {Prikladna\^a diskretna\^a matematika}, pages = {111--127}, publisher = {mathdoc}, number = {1}, year = {2023}, language = {ru}, url = {http://geodesic.mathdoc.fr/item/PDM_2023_1_a6/} }
TY - JOUR AU - P. D. Shtykov AU - A. G. Dyakonov TI - A generalized dialogue graph construction and~visualization based on a corpus of dialogues JO - Prikladnaâ diskretnaâ matematika PY - 2023 SP - 111 EP - 127 IS - 1 PB - mathdoc UR - http://geodesic.mathdoc.fr/item/PDM_2023_1_a6/ LA - ru ID - PDM_2023_1_a6 ER -
P. D. Shtykov; A. G. Dyakonov. A generalized dialogue graph construction and~visualization based on a corpus of dialogues. Prikladnaâ diskretnaâ matematika, no. 1 (2023), pp. 111-127. http://geodesic.mathdoc.fr/item/PDM_2023_1_a6/
[1] Chotimongkol A., Learning the structure of task-oriented conversations from the corpus of in-domain dialogs, PhD thesis, Carnegie Mellon University, 2008
[2] Tang D., Li X., Gao J., et al., “Subgoal discovery for hierarchical dialogue policy learning”, Proc. EMNLP (Brussels, Belgium, 2018), 2298–2309
[3] Shi W., Zhao T., and Yu Z., Unsupervised Dialog Structure Learning, 2019, arXiv: 1904.03736
[4] Qiu L., Zhao Y., Shi W., et al., Structured Attention for Unsupervised Dialogue Structure Induction, 2020, arXiv: 2009.08552
[5] Chung J., Kastner K., Dinh L., et al., A Recurrent Latent Variable Model for Sequential Data, 2015, arXiv: 1506.02216
[6] Vaswani A., Shazeer N., Parmar N., et al., Attention Is All You Need, 2017, arXiv: 1706.03762
[7] Xu J., Lei Z., Wang H., et al., Discovering Dialog Structure Graph for Open-Domain Dialog Generation, 2020, arXiv: 2012.15543
[8] Yusupov I. F., Trofimova M. V., and Burtsev M. S., “Unsupervised graph extraction for improvement of multi-domain task-oriented dialogue modelling”, Proc. MIPT, 21:3 (2020), 75–86 (in Russian) | DOI
[9] Fel'dina E. A. and Makhnytkina O. V., “Automatic construction of the dialog tree based on unmarked text corpora in Russian”, Nauchno-Tekhnicheskiy Vestnik Informatsionnykh Tekhnologiy, Mekhaniki i Optiki, 21:5 (2021), 709–719 (in Russian)
[10] Nath A. and Kubba A., TSCAN: Dialog Structure Discovery using SCAN, 2021, arXiv: 2107.06426
[11] Van Gansbeke W., Vandenhende S., Georgoulis S., et al., SCAN: Learning to Classify Images without Labels, 2020, arXiv: 2005.12320
[12] Devlin J., Chang M.-W., Lee K., and Toutanova K., BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding, 2018, arXiv: 1810.04805
[13] Reimers N. and Gurevych I., Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks, 2019, arXiv: 1908.10084
[14] Bishop C., Pattern Recognition and Machine Learning, Springer, N.Y., 2006, 738 pp. | MR
[15] Blei D., Ng A., and Jordan M., “Latent Dirichlet allocation”, J. Machine Learning Res., 3 (2003), 993–1022
[16] Postroenie i vizualizatsiya obobschennogo dialogovogo grafa po korpusu dialogov, , 2022 https://github.com/PavelShtykov/generalized_dialogue_graph
[17] Mosig J., Mehri S., and Kober T., STAR: A Schema-Guided Dialog Dataset for Transfer Learning, 2020, arXiv: 2010.11853
[18] Customer Support on Twitter, , 2022 http://www.kaggle.com/datasets/thoughtvector/customer-support-on-twitter
[19] Li Y., Su H., Shen X., et al., “DailyDialog: A manually labelled multi-turn dialogue dataset”, Proc. Eighth Int. Joint Conf. Natural Language Processing (Taipei, Taiwan, 2017), v. 1, 986–995
[20] Natural Language Toolkit, , 2022 https://www.nltk.org
[21] Liu Y., Ott M., Goyal N., et al., RoBERTa: A Robustly Optimized BERT Pretraining Approach, 2019, arXiv: 1907.11692
[22] Van der Maaten L. and Hinton G., “Viualizing data using t-SNE”, J. Machine Learning Res., 9 (2008), 2279–2605 | MR
[23] Song K., Tan X., Qin T., et al., MPNet: Masked and Permuted Pre-training for Language Understanding, 2020, arXiv: 2004.09297
[24] Sanh V., Debut L., Chaumond J., and Wolf T., DistilBERT, a Distilled Version of BERT: Smaller, Faster, Cheaper and Lighter, 2019, arXiv: 1910.01108
[25] Wang W., Wei F., Dong L., et al., MiniLM: Deep Self-Attention Distillation for Task-Agnostic Compression of Pre-Trained Transformers, 2020, arXiv: 2002.10957
[26] Rousseeuw P. J., “Silhouettes: A graphical aid to the interpretation and validation of cluster analysis”, J. Comput. Appl. Math., 20 (1987), 53–65 | DOI
[27] Caliński T. and Harabasz J., “A dendrite method for cluster analysis”, Commun. in Statistics — Theory and Methods, 3:1 (1974), 1–27 | MR
[28] Davies D. L. and Bouldin D. W., “A cluster separation measure”, IEEE Trans. Pattern Analysis and Machine Intelligence, 1:2 (1979), 224–227 | DOI
[29] Spärck K. J., “A statistical interpretation of term specificity and its application in retrieval”, J. Documentatio, 60 (2004), 493–502 | DOI
[30] Graphviz: open source graph visualization software, , 2022 https://graphviz.org