Voir la notice de l'article provenant de la source Library of Science
@article{IJAMCS_2018_28_4_a7, author = {Kantavat, P. and Kijsirikul, B. and Songsiri, P. and Fukui, K. I. and Numao, M.}, title = {Efficient decision trees for multi-class support vector machines using entropy and generalization error estimation}, journal = {International Journal of Applied Mathematics and Computer Science}, pages = {705--717}, publisher = {mathdoc}, volume = {28}, number = {4}, year = {2018}, language = {en}, url = {http://geodesic.mathdoc.fr/item/IJAMCS_2018_28_4_a7/} }
TY - JOUR AU - Kantavat, P. AU - Kijsirikul, B. AU - Songsiri, P. AU - Fukui, K. I. AU - Numao, M. TI - Efficient decision trees for multi-class support vector machines using entropy and generalization error estimation JO - International Journal of Applied Mathematics and Computer Science PY - 2018 SP - 705 EP - 717 VL - 28 IS - 4 PB - mathdoc UR - http://geodesic.mathdoc.fr/item/IJAMCS_2018_28_4_a7/ LA - en ID - IJAMCS_2018_28_4_a7 ER -
%0 Journal Article %A Kantavat, P. %A Kijsirikul, B. %A Songsiri, P. %A Fukui, K. I. %A Numao, M. %T Efficient decision trees for multi-class support vector machines using entropy and generalization error estimation %J International Journal of Applied Mathematics and Computer Science %D 2018 %P 705-717 %V 28 %N 4 %I mathdoc %U http://geodesic.mathdoc.fr/item/IJAMCS_2018_28_4_a7/ %G en %F IJAMCS_2018_28_4_a7
Kantavat, P.; Kijsirikul, B.; Songsiri, P.; Fukui, K. I.; Numao, M. Efficient decision trees for multi-class support vector machines using entropy and generalization error estimation. International Journal of Applied Mathematics and Computer Science, Tome 28 (2018) no. 4, pp. 705-717. http://geodesic.mathdoc.fr/item/IJAMCS_2018_28_4_a7/
[1] Bala, M. and Agrawal, R.K. (2011). Optimal decision tree based multi-class support vector machine, Informatica 35(2): 197–209.
[2] Bartlett, P.L. and Shawe-Taylor, J. (1999). Generalization performance of support vector machines and other pattern classifiers, in B. Schölkopf et al. (Eds.), Advances in Kernel Methods, MIT Press, Cambridge, MA, pp. 43–54.
[3] Blake, C.L. and Merz, C.J. (1998). UCI Repository of Machine Learning Databases, University of California, Irvine, CA, http://archive.ics.uci.edu/ml/.
[4] Bredensteiner, E.J. and Bennett, K.P. (1999). Multicategory classification by support vector machines, Computational Optimization 12(1–3): 53–79.
[5] Burges, C.J.C. (1998). A tutorial on support vector machines for pattern recognition, Data Mining and Knowledge Discovery 2(2): 121–167.
[6] Chen, J., Wang, C. and Wang, R. (2009). Adaptive binary tree for fast SVM multiclass classification, Neurocomputing 72(13–15): 3370–3375.
[7] Cheong, S., Hoon Oh, S. and Lee, S.-Y. (2004). Support vector machines with binary tree architecture for multi-class classification, Neural Information Processing Letters 2(3): 47–51.
[8] Chmielnicki, W. and Stąpor, K. (2016). Using the one-versus-rest strategy with samples balancing to improve pairwise coupling classification, International Journal of Applied Mathematics and Computer Science 26(1): 191–201, DOI: 10.1515/amcs-2016-0013.
[9] Crammer, K. and Singer, Y. (2002). On the learnability and design of output codes for multiclass problems, Machine Learning 47(2–3): 201–233.
[10] Dong, C., Zhou, B. and Hu, J. (2015). A hierarchical SVMbased multiclass classification by using similarity clustering, International Joint Conference on Neural Networks, Killarney, Ireland, pp.1–6.
[11] Fei, B. and Liu, J. (2006). Binary tree of SVM: A new fast multiclass training and classification algorithm, IEEE Transactions on Neural Networks 17(3): 696–704.
[12] Friedman, J. (1996). Another approach to polychotomous classification, Technical report, Stanford University, Stanford, CA.
[13] García, S., Fernández, A., Luengo, J. and Herrera, F. (2010). Advanced nonparametric tests for multiple comparisons in the design of experiments in computational intelligence and data mining: Experimental analysis of power, Information Sciences 180(10): 2044–2064.
[14] Hastie, T. and Tibshirani, R. (1998). Classification by pairwise coupling, Annals of Statistics 26(2): 451–471.
[15] Hsu, C. and Lin, C. (2002). A comparison of methods for multiclass support vector machines, IEEE Transactions on Neural Networks 13(2): 415–425.
[16] Joachims, T. (1999). Making large-scale SVM learning practical, in B. Schölkopf et al. (Eds.), Advances in Kernel Methods—Support Vector Learning, MIT Press, Cambridge, MA.
[17] Kijsirikul, B., Ussivakulz, N. and Road, P. (2002). Multiclass support vector machines using adaptive directed acyclic graph, International Joint Conference on Neural Networks, Honolulu, HI, USA, pp. 980–985.
[18] Knerr, S., Personnaz, L. and Dreyfus, G. (1990). Single-layer learning revisited: A stepwise procedure for building and training a neural network, Neurocomputing 68(68): 41–50.
[19] Kumar, M.A. and Gopal, M. (2010). Fast multiclass SVM classification using decision tree based one-against-all method, Neural Processing Letters 32(3): 311–323.
[20] Kumar, M.A. and Gopal, M. (2011). Reduced one-against-all method for multiclass svm classification, Expert Systems with Applications 38(11): 14238–14248.
[21] Lei, H. and Govindaraju, V. (2005). Half-against-halfmulti-class support vector machines, in N.C. Oza et al. (Eds.), Multiple Classifier Systems, MCS 2005, Lecture Notes in Computer Science, Vol. 3541, Springer, Berlin/Heidelberg, pp. 156–164.
[22] Liu, B., Cao, L., Yu, P.S. and Zhang, C. (2008). Multi-space-mapped SVMs for multi-class classification, Proceedings of 8th IEEE International Conference on Data Mining, Washington, DC, USA, Vol. 8, pp. 911–916.
[23] Madzarov, G., Gjorgjevikj, D. and Chorbev, I. (2009). A multi-class SVM classifier utilizing binary decision tree support vector machines for pattern recognition, Electrical Engineering 33(1): 233–241.
[24] Platt, J., Cristianini, N. and Shawe-Taylor, J. (2000). Large margin DAGs for multiclass classification, in S.A. Solla et al. (Eds.), Advances in Neural Information Processing Systems, MIT Press, Cambridge, MA, pp. 547–553.
[25] Songsiri, P., Kijsirikul, B. and Phetkaew, T. (2008). Information-based dicrotomizer: A method for multiclass support vector machines, IEEE International Joint Conference on Neural Networks, Hong Kong, China, pp. 3284–3291.
[26] Songsiri, P., Phetkaew, T. and Kijsirikul, B. (2015). Enhancement of multi-class support vector machine construction from binary learners using generalization performance, Neurocomputing 151(P1): 434–448.
[27] Takahashi, F. and Abe, S. (2002). Decision-tree-based multiclass support vector machines, Proceedings of the 9th International Conference on Neural Information Processing, ICONIP’02, Singapore, Singapore, Vol. 3, pp. 1418–1488.
[28] Vapnik, V.N. (1998). Statistical Learning Theory, John Wiley Sons, New York, NY.
[29] Vapnik, V.N. (1999). An overview of statistical learning theory, IEEE Transactions on Neural Networks 10(5): 988–99.
[30] Vapnik V.N., C.A. (1974). Teoriya Raspoznavaniya Obrazov: Statisticheskie Problemy Obucheniya (Theory of Pattern Recognition: Statistical Problems of Learning), Nauka, Moscow.
[31] Yang, X., Yu, Q., He, L. and Guo, T. (2013). The one-against-all partition based binary tree support vector machine algorithms for multi-class classification, Neurocomputing 113(3): 1–7.