Voir la notice de l'article provenant de la source Library of Science
@article{IJAMCS_2019_29_4_a12, author = {Cestnik, Bojan}, title = {Revisiting the optimal probability estimator from small samples for data mining}, journal = {International Journal of Applied Mathematics and Computer Science}, pages = {783--796}, publisher = {mathdoc}, volume = {29}, number = {4}, year = {2019}, language = {en}, url = {http://geodesic.mathdoc.fr/item/IJAMCS_2019_29_4_a12/} }
TY - JOUR AU - Cestnik, Bojan TI - Revisiting the optimal probability estimator from small samples for data mining JO - International Journal of Applied Mathematics and Computer Science PY - 2019 SP - 783 EP - 796 VL - 29 IS - 4 PB - mathdoc UR - http://geodesic.mathdoc.fr/item/IJAMCS_2019_29_4_a12/ LA - en ID - IJAMCS_2019_29_4_a12 ER -
%0 Journal Article %A Cestnik, Bojan %T Revisiting the optimal probability estimator from small samples for data mining %J International Journal of Applied Mathematics and Computer Science %D 2019 %P 783-796 %V 29 %N 4 %I mathdoc %U http://geodesic.mathdoc.fr/item/IJAMCS_2019_29_4_a12/ %G en %F IJAMCS_2019_29_4_a12
Cestnik, Bojan. Revisiting the optimal probability estimator from small samples for data mining. International Journal of Applied Mathematics and Computer Science, Tome 29 (2019) no. 4, pp. 783-796. http://geodesic.mathdoc.fr/item/IJAMCS_2019_29_4_a12/
[1] Berger, J.O. (1985). Statistical Decision Theory and Bayesian Analysis, Springer, New York, NY.
[2] Bouguila, N. (2013). On the smoothing of multinomial estimates using Liouville mixture models and applications, Pattern Analysis and Applications 16(3): 349–363.
[3] Breiman, L., Friedman, J.H., Olshen, R.A. and Stone, C.J. (1984). Classification and Regression Trees, Wadsworth, Belmont.
[4] Calvo, B. and Santafé, G. (2016). SCMAMP: Statistical comparison of multiple algorithms in multiple problems, The R Journal 8(1): 248–256.
[5] Cestnik, B. (1990). Estimating probabilities: A crucial task in machine learning, Proceedings of the 9th European Conference on Artificial Intelligence, London, UK, pp. 147–149.
[6] Cestnik, B. (2018). Experimental framework in R for experimenting with probability estimations from small samples, https://github.com/BojanCestnik/probability-estimation.R.
[7] Cestnik, B. and Bratko, I. (1991). On estimating probabilities in tree pruning, Proceedings of the European Working Session on Learning, Porto, Portugal, pp. 138–150.
[8] Chan, J.C.C. and Kroese, D.P. (2011). Rare-event probability estimation with conditional Monte Carlo, Annals of Operations Research 189(1): 43–61.
[9] Chandra, B. and Gupta, M. (2011). Robust approach for estimating probabilities in naïve-Bayes classifier for gene expression data, Expert Systems with Applications 38(3): 1293–1298.
[10] DasGupta, A. (2011). Probability for Statistics and Machine Learning: Fundamentals and Advanced Topics, Springer, New York, NY.
[11] DeGroot, M. and Schervish, M. (2012). Probability and Statistics, Addison-Wesley, Boston, MA.
[12] Demšar, J. (2006). Statistical comparisons of classifiers over multiple data sets, Journal of Machine Learning Research 7(1): 1–30.
[13] Domingos, P. and Pazzani, M. (1997). On the optimality of the simple Bayesian classifier under zero-one loss, Machine Learning 29(2): 103–130.
[14] Džeroski, S., Cestnik, B. and Petrovski, I. (1993). Using the m-estimate in rule induction, Journal of Computing and Information Technology 1(1): 37–46.
[15] Feller, W. (1968). An Introduction to Probability Theory and Its Applications, Willey, Hoboken, NJ.
[16] Fienberg, S.E. and Holland, P.W. (1972). On the choice of flattening constants for estimating multinomial probabilities, Journal of Multivariate Analysis 2(1): 127–134.
[17] Flach, P. (2012). Machine Learning: The Art and Science of Algorithms that Make Sense of Data, Cambridge University Press, New York, NY.
[18] Fürnkranz, J. and Flach, P.A. (2005). ROC ‘n’ rule learning—towards a better understanding of covering algorithms, Machine Learning 58(1): 39–77.
[19] García, S., Fernández, A., Luengo, J. and Herrera, F. (2010). Advanced nonparametric tests for multiple comparisons in the design of experiments in computational intelligence and data mining: Experimental analysis of power, Information Sciences 180(10): 2044–2064.
[20] García, S. and Herrera, F. (2008). An extension on statistical comparisons of classifiers over multiple data sets for all pairwise comparisons, Journal of Machine Learning Research 9(12): 2677–2694.
[21] Good, I.J. (1965). The Estimation of Probabilities: An Essay on Modern Bayesian Methods, MIT Press, Cambridge, MA.
[22] Good, I.J. (1966). How to estimate probabilities, IMA Journal of Applied Mathematics 2(4): 364–383.
[23] Good, P. and Hardin, J. (2012). Common Errors in Statistics (and How to Avoid Them), Wiley, Hoboken, NJ.
[24] Grover, J. (2012). Strategic Economic Decision-Making: Using Bayesian Belief Networks to Solve Complex Problems, Springer New York, NY.
[25] Gudder, S. (1988). Quantum Probability, Academic Press, Boston, MA.
[26] Laplace, P.-S. (1814). Essai philosophique sur les probabilités, Courcier, Paris.
[27] Larose, D. (2010). Discovering Statistics, W.H. Freeman, New York, NY.
[28] Mitchell, T.M. (1997). Machine Learning, McGrawHill, Maidenhead.
[29] Piegat, A. and Landowski, M. (2012). Optimal estimator of hypothesis probability for data mining problems with small samples, International Journal of Applied Mathematics and Computer Science 22(3): 629–645, DOI: 10.2478/v10006-012-0048-z.
[30] Piegat, A. and Landowski,M. (2013). Mean square error optimal completeness estimator eph2 of probability, Journal of Theoretical and Applied Computer Science 7(3): 3–20.
[31] Piegat, A. and Landowski, M. (2014). Specialized, MSE-optimal m-estimators of the rule probability especially suitable for machine learning, Control and Cybernetics 43(1): 133–160.
[32] R Core Team (2018). R: A Language and Environment for Statistical Computing, R Foundation for Statistical Computing, Vienna, https://www.R-project.org/.
[33] Rudas, T. (2008). Handbook of Probability: Theory and Applications, SAGE Publications, Thousand Oaks, CA.
[34] Starbird, M. (2006). What Are the Chances? Probability Made Clear, Chantilly, VA.
[35] Sulzmann, J.N. and Fürnkranz, J. (2009). An empirical comparison of probability estimation techniques for probabilistic rules, in J. Gama et al. (Eds), Discovery Science, Springer, Heidelberg, pp. 317–331.
[36] Webb, J. (2007). Game Theory: Decisions, Interaction and Evolution, Springer, London.