Voir la notice de l'article provenant de la source Library of Science
@article{IJAMCS_2004_14_3_a3, author = {Miquelez, T. and Bengoetxea, E. and Larranaga, P.}, title = {Evolutionary computation based on {Bayesian} classifiers}, journal = {International Journal of Applied Mathematics and Computer Science}, pages = {335--349}, publisher = {mathdoc}, volume = {14}, number = {3}, year = {2004}, language = {en}, url = {http://geodesic.mathdoc.fr/item/IJAMCS_2004_14_3_a3/} }
TY - JOUR AU - Miquelez, T. AU - Bengoetxea, E. AU - Larranaga, P. TI - Evolutionary computation based on Bayesian classifiers JO - International Journal of Applied Mathematics and Computer Science PY - 2004 SP - 335 EP - 349 VL - 14 IS - 3 PB - mathdoc UR - http://geodesic.mathdoc.fr/item/IJAMCS_2004_14_3_a3/ LA - en ID - IJAMCS_2004_14_3_a3 ER -
%0 Journal Article %A Miquelez, T. %A Bengoetxea, E. %A Larranaga, P. %T Evolutionary computation based on Bayesian classifiers %J International Journal of Applied Mathematics and Computer Science %D 2004 %P 335-349 %V 14 %N 3 %I mathdoc %U http://geodesic.mathdoc.fr/item/IJAMCS_2004_14_3_a3/ %G en %F IJAMCS_2004_14_3_a3
Miquelez, T.; Bengoetxea, E.; Larranaga, P. Evolutionary computation based on Bayesian classifiers. International Journal of Applied Mathematics and Computer Science, Tome 14 (2004) no. 3, pp. 335-349. http://geodesic.mathdoc.fr/item/IJAMCS_2004_14_3_a3/
[1] Baluja S. (1994): Population-based incremental learning: A method for integrating genetic search based function optimization and competitive learning. — Techn. Rep., Carnegie Mellon, CMU-CS-94-163.
[2] Baluja S. and Davies S. (1997): Using optimal dependency-trees for combinatorial optimization: Learning the structure of the search space.—Techn. Rep., Carnegie Mellon, CMUCS-97-107.
[3] Cantu-Paz E. (2001): Supervised and unsupervised discretization methods for evolutionary algorithms. — Proc. Genetic and Evolutionary Computation Conference (GECCO’2001), Workshop Optimization by Building and Using Probabilisitic Models, San Francisco, California, pp. 213–216.
[4] Cestnik B., Kononenko I. and Bratko I. (1987): ASSISTANT-86: A knowledge elicitation tool for sophisticated users, In: Progress in Machine Learning (I. Bratko and N. Lavrac, Eds.). —Wilmslow, U.K.: Sigma Press, pp. 31–45.
[5] Cheng J. and Greiner R. (1999): Comparing Bayesian network classifiers. — Proc. 15th Conf. Uncertainty in Artificial Intelligence, San Francisco, CA: Morgan Kaufmann Publishers, pp. 101–107.
[6] Chow C. and Liu C. (1968): Approximating discrete probability distributions with dependence trees. — IEEE Trans. Inf. Theory, Vol. 14, No. 3, pp. 462–467.
[7] de Bonet J.S., Isbell C.L. and Viola P. (1997): MIMIC: Finding optima by estimating probability densities, In: Advances in Neural Information Processing Systems (M. Mozer, M. Jordan and Th. Petsche, Eds.).—Cambridge, MA: The MIT Press, Vol. 9, pp. 424–431.
[8] Domingos P. and Pazzani M. (1997): On the optimality of the simple Bayesian classifier under zero-one loss. — Mach. Learn., Vol. 29, No. 2–3, pp. 103–130.
[9] Duda R. and Hart P. (1973): Pattern Classification and Scene Analysis. — New York: Wiley.
[10] Etxeberria R. and Larrañaga P. (1999): Global optimization with Bayesian networks. — Proc. 2nd Symp. Artificial Intelligence, CIMAF99, La Habana, Cuba, pp. 332–339.
[11] Friedman N., Geiger D. and Goldsmidt M. (1997): Bayesian network classifiers.—Mach. Learn., Vol. 29, No. 2, pp. 131–163.
[12] Gammerman A. and Thatcher A.R. (1991): Bayesian diagnostic probabilities without assuming independence of symptoms.— Meth. Inf. Medic., Vol. 30, No. 1, pp. 15–22.
[13] Goldberg D.E. (1989): Genetic Algorithms in Search, Optimization, and Machine Learning. — Reading: Addison-Wesley.
[14] Harik G. (1999): Linkage learning via probabilistic modeling in the EcGA. — Techn. Rep., University of Illinois, Urbana, IlliGAL Report No. 99010.
[15] Harik G., Lobo F.G. and Golberg D.E. (1998): The compact genetic algorithm. — Proc. IEEE Conf. Evolutionary Computation, Piscataway, NJ, pp. 523–528.
[16] Holland J.H. (1975): Adaptation in Natural and Artificial Systems.— Michigan: The University of Michigan Press.
[17] Inza I., Larrañaga P., Etxeberria R. and Sierra B. (2000): Feature subset selection by Bayesian network-based optimization. —Artif. Intell., Vol. 123, No. 1–2, pp. 157–184.
[18] Kaufman K. and Michalski R. (1999): The AQ18 machine learning and data mining system: An implementation and user’s guide. — Techn. Rep., Machine Learning and Inference Laboratory, George Manson University, Fairfax, Virginia.
[19] Kohavi R. and John G. (1997): Wrappers for feature subset selection.— Artif. Intell., Vol. 97, No. 1–2, pp. 273–324.
[20] Kononenko I. (1990): Comparison of inductive and naïve Bayesian learning approaches to automatic knowledge acquisition, In: Current Trends in Knowledge Acquisition (B. Wielinga, J. Boose, B. Gaines, G. Shereiber and M. van Someren, Eds.). — Amsterdam: IOS Press, pp. 190–197.
[21] Kononenko I. (1991): Semi-naïve Bayesian classifiers. — Proc. 6th Europ. Working Session on Learning, Porto, Portugal, pp. 206–219.
[22] Kontkanen P., Myllymäki P., Tirri H. and Valtonen K. (2000): Bayesian multinet classifiers. — Proc. 10th Int. Conf. Computing and Information (ICCI’2000).
[23] Langley P. and Sage S. (1994): Induction of selective Bayesian classifiers. — Proc. 10th Conf. Uncertainty in Artificial Intelligence, Seattle, WA, pp. 399–406.
[24] Larrañaga P. and Lozano J.A. (2001): Estimation of Distribution Algorithms. A New Tool for Evolutionary Computation.— Kluwer.
[25] Liu H. and Motoda H. (1998): Feature Selection. From Knowledge Discovery and Data Mining.—Boston: Kluwer.
[26] Llorà X. and Goldberg D.E. (2003): Wise Breeding GA via Machine Learning Techniques for Function Optimization. — Proc. Genetic and Evolutionary Computation Conference GECCO-03, Part I, Chicago, Illinois, pp. 1172–1183.
[27] Michalewicz Z. (1992): Genetic Algorithms + Data Structures = Evolution Programs. — Berlin: Springer Verlag.
[28] Michalski R.S. (2000): Learnable execution model: Evolutionary processes guided by machine learning. — Mach. Learn., Vol. 38, pp. 9–40.
[29] Minsky M. (1961): Steps toward artificial intelligence.—Trans. Inst. Radio Eng., Vol. 49, pp. 8–30.
[30] Mühlenbein H. (1998): The equation for response to selection and its use for prediction. — Evolut. Comput., Vol. 5, No. 3, pp. 303–346.
[31] Mühlenbein H. and Mahning T. (1999): FDA—A scalable evolutionary algorithm for the optimization of additively decomposed functions. — Evolut. Comput., Vol. 7, No. 4, pp. 353–376.
[32] Mühlenbein H., Mahning T. and Ochoa A. (1999): Schemata, distributions and graphical models in evolutionary optimization. —J. Heurist., Vol. 5, pp. 215–247.
[33] Mühlenbein H. and Paaß G. (1996): From recombination of genes to the estimation of distributions, I. Binary parameters, In: Parallel Problem Solving from Nature—PPSN IV (M. Voigt, W. Ebeling, I. Rechenberg and H.-P. Schwefel, Eds.).—Lecture Notes in Computer Science 1411, Berlin: Springer, pp. 178–187.
[34] Muñoz A. (2003): LEM algorithms.—Final year project, Computer Engineering Faculty, University of the Basque Country, (in Spanish).
[35] Neapolitan R.E. (2003): Learning Bayesian Networks. — Prentice Hall.
[36] Ohmann C., Yang Q., Kunneke M., Stolzing H., Thon K. and Lorenz W. (1988): Bayes theorem and conditional dependence of symptoms: Different models applied to data of upper gastrointestinal bleeding. — Meth. Inf. Medic., Vol. 27, No. 2, pp. 73–83.
[37] Pazzani M. (1997): Searching for dependencies in Bayesian classifiers, In: Learning from Data: Artificial Intelligence and Statistics V (D. Fisher and H.-J. Lenz, Eds.). — New York: Springer, pp. 239–248.
[38] Pelikan M., Goldberg D.E. and Cantú-Paz E. (1999): BOA: The Bayesian optimization algorithm. — Proc. Genetic and Evolutionary Computation Conference GECCO-99, Orlando, pp. 525–532.
[39] Pelikan M. and Mühlenbein H. (1999): The bivariate marginal distribution algorithm, In: Advances in Soft Computing-Engineering Design and Manufacturing (R. Roy, T. Furuhashi and P.K. Chandhory, Eds.). — London: Springer-Verlag, pp. 521–535.
[40] Sahami M. (1996): Learning limited dependence Bayesian classifiers. — Proc. 2nd Int. Conf. Knowledge Discovery and Data Mining, Portland, OR, pp. 335–338.
[41] Santana R. (2004): Estimation of distribution algorithms with Kikuchi approximations.—(submitted).
[42] Syswerda G. (1993): Simulated crossover in genetic algorithms, In: Foundations of Genetic Algorithms (L.D. Whitley, Ed.). — Vol. 2, San Mateo: Morgan Kaufmann, pp. 239–255.
[43] Thierens D. and Bosman P.A.N. (2001): Multi-objective mixture-based iterated density estimation evolutionary algorithms.— Proc. Genetic and Evolutionary Computation Conference, GECCO’2001, San Francisco, pp. 663–670.
[44] Todd B.S. and Stamper R. (1994): The relative accuracy of a variety of medical diagnostic programs. — Meth. Inf. Medic., Vol. 33, No. ??, pp. 402–416.
[45] Ventura S., Herrera F., Berná J.N. and Hervás C. (2002): Evolución guiada mediante aprendizaje. Comparación en problemas de optimización. — Proc. Conf. Algoritmos Evolutivos y Bioinspirados, AEB’02, pp. 430–436, (in Spanish).
[46] Whitley D. and Kauth J. (1988): GENITOR: A different genetic algorithm. — Proc. Rocky Mountain Conf. Artificial Intelligence, USA, Vol. 2, pp. 118–130.
[47] Zitzler E., Deb K. and Thiele L. (1999): Comparison of multiobjective evolutionary algorithms on test functions of different difficulty. — Proc. 1999 Genetic and Evolutionary Computation Conf., Orlando, FL, pp.121–122.