Voir la notice de l'article provenant de la source Library of Science
@article{IJAMCS_2010_20_1_a11, author = {Tro\'c, M. and Unold, O.}, title = {Self-adaptation of parameters in a learning classifier system ensemble machine}, journal = {International Journal of Applied Mathematics and Computer Science}, pages = {157--174}, publisher = {mathdoc}, volume = {20}, number = {1}, year = {2010}, language = {en}, url = {http://geodesic.mathdoc.fr/item/IJAMCS_2010_20_1_a11/} }
TY - JOUR AU - Troć, M. AU - Unold, O. TI - Self-adaptation of parameters in a learning classifier system ensemble machine JO - International Journal of Applied Mathematics and Computer Science PY - 2010 SP - 157 EP - 174 VL - 20 IS - 1 PB - mathdoc UR - http://geodesic.mathdoc.fr/item/IJAMCS_2010_20_1_a11/ LA - en ID - IJAMCS_2010_20_1_a11 ER -
%0 Journal Article %A Troć, M. %A Unold, O. %T Self-adaptation of parameters in a learning classifier system ensemble machine %J International Journal of Applied Mathematics and Computer Science %D 2010 %P 157-174 %V 20 %N 1 %I mathdoc %U http://geodesic.mathdoc.fr/item/IJAMCS_2010_20_1_a11/ %G en %F IJAMCS_2010_20_1_a11
Troć, M.; Unold, O. Self-adaptation of parameters in a learning classifier system ensemble machine. International Journal of Applied Mathematics and Computer Science, Tome 20 (2010) no. 1, pp. 157-174. http://geodesic.mathdoc.fr/item/IJAMCS_2010_20_1_a11/
[1] Bahler, D. and Navarro, L. (2000). Methods for combining heterogeneous sets of classifiers, Proceedings of the 17th National Conference on Artificial Intelligence (AAAI 2000), Workshop on New Research Problems for Machine Learning, Austin,TX, USA, http://www4.ncsu.edu/˜bahler/aaai2000/aaai2000.pdf.
[2] Breiman, L. (1996). Bagging predictors, Machine Learning 24(2): 123-140.
[3] Bull, L., Mansilla, E. B. and Holmes, J. (Eds) (2008). Learning Classifier Systems in Data Mining, Springer, Berlin/Heidelberg.
[4] Bull, L., Studley, M., Bagnall, A. and Whittley, I. (2007). Learning classifier system ensembles with rulesharing, IEEE Transactions on Evolutionary Computation 11(4): 496-502.
[5] Butz, M. V. (1999). An implementation of the XCS classifier system in C, Technical Report 99021, Illinois Genetic Algorithms Laboratory, University of Illinois, Urbana-Champaign, IL.
[6] Butz, M. V., Sastry, K., Goldberg, D. E. (2002). Tournament selection in XCS, Technical report, Proceedings of the Fifth Genetic and Evolutionary Computation Conference (GECCO-2003), pp. 1857-1869.
[7] Butz, M. V., Goldberg, D. E. and Lanzi, P. L. (2005). Gradient descent methods in learning classifier systems: Improving XCS performance in multistep problems, IEEE Transactions on Evolutionary Computation 9(5): 452-473.
[8] Butz, M. V., Goldberg, D. E. and Tharakunnel, K. (2003). Analysis and improvement of fitness exploitation in XCS: Bounding models, tournament selection, and bilateral accuracy, Evolutionary Computation 11(3): 239-277.
[9] Butz, M. V., Kovacs, T., Lanzi, P. L. and Wilson, S. W. (2004). Toward a theory of generalization and learning in XCS, IEEE Transactions on Evolutionary Computation 8(1): 28-46.
[10] Butz, M. V. and Pelikan, M. (2001). Analyzing the evolutionary pressures in XCS, in L. Spector, E. Goodman, A.Wu, W. Langdon, H. Voigt, M. Gen, S. Sen, M. Dorigo, S. Pezeshk, M. Garzon, and E. Burke (Eds), Proceedings of the Genetic and Evolutionary Computation Conference (GECCO2001), Morgan Kaufmann, San Francisco, CA, pp. 935-942.
[11] Butz, M. V. and Pelikan, M. (2006). Studying XCS/BOA learning in boolean functions: Structure encoding and random boolean functions, GECCO '06: Proceedings of the 8th Annual Conference on Genetic and Evolutionary Computation, Seattle, WA, USA, pp. 1449-1456.
[12] Dam, H. H., Abbass, H. A. and Lokan, C. (2005). DXCS: An XCS system for distributed data mining, in H.-G. Beyer and U.-M. O'Reilly (Eds), GECCO, ACM,NewYork, NY, pp. 1883-1890.
[13] Dam, H. H., Lokan, C. and Abbass, H. A. (2007). Evolutionary online data mining: An investigation in a dynamic environment, in S. Yang, Y.-S. Ong and Y. Jin (Eds), Evolutionary Computation in Dynamic and Uncertain Environments, Studies in Computational Intelligence, Vol. 51, Springer, Berlin/Heidelberg, pp. 153-178.
[14] Dawson, D. (2002). Improving extended classifier system performance in resource-constrained configurations, Master's thesis, California State University, Chico, CA.
[15] Dietterich, T. (2000). An experimental comparison of three methods for constructing ensembles of decision trees: Bagging, boosting, and randomization, Machine Learning 40(2): 139-158.
[16] Eiben, A., Schut, M. and de Wilde, A. (2006a). Boosting genetic algorithms with (self-) adaptive selection, Proceedings of the IEEE Congress on Evolutionary Computation (CEC 2005), Vancouver, BC, Canada, pp. 1584-1589.
[17] Eiben, A., Schut,M. and deWilde, A. (2006b). Is self-adaptation of selection pressure and population size possible? A case study, in T. Runarsson, H.-G. Beyer, E. Burke, J. J.Merelo-Guervs, L. D. Whitley and X. Yao (Eds), Parallel Problem Solving from Nature (PPSN IX), Lecture Notes in Computer Science, Vol. 4193, Springer, Berlin/Heidelberg, pp. 900-909.
[18] Fogel, D. B. (1992). Evolving artificial intelligence, Ph.D. thesis, US San Diego, La Jolla, CA.
[19] Gao, Y., Huang, J. Z. and Wu, L. (2007). Learning classifier system ensemble and compact rule set, Connection Science 19(4): 321-337.
[20] Goldberg, D. E. (1989). Genetic Algorithms in Search, Optimization, and Machine Learning, Addison-Wesley Professional, Reading, MA.
[21] Grefenstette, J. J. (1986). Optimization of control parameters for genetic algorithms, IEEE Transactions on Systems, Man, and Cybernetics SMC-16(1): 122-128.
[22] Holland, J. (1976). Adaptation, in R. Rosen (Ed.), Progress in Theoretical Biology, Plenum Press, New York, NY, pp. 263-293.
[23] Holmes, J. H., Lanzi, P. L., Stolzmann, W. and Wilson, S. W. (2002). Learning classifier systems: New models, successful applications, Information Processing Letters 82(1): 23-30.
[24] Howard, D., Bull, L. and Lanzi, P. (2008). Self-adaptive constructivism in neural XCS and XCSF, in M. Keijzer, G. Antoniol, C. Congdon, K. Deb, N. Doerr, N. Hansen, J. Holmes, G. Hornby, D. Howard, J. Kennedy, S. Kumar and F. Lobo (Eds), GECCO-2008: Proceedings of the Genetic and Evolutionary Computation Conference, Atlanta, GA, USA, pp. 1389-1396.
[25] Huang, C.-Y. and Sun, C.-T. (2004). Parameter adaptation within co-adaptive learning classifier systems, in K. Deb, R. Poli, W. Banzhaf, H.-G. Beyer, E. Burke, P. Darwen, D. Dasgupta, D. Floreano, J. Foster, M. Harman, O. Holland, P. L. Lanzi, L. Spector, A. Tettamanzi, D. Thierens and A. Tyrrell (Eds), Genetic and Evolutionary Computation-GECCO-2004, Part II, Lecture Notes in Computer Science, Vol. 3103, Springer-Verlag, Berlin/Heidelberg, pp. 774-784.
[26] Hurst, J. and Bull, L. (2002). A self-adaptive XCS, IWLCS '01: Revised Papers from the 4th International Workshop on Advances in Learning Classifier Systems, Lecture Notes in Artificial Intelligence, Vol. 2321, Springer-Verlag, London, pp. 57-73.
[27] Hurst, J. and Bull, L. (2003). Self-adaptation in classifier system controllers, Artificial Life and Robotics 5(2): 109-119.
[28] Kharbat, F., Bull, L. and Odeh, M. (2005). Revisiting genetic selection in the XCS learning classifier system, Congress on Evolutionary Computation, Vancouver, BC, Canada, pp. 2061-2068.
[29] Kuncheva, L. I. and Whitaker, C. J. (2003). Measures of diversity in classifier ensembles, Machine Learning 51(2): 181-207.
[30] Llorà, X. and Sastry, K. (2006). Fast rule matching for learning classifier systems via vector instructions, GECCO '06: Proceedings of the 8th Annual Conference on Genetic and Evolutionary Computation, Seattle, WA, USA, pp. 1513-1520.
[31] Meyer-Nieberg, S. and Beyer, H.-G. (2007). Self-adaptation in evolutionary algorithms, in F. G. Lobo, C. F. Lima and Z. Michalewicz (Eds), Parameter Setting in Evolutionary Algorithms, Springer, Berlin.
[32] Opitz, D. and Maclin, R. (1999). Popular ensemble methods: An empirical study, Journal of Artificial Intelligence Research 11: 169-198.
[33] Opitz, D. W., Shavlik, J. W. and Shavlik, O. (1996). Actively searching for an effective neural-network ensemble, Connection Science 8(3-4): 337-353.
[34] Orriols-Puig, A., Bernado-Mansilla, E., Goldberg, D. E., Sastry, K. and Lanzi, P. L. (2009). Facetwise analysis of XCS for problems with class imbalances, IEEE Transactions on Evolutionary Computation 13(5): 1093-1119.
[35] Spears, W. M. (1995). Adapting crossover in evolutionary algorithms, in J. R. McDonnell, R. G. Reynolds and D. B. Fogel (Eds), Proceedings of the Fourth Annual Conference on Evolutionary Programming, San Diego, CA,USA, pp. 367-384.
[36] Stout, M., Bacardit, J., Hirst, J. and Krasnogor, N. (2008a). Prediction of recursive convex hull class assignment for protein residues, Bioinformatics 24(7): 916-923.
[37] Stout, M., Bacardit, J., Hirst, J. and Krasnogor, N. (2008b). Prediction of topological contacts in proteins using learning classifier systems, Journal of Soft Computing 13(3): 245-258.
[38] Sutton, R. S. (1991). Reinforcement learning architectures for animats, in J. Meyer and S.W.Wilson (Eds), From Animals to Animats: Proceedings of the First International Conference on Simulation of Adaptive Behavior, MIT Press, Cambridge, MA, pp. 288-296.
[39] Takashima, E., Murata, Y., Shibata, N. and Ito, M. (2003). Self adaptive island GA, Proceedings of the 2003 Congress on Evolutionary Computation (CEC 2003), Newport Beach, CA, USA, Vol. 2, pp. 1072-1079.
[40] Tongchim, S. and Chongstitvatana, P. (2002). Parallel genetic algorithm with parameter adaptation, Information Processing Letters 82(1): 47-54.
[41] Troć, M. and Unold, O. (2008). Self-adaptation of parameters in a XCS-based ensemble machine, Proceedings of the Eighth International Conference on Hybrid Intelligent Systems (HIS 2008), Barcelona, Spain, pp. 893-898.
[42] Tsoumakas, G., Katakis, I. and Vlahavas, I. (2004). Effective voting of heterogeneous classifiers, Proceedings of the 15th European Conference on Machine Learning, Lecture Notes in Artificial Intelligence, Vol. 3201, Springer, Berlin/Heidelberg, pp. 465-476.
[43] Unold, O. and Tuszynski, K. (2008). Mining knowledge from data using anticipatory classifier system, Knowledge-Based Systems 21(5): 363-370.
[44] Widrow, B. and Hoff, M. E. (1960). Adaptive switching circuits, 1960 IRE WESCON Convention Record, pp. 96-104.
[45] Wilson, S. W. (1995). Classifier fitness based on accuracy, Evolutionary Computation 3(2): 149-175.
[46] Wilson, S.W. (2000). Get real! XCS with continuous-valued inputs, in P.L. Lanzi, W. Stolzmann, and S.W. Wilsin (Eds), Learning Classifier Systems, From Foundations to Applications,Lecture Notes in Artificial Intelligence, Vol. 1813, Springer-Verlag, Berlin/Heidelberg, pp. 209-219.