@article{KYB_1995_31_4_a2,
author = {Esteban, Mar{\'\i}a Dolores and Morales, Domingo},
title = {A summary on entropy statistics},
journal = {Kybernetika},
pages = {337--346},
year = {1995},
volume = {31},
number = {4},
mrnumber = {1357348},
zbl = {0857.62002},
language = {en},
url = {http://geodesic.mathdoc.fr/item/KYB_1995_31_4_a2/}
}
Esteban, María Dolores; Morales, Domingo. A summary on entropy statistics. Kybernetika, Tome 31 (1995) no. 4, pp. 337-346. http://geodesic.mathdoc.fr/item/KYB_1995_31_4_a2/
[1] J. Aczel, Z. Daroczy: Characterisierung der entropien positiver ordnung und der Shannonschen entropie. Acta Math. Acad. Sci. Hunger. 14 (1963), 95-121. | MR
[2] S. Arimoto: Information-theoretical considerations on estimation problems. Inform. and Control 19(1971), 181-194. | MR | Zbl
[3] M. Belis, S. Guiasu: A quantitative-qualitative measure of information in cybernetics systems. IEEE Trans. Inform. Theory IT-4 (1968), 593-594.
[4] J. Feistauerova, I. Vajda: Testing system entropy and prediction error probability. IEEE Trans. Systems Man Cybernet. 23 (1993), 1352-1358.
[5] C. Ferreri: Hypoentropy and related heterogeneity divergence measures. Statistica 40 (1980), 55-118. | MR
[6] P. Gil: Medidas de incertidumbre e informacion en problemas de decision estadistica. Rev. Real Acad. Cienc. Exact. Fis. Natur. Madrid LXIX (1975), 549-610. | MR
[7] J. Havrda, F. Charvát: Concept of structural $\alpha$-entropy. Kybernetika 3 (1967), 30-35. | MR
[8] J. N. Kapur: Generalized entropy of order a and type $\beta$. The Math. Seminar 4 (1967), 78-82. | MR
[9] H. B. Mann, A. Wald: On the choice of the number of class intervals in the application of the chi-squared test. Ann. Math. Statist. 13 (1942), 306-317. | MR
[10] C. F. Picard: The use of Information theory in the study of the diversity of biological populations. In: Proc. Fifth Berk. Symp. IV, 1979, pp. 163-177.
[11] C. R. Rao: Linear Statistical Inference and its Applications. Second edition. J. Wiley, New York 1973. | MR | Zbl
[12] A. Renyi: On the measures of entropy and information. In: Proc. 4th Berkeley Symp. Math. Statist, and Prob., 1, 1961, pp. 547-561. | MR
[13] A. P. Sant'anna, I. J. Taneja: Trigonometric entropies, ensen difference divergences and error bounds. Inform. Sci. 35 (1985), 145-156. | MR
[14] C. E. Shannon: A mathematical theory of communication. Bell. System Tech. J. 21 (1948), 379-423. | MR | Zbl
[15] B. D. Sharma, D. P. Mittal: New non-additive measures of relative information. J. Combin. Inform. System Sci. 2 (1975), 122-133. | MR
[16] B. D. Sharma, I. J. Taneja: Entropy of type ($\alpha$, $\beta$) and other generalized measures in information theory. Metrika 22 (1975), 205-215. | MR | Zbl
[17] B. D. Sharma, I. J. Taneja: Three generalized additive measures of entropy. Elektron. Informationsverarb. Kybernet. 13 (1977), 419-433. | MR | Zbl
[18] I. J. Taneja: A Study of Generalized Measures in Information Theory. Ph.D. Thesis, University of Delhi 1975.
[19] R. S. Varma: Generalizations of Renyi's entropy of order $\alpha$. J. Math. Sci. 1 (1966), 34-48. | MR | Zbl