@article{ZNSL_2009_363_a3,
author = {K. Mattheou and A. Karagrigoriou},
title = {On new developments in divergence statistics},
journal = {Zapiski Nauchnykh Seminarov POMI},
pages = {48--65},
year = {2009},
volume = {363},
language = {en},
url = {http://geodesic.mathdoc.fr/item/ZNSL_2009_363_a3/}
}
K. Mattheou; A. Karagrigoriou. On new developments in divergence statistics. Zapiski Nauchnykh Seminarov POMI, Probability and statistics. Part 14–1, Tome 363 (2009), pp. 48-65. http://geodesic.mathdoc.fr/item/ZNSL_2009_363_a3/
[1] A. Agresti, Analysis of Ordinal Categorical Data, John Wiley, 1984 | MR | Zbl
[2] S. M. Ali, S. D. Silvey, “A general class of coefficients of divergence of one distribution from another”, J. R. Statist. Soc. B, 28 (1966), 131–142 | MR | Zbl
[3] A. Basu, I. R. Harris, N. L. Hjort, M. C. Jones, “Robust and efficient estimation by minimising a density power divergence”, Biometrika, 85 (1998), 549–559 | DOI | MR | Zbl
[4] D. R. Cox, The Analysis of Binary Data, Methuen, London, 1970 | MR | Zbl
[5] N. Cressie, T. R. C. Read, “Multinomial goodness-of-fit tests”, J. R. Statist. Soc. B, 5 (1984), 440–464 | MR
[6] N. Cressie, T. R. C. Read, Goodness-of-Fit Statistics for Discrete Multivariate Data, Springer-Verlag, New York, 1988 | MR | Zbl
[7] I. Csiszar, “Eine informationstheoretische ungleichung and ihre anwendung auf den beweis der ergodizitat von markoffischen ketten”, Magyar Tud. Akad. Mat. Kutato Int. Kozl., 8 (1963), 85–108 | MR | Zbl
[8] I. Csiszar, “Why least squares and maximum entropy? An axiomatic approach to inference for linear inverse problems”, Ann. Statist., 19 (1991), 2032–2066 | DOI | MR | Zbl
[9] D. Hunter, Lecture notes in Asymptotic Tools, Penn-State Univ., 2002
[10] L. K. Jones, C. L. Byrne, “General entropy criteria for inverse problems, with applications to data compression, pattern classification, and cluster analysis”, IEEE Trans. Info. Theory, 36 (1990), 23–30 | DOI | MR | Zbl
[11] A. M. Kagan, “On the theory of Fisher's amount of information”, Dokl. Akad. Nauk SSSR, 151 (1963), 277–278 | MR
[12] F. Liese, I. Vajda, Convex Statistical Distances, Teubner, Leipzig, 1987 | MR | Zbl
[13] K. Matusita, “On the notion of affinity of several distributions and some of its applications”, Ann. Inst. Statist. Math., 19 (1967), 181–192 | DOI | MR | Zbl
[14] D. Morales, L. Pardo, I. Vajda, “Some new statistics for testing hypotheses in parametric models”, J. Multivariate Anal., 62:1 (1997), 137–168 | DOI | MR | Zbl
[15] L. Pardo, “Generalized divergence measures: statistical applications”, Encyclopedia of Microcomputers, Marcel–Dekker, 1999, 163–191
[16] L. Pardo, Statistical inference based on divergence measures, Chapman and Hall/CRC, 2006 | MR | Zbl
[17] C. R. Rao, Linear Statistical Inference and Its Applications, John Wiley, 1973 | MR | Zbl
[18] A. Renyi, “On measures of entropy and information”, Proc. 4th Berkeley Symp. on Math. Statist. Prob., V. 1, Univ. California Press, 1961, 547–561 | MR
[19] R. J. Serfling, Approximations Theorems of Mathematical Statistics, John Wiley, 1980 | MR | Zbl
[20] K. Zografos, K. Ferentinos, T. Papaioannou, “$\Phi$-divergence statistics: Sampling properties, multinomial goodness of fit and divergence tests”, Commun. Statist. Theory, Methods, 19:5 (1990), 1785–1802 | DOI | MR | Zbl