@article{KYB_1990_26_4_a0,
author = {Vajda, Igor},
title = {Generalization of discrimination-rate theorems of {Chernoff} and {Stein}},
journal = {Kybernetika},
pages = {273--288},
year = {1990},
volume = {26},
number = {4},
mrnumber = {1080281},
zbl = {0727.62026},
language = {en},
url = {http://geodesic.mathdoc.fr/item/KYB_1990_26_4_a0/}
}
Vajda, Igor. Generalization of discrimination-rate theorems of Chernoff and Stein. Kybernetika, Tome 26 (1990) no. 4, pp. 273-288. http://geodesic.mathdoc.fr/item/KYB_1990_26_4_a0/
[1] P. H. Algoet, T.M. Cover: A sandwich proof of the Shannon-McMillan-Breiman theorem. Ann. Probab. 16 (1988), 899-909. | MR | Zbl
[2] A. Bhattacharyya: On some analogous to the amount of information and their uses in statistical estimation. Sankhya 8 (1946), 1-14. | MR
[3] H. Chernoff: A measure of asymptotic efficiency for tests of a hypothesis based on a sum of observations. Ann. Math. Statist. 23 (1952), 493-507. | MR
[4] H. Chernoff: Large sample theory: Parametric case. Ann. Math. Statist. 27 (1956), 1 - 22. | MR | Zbl
[5] R. S. Ellis: Large deviations for a general class of random vectors. Ann. Probab. 12 (1984), 1-12. | MR | Zbl
[6] R. S. Ellis: Entropy, Large Deviations, and Statistical Mechanics. Springer-Verlag. Berlin-Heidelberg-New York 1985. | Zbl
[7] I. I. Gikhman, A. V. Skorokhod: Stochastic Differential Equations (in Russian). Naukova Dumka, Kiev 1986.
[8] J. Hájek: A property of J-divergences or marginal probability distributions. Czechoslovak Math. J. 8 (1958), 460-463. | MR
[9] J. Hájek: On a property of normal distributions of an arbitrary stochastic process. Czechoslovak Math. J. 8 (1958), 610-618. | MR
[10] A. Janssen: Asymptotic properties of Neyman-Pearson test for infinite Kullback-Leibler information. Ann. Statist. 14 (1986), 1068-1079. | MR
[11] M. Janžura: Divergences of Gauss-Markov random fields with application to statistical inference. Kybernetika 24 (1988), 6, 401 - 412. | MR
[12] S. Kakutani: On equivalence of infinite product measures. Ann. of Math. 49 (1948), 214 - 226. | MR | Zbl
[13] E. I. Kolomietz: Asymptotic behaviour of type II errors of Neyman-Pearson test (in Russian). Teor. Veroyatnost. i Primenen. 33 (1986), 503 - 522.
[14] S. H. Koopmans: Asymptotic rate of discrimination for Markov processes. Ann. Math. Statist. 31 (1960), 982-994. | MR | Zbl
[15] O. Kraft, D. Plachky: Bounds for the power of likelihood ratio test and their asymptotic properties. Ann. Math. Statist. 41 (1970), 1646-1654. | MR
[16] S. Kullback J. C Keegel, and J. H. Kullback: Topics in Statistical Information Theory. Springer-Verlag, Berlin-Heidelberg-New York 1987. | MR
[17] H. Künsch: Thermodynamics and statistical analysis of Gaussian random fields. Z. Wahrsch. verw. Geb. 58 (1981) 407-421. | MR
[18] E. L. Lehman: Testing Statistical Hypotheses. J. Wiley, New York 1959. | MR
[19] F. Liese: Hellinger integrals of diffusion processes. Statistics 17 (1986), 63-78. | MR | Zbl
[20] F. Liese, I. Vajda: Convex Statistical Distances. Teubner, Leipzig 1987. | MR | Zbl
[21] J. Mémin, A. N. Shiryayev: Distances de Hellinger-Kakutani des lois correspondant á acroissements indépendants. Z. Wahrsch. verw. Geb. 70 (1985), 67-89. | MR
[22] T. Nemetz: On the $\alpha$-divergence rate for Markov-dependent hypotheses. Problems Control Inform. Theory 5(1974), 147-155. | MR | Zbl
[23] C M. Newman: The inner product of path space measures corresponding to random processes with independent increments. Bull. Amer. Math. Soc. 78 (1982), 268 - 272. | MR
[24] C M. Newman: The orthogonality of independent increment processes. In: Topics in Probability Theory (D. W. Strook, S. R. S. Varadhan, eds.), Convent Inst, of Math. Sciences, New York 1973, pp. 93-111. | MR | Zbl
[25] C M. Newman, B. W. Stuck: Chernoff bounds for discriminating between two Markov processes. Stochastics 2 (1979), 139-153. | MR | Zbl
[26] J. Oosterhooff, W. R. van Zwett: A note on contiguity and Hellinger distance. In: Contributions to Statistics (J. Hájek Memorial Volume, J. Jurečková, ed.), Reidel, Dordrecht 1979. | MR
[27] A. Perez: Notions generalises d'incertitude, d'entropie et d'information du point de vue de la theorie de martingales. In: Trans. 1st Prague Conf. on Inform. Theory, Statist. Dec. Functions, Random Processes. Publ. House Czechosl. Acad. Sci., Prague 1957, pp. 183 -206. | MR
[28] A. Perez: Generalization of ChernofTs result on the asymptote discernibility of two random processes. Colloq. Math. Soc. Janos Bolyai 9 (1974), 619 - 632. | MR
[29] M. S. Pinsker: Information and Information Stability of Random Variables and Processes. Holden-Day, San Francisco 1964. | MR | Zbl
[30] A. Rényi: On measures of entropy and information. In: Proc. 4th Berkeley Symp. on Probab. Theory Math. Statist., Vol. 1, Berkeley Univ. Press, Berkeley 1961, pp. 447-561. | MR
[31] I. Vajda: Limit theorems for total variation of cartesian product measures. Studia Sci. Math. Hungar. 6 (1971), 317-333. | MR
[32] I. Vajda: Theory of Statistical Inference and Information. Kluwer, Dordrecht-Boston 1989. | Zbl
[33] A. Wald: Statistical Decision Functions. J. Wiley, New York 1961. | MR