Define $Y_0$ to be a geometric convolution of $X$ if $Y_0$ is the sum of $N_0$ i.i.d. random variables distributed as $X$, where $N_0$ is geometrically distributed and independent of $X$. It is known that if $X$ is nonnegative with finite second moment, then as $p \rightarrow 0, Y_0/EY_0$ converges in distribution to an exponential distribution with mean 1. We derive an upper bound for $d(Y_0)$, the sup norm distance between $Y_0$ and an exponential with mean $EY_0$. This upper bound is $d(Y_0) \leq \operatorname{cp}$ for $0 < p \leq \frac{1}{2}$, where $c = EX^2/(EX)^2$. It is asymptotically $(p \rightarrow 0)$ tight. Also derived is a bound for $d(Y_0 + Z)$, where $Z$ is independent of $Y_0$.
@article{1176990750,
author = {Brown, Mark},
title = {Error Bounds for Exponential Approximations of Geometric Convolutions},
journal = {Ann. Probab.},
volume = {18},
number = {4},
year = {1990},
pages = { 1388-1402},
language = {en},
url = {http://dml.mathdoc.fr/item/1176990750}
}
Brown, Mark. Error Bounds for Exponential Approximations of Geometric Convolutions. Ann. Probab., Tome 18 (1990) no. 4, pp. 1388-1402. http://gdmltest.u-ga.fr/item/1176990750/