The Cramer-Rao inequality

Handout

\begin{thm}[The Cramer-Rao inequality]
Assume that $\mathbf{X} = (X_1,\dots,X_n)' \sim f_\theta$ where
$f_\theta$ is a density function and that the random variable
$W(\mathbf{X})$ is such that
\begin{equation} \tag{*} \frac{d}{d\theta} E_\theta[W(\mathbf{X})]
  = \displaystyle\int_{X(\Omega)} \frac{\partial}{\partial \theta}
  W(\mathbf{x})f_\theta(\mathbf{x})dx
    \end{equation}
    and that $V_\theta[W(\mathbf{X})] < \infty$.

Then
    $$V_\theta[W(\mathbf{X})] \geq \frac{\left(\frac{d}{d\theta}E_\theta[W(\mathbf{X})]\right)^2}{E_\theta\left[\left\{\frac{\partial}{\partial \theta} \ln f_\theta(\mathbf{X})\right\}^2\right]}$$
\end{thm}
\begin{note}
It is worth noting that
    \begin{itemize}
        \item[(1)] the condition (*) is quite useless but can be
              shown to hold for very many distributions, including the
              exponential family
        \item[(2)] The denominator contains the phenomenon
            $$\ln f_\theta (\mathbf{X})$$
            which is a function of parameters and random variables.
        \item[(3)] If $W(\mathbf{X})$ is an unbiased estimator for
 $\theta$ then $E_\theta[W(\mathbf{X})] = \theta$ and the numerator
 is then the constant $1$.
        \item[(4)] If $W(\mathbf{X})$ unbiased and achieves
              thes lower bounds, then $W$ is UMVUE.
        \item[(5)] $E_\theta \frac{\partial}{\partial \theta} \ln f_\theta (\mathbf{X}) = 0$ since
            \begin{equation*}
            \begin{split}
                E_\theta \frac{\partial}{\partial \theta} \ln f_\theta (\mathbf{X}) &= \int\left(\frac{\partial}{\partial \theta} \ln f_\theta (\mathbf{x})\right) f_\theta(\mathbf{x})d\mathbf{x}\\
                &= \int\frac{\frac{\partial}{\partial \theta} f_\theta (\mathbf{x})}{f_\theta(\mathbf{x})}f_\theta(\mathbf{x})d\mathbf{x}\\
                &= \int\frac{\partial}{\partial \theta} f_\theta(\mathbf{x})d\mathbf{x} = \frac{\partial}{\partial \theta} \underbrace{\int f_\theta(\mathbf{x})d\mathbf{x}}_{=1} = 0,
            \end{split}
            \end{equation*}
        where the second to last step is only valid if the condition (*) is fulfilled.
    \end{itemize}
\end{note}
\begin{proof}[Proof]
    \begin{equation*}
            \begin{split}
                \frac{d}{d\theta}E_\theta[W(\mathbf X)] &= \int W(\mathbf{x}) \frac{\partial}{\partial \theta} f_\theta(\mathbf{x}) d\mathbf{x}\\
                & = \int W(\mathbf{x}) \frac{\frac{\partial}{\partial \theta} f_\theta(\mathbf{x})}{f_\theta(\mathbf{x})} f_\theta(\mathbf{x}) d\mathbf{x}\\
                & = \int W(\mathbf{x})\left[\frac{\partial}{\partial \theta} \ln f_\theta (\mathbf{x})\right]f_\theta(\mathbf{x})d\mathbf{x}\\
                & = E_\theta \left[\underbrace{W(\mathbf{X})}_{W} \underbrace{\frac{\partial}{\partial \theta} \ln f_\theta(\mathbf{X})}_{U_\theta}\right] = E_\theta[WU_\theta]\\
                & = E_\theta[WU_\theta] - E_\theta W \underbrace{E_\theta U_\theta}_{0} = Cov_\theta (W,U_\theta)
            \end{split}
    \end{equation*}

    We also have $V_\theta[U_\theta] = E[U_\theta^2] - \underbrace{(EU_\theta)^2}_{0}$
    and thus
    \begin{equation*}
            \begin{split}
                1 \geq \rho^2_{W,U_\theta} &= \frac{Cov_\theta(W,U_\theta)}{V_\theta[W] \cdot V_\theta[U_\theta]}\\
                & = \frac{(E_\theta[WU_\theta])^2}{V_\theta[W] \cdot E[U_\theta^2]}
            \end{split}
    \end{equation*}
    $\Rightarrow V_\theta[W] \geq \frac{(E_\theta[WU_\theta])^2}{E[U_\theta^2]} = \frac{(\frac{d}{d\theta}E_\theta[W])^2}{E[U_\theta^2]}$\\
    $\Rightarrow V_\theta[W] \geq \frac{(\frac{d}{d\theta}E_\theta[W])^2}{E_\theta\left[\left(\frac{\partial}{\partial \theta} \ln f_\theta(\mathbf{X})\right)^2\right]}$
\end{proof}