aboutsummaryrefslogtreecommitdiff
path: root/houdre
diff options
context:
space:
mode:
Diffstat (limited to 'houdre')
-rw-r--r--houdre/hw8.tex154
1 files changed, 154 insertions, 0 deletions
diff --git a/houdre/hw8.tex b/houdre/hw8.tex
new file mode 100644
index 0000000..930979e
--- /dev/null
+++ b/houdre/hw8.tex
@@ -0,0 +1,154 @@
+\newfam\rsfs
+\newfam\bbold
+\def\scr#1{{\fam\rsfs #1}}
+\def\bb#1{{\fam\bbold #1}}
+\let\oldcal\cal
+\def\cal#1{{\oldcal #1}}
+\font\rsfsten=rsfs10
+\font\rsfssev=rsfs7
+\font\rsfsfiv=rsfs5
+\textfont\rsfs=\rsfsten
+\scriptfont\rsfs=\rsfssev
+\scriptscriptfont\rsfs=\rsfsfiv
+\font\bbten=msbm10
+\font\bbsev=msbm7
+\font\bbfiv=msbm5
+\textfont\bbold=\bbten
+\scriptfont\bbold=\bbsev
+\scriptscriptfont\bbold=\bbfiv
+
+\def\Pr{\bb P}
+\def\E{\bb E}
+\newcount\qnum
+\def\q{\afterassignment\qq\qnum=}
+\def\qq{\qqq{\number\qnum}}
+\def\qqq#1{\bigskip\goodbreak\noindent{\bf#1)}\smallskip}
+\def\align#1{\vcenter{\halign{$\displaystyle##\hfil$\tabskip1em&&
+ $\hfil\displaystyle##$\cr#1}}}
+\def\fr#1#2{{#1\over #2}}
+\def\var{\mathop{\rm var}\nolimits}
+\def\cov{\mathop{\rm cov}\nolimits}
+\def\infint{\int_{-\infty}^\infty}
+\def\pa#1#2{\partial#1/\partial#2}
+
+\q1
+
+(a)
+
+$$\Pr(|Z_n-a|>\epsilon) = \Pr(Z_n<a-\epsilon) = \Pr(X_i<a-\epsilon)^n,$$
+which, since $\Pr(X_i<a-\epsilon)=1-\epsilon/a,$ (for small $\epsilon$)
+$\to 0$ as $n\to\infty$ for all $\epsilon>0.$
+
+(b)
+
+$$\Pr(|\sqrt{Z_n}-\sqrt{a}|>\epsilon) =
+\Pr(\sqrt{Z_n}<\sqrt{a}-\epsilon) =
+\Pr(Z_n<a-\sqrt{a}\epsilon+\epsilon^2) =
+\Pr(X_i<a-\sqrt{a}\epsilon+\epsilon^2)^n =
+(1-\epsilon(\sqrt{a}-\epsilon)/a)^n \to 0,$$
+as $n\to\infty$ because $\epsilon<\sqrt{a},$ otherwise the probability
+is zero because the domain would be outside of the uniform distribution.
+
+% not done here
+
+(c)
+
+$$\Pr(U_n\leq x) = \Pr(1-Z_n\leq x/n) = \Pr(Z_n\geq 1-x/n) =
+1-\Pr(Z_n < 1-x/n) = 1-\Pr(X_i < x/n)^n = 1-(1-x/n)^n,$$
+for $0<x<n,$ which becomes $x>0$ as $n\to\infty,$ and limits to
+$1-e^{-x},$ by definition of the exponential.
+
+\q2
+
+Let $Z_n$ be the normalization of sum $S_n$ of $n$ independent Bernoulli
+variables, each with parameter $p$ and thus mean $p$ and variance $pq.$
+$$\Pr(|Z_n|\leq x) = \Pr(-x\leq Z_n\leq x) =
+\Pr(np-\sqrt{npq}x\leq S_n\leq np+\sqrt{npq}x) = \sum{n\choose k}p^k
+q^{n-k}.$$
+By the central limit theorem, as $n\to\infty,$
+$$\Pr(|Z_n|\leq x) = \Pr(Z_n\leq x) - \Pr(Z_n\leq -x) = \int_{-x}^x
+{1\over\sqrt{2\pi}}e^{-\fr12 u^2}du = 2\int_0^x
+{1\over\sqrt{2\pi}}e^{-\fr12 u^2}du.$$
+
+\q3
+
+$X_n,$ the binomial distribution is equivalent to the sum of $n$
+independent Bernoulli distributions $B_i$ with parameter $p.$
+
+$$\E([n^{-1}X_n-p]^2) = \E(n^{-2}[(B_1-p)+(B_2-p)+\cdots+(B_n-p)]^2) =
+n^{-2}(n\var(B_i)) = p(1-p)/n,$$
+
+which $\to 0$, as $n\to\infty.$ Converging in mean square implies
+converging in probability, therefore the distribution converges in
+probability to $p.$
+
+\q5
+
+With $P_n$ the poisson distribution with parameter $n,$
+$$\Pr(P_n = k) = {n^k e^{-\lambda}\over k!},$$
+$$e^{-n}\left(1+n+{n^2\over 2!}+\cdots+{n^n\over n!}\right) =
+\sum_{k=0}^n \Pr(P_n=k) = \Pr(P_n\leq n).$$
+$P_n = X_1+X_2+\cdots+X_n$ where $X_i$ has the Poisson distribution with
+parameter 1.
+The normalized version of $P_n$ is $(P_n-n)/\sqrt{x},$ because the mean
+and variance of $X_i$ is $1.$
+As $n\to\infty,$ this distribution approaches the standard normal (by
+the central limit theorem), and
+$\Pr(P_n\leq n) \to \Pr(N(0,1)\leq0) = 1/2.$
+
+\q7
+
+$$\E([X_n+Y_n-(X+Y)]^2) = \E([X_n-X]^2) + \E([Y_n-Y]^2) +
+2\E([Y_n-Y][X_n-X]).$$
+
+As $n\to\infty,$ this approaches $2\E([Y_n-Y][X_n-x]) \leq
+2\sqrt{\var(Y_n-Y)\var(X_n-X)},$ of which both variances $\to 0$ because
+$Y_n \to Y$ in mean square and similar for $X_n,$ and therefore the
+first expectation approaches zero and $X_n+Y_n \to X+Y.$
+
+\q8
+
+$$\E([X_n-X]^2)\to0\qquad\hbox{\it\ as } n\to\infty$$
+Let $V=1.$ By Cauchy-Schwartz,
+$$\E(X_n-X)^2\leq\E([X_n-X]^2)\Rightarrow\E([X_n-X])\to 0,$$
+as $n\to\infty.$
+
+By linearity of expectations,
+$$\E(X_n)\to\E(X)\qquad\hbox{\it\ as } n\to\infty.$$
+
+If $\Pr(Z_n=0) = 1-{1\over n}$ and $\Pr(Z_n=n) = {1\over n},$ $X_n\to
+X=0$ in probability because as $n\to\infty,$ $\Pr(X>\epsilon) \to 0,$
+but $\E(X) = \E(0) = 0$ and $\E(X_n) = {n\over n} = 1.$
+
+\q11
+
+$$\Pr(|X|\geq a) = \Pr(g(X)\geq g(a)),$$
+by the fact that $g$ is symmetric and strictly increasing on $x>0.$
+$$\Pr(g(X)\geq g(a)) \leq {\E(g(X))\over g(a)},$$
+by Markov's inequality (since $g(X)$ is a random variable) given that
+$g(x)>0,$ therefore the inequality is true.
+
+\q14
+
+$X_n$ converges in mean square to the random variable $X,$ so
+$$\E([X_n-X]^2) \to 0\qquad\hbox{\it\ as } n\to\infty,$$
+
+$$\E([X_n-X_m]^2) = \E([X_n-X-(X_m-X)]^2) =
+\E([X_n-X]^2)-2\E([X_n-X][X_m-X])+\E([X_m-X]^2) = 2\E([X_n-X][X_m-X]),$$
+as $n,m\to\infty,$ from the first assumption.
+
+By the Cauchy-Schwartz inequality,
+$$\E([X_n-X][X_m-X])^2\leq \E([X_n-X]^2)\E([X_m-X])^2 \to 0,$$
+as $n\to\infty,$ so $\E([X_n-X][X_m-X]) \to 0,$ so the assumption is
+proved.
+
+$$\cov(X_n,X_m) = \E([X_n-\E(X_n)][X_m-\E(X_m)]) =
+\E([X_n-\mu][X_m-\mu])$$
+
+$X_n$ and $X_m$ are sufficiently similar to $X,$ so
+$$\E([X_n-\mu][X_m-\mu]) \to \E([X-\mu]^2) = \sigma^2 =
+\sqrt{\var(X_n)\var(X_m)} \Rightarrow \rho\to 1.$$
+
+% An incomplete answer: how is it sufficiently similar?
+
+\bye