% This is a LaTex file.
% Homework for the course "Stochastic Calculus",
% Fall semester, 2004, Jonathan Goodman.
\documentclass[12pt]{article}
% The page format, somewhat wider and taller page than in art12.sty.
\topmargin -0.1in \headsep 0in \textheight 8.9in \footskip 0.6in
\oddsidemargin 0in \evensidemargin 0in \textwidth 6.5in
\begin{document}
% The title and header.
\noindent
{\scriptsize Stochastic Calculus, Fall 2004
(http://www.math.nyu.edu/faculty/goodman/teaching/StochCalc2004/)} \hfill
\begin{center}
\large
Assignment 2.
\normalsize
\end{center}
\noindent
Given September 9, due September 23.\\
Last revised, September 10.
\vspace{.3in}
\noindent
{\bf Objective:} Conditioning and Markov chains.
\vspace{.5cm}
% The questions
\begin{description}
\item[1.]
Suppose that $\cal F$ and $\cal G$ are two algebras of sets and that
$\cal F$ adds information to $\cal G$ in the sense that any $\cal G$
measurable event is also $\cal F$ measurable:
$\cal G \subset \cal F$.
Suppose that the probability space $\Omega$ is discrete (finite or countable)
and that $X(\omega)$ is a variable defined on
$\Omega$ (that is, a function of the random variable $\omega$).
The conditional expectations (in the modern sense) of $X$ with
respect to $\cal F$ and $\cal G$ are $Y = E[X\mid {\cal F}]$ and
$Z = E[X\mid {\cal G}]$. In each case below, state whether the
statement is true or false and explain your answer with a proof or
a counterexample.
\begin{description}
\item[a.] $Z \in \cal F$.
\item[b.] $Y \in \cal G$.
\item[c.] $Z = E[ Y \mid {\cal G}]$.
\item[d.] $Y = E[ Z \mid {\cal F}]$.
\end{description}
\item[2.] Let $\Omega$ be a discrete probability space and $\cal F$ a
$\sigma-$algebra. Let $X(\omega)$ be a (function of a) random variable
with $E[X^2] < \infty$. Let $Y=E[X \mid {\cal F}]$. The variance of
$X$ is $\mbox{var}(X) = E[(X - \overline{X})^2]$, where
$\overline{X} = E[X]$.
\begin{description}
\item[a.] Show directly from the (modern) definition of conditional
expectation that
\begin{equation}
E[X^2] = E[(X-Y)^2] + E[Y^2] \;.
\end{equation}
Note that this equation also could be written
$$
E[X^2] = E\left[ \left( X - E[X\mid {\cal F}]\right)^2\right]
+ E\left[ \left( E[X \mid {\cal F}] \right)^2 \right] \; .
$$.
\item[b.] Use this to show that
$\mbox{var}(X) = \mbox{var}(X-Y) + \mbox{var}(Y)$.
\item[c.] If we interpret conditional expectation as an orthogonal projection
in a vector space, what theorem about orthogonality does part a represent?
\item[d.] We have $n$ independent coin tosses with each equally likely to
be H or T. Take $X$ to be the indicator function of the event that the first
toss is H. Take $\cal F$ to be the algebra generated by the number of H
tosses in all. Calculate each of the three quantities in (1) from scratch
and check that the equation holds. Both of the terms on the right are
easiest to do using the law of total probability, which is pretty obvious
in this case.
\end{description}
\item[3.] (Bayesian identification of a Markov chain) We have a state space
of size $m$ and two $m \times m$ stochastic matrices, $Q$, and $R$.
First we pick one of the matrices, choosing $Q$ with probability $f$ and
$R$ with probability $1-f$.
Then we use the chosen matrix to run a Markov chain $X$, starting with
$X(0) = 1$ up to time $T$.
\begin{description}
\item[a.] Describe the probability space $\Omega$ appropriate for this
situation.
\item[b.] Let $\cal F$ be the algebra generated by the chain itself, without
knowing whether $Q$ or $R$ was chosen. Find a formula for
$P(Q \mid {\cal F})$ (which would be $P(Q \mid X=x)$ in classical
notation). Though this formula might be ugly, it is easy to probram.
\end{description}
\item[4.] Suppose we have a 3 state Markov chain with transition matrix
$$
P = \left( \begin{array}{ccc} .6 & .2 & .2 \\
.3 & .5 & .2 \\
.1 & .2 & .7 \end{array} \right)
$$
and suppose that $X(0) = 1$. For any $t>0$, the algebras ${\cal F}_t$
and ${\cal G}_t$ are as in the notes, and ${\cal H}_t$ is the algebra
generated by $X(s)$ for $t \leq s \leq T$.
\begin{description}
\item[a.] Show that the probability distribution of the first $t$ steps
conditioned on ${\cal G}_{t+1}$ is the same as that conditioned on
${\cal H}_{t+1}$. This is a kind of backwards Markov property: a forward
Markov chain is a backward Markov chain also.
\item[b.] Calculate $P(X(3) = 2 \mid {\cal G}_4)$. This consists of 3 numbers.
\end{description}
\end{description}
\end{document}