Multivariate Distributions

\section{Joint Probability Distributions}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{defn}
Let X,Y be two random variables defined on the same sample space. Let A,B be the set of possible values for X,Y respectively. The function

    \begin{align*} p(x,y) &= P(X = x, Y = y) \end{align*}

is called
\begin{enumerate}
\item The Joint Probability Mass Function (jpmf) of X,Y when discrete, with

    \begin{align*} 	\sum_{x \in A}\sum_{y \in B} p(x,y) = 1 	\end{align*}

\item The Joint Probability Density Function (jpdf) of X,Y when continuous, denoted by f(x,y) with

    \begin{align*} 	\int_{-\infty}^{\infty}\int_{-\infty}^{\infty} f(x,y)dxdy = 1 	\end{align*}

\end{enumerate}
\end{defn}
}}

\section{Marginal Distributions}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{defn}[Discrete]
Let X,Y have jpmf p(x,y), with sets of possible values A,B respectively, then

    \begin{align*} p_X(x) &= \sum_{y \in B} p(x,y) \\ p_Y(y) &= \sum_{x \in A} p(x,y) \end{align*}

are called the marginal probability mass functions of X and Y.
\end{defn}
}}
\hfill\break

and for the continuous case,

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{defn}[Continuous]
Let X,Y have jpdf f(x,y), then

    \begin{align*} f_X(x) &= \int_{-\infty}^{\infty} f(x,y)dy \\ f_Y(y) &= \int_{-\infty}^{\infty} f(x,y)dx \end{align*}

are called the marginal probability density functions of X and Y.
\end{defn}
}}

\section{Expected Values}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{defn}[Expected Value (Discrete)]
We define the expected value to be

    \begin{align*} E[X] &= \sum_{x \in A} xp_X(x) \\ E[Y] &= \sum_{y \in B} yp_Y(y) \end{align*}

\end{defn}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{thm}
Let p(x,y) be the jpmf of X,Y. If h:\Real ^2 \rightarrow \Real, then h(X,Y) is a discrete r.v. with

    \begin{align*} E\left[h(X,Y)\right] &= \sum \sum h(x,y) p(x,y) \end{align*}

\end{thm}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{defn}[Expected Value (Continuous)]

    \begin{align*} E[X] &= \int_{-\infty}^{\infty}x f_X(x)dx \\ E[Y] &= \int_{-\infty}^{\infty}y f_Y(y)dy \end{align*}

\end{defn}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{thm}
Let f(x,y) be the jpdf of X,Y. If h:\Real ^2 \rightarrow \Real, then h(X,Y) is a discrete r.v. with

    \begin{align*} E\left[h(X,Y)\right] &= \int_{-\infty}^{\infty} \int_{-\infty}^{\infty} h(x,y) p(x,y) \end{align*}

\end{thm}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{cor}
By Theorems 3.2 and 3.4 we have

    \begin{align*} E[X + Y] &= E[X] + E[Y] \end{align*}

\end{cor}
}}

\section{Independent Random Variables}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{defn}[Expected Value (Continuous)]
Two random variables X,Y are called independent if

    \begin{align*} P(X \in A, Y\in B) &= P(X \in A) P(Y \in B) \\ \shortintertext{ or, } P(X \leq a, Y \leq b) &= P(X \leq a)P(Y \leq b) \end{align*}

\end{defn}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{thm}
Let X,Y be two r.v.s. If F is the joint distribution function of X and Y, then X and Y are independent if and only if \forall t,u \in \Real

    \begin{align*} F(t,u) &= F_X(t)F_Y(u) \end{align*}

\end{thm}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{thm}
Let X,Y be discrete r.v.s with p(x,y) their jpmf, then X,Y are independent if and only if \forall x,y \in \Real

    \begin{align*} p(x,y) &= p_X(x)p_Y(y) \end{align*}

\end{thm}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{thm}
Let X,Y be independent r.v.s and g,h : \Real \rightarrow \Real, then g(X) and h(Y) are also independent r.v.s.
\end{thm}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{thm}
Let X,Y be independent r.v.s Then \forall g,h:\Real \rightarrow \Real

    \begin{align*} E[g(X)h(Y)] &= E[g(X)] E[h(Y)] \shortintertext{Most importantly} E[XY] &= E[X]E[Y] \end{align*}

\end{thm}
}}

\section{Conditional Distributions}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{defn}
The conditional probability mass function of X given that Y = y is

    \begin{align*} p_{X|Y}(x|y) &= P(X = x | Y = x) = \frac{P(X = x, Y = y)}{P(Y = y)} \\ &= \frac{p(x,y)}{p_Y(y)} \end{align*}

\end{defn}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{defn}
The conditional expectation of X given that Y = y is

    \begin{align*} E[X | Y = y] = \sum_{x \in A}xP(X = x | Y = y) = \sum_{x \in A} x p_{X|Y}(x|y) \end{align*}

\end{defn}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{defn}
The conditional probability density function of X given that Y = y is

    \begin{align*} f_{X|Y}(x|y) &=  \frac{f(x,y)}{f_Y(y)} \\ \end{align*}

\end{defn}
}}

\hfill\break
\hspace{-1cm}\fbox{\parbox{\textwidth + 1cm}{
\begin{defn}
The conditional expectation of X given that Y = y is

    \begin{align*} E[X | Y = y] = \int_{-\infty}^{\infty}xf_{X|Y}(x|y) dx \end{align*}

\end{defn}
}}