% 260s20Assignment1.tex                 REVIEW
\documentclass[12pt]{article} 
%\usepackage{amsbsy} % for \boldsymbol and \pmb 
%\usepackage{graphicx} % To include pdf files!
\usepackage{amsmath}
\usepackage{amsbsy}
\usepackage{amsfonts}
\usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links
%\usepackage{fullpage}
\oddsidemargin=0in                  % Good for US Letter paper
\evensidemargin=0in
\textwidth=6.3in
\topmargin=-1in
\headheight=0.2in
\headsep=0.5in
\textheight=9.4in

%\pagestyle{empty} % No page numbers


\begin{document}
%\enlargethispage*{1000 pt} 

\begin{center}   
{\Large \textbf{STA 260s20 Assignment One: Mostly Review}}%\footnote{Copyright information is at the end of the last page.}
%\vspace{1 mm}
\end{center}

\noindent
These homework problems are not to be handed in.  They are preparation for Quiz 1 and Term Test 1. \textbf{Please try each question before looking at the solution}.

%\vspace{5mm}
\begin{enumerate} 

\item Let the continuous random variable $X$ have density $f_{_X}(x) = 2x \, e^{-x^2} \, I(x>0)$. 
    \begin{enumerate}
        \item Write the cumulative distribution function $F_{_X}(x)$ using indicator functions. Show your work.
        \item Calculate $P(X>\frac{1}{2})$. My answer is 0.7788.
    \end{enumerate}

\item The discrete random variable $X$ has probability mass function 
\begin{displaymath}
    p_{_X}(x) = \frac{|x|}{20} I(x = -4, \ldots, 4).
\end{displaymath}
Let $Y=X^2-1$.
        \begin{enumerate}
            \item What is $E(X)$? The answer is a number. Show some work. My answer is zero.
            \item Calculate the variance of $X$. The answer is a number. My answer is 10.
            \item What is $P(Y=8)$? My answer is 0.30
            \item What is $P(Y=-1)$? My answer is zero.
            \item What is $P(Y=-4)$? My answer is zero.
            \item What is the probability distribution of $Y$? Give the $y$ values with their probabilities for $y$ with $p_{_Y}(y)>0$.
\begin{verbatim} 
        y        0       3       8       15
        p(y)    0.1     0.2     0.3     0.4
\end{verbatim}
            \item What is $E(Y)$? The answer is a number. My answer is 9.
            \item What is $Var(Y)$? The answer is a number. My answer is 30.
        \end{enumerate}

\item Let $f_{_X}(x) = \frac{1}{2} I(-1<x<1)$, and $Y=X^2$. Find $f_{_Y}(y)$. This is a valuable workout in the use of indicator functions.

\item Let $X \sim N(\mu,\sigma^2)$. Show $Z = \frac{X-\mu}{\sigma} \sim N(0,1)$.

\item Let $X_1, \ldots X_n$ be independent and identically distributed $N(\mu,\sigma^2)$ random variables. Find the distribution of $Y = a + \sum_{i=1}^n b_iX_i$. Show your work.

\item Let $Z \sim N(0,1)$. Show $Z^2 \sim \chi^2(1)$. 

\item Let $Y_1, \ldots Y_n$ be independent $\chi^2(\nu_i)$ random variables. Show $Y = \sum_{i=1}^n Y_i \sim \chi^2 \left( \sum_{i=1}^n \nu_i\right)$. 

\item Let $X_1, \ldots X_n$ be independent random variables with expected value $\mu$ and variance $\sigma^2$, and denote the sample mean by $\overline{X}_n = \frac{1}{n}\sum_{i=1}^n X_i$. 
    \begin{enumerate}
        \item Calculate $E(\overline{X}_n)$. Show your work.
        \item Calculate $Var(\overline{X}_n)$. Show your work.
    \end{enumerate}

\pagebreak

    \item The discrete random variables $X$ and $Y$ have joint distribution 
\begin{center}
\begin{tabular}{c|ccc} 
        &  $x=1$   & $x=2$    & $x=3$   \\  \hline
$y=1$   &  $3/12$  & $1/12$   & $3/12$  \\
$y=2$   &  $1/12$  & $3/12$   & $1/12$  \\ 
\end{tabular}
\end{center} 

    \begin{enumerate}
        \item Calculate $Cov(X,Y)$. Show your work.
        \item Are $X$ and $Y$ independent? Answer Yes or No and prove it.
    \end{enumerate}

\item Starting with the definition, show $Var(X) = E(X^2)-[E(X)]^2$.

\item Starting with the definition, show $Cov(X,Y) = E(XY)-E(X)E(Y)$.

\item Let $X$ and $Y$ be discrete random variables. Either prove that the following proposition is true in general, or show that it is not by giving a simple counter-example: If $X$ and $Y$ are independent, then $Cov(X,Y)=0$.

\item Let $X$ and $Y$ be discrete random variables. Either prove that the following proposition is true in general, or show that it is not by giving a simple counter-example: If $Cov(X,Y)=0$, then $X$ and $Y$ are independent.

\item Find $Cov(X,Y+Z)$. Use the definition of covariance. What fact on the formula sheet could you have used instead?

\item Let the random variable $X$ have distribution function $F_{_X}(x) = 1$ for all real $x$. Is this possible? Answer Yes or No and briefly explain. 

\item Let the continuous random variable $X$ have density  $f_{_X}(x)$. What's wrong with this? 
\begin{displaymath}
    F_{_X}(x) = \int_{-\infty}^\infty f_{_X}(t) \, dt
\end{displaymath}

\item What's wrong with this? $F_{_{X|Y}}(x|y) = \frac{F_{_{X,Y}}(x,y)}{F_{_Y}(y)}$. To see it more easily, let $X$ and $Y$ be discrete.

\item Let $X$ be a continuous random variable. Either prove that the following proposition is true in general, or show that it is not by giving a simple counter-example: \\
$E\left( \frac{1}{X}\right) = \frac{1}{E(X)}$. 

\item What's wrong with this? 
$Var(X) = E\left( (X-\mu)^2 \right)
        = \left( E(X-\mu) \right)^2
        = \left( E(X)-E(\mu) \right)^2
        = \left( \mu-\mu \right)^2 = 0$. 

% Cov(X,Y)=0 as above.

\end{enumerate}

\vspace{2mm}

\noindent
\begin{center}\begin{tabular}{l}
\hspace{6in} \\ \hline
\end{tabular}\end{center}
This assignment was prepared by  \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner},
Department of Mathematical and Computational Sciences, University of Toronto. It is licensed under a 
\href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US}
     {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: 
     
\begin{center}
\href{http://www.utstat.toronto.edu/~brunner/oldclass/260s20} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/260s20}}
\end{center}

\end{document}

Maybe put on Assignment 2

\item Let $X_1, \ldots X_n$ be independent Uniform $(0,\theta)$ random variables, where $\theta>0$.
    \begin{enumerate}
        \item Write the cumulative distribution function $F_{_{X_i}}(x)$ using indicator functions. Show your work.
        \item Let $T_n = $
        \item 
        \item 
        \item 
    \end{enumerate}
