Your IP : 216.73.216.40


Current Path : /var/www/html/srijit/.Trash/
Upload File :
Current File : /var/www/html/srijit/.Trash/end sem ms.tex

\documentclass[12pt]{article}%\makeatletter
\oddsidemargin .0in \evensidemargin .0in \textwidth 6.5in
\topmargin -.50in \textheight 24cm
\usepackage{amsmath,amssymb,amsthm,mathrsfs,booktabs,graphics,graphicx,float,mathtools}
\usepackage[mathscr]{euscript}
\usepackage{setspace}
\usepackage{enumitem}
\usepackage{tikz}
\newcommand{\boldr}{\boldsymbol{r}}
\newcommand{\boldi}{\boldsymbol{i}}
\newcommand{\boldj}{\boldsymbol{j}}
\newcommand{\boldk}{\boldsymbol{k}}
\newcommand{\boldv}{\boldsymbol{v}}
\newcommand{\rt}[1]{\textcolor{red}{#1}}
\newcommand{\rh}[1]{\hfill{\rt{{#1}}}}
% =============================================================== %
%      List environment with lower case alphabetic counter        %
% =============================================================== %
\newcounter{list-counter}
\newenvironment{listalph}
{\begin{list}{(\alph{list-counter})~}{\usecounter{list-counter}}
%\setlength{\itemsep}{-0.1in}
%\setlength{\topsep}{0in}
}
{\end{list}}


\begin{document}

\parskip = 10pt
\begin{center}
\textbf{\underline {INDIAN INSTITUTE OF INFORMATION TECHNOLOGY, ALLAHABAD}}
\\\textbf{End Sem Exam (Paper Code: SPAS230C)}\\
Marking Scheme
\end{center}
\hrule 
\singlespacing
\thispagestyle{empty}
\begin{enumerate}
\item \begin{enumerate}
\item Let $A$ and $B$ be two events such that $P(A)= 1/3$ and $P(B) = 1/4$. Then find an interval of minimal length such that $P(A\cup B)$ lies in that. \hfill[2]

\textbf{Solution:} $\text{max}(P(A), P(B)) \leq P(A\cup B) \leq P(A) + P(B)$. Hence \\$P(A\cup B) \in (1/3, 7/12)$. \rh{[1+1]}
\item Let $X_1 \thicksim N(1, 2^2)$ and $X_2 \thicksim N(2, 1^2)$ be two independent random variables. Then write the moment generating function of $X_1 + X_2$. \hfill[2]

\textbf{Solution:} Since $X_1 \thicksim N(1, 2^2)$ and $X_2 \thicksim N(2, 1^2)$ are independent, $X_1 + X_2 \thicksim N(3, (\sqrt{5})^2)$. \rh{[1]}\\
So the moment generating function of $X_1 + X_2$ is $e^{3t+\frac{5t^2}{2}}$. \rh{[1]}
\item Let $X$ and $Y$ be two independent random variables with variance $3$ and $5$, respectively. Find the variance of $2X+3Y$. \hfill[2] 

\textbf{Solution:} $\text{Var}(2X+3Y) = 4\text{Var}(X) + 9\text{Var}(Y) = 57$. \rh{[2]}
\item If the sample mean and standard error of a normally distributed population is $22$ and $5$ respectively, then find the $95\%$ confidence interval of the population mean.\hfill[2]

\textbf{Solution:} The $95\%$ confidence interval of the population mean is $(22-(5\times 1.96), 22+ (5\times 1.96)) = (12.2, 31.8)$. \rh{[2]}
\end{enumerate}
\item Alice is taking a probability class and at the end of each week she can be either up-to-date or she may have fallen behind. If she is up-to-date in a given week, the probability that she will be up-to-date (or behind) in the next week is $0.8$ (or $0.2$, respectively). If she is behind in a given week, the probability that she will be up-to-date (or behind) in the next week is $0.4$ (or $0.6$, respectively). Alice is (by default) up-to-date when she starts the class. What is the probability that she is up-to-date after three weeks? \hfill[5]

\textbf{Solution:} Let $U_i$ and $B_i$ be the events that Alice is up-to-date or behind respectively, after $i$ weeks for $i= 1, 2, 3$. By the total probability theorem, $P(U_3) = P(U_2)P(U_3|U_2)+ P(B_2)P(U_3|B_2) = 0.8P(U_2) + 0.4P(B_2)$. \rh{[1]}

Similarly, $P(U_2) = 0.8P(U_1) + 0.4P(B_1)$ and $P(B_2) = 0.2P(U_1) + 0.6P(B_1)$ . \rh{[1+1]}\\ Finally, since Alice starts her class up-to-date, we have $P(U_1) = 0.8$ and $P(B_1) = 0.2$. Hence $P(U_2) = 0.72$ and $P(B_2) = 0.28$. \rh{[1]}
\\ Therefore $P(U_3)= 0.688$. \rh{[1]}

\item Let $X$ be random variable with probability mass function \hfill[7]
\begin{align*}
p_{X}(x) = \left\{\begin{array}{ll} 
                     \frac{1}{9} & ~\text{if}~ x ~\text{is an integer in the range}~ [-4, 4]\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
Find the probability mass function of $Y = |X|$ and variance of $X$.

\textbf{Solution:} The p.m.f of $Y$ is \begin{align*}
p_{X}(x) = \left\{\begin{array}{ll} 
                     \frac{1}{9} & ~\text{if}~ x = 0~~~~ \rh{[1]}\\ \frac{2}{9} & ~\text{if}~ x \in \{1, 2, 3, 4\}~~~~~\rh{[1+1+1+1]}\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
Clearly $E(X) = 0$.\rh{[1]}\\ So $\text{Var}(X) = E(X^2) = \frac{20}{3}$.\rh{[1]}
\item Let $X \sim U(0, 1)$ and $Y = -\ln (1-X) $. Find the cumulative distribution function and probability density function of $Y$. \hfill[4]

\textbf{Solution:} The c.d.f of $Y$ is $F_Y (y) = P(Y \leq y) = P(-\ln (1-X) \leq y) = P(\frac{1}{1-X} \leq e^y) = P(X \leq 1-e^y)$. \rh{[1]}\\Since $X$ is uniform, the c.d.f of $Y$ is 
\begin{align*}
F_{Y}(y) = \left\{\begin{array}{ll} 
                    1-e^{-y} & ~\text{if} ~ y> 0 ~~~ \rh{[1+1]}\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
Also p.d.f of $Y$ is \begin{align*}
f_{Y}(y) = \left\{\begin{array}{ll}  
                    e^{-y} & ~\text{if} ~ y> 0 ~~~ \rh{[1]}\\ 
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
\item Engineers designing the next generation of space shuttles plan to include two fuel pumps one active, the other in reserve. If the primary pump malfunctions, the second is automatically brought on line. Suppose a typical mission is expected to require that fuel be pumped for at most 50 hours. According to the manufacturer's specifications, pumps are expected to fail once every 100 hours. What are the chances that such a fuel pump system would not remain functioning for the full 50 hours? \hfill[6]

\textbf{Solution:} Let $Y$ = the time elapsed until the 2nd pumps break down (waiting time). We know that waiting time of a Poisson process before the $r$-th event follows Gamma distribution.Assume the failures follow the Poisson process $P(\lambda)$. We have $\frac{1}{\lambda} = 100 \Longrightarrow \lambda = 1/100$. \rh{[1]}

Hence the p.d.f of $Y$ is 
\begin{align*}
f_{Y}(y) = \left\{\begin{array}{ll} 
                     \frac{\lambda^2 e^{-\lambda y}y}{\Gamma(2)} & \text{if } y>0\\
                    0 & \text{otherwise}
            \end{array}\right. = \left\{\begin{array}{ll} 
                     \frac{1}{10^4}y e^{-y/100} & \text{if } y>0~~~\rh{[2]}\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
Therefore the probability that the system fails to last for the $50$ hours is \\ $P(Y < 50) = \int_{0}^{50}\frac{1}{10^4}y e^{-y/100} dy ~~~\rh{[2]}\\~~~~~~~~~~~~~= 1- \frac{3}{2}e^{-1/2}$.\rh{[1]}

\item Let $X$ be random variable with probability density function. \hfill[10]
\begin{align*}
f_{X}(x) = \left\{\begin{array}{ll} 
                     4x(1-x^2) & \text{if } 0< x < 1\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
For a fixed $x \in (0, 1)$, the conditional probability density function of $Y$ given $X =x$ is \begin{align*}
f_{Y|X}(y|x) = \left\{\begin{array}{ll} 
                     \frac{2y}{1-x^2} & \text{if } x< y < 1\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
\begin{enumerate}
\item Find the conditional probability density function of $X$ given $Y = y$ for appropriate values of $y$

\textbf{Solution:} The joint p.d.f of $X$ and $Y$ is \begin{align*}
f(x, y) = f_{Y|X}(y|x) f_{X}(x)= \left\{\begin{array}{ll} 
                     8xy & \text{if } 0<x< y < 1 ~~~~\rh{[1+1]}\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
 Thus the marginal p.d.f of $Y$ is \begin{align*}
f_{Y}(y)= \int_{-\infty}^{\infty}f(x, y) dx = \left\{\begin{array}{ll} 
                     4y^3 & \text{if } 0< y < 1~~~~ \rh{[1+1]}\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
Thus for $0< y < 1$, the conditional p.d.f of $X$ given $Y = y$ is \begin{align*}
f_{X|Y}(x|y) = \frac{f(x, y)}{f_{Y}(y)} = \left\{\begin{array}{ll} 
                     \frac{2x}{y^3} & \text{if } 0<x< y~~~~\rh{[1+1]}\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
\item Find $E(X|Y =0.5)$ and $\text{Var}(X|Y =0.5)$.

\textbf{Solution:} $E(X|Y =0.5) = \int_{-\infty}^{\infty}xf_{X|Y}(x|y=\frac{1}{2}) dx = \frac{1}{3}$.\rh{[1]}\\ Similarly $E(X^2|Y =0.5) = \int_{-\infty}^{\infty}x^2f_{X|Y}(x|y=\frac{1}{2}) dx = \frac{1}{8}$.\rh{[1]}\\ So $\text{Var}(X|Y =0.5)= E(X^2|Y =0.5) - (E(X|Y =0.5))^2 = \frac{1}{72}$. \rh{[2]}

\end{enumerate}
 
\item Consider $n$ independent tosses of a biased coin with probability of a head equal to $p$. Let $X$ and $Y$ be the numbers of heads and of tails, respectively. Find the correlation coefficient of $X$ and $Y$. \hfill[5]

\textbf{Solution:} Clearly $Y = n- X$. Hence $Y - E(Y) = -(X- E(X))$. \rh{[1]}\\ Thus $\text{Var}(Y) = \text{Var}(X)$. \rh{[1]} \\Also $\text{Cov}(X, Y)= -\text{Var}(X)$. \rh{[1]}\\
Now the correlation coefficient $\rho(X, Y) = \frac{\text{Cov}(X, Y)}{\sqrt{\text{Var}(X)}\sqrt{\text{Var}(Y)}}$. \rh{[1]}\\
Hence $\rho(X, Y) = -1$. \rh{[1]}
\item Let $X_1 \thicksim P(\lambda_1)$ and $X_2 \thicksim P(\lambda_2)$ be two independent Poisson random variables. Then using moment generating function technique, find the probability mass function of $X_1 + X_2$. \hfill[5]

\textbf{Solution:} Let $Y = X_1 + X_2$. Then the m.g.f of $Y$ is $\phi_{Y} (t) = E(e^{tY}) = E(e^{tX_1}e^{tX_2})$.\rh{[1]}\\
Since $X_1$ and $X_2$ are independent, $\phi_{Y} (t) = E(e^{tX_1})E(e^{tX_2})$.\rh{[1]}\\Since $X_1 \thicksim P(\lambda_1)$ and $X_2 \thicksim P(\lambda_2)$, the m.g.f of  $X_1$ and $X_2$ are $e^{\lambda_1(e^t -1)}$  and $e^{\lambda_2(e^t -1)}$, respectively.\rh{[1]}\\
Hence $\phi_{Y} (t) = e^{(\lambda_1 + \lambda_2)(e^t -1)}$. Hence $Y \thicksim P(\lambda_1+ \lambda_2)$. \rh{[1]}\\Therefore the p.m.f of $Y$ is \rh{[1]}
\begin{align*}
p_{Y}(y) = \left\{\begin{array}{ll} 
                     e^{-(\lambda_1 + \lambda_2)}\frac{(\lambda_1 + \lambda_2)^y}{y!} & ~\text{if}~ y \in \{0, 1, 2, \cdots\}\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
\item Let $X_1$ and $X_2$ be two independent exponential random variables i.e., $X_i \thicksim E(\lambda)$ , for $i = 1, 2$. Then using transformation of variables technique, find the probability density function of $\frac{X_1}{X_1 + X_2}$. \hfill[10]

\textbf{Solution:} The joint p.d.f of $X_1$ and $X_2$ is \rh{[2]}\begin{align*}
f(x_1, x_2) = \left\{\begin{array}{ll} 
                     \lambda^2 e^{-\lambda(x_1 + x_2)} & \text{if } x_1 >0, x_2 > 0\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
Let $y_1 = \frac{x_1}{x_1 + x_2}$ and $y_2 = x_1 + x_2$. Then $x_1 = y_1 y_2$ and $x_2 = y_2 (1-y_1)$.\rh{[1]}\\Also $J =
\left| \begin{array}{ccc}
\displaystyle\frac{\partial x_1}{\partial y_1} & \displaystyle\frac{\partial x_1}{\partial y_2} \\[4mm]
\displaystyle\frac{\partial x_2}{\partial y_1} & \displaystyle\frac{\partial x_2}{\partial y_2}
\end{array} \right|
=
\left| \begin{array}{ccc}
\displaystyle y_2 & y_1 \\[4mm]
\displaystyle -y_2 & \displaystyle 1-y_1
\end{array} \right|
= y_2 \ne 0$. \rh{[1]}

Therefore the joint p.d.f of $Y_1$ and $Y_2$ is given by $f_{Y}(y_1, y_2)= f(y_1 y_2, y_2 (1-y_1))|J|$ \rh{[1]}\begin{align*}
f_{Y}(y_1, y_2)= \left\{\begin{array}{ll} 
                     \lambda^2 y_2 e^{-\lambda y_2} & \text{if } y_1 y_2 >0, y_2 (1-y_1) > 0 ~~\rh{[1]}\\
                    0 & \text{otherwise}
            \end{array}\right.
\end{align*}
Hence \begin{align*}
f_{Y}(y_1, y_2)= \left\{\begin{array}{ll} 
                     \lambda^2 y_2 e^{-\lambda y_2} & \text{if } 0< y_1< 1, ~ y_2 >0 ~~\rh{[1]} \\
                    0 & \text{otherwise}
            \end{array}\right.
            \end{align*}
So the marginal p.d.f of $Y_1$ is $f_{Y_1}(y_1) = \int_{-\infty}^{\infty}f_{Y}(y_1, y_2) dy_2$. \rh{[1]}\\ Hence \begin{align*}
f_{Y_1}(y_1)= \left\{\begin{array}{ll} 
                     \lambda^2 \int_{0}^{\infty}y_2 e^{-\lambda y_2} dy_2  & \text{if } 0< y_1< 1 ~~~\rh{[1]}\\
                    0 & \text{otherwise}
            \end{array}\right.
            \end{align*} 
Therefore \begin{align*}
f_{Y_1}(y_1)= \left\{\begin{array}{ll} 
                    1  & \text{if } 0< y_1< 1 ~~~~\rh{[1]}\\
                    0 & \text{otherwise}
            \end{array}\right.
            \end{align*}
\item Consider a sequence $(X_n)_{n \geq 1}$ of independent random variables, that are uniformly distributed over $(0,1)$ and let $Y_n = \text{min}\{X_1, X_2, \cdots, X_n\}$. Find a real number $a$ to which the sequence $(Y_n)$ converges in probability. \hfill[5]

\textbf{Solution:} Let $\epsilon > 0$. Then $P(|Y_n - 0| \geq \epsilon) = P(X_1 \geq \epsilon, \cdots, X_n \geq \epsilon) = P(X_1 \geq \epsilon) \cdots P(X_n \geq \epsilon)$ (as sequence $(X_n)_{n \geq 1}$ is of independent random variables).\rh{[1]}

For $\epsilon \geq 1$, $P(X_1 \geq \epsilon) = 1- P(X_1 < \epsilon) = 0$. Hence $P(|Y_n - 0| \geq \epsilon) = 0$ \rh{[1]}

For $0 <\epsilon < 1$, $P(X_1 \geq \epsilon) = 1- P(X_1 < \epsilon) = 1- \epsilon$.\rh{[1]}\\ Hence $P(|Y_n - 0| \geq \epsilon) = (1- \epsilon)^n$. \rh{[1]}\\ $\displaystyle{\lim_{n \longrightarrow \infty}} P(|Y_n - 0| \geq \epsilon) = \displaystyle{\lim_{n \longrightarrow \infty}}(1- \epsilon)^n = 0$. Hence $(Y_n)$ converges to $0$ in probability.\rh{[1]}
\item A die is rolled $420$ times. What is the probability that the sum of the rolls lies between $1400$ and $1550$? [$\Phi(2.3) = 0.9893 ~ \&~ \Phi(2.01) = 0.9778$]
 \hfill[10]
 
\textbf{Solution:} Let $X_i =$ upper face of die in $i$-th roll. 
\\Then the sum $X = X_1 + X_2 + \cdots + X_{420}$ is a random variable. The p.m.f of $X_i$ is \begin{align*}
f_{X_i}(x)= \left\{\begin{array}{ll} 
                    1/6  & \text{if } x \in \{1, 2, \cdots, 6\} ~~~~\rh{[1]}\\
                    0 & \text{otherwise}
            \end{array}\right.
            \end{align*}, for every $1 \leq i \leq 420$
            
So $E(X_i) = 7/2$, $E({X_i}^2) = 91/6$ and $\text{Var}(X_i) = 35/12$, for every $1 \leq i \leq 420$. \rh{[1+1+1]} Therefore $E(X) = 420 \times 7/2 = 1470$ and $\sqrt{\text{Var}(X)} = \sqrt{420 \times 35/12} = 35$ . \rh{[1+1]}

Hence $P(1400 \leq X \leq 1550)\simeq P(1399.5 \leq X \leq 1550.5)$ (by continuity correction and $X \thicksim N (1470, 35^2)$).\rh{[1]}

Hence \\$P(1400 \leq X \leq 1550)\simeq P(\frac{1399.5 - 1470}{35} \leq \frac{X - 1470}{35} \leq \frac{1550 - 1470}{35})= P(-2.01 \leq Z \leq 2.3) \rh{[1]}\\= \Phi(2.3) + \Phi(2.01)- 1 = 0.9893 + 0.9778 -1 = 0.9671$. \rh{[1+1]}
\end{enumerate}


\end{document}