196455f4f6559b2ade4da64913724b3515508f2d
   1\documentclass[methods-collated.tex]{subfiles}
   2
   3\begin{document}
   4
   5\section{Statistics}
   6
   7\subsection*{Probability}
   8
   9\begin{align*}
  10  \Pr(A \cup B) &= \Pr(A) + \Pr(B) - \Pr(A \cap B) \\
  11  \Pr(A \cap B) &= \Pr(A|B) \times \Pr(B) \\
  12  \Pr(A|B) &= \frac{\Pr(A \cap B)}{\Pr(B)} \\
  13  \Pr(A) &= \Pr(A|B) \cdot \Pr(B) + \Pr(A|B^{\prime}) \cdot \Pr(B^{\prime})
  14\end{align*}
  15
  16Mutually exclusive \(\implies \Pr(A \cup B) = 0\) \\
  17
  18Independent events:
  19\begin{flalign*}
  20  \quad \Pr(A \cap B) &= \Pr(A) \times \Pr(B)& \\
  21  \Pr(A|B) &= \Pr(A) \\
  22  \Pr(B|A) &= \Pr(B)
  23\end{flalign*}
  24
  25\subsection*{Combinatorics}
  26
  27\begin{itemize}
  28  \item Arrangements \({n \choose k} = \frac{n!}{(n-k)}\)
  29  \item \colorbox{important}{Combinations} \({n \choose k} = \frac{n!}{k!(n-k)!}\)
  30  \item Note \({n \choose k} = {n \choose k-1}\)
  31\end{itemize}
  32
  33\subsection*{Distributions}
  34
  35\subsubsection*{Mean \(\mu\)}
  36
  37\textbf{Mean} \(\mu\) or \textbf{expected value} \(E(X)\)
  38
  39\begin{align*}
  40  E(X) &= \frac{\Sigma \left[ x \cdot f(x) \right]}{\Sigma f} \tag{\(f =\) absolute frequency} \\
  41  &= \sum_{i=1}^n \left[ x_i \cdot \Pr(X=x_i) \right] \tag{discrete}\\
  42  &= \int_\textbf{X} (x \cdot f(x)) \> dx
  43\end{align*}
  44
  45\subsubsection*{Mode}
  46
  47Most popular value (has highest probability of all \(X\) values). Multiple modes can exist if \(>1 \> X\) value have equal-highest probability. Number must exist in distribution.
  48
  49\subsubsection*{Median}
  50
  51If \(m > 0.5\), then value of \(X\) that is reached is the median of \(X\). If \(m = 0.5 = 0.5\), then \(m\) is halfway between this value and the next. To find \(m\), add values of \(X\) from smallest to alrgest until the sum reaches 0.5.
  52
  53\[ m = X \> \text{such that} \> \int_{-\infty}^{m} f(x) dx = 0.5 \]
  54
  55\subsubsection*{Variance \(\sigma^2\)}
  56
  57\begin{align*}
  58  \operatorname{Var}(x) &= \sum_{i=1}^n p_i (x_i-\mu)^2 \\
  59  &= \sum (x-\mu)^2 \times \Pr(X=x) \\
  60  &= \sum x^2 \times p(x) - \mu^2 \\
  61  &= \operatorname{E}(X^2) - [\operatorname{E}(X)]^2 \\
  62  &= E\left[(X-\mu)^2\right]
  63\end{align*}
  64
  65\subsubsection*{Standard deviation \(\sigma\)}
  66
  67\begin{align*}
  68  \sigma &= \operatorname{sd}(X) \\
  69  &= \sqrt{\operatorname{Var}(X)}
  70\end{align*}
  71
  72\subsection*{Binomial distributions}
  73
  74Conditions for a \textit{binomial distribution}:
  75\begin{enumerate}
  76  \item Two possible outcomes: \textbf{success} or \textbf{failure}
  77  \item \(\Pr(\text{success})\) (=\(p\)) is constant across trials
  78  \item Finite number \(n\) of independent trials
  79\end{enumerate}
  80
  81
  82\subsubsection*{Properties of \(X \sim \operatorname{Bi}(n,p)\)}
  83
  84\begin{align*}
  85  \mu(X) &= np \\
  86  \operatorname{Var}(X) &= np(1-p) \\
  87  \sigma(X) &= \sqrt{np(1-p)} \\
  88  \Pr(X=x) &= {n \choose x} \cdot p^x \cdot (1-p)^{n-x}
  89\end{align*}
  90
  91\begin{cas}
  92  Interactive \(\rightarrow\) Distribution \(\rightarrow\) \verb;binomialPdf;
  93  \begin{description}[nosep, style=multiline, labelindent=0.5cm, leftmargin=3cm, font=\normalfont]
  94    \item [x:] no. of successes
  95    \item [numtrial:] no. of trials
  96    \item [pos:] probability of success
  97  \end{description}
  98\end{cas}
  99
 100\subsection*{Continuous random variables}
 101
 102A continuous random variable \(X\) has a pdf \(f\) such that:
 103
 104\begin{enumerate}
 105  \item \(f(x) \ge 0 \forall x \)
 106  \item \(\int^\infty_{-\infty} f(x) \> dx = 1\)
 107\end{enumerate}
 108
 109\begin{align*}
 110  E(X) &= \int_\textbf{X} (x \cdot f(x)) \> dx \\
 111  \operatorname{Var}(X) &= E\left[(X-\mu)^2\right]
 112\end{align*}
 113
 114\[ \Pr(X \le c) = \int^c_{-\infty} f(x) \> dx \]
 115
 116\begin{cas}
 117  Define piecewise functions: \\
 118  Math3 \(\rightarrow\) 
 119  % TODO: finish this section
 120\end{cas}
 121
 122\subsection*{Two random variables \(X, Y\)}
 123
 124If \(X\) and \(Y\) are independent:
 125\begin{align*}
 126  \operatorname{E}(aX+bY) & = a\operatorname{E}(X)+b\operatorname{E}(Y) \\
 127  \operatorname{Var}(aX \pm bY \pm c) &= a^2 \operatorname{Var}(X) + b^2 \operatorname{Var}(Y)
 128\end{align*}
 129
 130\subsection*{Linear functions \(X \rightarrow aX+b\)}
 131
 132\begin{align*}
 133  \Pr(Y \le y) &= \Pr(aX+b \le y) \\
 134  &= \Pr\left(X \le \dfrac{y-b}{a}\right) \\
 135  &= \int^{\frac{y-b}{a}}_{-\infty} f(x) \> dx
 136\end{align*}
 137
 138\begin{align*}
 139  \textbf{Mean:} && \operatorname{E}(aX+b) & = a\operatorname{E}(X)+b \\
 140  \textbf{Variance:} && \operatorname{Var}(aX+b) &= a^2 \operatorname{Var}(X) \\
 141\end{align*}
 142
 143\subsection*{Expectation theorems}
 144
 145For some non-linear function \(g\), the expected value \(E(g(X))\) is not equal to \(g(E(X))\).
 146
 147\begin{align*}
 148  E(X^2) &= \operatorname{Var}(X) - \left[E(X)\right]^2 \\
 149  E(X^n) &= \Sigma x^n \cdot p(x) \tag{non-linear} \\
 150  &\ne [E(X)]^n \\
 151  E(aX \pm b) &= aE(X) \pm b \tag{linear} \\
 152  E(b) &= b \tag{\(\forall b \in \mathbb{R}\)}\\
 153  E(X+Y) &= E(X) + E(Y) \tag{two variables}
 154\end{align*}
 155
 156\begin{figure*}[hb]
 157  \centering
 158  \include{../spec/normal-dist-graph}
 159\end{figure*}
 160
 161\subsection*{Sample mean}
 162
 163Approximation of the \textbf{population mean} determined experimentally.
 164
 165\[ \overline{x} = \dfrac{\Sigma x}{n} \]
 166
 167where
 168\begin{description}[nosep, labelindent=0.5cm]
 169  \item \(n\) is the size of the sample (number of sample points)
 170  \item \(x\) is the value of a sample point
 171\end{description}
 172
 173\begin{cas}
 174  \begin{enumerate}[leftmargin=3mm]
 175    \item Spreadsheet
 176    \item In cell A1:\\ \path{mean(randNorm(sd, mean, sample size))}
 177    \item Edit \(\rightarrow\) Fill \(\rightarrow\) Fill Range
 178    \item Input range as A1:An where \(n\) is the number of samples
 179    \item Graph \(\rightarrow\) Histogram
 180  \end{enumerate}
 181\end{cas}
 182
 183\subsubsection*{Sample size of \(n\)}
 184
 185\[ \overline{X} = \sum_{i=1}^n \frac{x_i}{n} = \dfrac{\sum x}{n} \]
 186
 187Sample mean is distributed with mean \(\mu\) and sd \(\frac{\sigma}{\sqrt{n}}\) (approaches these values for increasing sample size \(n\)).
 188
 189For a new distribution with mean of \(n\) trials, \(\operatorname{E}(X^\prime) = \operatorname{E}(X), \quad \operatorname{sd}(X^\prime) = \dfrac{\operatorname{sd}(X)}{\sqrt{n}}\)
 190
 191\begin{cas}
 192
 193  \begin{itemize}
 194    \item Spreadsheet \(\rightarrow\) Catalog \(\rightarrow\) \verb;randNorm(sd, mean, n); where \verb;n; is the number of samples. Show histogram with Histogram key in top left
 195    \item To calculate parameters of a dataset: Calc \(\rightarrow\) One-variable
 196  \end{itemize}
 197
 198\end{cas}
 199
 200\subsection*{Normal distributions}
 201
 202
 203\[ Z = \frac{X - \mu}{\sigma} \]
 204
 205Normal distributions must have area (total prob.) of 1 \(\implies \int^\infty_{-\infty} f(x) \> dx = 1\) \\
 206\(\text{mean} = \text{mode} = \text{median}\)
 207
 208\begin{warning}
 209  Always express \(z\) as +ve. Express confidence \textit{interval} as ordered pair.
 210\end{warning}
 211
 212\subsection*{Confidence intervals}
 213
 214\begin{itemize}
 215  \item \textbf{Point estimate:} single-valued estimate of the population mean from the value of the sample mean \(\overline{x}\)
 216  \item \textbf{Interval estimate:} confidence interval for population mean \(\mu\)
 217  \item \(C\)\% confidence interval \(\implies\) \(C\)\% of samples will contain population mean \(\mu\)
 218\end{itemize}
 219
 220\subsubsection*{95\% confidence interval}
 221
 222For 95\% c.i. of population mean \(\mu\):
 223
 224\[ x \in \left(\overline{x} \pm 1.96 \dfrac{\sigma}{\sqrt{n}} \right)\]
 225
 226where:
 227\begin{description}[nosep, labelindent=0.5cm]
 228  \item \(\overline{x}\) is the sample mean
 229  \item \(\sigma\) is the population sd
 230  \item \(n\) is the sample size from which \(\overline{x}\) was calculated
 231\end{description}
 232
 233\begin{cas}
 234  Menu \(\rightarrow\) Stats \(\rightarrow\) Calc \(\rightarrow\) Interval \\
 235  Set \textit{Type = One-Sample Z Int} \\ \-\hspace{1em} and select \textit{Variable}
 236\end{cas}
 237
 238\subsection*{Margin of error}
 239
 240For 95\% confidence interval of \(\mu\):
 241\begin{align*}
 242  M &= 1.96 \times \dfrac{\sigma}{\sqrt{n}} \\
 243  &= \dfrac{1}{2} \times \text{width of c.i.} \\
 244  \implies n &= \left( \dfrac{1.96 \sigma}{M} \right)^2
 245\end{align*}
 246
 247Always round \(n\) up to a whole number of samples.
 248
 249\subsection*{General case}
 250
 251For \(C\)\% c.i. of population mean \(\mu\):
 252
 253\[ x \in \left( \overline{x} \pm k \dfrac{\sigma}{\sqrt{n}} \right) \]
 254\hfill where \(k\) is such that \(\Pr(-k < Z < k) = \frac{C}{100}\)
 255
 256\begin{cas}
 257  Menu \(\rightarrow\) Stats \(\rightarrow\) Calc \(\rightarrow\) Interval \\
 258  Set \textit{Type = One-\colorbox{important}{Prop} Z Int} \\
 259  Input  x \(= \hat{p} * n\)
 260\end{cas}
 261
 262\subsection*{Confidence interval for multiple trials}
 263
 264For a set of \(n\) confidence intervals (samples), there is \(0.95^n\) chance that all \(n\) intervals contain the population mean \(\mu\).
 265
 266\end{document}