spec / statistics.texon commit [spec] additions to complex graphs and exp identities (4de6207)
   1\documentclass[spec-collated.tex]{subfiles}
   2\begin{document}
   3
   4  \section{Statistics}
   5
   6  \subsection*{Continuous random variables}
   7
   8  A continuous random variable \(X\) has a pdf \(f\) such that:
   9
  10  \begin{enumerate}
  11    \item \(f(x) \ge 0 \forall x \)
  12    \item \(\int^\infty_{-\infty} f(x) \> dx = 1\)
  13  \end{enumerate}
  14
  15  \begin{align*}
  16    E(X) &= \int_\textbf{X} (x \cdot f(x)) \> dx \\
  17    \operatorname{Var}(X) &= E\left[(X-\mu)^2\right]
  18  \end{align*}
  19
  20  \[ \Pr(X \le c) = \int^c_{-\infty} f(x) \> dx \]
  21  
  22
  23  \subsection*{Two random variables \(X, Y\)}
  24
  25  If \(X\) and \(Y\) are independent:
  26  \begin{align*}
  27    \operatorname{E}(aX+bY) & = a\operatorname{E}(X)+b\operatorname{E}(Y) \\
  28    \operatorname{Var}(aX \pm bY \pm c) &= a^2 \operatorname{Var}(X) + b^2 \operatorname{Var}(Y)
  29  \end{align*}
  30
  31  \subsection*{Linear functions \(X \rightarrow aX+b\)}
  32
  33  \begin{align*}
  34    \Pr(Y \le y) &= \Pr(aX+b \le y) \\
  35    &= \Pr\left(X \le \dfrac{y-b}{a}\right) \\
  36    &= \int^{\frac{y-b}{a}}_{-\infty} f(x) \> dx
  37  \end{align*}
  38
  39  \begin{align*}
  40    \textbf{Mean:} && \operatorname{E}(aX+b) & = a\operatorname{E}(X)+b \\
  41    \textbf{Variance:} && \operatorname{Var}(aX+b) &= a^2 \operatorname{Var}(X) \\
  42  \end{align*}
  43
  44  \subsection*{Expectation theorems}
  45
  46  For some non-linear function \(g\), the expected value \(E(g(X))\) is not equal to \(g(E(X))\).
  47
  48  \begin{align*}
  49    E(X^2) &= \operatorname{Var}(X) - \left[E(X)\right]^2 \\
  50    E(X^n) &= \Sigma x^n \cdot p(x) \tag{non-linear} \\
  51    &\ne [E(X)]^n \\
  52    E(aX \pm b) &= aE(X) \pm b \tag{linear} \\
  53    E(b) &= b \tag{\(\forall b \in \mathbb{R}\)}\\
  54    E(X+Y) &= E(X) + E(Y) \tag{two variables}
  55  \end{align*}
  56
  57  \subsection*{Sample mean}
  58
  59  Approximation of the \textbf{population mean} determined experimentally.
  60
  61  \[ \overline{x} = \dfrac{\Sigma x}{n} \]
  62
  63  where
  64  \begin{description}[nosep, labelindent=0.5cm]
  65    \item \(n\) is the size of the sample (number of sample points)
  66    \item \(x\) is the value of a sample point
  67  \end{description}
  68
  69\begin{cas}
  70  \begin{enumerate}[leftmargin=3mm]
  71    \item Spreadsheet
  72    \item In cell A1:\\ \path{mean(randNorm(sd, mean, sample size))}
  73    \item Edit \(\rightarrow\) Fill \(\rightarrow\) Fill Range
  74    \item Input range as A1:An where \(n\) is the number of samples
  75    \item Graph \(\rightarrow\) Histogram
  76  \end{enumerate}
  77  \end{cas}
  78
  79  \subsubsection*{Sample size of \(n\)}
  80
  81  \[ \overline{X} = \sum_{i=1}^n \frac{x_i}{n} = \dfrac{\sum x}{n} \]
  82
  83  Sample mean is distributed with mean \(\mu\) and sd \(\frac{\sigma}{\sqrt{n}}\) (approaches these values for increasing sample size \(n\)).
  84
  85  For a new distribution with mean of \(n\) trials, \(\operatorname{E}(X^\prime) = \operatorname{E}(X), \quad \operatorname{sd}(X^\prime) = \dfrac{\operatorname{sd}(X)}{\sqrt{n}}\)
  86
  87  \begin{cas}
  88  
  89    \hspace{1em} Spreadsheet \(\rightarrow\) Catalog \(\rightarrow\) \verb;randNorm(sd, mean, n); \\
  90    where \verb;n; is the number of samples. Show histogram with Histogram key in top left.
  91
  92    To calculate parameters of a dataset: \\
  93    \-\hspace{1em}Calc \(\rightarrow\) One-variable
  94
  95  \end{cas}
  96  
  97  \subsection*{Normal distributions}
  98
  99
 100  \[ Z = \frac{X - \mu}{\sigma} \]
 101
 102  Normal distributions must have area (total prob.) of 1 \(\implies \int^\infty_{-\infty} f(x) \> dx = 1\) \\
 103  \(\text{mean} = \text{mode} = \text{median}\)
 104
 105  \begin{warning}
 106    Always express \(z\) as +ve. Express confidence \textit{interval} as ordered pair.
 107  \end{warning}
 108
 109  \begin{figure*}[hb]
 110    \centering
 111    \include{normal-dist-graph}
 112  \end{figure*}
 113
 114  \subsection*{Central limit theorem}
 115
 116  \begin{theorembox}{}
 117    If \(X\) is randomly distributed with mean \(\mu\) and sd \(\sigma\), then with an adequate sample size \(n\) the distribution of the sample mean \(\overline{X}\) is approximately normal with mean \(E(\overline{X})\) and \(\operatorname{sd}(\overline{X}) = \frac{\sigma}{\sqrt{n}}\).
 118  \end{theorembox}
 119
 120  \subsection*{Confidence intervals}
 121
 122  \begin{itemize}
 123    \item \textbf{Point estimate:} single-valued estimate of the population mean from the value of the sample mean \(\overline{x}\)
 124    \item \textbf{Interval estimate:} confidence interval for population mean \(\mu\)
 125    \item \(C\)\% confidence interval \(\implies\) \(C\)\% of samples will contain population mean \(\mu\)
 126  \end{itemize}
 127
 128  \subsubsection*{95\% confidence interval}
 129
 130  For 95\% c.i. of population mean \(\mu\):
 131
 132  \[ x \in \left(\overline{x} \pm 1.96 \dfrac{\sigma}{\sqrt{n}} \right)\]
 133
 134  where:
 135  \begin{description}[nosep, labelindent=0.5cm]
 136    \item \(\overline{x}\) is the sample mean
 137    \item \(\sigma\) is the population sd
 138    \item \(n\) is the sample size from which \(\overline{x}\) was calculated
 139  \end{description}
 140
 141  \begin{cas}
 142    Menu \(\rightarrow\) Stats \(\rightarrow\) Calc \(\rightarrow\) Interval \\
 143    Set \textit{Type = One-Sample Z Int} \\ \-\hspace{1em} and select \textit{Variable}
 144  \end{cas}
 145
 146  \subsection*{Margin of error}
 147
 148  For 95\% confidence interval of \(\mu\):
 149  \begin{align*}
 150    M &= 1.96 \times \dfrac{\sigma}{\sqrt{n}} \\
 151    \implies n &= \left( \dfrac{1.96 \sigma}{M} \right)^2
 152  \end{align*}
 153
 154  Always round \(n\) up to a whole number of samples.
 155
 156  \subsection*{General case}
 157
 158  For \(C\)\% c.i. of population mean \(\mu\):
 159
 160  \[ x \in \left( \overline{x} \pm k \dfrac{\sigma}{\sqrt{n}} \right) \]
 161  \hfill where \(k\) is such that \(\Pr(-k < Z < k) = \frac{C}{100}\)
 162
 163  \subsection*{Confidence interval for multiple trials}
 164
 165  For a set of \(n\) confidence intervals (samples), there is \(0.95^n\) chance that all \(n\) intervals contain the population mean \(\mu\).
 166
 167  \section{Hypothesis testing}
 168
 169  \begin{warning}
 170    Note hypotheses are always expressed in terms of population parameters
 171  \end{warning}
 172
 173  \subsection*{Null hypothesis \(\textbf{H}_0\)}
 174
 175  Sample drawn from population has same mean as control population, and any difference can be explained by sample variations.
 176
 177  \subsection*{Alternative hypothesis \(\textbf{H}_1\)}
 178
 179  Amount of variation from control is significant, despite standard sample variations.
 180
 181  \subsection*{\(p\)-value}
 182
 183  Probability of observing a value of the sample statistic as significant as the one observed, assuming null hypothesis is true.
 184
 185  For one-tail tests:
 186  \begin{align*}
 187    p\text{-value} &= \Pr\left( \> \overline{X} \lessgtr \mu(\textbf{H}_1) \> \given \> \mu = \mu(\textbf{H}_0)\> \right) \\
 188    &= \Pr\left( Z \lessgtr \dfrac{\left( \mu(\textbf{H}_1) - \mu(\textbf{H}_0) \right) \cdot \sqrt{n} }{\operatorname{sd}(X)} \right) \\
 189    &\text{then use \texttt{normCdf} with std. norm.}
 190  \end{align*}
 191
 192  \vspace{0.5em}
 193  \begin{tabularx}{23em}{|l|X|}
 194    \hline
 195    \rowcolor{cas}
 196    \(\boldsymbol{p}\) & \textbf{Conclusion} \\
 197    \hline
 198    \(> 0.05\) & insufficient evidence against \(\textbf{H}_0\) \\
 199    \(< 0.05\) (5\%) & good evidence against \(\textbf{H}_0\) \\
 200    \(< 0.01\) (1\%) & strong evidence against \(\textbf{H}_0\) \\
 201    \(< 0.001\) (0.1\%) & very strong evidence against \(\textbf{H}_0\) \\
 202    \hline
 203  \end{tabularx}
 204
 205  \subsubsection*{Finding \(n\) for a given \(p\)-value}
 206
 207  Find \(c\) such that \(\Pr(Z \lessgtr c)\) such that \(c = \alpha\) (use \texttt{invNormCdf} on CAS).
 208
 209  \subsection*{Significance level \(\alpha\)}
 210
 211  The condition for rejecting the null hypothesis.
 212
 213  \-\hspace{1em} If \(p<\alpha\), null hypothesis is \textbf{rejected} \\
 214  \-\hspace{1em} If \(p>\alpha\), null hypothesis is \textbf{accepted}
 215
 216  \subsection*{\(z\)-test}
 217
 218  Hypothesis test for a mean of a sample drawn from a normally distributed population with a known standard deviation.
 219
 220  \begin{cas}
 221  Menu \(\rightarrow\) Statistics \(\rightarrow\) Calc \(\rightarrow\) Test. \\
 222  Select \textit{One-Sample Z-Test} and \textit{Variable}, then input:
 223    \begin{description}[nosep, style=multiline, labelindent=0.5cm, leftmargin=2cm, font=\normalfont]
 224    \item[\(\mu\) cond:] same operator as \(\textbf{H}_1\)
 225    \item[\(\mu_0\):] expected sample mean (null hypothesis)
 226    \item[\(\sigma\):] standard deviation (null hypothesis)
 227    \item[\(\overline{x}\):] sample mean
 228    \item[\(n\):] sample size
 229  \end{description}
 230  \end{cas}
 231
 232  \subsection*{One-tail and two-tail tests}
 233  
 234  \[ p\text{-value (two-tail)} = 2 \times p\text{-value (one-tail)} \]
 235
 236  \subsubsection*{One tail}
 237
 238  \begin{itemize}
 239    \item \(\mu\) has changed in one direction
 240    \item State ``\(\textbf{H}_1: \mu \lessgtr \) known population mean''
 241  \end{itemize}
 242
 243  \subsubsection*{Two tail}
 244
 245  \begin{itemize}
 246    \item Direction of \(\Delta \mu\) is ambiguous
 247    \item State ``\(\textbf{H}_1: \mu \ne\) known population mean''
 248  \end{itemize}
 249
 250  \begin{align*}
 251    p\text{-value} &= \Pr(|\overline{X} - \mu| \ge |\overline{x}_0 - \mu|) \\
 252    &= \left( |Z| \ge \left|\dfrac{\overline{x}_0 - \mu}{\sigma \div \sqrt{n}} \right| \right) \\
 253  \end{align*}
 254
 255  where
 256  \begin{description}[nosep, labelindent=0.5cm]
 257    \item [\(\mu\)] is the population mean under \(\textbf{H}_0\)
 258    \item [\(\overline{x}_0\)] is the observed sample mean
 259    \item [\(\sigma\)] is the population s.d.
 260    \item [\(n\)] is the sample size
 261  \end{description}
 262
 263  \subsection*{Modulus notation for two tail}
 264
 265  \(\Pr(|\overline{X} - \mu| \ge a) \implies\) ``the probability that the distance between \(\overline{\mu}\) and \(\mu\) is \(\ge a\)''
 266
 267  \subsection*{Inverse normal}
 268
 269  \begin{cas}
 270    \verb;invNormCdf("L", ;\(\alpha\)\verb;, ;\(\dfrac{\sigma}{n^\alpha}\)\verb;, ;\(\mu\)\verb;);
 271  \end{cas}
 272
 273  \subsection*{Errors}
 274
 275  \begin{description}[labelwidth=2.5cm, labelindent=0.5cm]
 276    \item [Type I error] \(\textbf{H}_0\) is rejected when it is \textbf{true}
 277    \item [Type II error] \(\textbf{H}_0\) is \textbf{not} rejected when it is \textbf{false}
 278  \end{description}
 279
 280  \begin{tabularx}{\columnwidth}{|X|l|l|}
 281    \rowcolor{cas}\hline
 282    \cellcolor{white}&\multicolumn{2}{c|}{\textbf{Actual result}} \\
 283    \hline
 284    \cellcolor{cas}\(\boldsymbol{z}\)\textbf{-test} & \cellcolor{light-gray}\(\textbf{H}_0\) true & \cellcolor{light-gray}\(\textbf{H}_0\) false \\
 285    \hline
 286    \cellcolor{light-gray}Reject \(\textbf{H}_0\) & Type I error & Correct \\
 287    \hline
 288    \cellcolor{light-gray}Do not reject \(\textbf{H}_0\) & Correct& Type II error \\
 289    \hline
 290  \end{tabularx}
 291
 292% \subsection*{Using c.i. to find \(p\)}
 293% need more here
 294
 295\end{document}