\begin{align*}
\sigma^2=\operatorname{Var}(x) &= \sum_{i=1}^n p_i (x_i-\mu)^2 \\
&= \sum (x-\mu)^2 \times \Pr(X=x) \\
- &= \sum x^2 \times p(x) - \mu^2
+ &= \sum x^2 \times p(x) - \mu^2 \\
+ &= \operatorname{E}(X^2) - [\operatorname{E}(X)]^2
\end{align*}
\item \textbf{Standard deviation $\sigma$} - measure of spread in the original magnitude of the data. Found by taking square root of the variance:
\begin{align*}
E(X+Y) &= E(X) + E(Y) \tag{for two random variables}
\end{align*}
+ \subsubsection*{Variance theorems}
+
+ \[ \operatorname{Var}(aX \pm bY \pm c) = a^2 \operatorname{Var}(X) + b^2 \operatorname{Var}(Y) \]
\section{Binomial Theorem}
\[ Pr(a \le X \le b) = \int^b_a f(x) \> dx \]
+ \colorbox{cas}{On CAS:} Interactive \(\rightarrow\) Distribution \(\rightarrow\) \verb;normCdf;.
+
+ For function in domain \(a \le x \le b\):
+
+ \[ \operatorname{E}(X) = \int^b_a x f(x) \> dx \]
+
+ \[ \operatorname{sd}(X) = \sqrt{\operatorname{Var}(X)} = \sqrt{\operatorname{E}(X^2)-[\operatorname{E}(X)]^2} \]
+
\end{document}