\begin{align*}
\sigma^2=\operatorname{Var}(x) &= \sum_{i=1}^n p_i (x_i-\mu)^2 \\
&= \sum (x-\mu)^2 \times \Pr(X=x) \\
- &= \sum x^2 \times p(x) - \mu^2
+ &= \sum x^2 \times p(x) - \mu^2 \\
+ &= \operatorname{E}(X^2) - [\operatorname{E}(X)]^2
\end{align*}
\item \textbf{Standard deviation $\sigma$} - measure of spread in the original magnitude of the data. Found by taking square root of the variance:
\begin{align*}
E(X+Y) &= E(X) + E(Y) \tag{for two random variables}
\end{align*}
+ \subsubsection*{Variance theorems}
+
+ \[ \operatorname{Var}(aX \pm bY \pm c) = a^2 \operatorname{Var}(X) + b^2 \operatorname{Var}(Y) \]
\section{Binomial Theorem}