A normal distribution, denoted as \mathcal{N}(X=x|\mu, \sigma^2), is a continuous distribution.
The probability density function of a normal distribution is given by:
\begin{align*}
\mathcal{N}(X=x|\mu, \sigma^2) = \frac{1}{\sqrt{2 \pi \sigma^2}} \exp\left(-\frac{(x - \mu)^2}{2 \sigma^2}\right)
\end{align*}
where x, \mu, and \sigma are real numbers.
A normal distribution can be derived from a binomial distribution.
Let's consider the situation,
\begin{align*}
\mathrm{Bin}(X=x|n, p) &= \binom{n}{x} p^x (1-p)^{n-x} \\
&= \frac{n!}{x! (n-x)!} p^x (1-p)^{n-x}
\end{align*}
where n, x \gg 0.
Let
\begin{align*}
g(x) &\equiv \ln \mathrm{Bin}(X=x|n, p) \\
&= \ln n! - \ln x! - \ln (n-x)! + x \ln p + (n-x) \ln (1 - p) \\
g'(x) &= - \ln x + \ln (n-x) + \ln p - \ln (1-p) \\
&= \ln \frac{p}{1-p} \frac{n-x}{x} \\
g''(x) &= - \frac{1}{x} - \frac{1}{n - x} \\
&= \frac{-n}{x(n-x)}
\end{align*}
\begin{align*}
\end{align*}
here, I used (\ln x!)' = \ln x .
When g'(x) = 0,
\begin{align*}
g'(x) &= \ln \frac{p}{1-p} \frac{n-x}{x} = 0 \\
&\iff \frac{p}{1-p} \frac{n-x}{x} = 1 \\
&\iff x = np = \mu\\
\end{align*}
and
\begin{align*}
g''(\mu) &= \frac{-n}{np(n-np)} \\
&= - \frac{1}{np(1-p)} = - \frac{1}{\sigma^2}
\end{align*}
The Taylor series of g(x) at \mu is given by,
\begin{align*}
g(x) &= g(\mu) + \frac{g'(\mu)}{1!}(x - \mu) + \frac{g''(\mu)}{2!}(x - \mu)^2 + ... \\
&\fallingdotseq g(\mu) - \frac{1}{2\sigma^2}(x - \mu)^2 \\
\end{align*}
Therefore,
\begin{align*}
\ln \mathrm{Bin}(X=x |n, p) &= \ln \mathrm{Bin}(X=\mu |n, p) - \frac{1}{2\sigma^2}(x - \mu)^2 \\
&= \ln c + \ln \exp \left( -\frac{1}{2\sigma^2}(x - \mu)^2 \right)\\
&= \ln c \exp \left( - \frac{1}{2\sigma^2}(x - \mu)^2 \right) \\
&\iff \mathrm{Bin}(X=x|n, p) \propto c \exp \left( - \frac{1}{2\sigma^2}(x - \mu)^2 \right) \equiv \mathcal{N}(X=x | \mu, \sigma^2)
\end{align*}
where x, n \gg 0.
The moment generating function of the normal distribution is
\begin{align*}
M(\theta) &= E[e^{\theta X}] \\
&= \int e^{\theta x} \frac{1}{\sqrt{2 \pi \sigma^2}} \exp\left\{ -\frac{(x - \mu)^2}{2 \sigma^2} \right\} dx \\
&= \frac{1}{\sqrt{2 \pi \sigma^2}} \int \exp\left\{
-\frac{(x - \mu)^2}{2 \sigma^2}
+ \theta x
\right\} dx \\
&= \frac{1}{\sqrt{2 \pi \sigma^2}} \int \exp\left\{
-\frac{1}{2 \sigma^2} \left(
x^2
- 2 (\mu + \sigma^2 \theta)x
+ \mu^2
\right)
\right\} dx \\
&= \frac{1}{\sqrt{2 \pi \sigma^2}} \int \exp\left\{
-\frac{1}{2 \sigma^2} \left(
(x - (\mu +\sigma^2 \theta))^2
- 2 \sigma^2 \mu \theta
- \sigma^4 \theta^2
\right)
\right\} dx \\
&= \frac{1}{\sqrt{2 \pi \sigma^2}}
\exp\left(
\mu \theta
+ \frac{\sigma^2 \theta^2}{2}
\right)
\int \exp\left\{
-\frac{(x - (\mu +\sigma^2 \theta))^2}{2 \sigma^2}
\right\} dx \\
&= \frac{1}{\sqrt{2 \pi \sigma^2}}
\exp\left(
\mu \theta
+ \frac{\sigma^2 \theta^2}{2}
\right)
\int \exp\left\{
-\frac{t^2}{2}
\right\} \sigma dt \\
&= \frac{1}{\sqrt{2 \pi \sigma^2}}
\exp\left(
\mu \theta
+ \frac{\sigma^2 \theta^2}{2}
\right)
\sqrt{2 \pi} \sigma \\
&= \exp\left(
\mu \theta
+ \frac{\sigma^2 \theta^2}{2}
\right) \\
\end{align*}
Here,
\begin{align*}
M'(\theta) &= \exp\left(
\mu \theta
+ \frac{\sigma^2 \theta^2}{2}
\right) (\mu + \sigma^2 \theta)\\
M''(\theta) &= \exp\left(
\mu \theta
+ \frac{\sigma^2 \theta^2}{2}
\right) (\mu + \sigma^2 \theta)^2
+ \exp\left(
\mu \theta
+ \frac{\sigma^2 \theta^2}{2}
\right) \sigma^2 \\
&= (\sigma^2 + (\mu + \sigma^2 \theta)^2) \exp\left(
\mu \theta
+ \frac{\sigma^2 \theta^2}{2}
\right) \\
M'(0) &= \mu \\
M''(0) &= (\sigma^2 + \mu^2) \\
\end{align*}
Therefore, mean and variance of a normal distribution are,
\begin{align*}
E[X] &= \mu \\
V[X] &= E[X^2] - E[X]^2 = M''(0) - M'(0)^2 \\
&= (\sigma^2 + \mu^2) - \mu^2 = \sigma^2
\end{align*}
Discussion