Add FAIKR3 joint distribution inference

This commit is contained in:
2023-10-19 20:45:49 +02:00
parent 4820aebed6
commit 6e133a9f79
3 changed files with 204 additions and 85 deletions

View File

@ -45,89 +45,4 @@
Defined as:
\[ \text{Decision theory} = \text{Utility theory} + \text{Probability theory} \]
where the utility theory depends on one's preferences.
\end{description}
\subsection{Probability}
\begin{description}
\item[Sample space] \marginnote{Sample space}
Set $\Omega$ of all possible worlds.
\begin{descriptionlist}
\item[Event] \marginnote{Event}
Subset $A \subseteq \Omega$.
\item[Sample point/Possible world/Atomic event] \marginnote{Sample point}
Element $\omega \in \Omega$.
\end{descriptionlist}
\item[Probability space] \marginnote{Probability space}
A probability space/model is a function $\prob{\cdot}: \Omega \rightarrow [0, 1]$ assigned to a sample space such that:
\begin{itemize}
\item $0 \leq \prob{\omega} \leq 1$
\item $\sum_{\omega \in \Omega} \prob{\omega} = 1$
\item $\prob{A} = \sum_{\omega \in A} \prob{\omega}$
\end{itemize}
\item[Random variable] \marginnote{Random variable}
A function from an event to some range (e.g. reals, booleans, \dots).
\item[Probability distribution] \marginnote{Probability distribution}
For any random variable $X$:
\[ \prob{X = x_i} = \sum_{\omega \text{ st } X(\omega)=x_i} \prob{\omega} \]
\item[Proposition] \marginnote{Proposition}
Event where a random variable has a certain value.
\[ a = \{ \omega \,\vert\, A(\omega) = \texttt{true} \} \]
\[ \lnot a = \{ \omega \,\vert\, A(\omega) = \texttt{false} \} \]
\[ (\texttt{Weather} = \texttt{rain}) = \{ \omega \,\vert\, B(\omega) = \texttt{rain} \} \]
\item[Prior probability] \marginnote{Prior probability}
Prior/unconditional probability of a proposition based on known evidence.
\item[Probability distribution (all)] \marginnote{Probability distribution (all)}
Gives all the probabilities of a random variable.
\[ \textbf{P}(A) = \langle \prob{A=a_1}, \dots, \prob{A=a_n} \rangle \]
\item[Joint probability distribution] \marginnote{Joint probability distribution}
The joint probability distribution of a set of random variables gives
the probability of all the different combinations of their atomic events.
Note: Every question on a domain can, in theory, be answered using the joint distribution.
In practice, it is hard to apply.
\begin{example}
$\textbf{P}(\texttt{Weather}, \texttt{Cavity}) = $
\begin{center}
\small
\begin{tabular}{c | cccc}
& \texttt{Weather=sunny} & \texttt{Weather=rain} & \texttt{Weather=cloudy} & \texttt{Weather=snow} \\
\hline
\texttt{Cavity=true} & 0.144 & 0.02 & 0.016 & 0.02 \\
\texttt{Cavity=false} & 0.576 & 0.08 & 0.064 & 0.08
\end{tabular}
\end{center}
\end{example}
\item[Probability density function] \marginnote{Probability density function}
The probability density function (PDF) of a random variable $X$ is a function $p: \mathbb{R} \rightarrow \mathbb{R}$
such that:
\[ \int_{\mathcal{T}_X} p(x) \,dx = 1 \]
\begin{descriptionlist}
\item[Uniform distribution] \marginnote{Uniform distribution}
\[
p(x) = \text{Unif}[a, b](x) =
\begin{cases}
\frac{1}{b-a} & a \leq x \leq b \\
0 & \text{otherwise}
\end{cases}
\]
\item[Gaussian (normal) distribution] \marginnote{Gaussian (normal) distribution}
\[ \mathcal{N}(\mu, \sigma^2) = \frac{1}{\sigma\sqrt{2\pi}}e^{\frac{-(x-\mu)^2}{2\sigma^2}} \]
$\mathcal{N}(0, 1)$ is the standard gaussian.
\end{descriptionlist}
\item[Conditional probability] \marginnote{Conditional probability}
Probability of a prior knowledge with new evidence:
\[ \prob{a \vert b} = \frac{\prob{a \land b}}{\prob{b}} \]
\end{description}