From f38189a714668b3dbe5255b4e75024548cc60d0b Mon Sep 17 00:00:00 2001 From: NotXia <35894453+NotXia@users.noreply.github.com> Date: Sun, 4 Feb 2024 17:10:41 +0100 Subject: [PATCH] Fix typos --- .../module3/sections/_approx_inference.tex | 6 +++--- .../module3/sections/_bayesian_net.tex | 4 ++-- .../module3/sections/_exact_inference.tex | 3 +-- src/fundamentals-of-ai-and-kr/module3/sections/_intro.tex | 4 ++-- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/src/fundamentals-of-ai-and-kr/module3/sections/_approx_inference.tex b/src/fundamentals-of-ai-and-kr/module3/sections/_approx_inference.tex index 0d26ece..35fa96b 100644 --- a/src/fundamentals-of-ai-and-kr/module3/sections/_approx_inference.tex +++ b/src/fundamentals-of-ai-and-kr/module3/sections/_approx_inference.tex @@ -113,7 +113,7 @@ The probability $\mathcal{S}$ of sampling a specific event $\matr{Z}$ and eviden probability of the single events in $\matr{Z}$ knowing their parents: \[ \mathcal{S}(\matr{Z}, \matr{E}) = \prod_{z_i \in \matr{Z}} \prob{z_i | \texttt{parents}(z_i)} \] -The weights of a sample $(\matr{Z}, \matr{E})$ is given by the +The weight of a sample $(\matr{Z}, \matr{E})$ is given by the probability of the single events in $\matr{E}$ knowing their parents: \[ \text{w}(\matr{Z}, \matr{E}) = \prod_{e_i \in \matr{E}} \prob{e_i | \texttt{parents}(e_i)} \] @@ -129,7 +129,7 @@ probability of the single events in $\matr{E}$ knowing their parents: &= \prob{\matr{Z}, \matr{E}} \end{split} \] - This is consequence of the global semantics of Bayesian networks. + This is a consequence of the global semantics of Bayesian networks. \end{proof} \end{theorem} @@ -148,7 +148,7 @@ probability of the single events in $\matr{E}$ knowing their parents: \[ \langle C=\texttt{true}, S=\texttt{true}, \prob{R | C=\texttt{true}}, W=\texttt{false} \rangle \] \[ \langle C=\texttt{true}, S=\texttt{true}, R=\texttt{true}, W=\texttt{false} \rangle \] - The weight associated to the sample is given by the probabilities of the evidence: + The weight associated to the sample is given by the probability of the evidence: \[ \begin{split} \text{w} &= \prob{S=\texttt{true} | C=\texttt{true}} \cdot \prob{W=\texttt{false} | S=\texttt{true}, R=\texttt{true}} \\ diff --git a/src/fundamentals-of-ai-and-kr/module3/sections/_bayesian_net.tex b/src/fundamentals-of-ai-and-kr/module3/sections/_bayesian_net.tex index a9b58d3..c5f65a3 100644 --- a/src/fundamentals-of-ai-and-kr/module3/sections/_bayesian_net.tex +++ b/src/fundamentals-of-ai-and-kr/module3/sections/_bayesian_net.tex @@ -158,7 +158,7 @@ \begin{theorem} Two d-separated nodes are independent. - In other words, two nodes are independent if there is no active trail between them. + In other words, two nodes are independent if there are no active trails between them. \end{theorem} \item[Independence algorithm] \phantom{} @@ -226,7 +226,7 @@ \item[Markov blanket] - Each node is conditionally independent of all other nodes + Each node is conditionally independent of all the other nodes if its Markov blanket (parents, children, children's parents) is in the evidence. \begin{figure}[h] \centering diff --git a/src/fundamentals-of-ai-and-kr/module3/sections/_exact_inference.tex b/src/fundamentals-of-ai-and-kr/module3/sections/_exact_inference.tex index 4fd6d04..fd3e857 100644 --- a/src/fundamentals-of-ai-and-kr/module3/sections/_exact_inference.tex +++ b/src/fundamentals-of-ai-and-kr/module3/sections/_exact_inference.tex @@ -115,5 +115,4 @@ A variable $X$ is irrelevant if summing over it results in a probability of $1$. \marginnote{Clustering algorithm} Method that joins individual nodes to form clusters. -Allows to estimate the posterior probabilities for $n$ variables with complexity $O(n)$ -(in contrast, variable elimination is $O(n^2)$). \ No newline at end of file +Allows to estimate the posterior probabilities for $n$ variables with complexity $O(n)$. \ No newline at end of file diff --git a/src/fundamentals-of-ai-and-kr/module3/sections/_intro.tex b/src/fundamentals-of-ai-and-kr/module3/sections/_intro.tex index 34faa2e..87c480b 100644 --- a/src/fundamentals-of-ai-and-kr/module3/sections/_intro.tex +++ b/src/fundamentals-of-ai-and-kr/module3/sections/_intro.tex @@ -22,9 +22,9 @@ \subsection{Handling uncertainty} \begin{descriptionlist} - \item[Default/nonmonotonic logic] \marginnote{Default/nonmonotonic logic} + \item[Default/non-monotonic logic] \marginnote{Default/non-monotonic logic} Works on assumptions. - An assumption can be contradicted by an evidence. + An assumption can be contradicted by the evidence. \item[Rule-based systems with fudge factors] \marginnote{Rule-based systems with fudge factors} Formulated as premise $\rightarrow_\text{prob.}$ effect.