diff --git a/src/year2/natural-language-processing/sections/_rnn.tex b/src/year2/natural-language-processing/sections/_rnn.tex index 8e540c5..7c19dcd 100644 --- a/src/year2/natural-language-processing/sections/_rnn.tex +++ b/src/year2/natural-language-processing/sections/_rnn.tex @@ -33,7 +33,7 @@ \end{figure} \begin{remark} - RNN-LMs allows to generate the output autoregressively. + RNN-LMs allow to generate the output autoregressively. \end{remark} \begin{description} @@ -98,7 +98,7 @@ \end{description} \begin{remark} - LSTM makes it easier to preserve information over time, but it might still be affected by the vanishing gradient problem. + LSTMs make it easier to preserve information over time, but they might still be affected by the vanishing gradient problem. \end{remark} \begin{figure}[H] @@ -112,7 +112,7 @@ \begin{description} \item[Gated recurrent units (GRU)] \marginnote{Gated recurrent units (GRU)} - Architecture simpler than LSTMs with fewer gates and without the cell state. + Simpler architecture than LSTMs with fewer gates and without the cell state. \begin{description} \item[Gates] \phantom{}