mirror of
https://github.com/NotXia/unibo-ai-notes.git
synced 2025-12-14 18:51:52 +01:00
Add DAS gradient tracking optimality
This commit is contained in:
@ -731,4 +731,25 @@
|
||||
% \[
|
||||
% \z_i^{k+1} = \sum_{j=1}^N a_{ij} \z_j^k - (N\alpha) \frac{1}{N} \sum_{j=1}^N \nabla l_k(z_k^k)
|
||||
% \]
|
||||
\end{description}
|
||||
|
||||
\begin{theorem}[Gradient tracking algorithm optimality] \marginnote{Gradient tracking algorithm optimality}
|
||||
If:
|
||||
\begin{itemize}
|
||||
\item $\matr{A}$ is the adjacency matrix of an undirected and connected communication graph $G$ such that it is doubly stochastic and $a_{ij} > 0$.
|
||||
\item Each cost function $l_i$ is $\mu$-strongly convex and its gradient $L$-Lipschitz continuous.
|
||||
\end{itemize}
|
||||
Then, there exists $\alpha^* > 0$ such that, for any choice of the step size $\alpha \in (0, \alpha^*)$, the sequence of local solutions $\{ \z_i^k \}_{k \in \mathbb{N}}$ of each agent generated by the gradient tracking algorithm asymptotically converges to a consensual optimal solution $\z^*$:
|
||||
\[ \lim_{k \rightarrow \infty} \Vert \z_i^k - \z^* \Vert = 0 \]
|
||||
|
||||
Moreover, the convergence rate is linear and stability is exponential:
|
||||
\[
|
||||
\exists \rho \in (0,1): \Vert \z_i^k - \z^* \Vert \leq \rho \Vert \z_i^{k+1} - \z^* \Vert
|
||||
\,\,\land\,\,
|
||||
\rho \Vert \z_i^{k+1} - \z^* \Vert \leq \rho^k \Vert \z_i^0 - \z^* \Vert
|
||||
\]
|
||||
\end{theorem}
|
||||
\end{description}
|
||||
|
||||
\begin{remark}
|
||||
It can be shown that gradient tracking also works with non-convex optimization and, under the correct assumptions, converges to a stationary point.
|
||||
\end{remark}
|
||||
Reference in New Issue
Block a user