From 81963813194adf949a7235beb17a55347c0c8e9c Mon Sep 17 00:00:00 2001 From: Andreas Tsouchlos Date: Sat, 8 Apr 2023 21:09:28 +0200 Subject: [PATCH] Reorganized proximal analysis results and continued writing --- latex/thesis/chapters/proximal_decoding.tex | 186 +++++++++++++++++--- 1 file changed, 159 insertions(+), 27 deletions(-) diff --git a/latex/thesis/chapters/proximal_decoding.tex b/latex/thesis/chapters/proximal_decoding.tex index 2b77dea..ae09966 100644 --- a/latex/thesis/chapters/proximal_decoding.tex +++ b/latex/thesis/chapters/proximal_decoding.tex @@ -322,8 +322,21 @@ $[-\eta, \eta]$ individually. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -\section{Simulation Results}% -\label{sec:prox:Simulation Results} +\section{Analysis and Simulation Results}% +\label{sec:prox:Analysis and Simulation Results} + +In this section, the general behaviour of the proximal decoding algorithm is +analyzed. +The impact of the parameters $\gamma$, as well as $\omega$, $K$ and $\eta$ is +examined. +The decoding performance is assessed on the basis of the \ac{BER} and the +\ac{FER} as well as the \textit{decoding failure rate} - the rate at which +the algorithm produces erroneous results. +The convergence properties are reviewed and related to the decoding +performance. +Finally, the computational performance is examined on a theoretical basis +as well as on the basis of the implementation completed in the context of this +work. All simulation results presented hereafter are based on Monte Carlo simulations. @@ -333,9 +346,18 @@ stated. \todo{Mention number of datapoints from which each graph was created for non ber and fer curves} + +\subsection{Choice of Parameters} + +First, the effect of the parameter $\gamma$ is investigated. Figure \ref{fig:prox:results} shows a comparison of the decoding performance of the proximal decoding algorithm as presented by Wadayama et al. in \cite{proximal_paper} and the implementation realized for this work. +\noindent The \ac{BER} curves for three different choices of the parameter +$\gamma$ are shown, as well as the \ac{BER} curve resulting from decoding +using \ac{BP}, as a reference. +The results from Wadayama et al. are shown with solid lines, +while the newly generated ones are shown with dashed lines. \begin{figure}[H] \centering @@ -344,9 +366,9 @@ of the proximal decoding algorithm as presented by Wadayama et al. in \begin{axis}[grid=both, grid style={line width=.1pt}, xlabel={$E_b / N_0$ (dB)}, ylabel={BER}, ymode=log, - legend style={at={(0.5,-0.55)},anchor=south}, - width=0.75\textwidth, - height=0.5625\textwidth, + legend style={at={(0.5,-0.7)},anchor=south}, + width=0.6\textwidth, + height=0.45\textwidth, ymax=1.2, ymin=0.8e-4, xtick={1, 2, ..., 5}, xmin=0.9, xmax=5.6, @@ -388,24 +410,37 @@ of the proximal decoding algorithm as presented by Wadayama et al. in \end{axis} \end{tikzpicture} - \caption{Simulation results\protect\footnotemark{} for $\omega = 0.05, K=100$} + \caption{Comparison of datapoints from Wadayama et al. with own simulation results% + \protect\footnotemark{}} \label{fig:prox:results} \end{figure} % -\footnotetext{(3,6) regular LDPC code with n = 204, k = 102 \cite[204.33.484]{mackay_enc}}% +\footnotetext{(3,6) regular LDPC code with $n = 204$, $k = 102$ + \cite[\text{204.33.484}]{mackay_enc}; $\omega = 0.05, K=200, \eta=1.5$ +}% % - -Looking at the graph in figure \ref{fig:prox:results} one might notice that for -a moderately chosen value of $\gamma$ ($\gamma = 0.05$) the decoding -performance is better than for low ($\gamma = 0.01$) or high -($\gamma = 0.15$) values. +\noindent It is noticeable that for a moderately chosen value of $\gamma$ +($\gamma = 0.05$) the decoding performance is better than for low +($\gamma = 0.01$) or high ($\gamma = 0.15$) values. The question arises if there is some optimal value maximazing the decoding performance, especially since the decoding performance seems to dramatically depend on $\gamma$. To better understand how $\gamma$ and the decoding performance are related, figure \ref{fig:prox:results} was recreated, but with a considerably -larger selection of values for $\gamma$ (figure \ref{fig:prox:results_3d}).% -% +larger selection of values for $\gamma$. +In this new graph, shown in figure \ref{fig:prox:results_3d}, instead of +stacking the \ac{BER} curves on top of one another in the same plot, the +visualization is extended to three dimensions. +The previously shown results are highlighted. + +Evidently, while the decoding performance does depend on the value of +$\gamma$, there is no single optimal value offering optimal performance, but +rather a certain interval in which it stays largely unchanged. +When examining a number of different codes (figure +\ref{fig:prox:results_3d_multiple}), it is apparent that while the exact +landscape of the graph depends on the code, the general behaviour is the same +in each case. + \begin{figure}[H] \centering @@ -416,8 +451,8 @@ larger selection of values for $\gamma$ (figure \ref{fig:prox:results_3d}).% xlabel={$E_b / N_0$ (dB)}, ylabel={$\gamma$}, zlabel={BER}, - legend pos=outer north east, - %legend style={at={(0.5,-0.55)},anchor=south}, + %legend pos=outer north east, + legend style={at={(0.5,-0.7)},anchor=south}, ytick={0, 0.05, 0.1, 0.15}, width=0.6\textwidth, height=0.45\textwidth,] @@ -446,19 +481,21 @@ larger selection of values for $\gamma$ (figure \ref{fig:prox:results_3d}).% \end{axis} \end{tikzpicture} - \caption{BER\protect\footnotemark{} for $\omega = 0.05, K=100$} + \caption{Visualization of relationship between the decoding performance\protect\footnotemark{} + and the parameter $\gamma$} \label{fig:prox:results_3d} \end{figure}% % -\footnotetext{(3,6) regular LDPC code with n = 204, k = 102 \cite[\text{204.33.484}]{mackay_enc}}% +\footnotetext{(3,6) regular LDPC code with n = 204, k = 102 \cite[\text{204.33.484}]{mackay_enc}; + $\omega = 0.05, K=200, \eta=1.5$ +}% % -\noindent Evidently, while the performance does depend on the value of -$\gamma$, there is no single optimal value offering optimal performance, but -rather a certain interval in which the performance stays largely the same. -When examining a number of different codes (figure -\ref{fig:prox:results_3d_multiple}), it is apparent that while the exact -landscape of the graph depends on the code, the general behaviour is the same -in each case. +\noindent This indicates \todo{This is a result fit for the conclusion} +that while the choice of the parameter $\gamma$ significantly +affects the decoding performance, there is not much benefit attainable in +undertaking an extensive search for an exact optimum. +Rather, a preliminary examination providing a rough window for $\gamma$ may +be sufficient. \begin{figure}[H] \centering @@ -688,12 +725,105 @@ in each case. A similar analysis was performed to determine the optimal values for the other parameters, $\omega$, $K$ and $\eta$. -TODO +Changing the parameter $\eta$ does not appear to have a significant effect on +the decoding performance, which seems sensible considering its only purpose +is ensuring numerical stability. + +\subsection{Decoding Performance} + +\begin{figure}[H] + \centering + + \begin{tikzpicture} + \begin{axis}[grid=both, + xlabel={$E_b / N_0$ (dB)}, ylabel={BER}, + ymode=log, + width=0.48\textwidth, + height=0.36\textwidth, + legend style={at={(0.05,0.05)},anchor=south west}, + ymax=1.5, ymin=3e-7,] + + \addplot [ForestGreen, mark=*] + table [x=SNR, y=BER, col sep=comma, discard if not={gamma}{0.15}] + {res/proximal/2d_ber_fer_dfr_20433484.csv}; + \addlegendentry{$\gamma = 0.15$} + \addplot [NavyBlue, mark=*] + table [x=SNR, y=BER, col sep=comma, discard if not={gamma}{0.01}] + {res/proximal/2d_ber_fer_dfr_20433484.csv}; + \addlegendentry{$\gamma = 0.01$} + \addplot [RedOrange, mark=*] + table [x=SNR, y=BER, col sep=comma, discard if not={gamma}{0.05}] + {res/proximal/2d_ber_fer_dfr_20433484.csv}; + \addlegendentry{$\gamma = 0.05$} + \end{axis} + \end{tikzpicture}% + \hfill% + \begin{tikzpicture} + \begin{axis}[grid=both, + xlabel={$E_b / N_0$ (dB)}, ylabel={FER}, + ymode=log, + width=0.48\textwidth, + height=0.36\textwidth, + legend style={at={(0.05,0.05)},anchor=south west}, + ymax=1.5, ymin=3e-7,] + + \addplot [ForestGreen, mark=*] + table [x=SNR, y=FER, col sep=comma, discard if not={gamma}{0.15}] + {res/proximal/2d_ber_fer_dfr_20433484.csv}; + \addlegendentry{$\gamma = 0.15$} + \addplot [NavyBlue, mark=*] + table [x=SNR, y=FER, col sep=comma, discard if not={gamma}{0.01}] + {res/proximal/2d_ber_fer_dfr_20433484.csv}; + \addlegendentry{$\gamma = 0.01$} + \addplot [RedOrange, mark=*] + table [x=SNR, y=FER, col sep=comma, discard if not={gamma}{0.05}] + {res/proximal/2d_ber_fer_dfr_20433484.csv}; + \addlegendentry{$\gamma = 0.05$} + \end{axis} + \end{tikzpicture}\\[1em] + \begin{tikzpicture} + \begin{axis}[grid=both, + xlabel={$E_b / N_0$ (dB)}, ylabel={Decoding Failure Rate}, + ymode=log, + width=0.48\textwidth, + height=0.36\textwidth, + legend style={at={(0.05,0.05)},anchor=south west}, + ymax=1.5, ymin=3e-7,] + + \addplot [ForestGreen, mark=*] + table [x=SNR, y=DFR, col sep=comma, discard if not={gamma}{0.15}] + {res/proximal/2d_ber_fer_dfr_20433484.csv}; + \addlegendentry{$\gamma = 0.15$} + \addplot [NavyBlue, mark=*] + table [x=SNR, y=DFR, col sep=comma, discard if not={gamma}{0.01}] + {res/proximal/2d_ber_fer_dfr_20433484.csv}; + \addlegendentry{$\gamma = 0.01$} + \addplot [RedOrange, mark=*] + table [x=SNR, y=DFR, col sep=comma, discard if not={gamma}{0.05}] + {res/proximal/2d_ber_fer_dfr_20433484.csv}; + \addlegendentry{$\gamma = 0.05$} + \end{axis} + \end{tikzpicture} + + \caption{Cmoparison\protect\footnotemark{} of \ac{FER}, \ac{BER} and + decoding failure rate; $\omega = 0.05, K=100$} + \label{fig:prox:ber_fer_dfr} +\end{figure}% +% +\footnotetext{(3,6) regular LDPC code with n = 204, k = 102 \cite[\text{204.33.484}]{mackay_enc}}% +% Until now, only the \ac{BER} has been considered to assess the decoding performance. The \ac{FER}, however, shows considerably worse performance, as can be seen in -figure \ref{TODO}. +figure \ref{fig:prox:ber_fer_dfr}. +Besides the \ac{BER} and \ac{FER} curves, the figure also shows the +\textit{decoding failure rate}. +This is the rate at which the iterative process produces erroneous codewords, +i.e., the stopping criterion (line 6 of algorithm \ref{TODO}) is never +satisfied and the maximum number of itertations $K$ is reached without +converging to a valid codeword. + One possible explanation might be found in the structure of the proxmal decoding algorithm \ref{TODO} itself. As it comprises two separate steps, one responsible for addressing the @@ -743,6 +873,8 @@ This course of thought will be picked up in section \end{itemize} \end{itemize} +\subsection{Convergence Properties} +\subsection{Computational Performance} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%