Add whole v. windowed plot for constant num. of total iterations

This commit is contained in:
2026-04-01 17:51:24 +02:00
parent 5480f2ed7b
commit 2246915be9

View File

@@ -65,6 +65,10 @@
\maketitle
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Soft vs Hard Information Decoding}
\label{sec:Soft vs Hard Information Decoding}
\begin{figure}[H]
\centering
@@ -305,5 +309,135 @@
$n_\text{rounds}=12,p_\text{phys} = 0.0025,W=5$}
\end{figure}
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Whole vs Windowed Decoding with Constant Total Number of Iterations}
\label{sec:Whole vs Windowed Decoding with Constant Total Number of Iterations}
\begin{itemize}
\item Sliding-window decoding
\begin{itemize}
\item $n_\text{iter,BP} = 32$
\item $W=5, F=2 \Rightarrow n_\text{windows} = 8$
\item $n_\text{iter,BP}^\text{total} = n_\text{windows}
\cdot n_\text{iter,BP} = 256$
\end{itemize}
\item Whole decoding
\begin{itemize}
\item $n_\text{iter,BP} = 256$
\end{itemize}
\end{itemize}
\begin{figure}[H]
\centering
\begin{subfigure}{0.4\textwidth}
\centering
\begin{tikzpicture}
\begin{axis}[
width=\figwidth,
height=\figheight,
ymode=log,
enlargelimits=false,
ymin=5e-5, ymax=2.5e-1,
grid=both,
xticklabel style={/pgf/number format/fixed},
xticklabel style={/pgf/number format/precision=4},
scaled x ticks=false,
grid=both,
xtick={0.001,0.0015,...,0.004},
xlabel={Physical Error Rate},
ylabel={Per-round-LER},
]
\addplot+[
mark=o, line width=1pt, densely dashed, black,
]
table[
col sep=comma, x=physical_p,
y expr={1 - (1-\thisrow{LER_whole})^(1/12)}
]
{/home/andreas/workspace/private/ma-sw-results/outputs/+decoder.bp_method=minimum_sum,+experiment=whole_more_iterations,+system.F=1,+system.W=3/2026-04-01_15-41-23/LERs.csv};
\addplot+[mark=o, line width=1pt, densely dashed, scol0]
table[
col sep=comma, x=physical_p,
y expr={1 - (1-\thisrow{LER_hard})^(1/12)}
]
{/home/andreas/workspace/private/ma-sw-results/outputs/+decoder.bp_method=minimum_sum,+experiment=soft_vs_hard_param_exploration,+system.F=1,+system.W=5/2026-03-30_00-06-26/LERs.csv};
\addplot+[mark=*, line width=1pt, scol0]
table[
col sep=comma, x=physical_p,
y expr={1 - (1-\thisrow{LER_soft})^(1/12)}
]
{/home/andreas/workspace/private/ma-sw-results/outputs/+decoder.bp_method=minimum_sum,+experiment=soft_vs_hard_param_exploration,+system.F=1,+system.W=5/2026-03-30_00-06-26/LERs.csv};
\end{axis}
\end{tikzpicture}
\caption{Min-Sum}
\end{subfigure}%
\begin{subfigure}{0.6\textwidth}
\centering
\begin{tikzpicture}
\begin{axis}[
width=\figwidth,
height=\figheight,
ymode=log,
legend style={
cells={anchor=west},
cells={align=left},
},
enlargelimits=false,
ymin=5e-5, ymax=2.5e-1,
grid=both,
legend pos = outer north east,
xticklabel style={/pgf/number format/fixed},
xticklabel style={/pgf/number format/precision=4},
scaled x ticks=false,
grid=both,
xtick={0.001,0.0015,...,0.004},
xlabel={Physical Error Rate},
ylabel={},
yticklabels = {},
]
\addplot+[mark=o, line width=1pt, densely dashed, scol0]
table[
col sep=comma, x=physical_p,
y expr={1 - (1-\thisrow{LER_hard})^(1/12)}
]
{/home/andreas/workspace/private/ma-sw-results/outputs/+decoder.bp_method=product_sum,+experiment=soft_vs_hard_param_exploration,+system.F=1,+system.W=5/2026-03-30_00-06-26/LERs.csv};
\addlegendentry{Hard}
\addplot+[mark=*, line width=1pt, scol0]
table[
col sep=comma, x=physical_p,
y expr={1 - (1-\thisrow{LER_soft})^(1/12)}
]
{/home/andreas/workspace/private/ma-sw-results/outputs/+decoder.bp_method=product_sum,+experiment=soft_vs_hard_param_exploration,+system.F=1,+system.W=5/2026-03-30_00-06-26/LERs.csv};
\addlegendentry{Soft}
\addplot+[
mark=o, line width=1pt, densely dashed, black,
]
table[
col sep=comma, x=physical_p,
y expr={1 - (1-\thisrow{LER_whole})^(1/12)}
]
{/home/andreas/workspace/private/ma-sw-results/outputs/+decoder.bp_method=product_sum,+experiment=whole_more_iterations,+system.F=1,+system.W=3/2026-04-01_15-41-24/LERs.csv};
\addlegendentryexpanded{Whole}
\end{axis}
\end{tikzpicture}
\caption{SPA}
\end{subfigure}%
\caption{BP simulations for the [[144,12,12]]-BB code with
$n_\text{rounds}=12, W=5, F=1$}
\end{figure}
\end{document}