Implemented naive soft decision decoder
This commit is contained in:
parent
04eaea92a1
commit
78fd8bf95c
54
sw/decoders/naive_soft_decision.py
Normal file
54
sw/decoders/naive_soft_decision.py
Normal file
@ -0,0 +1,54 @@
|
||||
import numpy
|
||||
import numpy as np
|
||||
import itertools
|
||||
|
||||
|
||||
class SoftDecisionDecoder:
|
||||
"""This class naively implements a soft decision decoder. This decoder calculates
|
||||
the posterior probability for each codeword and then chooses the one with the largest
|
||||
probability.
|
||||
"""
|
||||
|
||||
def __init__(self, G: np.array, H: np.array):
|
||||
"""Construct a new SotDecisionDecoder object.
|
||||
|
||||
:param G: Generator matrix
|
||||
:param H: Parity check matrix
|
||||
"""
|
||||
self._G = G
|
||||
self._H = H
|
||||
self._datawords, self._codewords = self._gen_codewords()
|
||||
self._codewords_bpsk = self._codewords * 2 - 1 # The codewords, but mapped to [-1, 1]^n
|
||||
|
||||
def _gen_codewords(self) -> np.array:
|
||||
"""Generate a list of all possible codewords.
|
||||
|
||||
:return: Numpy array of the form [[codeword_1], [codeword_2], ...]
|
||||
"""
|
||||
k, n = self._G.shape
|
||||
|
||||
# Generate a list of all possible data words
|
||||
u_lst = [list(i) for i in itertools.product([0, 1], repeat=k)]
|
||||
u_lst = np.array(u_lst)
|
||||
|
||||
# Map each data word onto a codeword
|
||||
c_lst = np.dot(u_lst, self._G) % 2
|
||||
|
||||
return u_lst, c_lst
|
||||
|
||||
def decode(self, y: np.array) -> np.array:
|
||||
"""Decode a received signal.
|
||||
|
||||
This function assumes a BPSK-like modulated signal ([-1, 1]^n instead of [0, 1]^n)
|
||||
and an AWGN channel.
|
||||
|
||||
:param y: Vector of received values. (y = x + n, where 'x' is element of [-1, 1]^m
|
||||
and 'n' is noise)
|
||||
:return: Most probably sent symbol
|
||||
"""
|
||||
# TODO: Is there a nice numpy way to implement this for loop?
|
||||
correlations = []
|
||||
for c in self._codewords_bpsk:
|
||||
correlations.append(np.dot(y, c))
|
||||
|
||||
return self._datawords[numpy.argmax(correlations)]
|
||||
@ -1,5 +1,4 @@
|
||||
import numpy as np
|
||||
from tqdm import tqdm
|
||||
|
||||
|
||||
class ProximalDecoder:
|
||||
|
||||
@ -44,6 +44,7 @@ def count_bit_errors(d: np.array, d_hat: np.array) -> int:
|
||||
|
||||
|
||||
def test_decoder(decoder: typing.Any,
|
||||
d: np.array,
|
||||
c: np.array,
|
||||
SNRs: typing.Sequence[float] = np.linspace(1, 4, 7),
|
||||
target_bit_errors=100,
|
||||
@ -54,6 +55,7 @@ def test_decoder(decoder: typing.Any,
|
||||
This function prints its progress to stdout.
|
||||
|
||||
:param decoder: Instance of the decoder to be tested
|
||||
:param d: Dataword (element of [0, 1]^n)
|
||||
:param c: Codeword whose transmission is to be simulated (element of [0, 1]^n)
|
||||
:param SNRs: List of SNRs for which the BER should be calculated
|
||||
:param target_bit_errors: Number of bit errors after which to stop the simulation
|
||||
@ -79,7 +81,7 @@ def test_decoder(decoder: typing.Any,
|
||||
y = add_awgn(x, SNR, signal_amp=np.sqrt(2))
|
||||
y_hat = decoder.decode(y)
|
||||
|
||||
total_bit_errors += count_bit_errors(c, y_hat)
|
||||
total_bit_errors += count_bit_errors(d, y_hat)
|
||||
total_bits += c.size
|
||||
|
||||
if total_bit_errors >= target_bit_errors:
|
||||
|
||||
@ -4,6 +4,7 @@ import seaborn as sns
|
||||
import pandas as pd
|
||||
|
||||
from decoders import proximal
|
||||
from decoders import naive_soft_decision
|
||||
from decoders import utility
|
||||
|
||||
|
||||
@ -21,13 +22,14 @@ def main():
|
||||
|
||||
# Test decoder
|
||||
|
||||
d = np.array([0, 1, 1, 1])
|
||||
d = np.array([0, 0, 0, 0])
|
||||
c = np.dot(G.transpose(), d) % 2
|
||||
|
||||
print(f"Simulating with c = {c}")
|
||||
|
||||
decoder = proximal.ProximalDecoder(H, K=100, gamma=0.01)
|
||||
SNRs, BERs = utility.test_decoder(decoder, c, SNRs=np.linspace(1, 5.5, 7), target_bit_errors=200, N_max=15000)
|
||||
# decoder = proximal.ProximalDecoder(H, K=100, gamma=0.01)
|
||||
decoder = naive_soft_decision.SoftDecisionDecoder(G, H)
|
||||
SNRs, BERs = utility.test_decoder(decoder, d, c, SNRs=np.linspace(1, 7, 9), target_bit_errors=500, N_max=10000)
|
||||
|
||||
data = pd.DataFrame({"SNR": SNRs, "BER": BERs})
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user