Changed target_bit_errors and N_max for simulation
This commit is contained in:
parent
01d4cf22aa
commit
04eaea92a1
@ -27,7 +27,7 @@ def main():
|
||||
print(f"Simulating with c = {c}")
|
||||
|
||||
decoder = proximal.ProximalDecoder(H, K=100, gamma=0.01)
|
||||
SNRs, BERs = utility.test_decoder(decoder, c, SNRs=[1, 3, 5, 7], N_max=10000)
|
||||
SNRs, BERs = utility.test_decoder(decoder, c, SNRs=np.linspace(1, 5.5, 7), target_bit_errors=200, N_max=15000)
|
||||
|
||||
data = pd.DataFrame({"SNR": SNRs, "BER": BERs})
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user