Hamming Code

image.png

image-2.png

1. Hard Decision Decoder

image.png
  • Hamming distance (\(d_H\)): the number of places in which two inputs differ.

2. Maximum Likelihood Decoder

image.png

3. Experiments

import numpy as np
G = np.concatenate([
    np.eye(4),
    np.array([[1, 0, 1], [1, 1, 1], [1, 1, 0], [0, 1, 1]]),
], axis=1)
print(f"Generator matrix:\n{G}")

#dec2bin
messages = []
for i in range(16):
    messages.append([int(x) for x in list('{:04b}'.format(i))])
print(f"Messages:\n{messages}")

codewords = np.array(messages) @ G % 2

print(f"Codewords:\n{codewords}")
Generator matrix:
[[1. 0. 0. 0. 1. 0. 1.]
 [0. 1. 0. 0. 1. 1. 1.]
 [0. 0. 1. 0. 1. 1. 0.]
 [0. 0. 0. 1. 0. 1. 1.]]
Messages:
[[0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0], [0, 0, 1, 1], [0, 1, 0, 0], [0, 1, 0, 1], [0, 1, 1, 0], [0, 1, 1, 1], [1, 0, 0, 0], [1, 0, 0, 1], [1, 0, 1, 0], [1, 0, 1, 1], [1, 1, 0, 0], [1, 1, 0, 1], [1, 1, 1, 0], [1, 1, 1, 1]]
Codewords:
[[0. 0. 0. 0. 0. 0. 0.]
 [0. 0. 0. 1. 0. 1. 1.]
 [0. 0. 1. 0. 1. 1. 0.]
 [0. 0. 1. 1. 1. 0. 1.]
 [0. 1. 0. 0. 1. 1. 1.]
 [0. 1. 0. 1. 1. 0. 0.]
 [0. 1. 1. 0. 0. 0. 1.]
 [0. 1. 1. 1. 0. 1. 0.]
 [1. 0. 0. 0. 1. 0. 1.]
 [1. 0. 0. 1. 1. 1. 0.]
 [1. 0. 1. 0. 0. 1. 1.]
 [1. 0. 1. 1. 0. 0. 0.]
 [1. 1. 0. 0. 0. 1. 0.]
 [1. 1. 0. 1. 0. 0. 1.]
 [1. 1. 1. 0. 1. 0. 0.]
 [1. 1. 1. 1. 1. 1. 1.]]
k = 4 # meesage bits
n = 7 # codeword bits
R = k/n
Nblocks = 100000

3.1. Hard Decision

def get_BER(EbNodB):
    EbNo = 10**(EbNodB/10)
    sigma = (1/(2 * EbNo))**0.5
    Nerrs = 0
    for i in range(Nblocks):
        msg = messages[np.random.randint(0, 16)]
        cword = G.T @ msg % 2
        s = 1 - 2 * cword
        r = s + sigma * np.random.randn(n)
        # Hard decision
        b = (r < 0).astype(int)
        
        Nerrs = Nerrs + np.sum(b != cword)
    return Nerrs / (Nblocks * n) / k

get_BER(6)
0.0005839285714285714
import matplotlib.pyplot as plt
EbNodB = np.arange(0, 10, 2)
BER = [get_BER(x) for x in EbNodB]
plt.semilogy(EbNodB, BER)
plt.xlabel('Eb/No (dB)')
plt.ylabel('BER')
plt.ylim([1e-10, 1])
plt.grid()
plt.show()

3.2. Soft Decision Decoding

def get_BER_soft(EbNodB):
    EbNo = 10**(EbNodB/10)
    sigma = (1/(2 * EbNo))**0.5
    Nerrs = 0
    for i in range(Nblocks):
        msg = messages[np.random.randint(0, 16)]
        cword = G.T @ msg % 2
        s = 1 - 2 * cword
        r = s + sigma * np.random.randn(n)
        # Soft decision
        distance = ((r.reshape(1,-1) - (1 - 2 * codewords) )**2).sum(axis=1)
        b = codewords[np.argmin(distance),:]
        Nerrs = Nerrs + np.sum(b != cword)
    return Nerrs / (Nblocks * n) / k

get_BER(6)
0.0005925
import matplotlib.pyplot as plt
EbNodB = np.arange(0, 10, 2)
BER = [get_BER_soft(x) for x in EbNodB]
plt.semilogy(EbNodB, BER)
plt.xlabel('Eb/No (dB)')
plt.ylabel('BER')
plt.ylim([1e-10, 1])
plt.grid()
plt.show()