@@ -99,8 +99,8 @@ class Trellis:
9999 [2 1]]
100100 References
101101 ----------
102- [1] S. Benedetto, R. Garello, et G. Montorsi, "A search for good convolutional codes to be used in the
103- construction of turbo codes", IEEE Transactions on Communications, vol. 46, nᵒ 9, p. 1101‑1105, sept . 1998.
102+ [1] S. Benedetto, R. Garello et G. Montorsi, "A search for good convolutional codes to be used in the
103+ construction of turbo codes", IEEE Transactions on Communications, vol. 46, n. 9, p. 1101-1005, spet . 1998
104104 """
105105 def __init__ (self , memory , g_matrix , feedback = None , code_type = 'default' ):
106106
@@ -389,7 +389,7 @@ def conv_encode(message_bits, trellis, termination = 'term', puncture_matrix=Non
389389
390390 outbits = np .zeros (number_outbits , 'int' )
391391 if puncture_matrix is not None :
392- p_outbits = np .zeros (number_outbits )
392+ p_outbits = np .zeros (number_outbits , 'int' )
393393 else :
394394 p_outbits = np .zeros (int (number_outbits *
395395 puncture_matrix [0 :].sum ()/ np .size (puncture_matrix , 1 )), 'int' )
@@ -477,7 +477,9 @@ def _acs_traceback(r_codeword, trellis, decoding_type,
477477 if decoding_type == 'hard' :
478478 branch_metric = hamming_dist (r_codeword .astype (int ), i_codeword_array .astype (int ))
479479 elif decoding_type == 'soft' :
480- pass
480+ neg_LL_0 = np .log (np .exp (r_codeword ) + 1 ) # negative log-likelihood to have received a 0
481+ neg_LL_1 = neg_LL_0 - r_codeword # negative log-likelihood to have received a 1
482+ branch_metric = np .where (i_codeword_array , neg_LL_1 , neg_LL_0 ).sum ()
481483 elif decoding_type == 'unquantized' :
482484 i_codeword_array = 2 * i_codeword_array - 1
483485 branch_metric = euclid_dist (r_codeword , i_codeword_array )
@@ -531,7 +533,7 @@ def viterbi_decode(coded_bits, trellis, tb_depth=None, decoding_type='hard'):
531533 decoding_type : str {'hard', 'soft', 'unquantized'}
532534 The type of decoding to be used.
533535 'hard' option is used for hard inputs (bits) to the decoder, e.g., BSC channel.
534- 'soft' option is used for soft inputs (LLRs) to the decoder.
536+ 'soft' option is used for soft inputs (LLRs) to the decoder. LLRs are clipped in [-500, 500].
535537 'unquantized' option is used for soft inputs (real numbers) to the decoder, e.g., BAWGN channel.
536538 Returns
537539 -------
@@ -561,26 +563,29 @@ def viterbi_decode(coded_bits, trellis, tb_depth=None, decoding_type='hard'):
561563
562564 path_metrics = np .full ((trellis .number_states , 2 ), np .inf )
563565 path_metrics [0 ][0 ] = 0
564- paths = np .full ((trellis .number_states , tb_depth ), np . iinfo ( int ). max , 'int' )
566+ paths = np .empty ((trellis .number_states , tb_depth ), 'int' )
565567 paths [0 ][0 ] = 0
566568
567569 decoded_symbols = np .zeros ([trellis .number_states , tb_depth ], 'int' )
568- decoded_bits = np .empty (math .ceil (L / k ) * k + tb_depth , 'int' )
570+ decoded_bits = np .empty (int ( math .ceil (( L + tb_depth ) / k ) * k ) , 'int' )
569571 r_codeword = np .zeros (n , 'int' )
570572
571573 tb_count = 1
572574 count = 0
573575 current_number_states = trellis .number_states
574576
577+ coded_bits = coded_bits .clip (- 500 , 500 )
578+
575579 for t in range (1 , int ((L + total_memory )/ k )):
576580 # Get the received codeword corresponding to t
577581 if t <= L // k :
578582 r_codeword = coded_bits [(t - 1 )* n :t * n ]
583+ # Pad with '0'
579584 else :
580585 if decoding_type == 'hard' :
581586 r_codeword [:] = 0
582587 elif decoding_type == 'soft' :
583- pass
588+ r_codeword [:] = np . iinfo ( int ). min
584589 elif decoding_type == 'unquantized' :
585590 r_codeword [:] = - 1
586591 else :
0 commit comments