diff --git a/Project_Model/Libs/Transformer/Utils/post_tokenization.py b/Project_Model/Libs/Transformer/Utils/post_tokenization.py index fc68363..23d5e4d 100644 --- a/Project_Model/Libs/Transformer/Utils/post_tokenization.py +++ b/Project_Model/Libs/Transformer/Utils/post_tokenization.py @@ -47,7 +47,7 @@ def normalize_sequence( ) -> tuple[list[int], list[bool]]: new_sequence = pad_sequence(sequence, max_length, pad_token) - new_sequence = truncate_sequence(sequence, max_length, end_token) - PADDING_MASK = create_padding_mask(sequence, pad_token) + new_sequence = truncate_sequence(new_sequence, max_length, end_token) + PADDING_MASK = create_padding_mask(new_sequence, pad_token) return (new_sequence, PADDING_MASK)