added padding_mask entry to decoder and encoder
This commit is contained in:
@@ -31,12 +31,12 @@ class Encoder(
|
||||
self.__dropout = nn.Dropout(0.1) # ...
|
||||
pass
|
||||
|
||||
def forward(self, x):
|
||||
def forward(self, x, padding_mask = None):
|
||||
# -> ATTENTION -> dropout -> add and normalize -> FF -> dropout -> add and normalize ->
|
||||
# Attention with Residual Connection [ input + self-attention]
|
||||
|
||||
# 1) Multi Head Attention
|
||||
ATTENTION = self.__attention(x, x, x)
|
||||
ATTENTION = self.__attention(x, x, x,key_padding_mask= padding_mask)
|
||||
|
||||
# 2) Dropout
|
||||
DROPPED_ATTENTION = self.__dropout(ATTENTION)
|
||||
|
||||
Reference in New Issue
Block a user