Activated Dropout to avoid overfitting
This commit is contained in:
@@ -43,12 +43,12 @@ class Encoder(
|
||||
ATTENTION = self.__attention(x, x, x, key_padding_mask=padding_mask)
|
||||
|
||||
# 2) Dropout
|
||||
# DROPPED_ATTENTION = self.__dropout(ATTENTION)
|
||||
# del ATTENTION
|
||||
DROPPED_ATTENTION = self.__dropout(ATTENTION)
|
||||
del ATTENTION
|
||||
|
||||
# 3) Residual Connection
|
||||
x = x + ATTENTION
|
||||
del ATTENTION
|
||||
x = x + DROPPED_ATTENTION
|
||||
del DROPPED_ATTENTION
|
||||
|
||||
# 4) Layer Normalization
|
||||
x = self.__layer_norm_1(x)
|
||||
@@ -57,12 +57,12 @@ class Encoder(
|
||||
FEED_FORWARD = self.__feed_forward(x)
|
||||
|
||||
# 6) Dropout
|
||||
# DROPPED_FEED_FORWARD = self.__dropout(FEED_FORWARD)
|
||||
# del FEED_FORWARD
|
||||
DROPPED_FEED_FORWARD = self.__dropout(FEED_FORWARD)
|
||||
del FEED_FORWARD
|
||||
|
||||
# 7) Residual Connection
|
||||
x = x + FEED_FORWARD
|
||||
del FEED_FORWARD
|
||||
x = x + DROPPED_FEED_FORWARD
|
||||
del DROPPED_FEED_FORWARD
|
||||
|
||||
# 8) Layer Normalization
|
||||
x = self.__layer_norm_2(x)
|
||||
|
||||
Reference in New Issue
Block a user