Fixed a bug where I took encoder embeddings rather than encoder output

This commit is contained in:
Christian Risi 2025-10-09 11:37:21 +02:00
parent ba592c3480
commit 0158db2dce

View File

@ -47,7 +47,7 @@ class TrainingModel(torch.nn.Module):
encoder_output, _ = self.__encoder((encoder_tensor, padding_tensor))
decoder_output, _, _, _ = self.__decoder(
(decoder_tensor, encoder_tensor, encoder_tensor, None)
(decoder_tensor, encoder_output, encoder_output, None)
)
logits: torch.Tensor = self.__detokener(decoder_output)