Added support for batches
This commit is contained in:
parent
14b810c451
commit
9b5bb6d5f8
File diff suppressed because one or more lines are too long
@ -1,19 +1,13 @@
|
||||
import torch
|
||||
from ..Utils import fixed_positional_encoding
|
||||
|
||||
|
||||
# WIP FOR BATCHING
|
||||
class NanoSocratesEmbedder(torch.nn.Module):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vocabulary_size: int,
|
||||
embedding_size: int
|
||||
) -> None:
|
||||
def __init__(self, vocabulary_size: int, embedding_size: int) -> None:
|
||||
super().__init__()
|
||||
self.__embedder = torch.nn.Embedding(
|
||||
vocabulary_size,
|
||||
embedding_size
|
||||
)
|
||||
self.__embedder = torch.nn.Embedding(vocabulary_size, embedding_size)
|
||||
|
||||
def forward(self, tokenized_sentence: list[list[int]]) -> torch.Tensor:
|
||||
|
||||
@ -24,11 +18,8 @@ class NanoSocratesEmbedder(torch.nn.Module):
|
||||
_, SENTENCE_LENGHT, EMBEDDING_SIZE = computed_embeddings.shape # for batching
|
||||
|
||||
POSITIONAL_ENCODINGS = fixed_positional_encoding(
|
||||
SENTENCE_LENGHT,
|
||||
EMBEDDING_SIZE
|
||||
SENTENCE_LENGHT, EMBEDDING_SIZE
|
||||
)
|
||||
|
||||
computed_embeddings = computed_embeddings + POSITIONAL_ENCODINGS.unsqueeze(0) # for batching
|
||||
computed_embeddings = computed_embeddings + POSITIONAL_ENCODINGS # for batching
|
||||
return computed_embeddings
|
||||
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user