NanoSocrates/Project_Model/Libs/Embedder/Classes/NanoSocratesEmbedder.py
2025-10-04 19:43:25 +02:00

33 lines
866 B
Python

import torch
from ..Utils import fixed_positional_encoding
class NanoSocratesEmbedder(torch.nn.Module):
def __init__(
self,
vocabulary_size: int,
embedding_size: int
) -> None:
super().__init__()
self.__embedder = torch.nn.Embedding(
vocabulary_size,
embedding_size
)
def forward(self, tokenized_sentence: list[int]) -> torch.Tensor:
TOKENIZED_TENSOR = torch.tensor(tokenized_sentence)
computed_embeddings: torch.Tensor = self.__embedder(TOKENIZED_TENSOR)
SENTENCE_LENGHT, EMBEDDING_SIZE = computed_embeddings.shape
POSITIONAL_ENCODINGS = fixed_positional_encoding(
SENTENCE_LENGHT,
EMBEDDING_SIZE
)
computed_embeddings = computed_embeddings + POSITIONAL_ENCODINGS
return computed_embeddings