From f9545aca1d796bbb1df974abfe5e0eaf00f4f7ca Mon Sep 17 00:00:00 2001 From: Christian Risi <75698846+CnF-Gris@users.noreply.github.com> Date: Tue, 7 Oct 2025 16:36:11 +0200 Subject: [PATCH] Deleted MultiHeadAttention --- .../Transformer/Classes/MultiHeadAttention.py | 24 ------------------- 1 file changed, 24 deletions(-) delete mode 100644 Project_Model/Libs/Transformer/Classes/MultiHeadAttention.py diff --git a/Project_Model/Libs/Transformer/Classes/MultiHeadAttention.py b/Project_Model/Libs/Transformer/Classes/MultiHeadAttention.py deleted file mode 100644 index 63c9a6f..0000000 --- a/Project_Model/Libs/Transformer/Classes/MultiHeadAttention.py +++ /dev/null @@ -1,24 +0,0 @@ -# multi-head attention -> (then to) ff -# attention: qkv -> score = qk -> divide -> softamx -# multihead -> QKV diferent in each head ( built by : X*[WQ/QK/WV]) -# z = soft(Q*K'/sqr(d))*V -# recombine Z: 1) concatenate. 2) [z01234] * W = Z -# we expect later to have padding token -######################## -# WIP -######################## - -import torch.nn as nn - -embed_dim = 256 -num_heads = 8 -multihead_attn = nn.MultiheadAttention(embed_dim, num_heads) - - -class MultiheadAttention: - - def __init__( - self, - num_heads=8, - ) -> None: - pass