From fe62b1edd5ddabff405a958b628356bc0dfa342d Mon Sep 17 00:00:00 2001 From: Christian Risi <75698846+CnF-Gris@users.noreply.github.com> Date: Thu, 16 Oct 2025 20:05:35 +0200 Subject: [PATCH] Fixed evaluation --- Playgrounds/evaluation.py | 8 ++++---- Project_Model/Libs/TransformerUtils/metrics.py | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/Playgrounds/evaluation.py b/Playgrounds/evaluation.py index 86d1d90..c2d6812 100644 --- a/Playgrounds/evaluation.py +++ b/Playgrounds/evaluation.py @@ -11,13 +11,13 @@ DEVICE = torch_shims.get_default_device() torch.set_default_device(DEVICE) # Get paths -MODEL_DIR = "Assets/Model/curated" -# MODEL_DIR= "Assets/Dataset/Tmp" +# MODEL_DIR = "Assets/Model/curated" +MODEL_DIR= "Assets/Dataset/Tmp" VOCABULARY_PATH = Path("Assets/Model/small/bpe-small-16.json") TRAIN_DATASET_PATH = Path("Assets/Dataset/1-hop/small/holdout/train.csv") VALIDATION_DATASET_PATH = Path("Assets/Dataset/1-hop/small/holdout/evaluation.csv") TEST_DATASET_PATH = Path("Assets/Dataset/1-hop/small/holdout/test.csv") -# TEST_DATASET_PATH = Path("Assets/Dataset/1-hop/toy/rdf_text.csv") +TEST_DATASET_PATH = Path("Assets/Dataset/1-hop/toy/rdf_text.csv") MODEL_PATH = Path(f"{MODEL_DIR}/NanoSocrates.zip") @@ -162,7 +162,7 @@ with torch.no_grad(): if tasktype == Batch.TaskType.TEXT2RDF: ref = TUtils.remove_padding(exp_tokens, PAD_TOKEN, END_TOKEN) - pred = TUtils.remove_padding(tokens, PAD_TOKEN, END_TOKEN) + pred = TUtils.remove_padding(tokens[1:], PAD_TOKEN, END_TOKEN) ref, pred = TUtils.balance_paddings(ref, pred, PAD_TOKEN) precision, recall = TUtils.txt2rdf(ref, pred) diff --git a/Project_Model/Libs/TransformerUtils/metrics.py b/Project_Model/Libs/TransformerUtils/metrics.py index 597e7f5..9b28fdc 100644 --- a/Project_Model/Libs/TransformerUtils/metrics.py +++ b/Project_Model/Libs/TransformerUtils/metrics.py @@ -35,7 +35,8 @@ def rouge(ref: list[str], pred: list[str]): def f1(precision: float, recall: float): - return (2 * recall * precision) / (precision + recall) + divisor = max((precision + recall), 1E-5) + return (2 * recall * precision) / divisor def average(array: list[float]):