From ee80eaca57b23644f29c4e2d878a34547fb6cd54 Mon Sep 17 00:00:00 2001 From: yjoonjang Date: Thu, 31 Oct 2024 19:35:41 +0900 Subject: [PATCH] fix: fix bug when not using knowledge_distilation in fine-tuning embedder --- FlagEmbedding/abc/finetune/embedder/AbsDataset.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/FlagEmbedding/abc/finetune/embedder/AbsDataset.py b/FlagEmbedding/abc/finetune/embedder/AbsDataset.py index efbfa2d..7b5861c 100644 --- a/FlagEmbedding/abc/finetune/embedder/AbsDataset.py +++ b/FlagEmbedding/abc/finetune/embedder/AbsDataset.py @@ -416,8 +416,9 @@ class AbsEmbedderSameDatasetTrainDataset(AbsEmbedderTrainDataset): passages.extend(tmp_passages) - if len(teacher_scores) > 0 and len(passages) > 0: - assert len(teacher_scores) == len(passages) + if teacher_scores is not None: + if len(teacher_scores) > 0 and len(passages) > 0: + assert len(teacher_scores) == len(passages) return queries, passages, teacher_scores