Merge pull request #1171 from yjoonjang/master

fix: fix bug when not using knowledge_distilation in fine-tuning embedder
This commit is contained in:
chaofan 2024-10-31 18:44:34 +08:00 committed by GitHub
commit b0233bf1dc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -416,8 +416,9 @@ class AbsEmbedderSameDatasetTrainDataset(AbsEmbedderTrainDataset):
passages.extend(tmp_passages)
if len(teacher_scores) > 0 and len(passages) > 0:
assert len(teacher_scores) == len(passages)
if teacher_scores is not None:
if len(teacher_scores) > 0 and len(passages) > 0:
assert len(teacher_scores) == len(passages)
return queries, passages, teacher_scores