mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-08-11 01:57:48 +00:00

* set fixture scope to "function" * run FARMReader without multiprocessing * dispose off ray after tests * run most expensive tasks first in test files * run expensive tests first * run garbage collector between tests Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
33 lines
1.3 KiB
Python
33 lines
1.3 KiB
Python
from haystack.nodes import FARMReader
|
|
import torch
|
|
|
|
def test_distillation():
|
|
student = FARMReader(model_name_or_path="prajjwal1/bert-tiny", num_processes=0)
|
|
teacher = FARMReader(model_name_or_path="prajjwal1/bert-small", num_processes=0)
|
|
|
|
# create a checkpoint of weights before distillation
|
|
student_weights = []
|
|
for name, weight in student.inferencer.model.named_parameters():
|
|
if "weight" in name and weight.requires_grad:
|
|
student_weights.append(torch.clone(weight))
|
|
|
|
assert len(student_weights) == 22
|
|
|
|
student_weights.pop(-2) # pooler is not updated due to different attention head
|
|
|
|
student.distil_from(teacher, data_dir="samples/squad", train_filename="tiny.json")
|
|
|
|
# create new checkpoint
|
|
new_student_weights = [torch.clone(param) for param in student.inferencer.model.parameters()]
|
|
|
|
new_student_weights = []
|
|
for name, weight in student.inferencer.model.named_parameters():
|
|
if "weight" in name and weight.requires_grad:
|
|
new_student_weights.append(weight)
|
|
|
|
assert len(new_student_weights) == 22
|
|
|
|
new_student_weights.pop(-2) # pooler is not updated due to different attention head
|
|
|
|
# check if weights have changed
|
|
assert not any(torch.equal(old_weight, new_weight) for old_weight, new_weight in zip(student_weights, new_student_weights)) |