haystack/test/test_distillation.py
Sara Zan d470b9d0bd
Improve dependency management (#1994)
* Fist attempt at using setup.cfg for dependency management

* Trying the new package on the CI and in Docker too

* Add composite extras_require

* Add the safe_import function for document store imports and add some try-catch statements on rest_api and ui imports

* Fix bug on class import and rephrase error message

* Introduce typing for optional modules and add type: ignore in sparse.py

* Include importlib_metadata backport for py3.7

* Add colab group to extra_requires

* Fix pillow version

* Fix grpcio

* Separate out the crawler as another extra

* Make paths relative in rest_api and ui

* Update the test matrix in the CI

* Add try catch statements around the optional imports too to account for direct imports

* Never mix direct deps with self-references and add ES deps to the base install

* Refactor several paths in tests to make them insensitive to the execution path

* Include tstadel review and re-introduce Milvus1 in the tests suite, to fix

* Wrap pdf conversion utils into safe_import

* Update some tutorials and rever Milvus1 as default for now, see #2067

* Fix mypy config


Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2022-01-26 18:12:55 +01:00

92 lines
3.7 KiB
Python

from pathlib import Path
from haystack.nodes import FARMReader
from haystack.modeling.data_handler.processor import UnlabeledTextProcessor
import torch
from conftest import SAMPLES_PATH
def create_checkpoint(model):
weights = []
for name, weight in model.inferencer.model.named_parameters():
if "weight" in name and weight.requires_grad:
weights.append(torch.clone(weight))
return weights
def assert_weight_change(weights, new_weights):
print([torch.equal(old_weight, new_weight) for old_weight, new_weight in zip(weights, new_weights)])
assert not any(torch.equal(old_weight, new_weight) for old_weight, new_weight in zip(weights, new_weights))
def test_prediction_layer_distillation():
student = FARMReader(model_name_or_path="prajjwal1/bert-tiny", num_processes=0)
teacher = FARMReader(model_name_or_path="prajjwal1/bert-small", num_processes=0)
# create a checkpoint of weights before distillation
student_weights = create_checkpoint(student)
assert len(student_weights) == 22
student_weights.pop(-2) # pooler is not updated due to different attention head
student.distil_prediction_layer_from(teacher, data_dir=SAMPLES_PATH/"squad", train_filename="tiny.json")
# create new checkpoint
new_student_weights = create_checkpoint(student)
assert len(new_student_weights) == 22
new_student_weights.pop(-2) # pooler is not updated due to different attention head
# check if weights have changed
assert_weight_change(student_weights, new_student_weights)
def test_intermediate_layer_distillation():
student = FARMReader(model_name_or_path="huawei-noah/TinyBERT_General_4L_312D")
teacher = FARMReader(model_name_or_path="bert-base-uncased")
# create a checkpoint of weights before distillation
student_weights = create_checkpoint(student)
assert len(student_weights) == 38
student_weights.pop(-1) # last layer is not affected by tinybert loss
student_weights.pop(-1) # pooler is not updated due to different attention head
student.distil_intermediate_layers_from(teacher_model=teacher, data_dir=SAMPLES_PATH/"squad", train_filename="tiny.json")
# create new checkpoint
new_student_weights = create_checkpoint(student)
assert len(new_student_weights) == 38
new_student_weights.pop(-1) # last layer is not affected by tinybert loss
new_student_weights.pop(-1) # pooler is not updated due to different attention head
# check if weights have changed
assert_weight_change(student_weights, new_student_weights)
def test_intermediate_layer_distillation_from_scratch():
student = FARMReader(model_name_or_path="huawei-noah/TinyBERT_General_4L_312D")
teacher = FARMReader(model_name_or_path="bert-base-uncased")
# create a checkpoint of weights before distillation
student_weights = create_checkpoint(student)
assert len(student_weights) == 38
student_weights.pop(-1) # last layer is not affected by tinybert loss
student_weights.pop(-1) # pooler is not updated due to different attention head
processor = UnlabeledTextProcessor(tokenizer=teacher.inferencer.processor.tokenizer, max_seq_len=128, train_filename="doc_2.txt", data_dir=SAMPLES_PATH/"docs")
student.distil_intermediate_layers_from(teacher_model=teacher, data_dir=SAMPLES_PATH/"squad", train_filename="tiny.json", processor=processor)
# create new checkpoint
new_student_weights = create_checkpoint(student)
assert len(new_student_weights) == 38
new_student_weights.pop(-1) # last layer is not affected by tinybert loss
new_student_weights.pop(-1) # pooler is not updated due to different attention head
# check if weights have changed
assert_weight_change(student_weights, new_student_weights)