haystack/test/test_modeling_processor_saving_loading.py
Sara Zan a59bca3661
Apply black formatting (#2115)
* Testing black on ui/

* Applying black on docstores

* Add latest docstring and tutorial changes

* Create a single GH action for Black and docs to reduce commit noise to the minimum, slightly refactor the OpenAPI action too

* Remove comments

* Relax constraints on pydoc-markdown

* Split temporary black from the docs. Pydoc-markdown was obsolete and needs a separate PR to upgrade

* Fix a couple of bugs

* Add a type: ignore that was missing somehow

* Give path to black

* Apply Black

* Apply Black

* Relocate a couple of type: ignore

* Update documentation

* Make Linux CI run after applying Black

* Triggering Black

* Apply Black

* Remove dependency, does not work well

* Remove manually double trailing commas

* Update documentation

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2022-02-03 13:43:18 +01:00

48 lines
1.5 KiB
Python

import logging
from pathlib import Path
from haystack.modeling.data_handler.processor import SquadProcessor
from haystack.modeling.model.tokenization import Tokenizer
from haystack.modeling.utils import set_all_seeds
import torch
from conftest import SAMPLES_PATH
def test_processor_saving_loading(caplog):
if caplog is not None:
caplog.set_level(logging.CRITICAL)
set_all_seeds(seed=42)
lang_model = "roberta-base"
tokenizer = Tokenizer.load(pretrained_model_name_or_path=lang_model, do_lower_case=False)
processor = SquadProcessor(
tokenizer=tokenizer,
max_seq_len=256,
label_list=["start_token", "end_token"],
train_filename="train-sample.json",
dev_filename="dev-sample.json",
test_filename=None,
data_dir=SAMPLES_PATH / "qa",
)
dicts = processor.file_to_dicts(file=SAMPLES_PATH / "qa" / "dev-sample.json")
data, tensor_names, _ = processor.dataset_from_dicts(dicts=dicts, indices=[1])
save_dir = Path("testsave/processor")
processor.save(save_dir)
processor = processor.load_from_dir(save_dir)
dicts = processor.file_to_dicts(file=SAMPLES_PATH / "qa" / "dev-sample.json")
data_loaded, tensor_names_loaded, _ = processor.dataset_from_dicts(dicts, indices=[1])
assert tensor_names == tensor_names_loaded
for i in range(len(data.tensors)):
assert torch.all(torch.eq(data.tensors[i], data_loaded.tensors[i]))
if __name__ == "__main__":
test_processor_saving_loading(None)