mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-12-29 07:59:27 +00:00
del HF token in tests (#8634)
This commit is contained in:
parent
2a9a6401d2
commit
f2b5f123b3
@ -45,6 +45,7 @@ class TestTransformersZeroShotDocumentClassifier:
|
||||
|
||||
def test_from_dict(self, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
data = {
|
||||
"type": "haystack.components.classifiers.zero_shot_document_classifier.TransformersZeroShotDocumentClassifier",
|
||||
"init_parameters": {
|
||||
@ -73,6 +74,7 @@ class TestTransformersZeroShotDocumentClassifier:
|
||||
|
||||
def test_from_dict_no_default_parameters(self, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
data = {
|
||||
"type": "haystack.components.classifiers.zero_shot_document_classifier.TransformersZeroShotDocumentClassifier",
|
||||
"init_parameters": {"model": "cross-encoder/nli-deberta-v3-xsmall", "labels": ["positive", "negative"]},
|
||||
|
||||
@ -166,6 +166,7 @@ class TestHuggingFaceLocalChatGenerator:
|
||||
@patch("haystack.components.generators.chat.hugging_face_local.pipeline")
|
||||
def test_warm_up(self, pipeline_mock, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
generator = HuggingFaceLocalChatGenerator(
|
||||
model="mistralai/Mistral-7B-Instruct-v0.2",
|
||||
task="text2text-generation",
|
||||
|
||||
@ -18,6 +18,7 @@ class TestHuggingFaceLocalGenerator:
|
||||
@patch("haystack.utils.hf.model_info")
|
||||
def test_init_default(self, model_info_mock, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
model_info_mock.return_value.pipeline_tag = "text2text-generation"
|
||||
generator = HuggingFaceLocalGenerator()
|
||||
|
||||
|
||||
@ -273,7 +273,7 @@ class TestSentenceTransformersDiversityRanker:
|
||||
Test that ranker loads the SentenceTransformer model correctly during warm up.
|
||||
"""
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
mock_model_class = MagicMock()
|
||||
mock_model_instance = MagicMock()
|
||||
mock_model_class.return_value = mock_model_instance
|
||||
|
||||
@ -313,6 +313,7 @@ class TestSimilarityRanker:
|
||||
@patch("haystack.components.rankers.transformers_similarity.AutoModelForSequenceClassification.from_pretrained")
|
||||
def test_device_map_dict(self, mocked_automodel, _mocked_autotokenizer, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
ranker = TransformersSimilarityRanker("model", model_kwargs={"device_map": {"layer_1": 1, "classifier": "cpu"}})
|
||||
|
||||
class MockedModel:
|
||||
|
||||
@ -519,6 +519,7 @@ def test_warm_up_use_hf_token(mocked_automodel, mocked_autotokenizer, initialize
|
||||
@patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained")
|
||||
def test_device_map_auto(mocked_automodel, _mocked_autotokenizer, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
reader = ExtractiveReader("deepset/roberta-base-squad2", model_kwargs={"device_map": "auto"})
|
||||
auto_device = ComponentDevice.resolve_device(None)
|
||||
|
||||
@ -537,6 +538,7 @@ def test_device_map_auto(mocked_automodel, _mocked_autotokenizer, monkeypatch):
|
||||
@patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained")
|
||||
def test_device_map_str(mocked_automodel, _mocked_autotokenizer, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
reader = ExtractiveReader("deepset/roberta-base-squad2", model_kwargs={"device_map": "cpu:0"})
|
||||
|
||||
class MockedModel:
|
||||
@ -554,6 +556,7 @@ def test_device_map_str(mocked_automodel, _mocked_autotokenizer, monkeypatch):
|
||||
@patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained")
|
||||
def test_device_map_dict(mocked_automodel, _mocked_autotokenizer, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
reader = ExtractiveReader(
|
||||
"deepset/roberta-base-squad2", model_kwargs={"device_map": {"layer_1": 1, "classifier": "cpu"}}
|
||||
)
|
||||
|
||||
@ -54,6 +54,7 @@ class TestTransformersTextRouter:
|
||||
def test_from_dict(self, mock_auto_config_from_pretrained, monkeypatch):
|
||||
mock_auto_config_from_pretrained.return_value = MagicMock(label2id={"en": 0, "de": 1})
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
data = {
|
||||
"type": "haystack.components.routers.transformers_text_router.TransformersTextRouter",
|
||||
"init_parameters": {
|
||||
@ -84,6 +85,7 @@ class TestTransformersTextRouter:
|
||||
def test_from_dict_no_default_parameters(self, mock_auto_config_from_pretrained, monkeypatch):
|
||||
mock_auto_config_from_pretrained.return_value = MagicMock(label2id={"en": 0, "de": 1})
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
data = {
|
||||
"type": "haystack.components.routers.transformers_text_router.TransformersTextRouter",
|
||||
"init_parameters": {"model": "papluca/xlm-roberta-base-language-detection"},
|
||||
@ -105,6 +107,7 @@ class TestTransformersTextRouter:
|
||||
def test_from_dict_with_cpu_device(self, mock_auto_config_from_pretrained, monkeypatch):
|
||||
mock_auto_config_from_pretrained.return_value = MagicMock(label2id={"en": 0, "de": 1})
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
data = {
|
||||
"type": "haystack.components.routers.transformers_text_router.TransformersTextRouter",
|
||||
"init_parameters": {
|
||||
|
||||
@ -28,6 +28,7 @@ class TestTransformersZeroShotTextRouter:
|
||||
|
||||
def test_from_dict(self, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
data = {
|
||||
"type": "haystack.components.routers.zero_shot_text_router.TransformersZeroShotTextRouter",
|
||||
"init_parameters": {
|
||||
@ -56,6 +57,7 @@ class TestTransformersZeroShotTextRouter:
|
||||
|
||||
def test_from_dict_no_default_parameters(self, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
monkeypatch.delenv("HF_TOKEN", raising=False)
|
||||
data = {
|
||||
"type": "haystack.components.routers.zero_shot_text_router.TransformersZeroShotTextRouter",
|
||||
"init_parameters": {"labels": ["query", "passage"]},
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user