mirror of
https://github.com/deepset-ai/haystack.git
synced 2026-01-07 20:46:31 +00:00
Fix unit tests failing if HF_API_TOKEN is set (#7491)
This commit is contained in:
parent
65705a8fdc
commit
ff269db12d
@ -1,5 +1,4 @@
|
||||
from unittest.mock import patch, Mock
|
||||
from haystack.utils.auth import Secret
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from transformers import PreTrainedTokenizer
|
||||
@ -7,6 +6,7 @@ from transformers import PreTrainedTokenizer
|
||||
from haystack.components.generators.chat import HuggingFaceLocalChatGenerator
|
||||
from haystack.dataclasses import ChatMessage, ChatRole
|
||||
from haystack.utils import ComponentDevice
|
||||
from haystack.utils.auth import Secret
|
||||
|
||||
|
||||
# used to test serialization of streaming_callback
|
||||
@ -160,7 +160,8 @@ class TestHuggingFaceLocalChatGenerator:
|
||||
assert generator_2.streaming_callback is streaming_callback_handler
|
||||
|
||||
@patch("haystack.components.generators.chat.hugging_face_local.pipeline")
|
||||
def test_warm_up(self, pipeline_mock):
|
||||
def test_warm_up(self, pipeline_mock, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
generator = HuggingFaceLocalChatGenerator(
|
||||
model="mistralai/Mistral-7B-Instruct-v0.2",
|
||||
task="text2text-generation",
|
||||
|
||||
@ -198,10 +198,12 @@ class TestSentenceTransformersDiversityRanker:
|
||||
ranker.run(query="test query", documents=documents)
|
||||
|
||||
@pytest.mark.parametrize("similarity", ["dot_product", "cosine"])
|
||||
def test_warm_up(self, similarity):
|
||||
def test_warm_up(self, similarity, monkeypatch):
|
||||
"""
|
||||
Test that ranker loads the SentenceTransformer model correctly during warm up.
|
||||
"""
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
|
||||
mock_model_class = MagicMock()
|
||||
mock_model_instance = MagicMock()
|
||||
mock_model_class.return_value = mock_model_instance
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
import logging
|
||||
from unittest.mock import MagicMock, patch
|
||||
from haystack.utils.auth import Secret
|
||||
|
||||
import pytest
|
||||
import logging
|
||||
import torch
|
||||
from transformers.modeling_outputs import SequenceClassifierOutput
|
||||
|
||||
from haystack import ComponentError, Document
|
||||
from haystack.components.rankers.transformers_similarity import TransformersSimilarityRanker
|
||||
from haystack.utils.auth import Secret
|
||||
from haystack.utils.device import ComponentDevice, DeviceMap
|
||||
|
||||
|
||||
@ -272,7 +272,8 @@ class TestSimilarityRanker:
|
||||
|
||||
@patch("haystack.components.rankers.transformers_similarity.AutoTokenizer.from_pretrained")
|
||||
@patch("haystack.components.rankers.transformers_similarity.AutoModelForSequenceClassification.from_pretrained")
|
||||
def test_device_map_dict(self, mocked_automodel, mocked_autotokenizer):
|
||||
def test_device_map_dict(self, mocked_automodel, _mocked_autotokenizer, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
ranker = TransformersSimilarityRanker("model", model_kwargs={"device_map": {"layer_1": 1, "classifier": "cpu"}})
|
||||
|
||||
class MockedModel:
|
||||
|
||||
@ -410,7 +410,8 @@ def test_warm_up_use_hf_token(mocked_automodel, mocked_autotokenizer, initialize
|
||||
|
||||
@patch("haystack.components.readers.extractive.AutoTokenizer.from_pretrained")
|
||||
@patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained")
|
||||
def test_device_map_auto(mocked_automodel, mocked_autotokenizer):
|
||||
def test_device_map_auto(mocked_automodel, _mocked_autotokenizer, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
reader = ExtractiveReader("deepset/roberta-base-squad2", model_kwargs={"device_map": "auto"})
|
||||
auto_device = ComponentDevice.resolve_device(None)
|
||||
|
||||
@ -427,7 +428,8 @@ def test_device_map_auto(mocked_automodel, mocked_autotokenizer):
|
||||
|
||||
@patch("haystack.components.readers.extractive.AutoTokenizer.from_pretrained")
|
||||
@patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained")
|
||||
def test_device_map_str(mocked_automodel, mocked_autotokenizer):
|
||||
def test_device_map_str(mocked_automodel, _mocked_autotokenizer, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
reader = ExtractiveReader("deepset/roberta-base-squad2", model_kwargs={"device_map": "cpu:0"})
|
||||
|
||||
class MockedModel:
|
||||
@ -443,7 +445,8 @@ def test_device_map_str(mocked_automodel, mocked_autotokenizer):
|
||||
|
||||
@patch("haystack.components.readers.extractive.AutoTokenizer.from_pretrained")
|
||||
@patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained")
|
||||
def test_device_map_dict(mocked_automodel, mocked_autotokenizer):
|
||||
def test_device_map_dict(mocked_automodel, _mocked_autotokenizer, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
reader = ExtractiveReader(
|
||||
"deepset/roberta-base-squad2", model_kwargs={"device_map": {"layer_1": 1, "classifier": "cpu"}}
|
||||
)
|
||||
|
||||
@ -23,7 +23,8 @@ class TestTransformersZeroShotTextRouter:
|
||||
},
|
||||
}
|
||||
|
||||
def test_from_dict(self):
|
||||
def test_from_dict(self, monkeypatch):
|
||||
monkeypatch.delenv("HF_API_TOKEN", raising=False)
|
||||
data = {
|
||||
"type": "haystack.components.routers.zero_shot_text_router.TransformersZeroShotTextRouter",
|
||||
"init_parameters": {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user