feat: upgrade canals to 0.10.1 (#6309)

* upgrade canals

* reno

* trigger preview e2e

* bump canals

* fix decorator

* fix test

* test factory

* tests inmemory

* tests writer

* test audio

* tests builders

* tests caching

* tests embedders

* tests converters

* tests generators

* tests rankers

* tests retrievers

* fix pipeline and telemetry tests

* remove trigger
This commit is contained in:
ZanSara 2023-11-17 13:46:23 +00:00 committed by GitHub
parent 21bcfe76fb
commit dfc1d452bb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 113 additions and 84 deletions

View File

@ -64,5 +64,4 @@ def test_extractive_qa_pipeline(tmp_path):
assert hasattr(answer, "document")
# the answer is extracted from the correct document
if answer.document is not None:
assert answer.document.content == doc.content
assert answer.document.id == doc.id

View File

@ -15,15 +15,16 @@ class _DocumentStore:
def _decorate(self, cls):
cls.__haystack_document_store__ = True
if cls.__name__ in self.registry:
classname = f"{cls.__module__}.{cls.__name__}"
if classname in self.registry:
logger.error(
"DocumentStore %s is already registered. Previous imported from '%s', new imported from '%s'",
cls.__name__,
self.registry[cls.__name__],
classname,
self.registry[classname],
cls,
)
self.registry[cls.__name__] = cls
self.registry[classname] = cls
logger.debug("Registered DocumentStore %s", cls)
return cls

View File

@ -86,7 +86,7 @@ dependencies = [
[project.optional-dependencies]
preview = [
"canals==0.9.0",
"canals==0.10.1",
"requests",
"pandas",
"rank_bm25",

View File

@ -0,0 +1,2 @@
preview:
- Upgrade Canals to 0.10.0

View File

@ -32,7 +32,7 @@ class TestLocalWhisperTranscriber:
transcriber = LocalWhisperTranscriber()
data = transcriber.to_dict()
assert data == {
"type": "LocalWhisperTranscriber",
"type": "haystack.preview.components.audio.whisper_local.LocalWhisperTranscriber",
"init_parameters": {"model_name_or_path": "large", "device": "cpu", "whisper_params": {}},
}
@ -45,7 +45,7 @@ class TestLocalWhisperTranscriber:
)
data = transcriber.to_dict()
assert data == {
"type": "LocalWhisperTranscriber",
"type": "haystack.preview.components.audio.whisper_local.LocalWhisperTranscriber",
"init_parameters": {
"model_name_or_path": "tiny",
"device": "cuda",

View File

@ -80,7 +80,7 @@ class TestRemoteWhisperTranscriber:
transcriber = RemoteWhisperTranscriber(api_key="test_api_key")
data = transcriber.to_dict()
assert data == {
"type": "RemoteWhisperTranscriber",
"type": "haystack.preview.components.audio.whisper_remote.RemoteWhisperTranscriber",
"init_parameters": {
"model_name": "whisper-1",
"api_base_url": "https://api.openai.com/v1",
@ -103,7 +103,7 @@ class TestRemoteWhisperTranscriber:
)
data = transcriber.to_dict()
assert data == {
"type": "RemoteWhisperTranscriber",
"type": "haystack.preview.components.audio.whisper_remote.RemoteWhisperTranscriber",
"init_parameters": {
"model_name": "whisper-1",
"organization": "test-org",
@ -119,7 +119,7 @@ class TestRemoteWhisperTranscriber:
monkeypatch.setenv("OPENAI_API_KEY", "test_api_key")
data = {
"type": "RemoteWhisperTranscriber",
"type": "haystack.preview.components.audio.whisper_remote.RemoteWhisperTranscriber",
"init_parameters": {
"model_name": "whisper-1",
"api_base_url": "https://api.openai.com/v1",
@ -140,7 +140,7 @@ class TestRemoteWhisperTranscriber:
monkeypatch.setenv("OPENAI_API_KEY", "test_api_key")
data = {
"type": "RemoteWhisperTranscriber",
"type": "haystack.preview.components.audio.whisper_remote.RemoteWhisperTranscriber",
"init_parameters": {
"model_name": "whisper-1",
"organization": "test-org",
@ -169,7 +169,7 @@ class TestRemoteWhisperTranscriber:
monkeypatch.delenv("OPENAI_API_KEY", raising=False)
data = {
"type": "RemoteWhisperTranscriber",
"type": "haystack.preview.components.audio.whisper_remote.RemoteWhisperTranscriber",
"init_parameters": {
"model_name": "whisper-1",
"api_base_url": "https://api.openai.com/v1",

View File

@ -15,7 +15,7 @@ def test_to_dict():
builder = PromptBuilder(template="This is a {{ variable }}")
res = builder.to_dict()
assert res == {
"type": "PromptBuilder",
"type": "haystack.preview.components.builders.prompt_builder.PromptBuilder",
"init_parameters": {"template": "This is a {{ variable }}", "template_variables": None},
}

View File

@ -13,9 +13,12 @@ class TestUrlCacheChecker:
component = UrlCacheChecker(document_store=mocked_docstore_class())
data = component.to_dict()
assert data == {
"type": "UrlCacheChecker",
"type": "haystack.preview.components.caching.url_cache_checker.UrlCacheChecker",
"init_parameters": {
"document_store": {"type": "MockedDocumentStore", "init_parameters": {}},
"document_store": {
"type": "haystack.preview.testing.factory.MockedDocumentStore",
"init_parameters": {},
},
"url_field": "url",
},
}
@ -26,9 +29,12 @@ class TestUrlCacheChecker:
component = UrlCacheChecker(document_store=mocked_docstore_class(), url_field="my_url_field")
data = component.to_dict()
assert data == {
"type": "UrlCacheChecker",
"type": "haystack.preview.components.caching.url_cache_checker.UrlCacheChecker",
"init_parameters": {
"document_store": {"type": "MockedDocumentStore", "init_parameters": {}},
"document_store": {
"type": "haystack.preview.testing.factory.MockedDocumentStore",
"init_parameters": {},
},
"url_field": "my_url_field",
},
}
@ -37,9 +43,12 @@ class TestUrlCacheChecker:
def test_from_dict(self):
mocked_docstore_class = document_store_class("MockedDocumentStore")
data = {
"type": "UrlCacheChecker",
"type": "haystack.preview.components.caching.url_cache_checker.UrlCacheChecker",
"init_parameters": {
"document_store": {"type": "MockedDocumentStore", "init_parameters": {}},
"document_store": {
"type": "haystack.preview.testing.factory.MockedDocumentStore",
"init_parameters": {},
},
"url_field": "my_url_field",
},
}
@ -49,20 +58,23 @@ class TestUrlCacheChecker:
@pytest.mark.unit
def test_from_dict_without_docstore(self):
data = {"type": "UrlCacheChecker", "init_parameters": {}}
data = {"type": "haystack.preview.components.caching.url_cache_checker.UrlCacheChecker", "init_parameters": {}}
with pytest.raises(DeserializationError, match="Missing 'document_store' in serialization data"):
UrlCacheChecker.from_dict(data)
@pytest.mark.unit
def test_from_dict_without_docstore_type(self):
data = {"type": "UrlCacheChecker", "init_parameters": {"document_store": {"init_parameters": {}}}}
data = {
"type": "haystack.preview.components.caching.url_cache_checker.UrlCacheChecker",
"init_parameters": {"document_store": {"init_parameters": {}}},
}
with pytest.raises(DeserializationError, match="Missing 'type' in document store's serialization data"):
UrlCacheChecker.from_dict(data)
@pytest.mark.unit
def test_from_dict_nonexisting_docstore(self):
data = {
"type": "UrlCacheChecker",
"type": "haystack.preview.components.caching.url_cache_checker.UrlCacheChecker",
"init_parameters": {"document_store": {"type": "NonexistingDocumentStore", "init_parameters": {}}},
}
with pytest.raises(DeserializationError, match="DocumentStore of type 'NonexistingDocumentStore' not found."):

View File

@ -79,7 +79,7 @@ class TestOpenAIDocumentEmbedder:
component = OpenAIDocumentEmbedder(api_key="fake-api-key")
data = component.to_dict()
assert data == {
"type": "OpenAIDocumentEmbedder",
"type": "haystack.preview.components.embedders.openai_document_embedder.OpenAIDocumentEmbedder",
"init_parameters": {
"model_name": "text-embedding-ada-002",
"organization": None,
@ -107,7 +107,7 @@ class TestOpenAIDocumentEmbedder:
)
data = component.to_dict()
assert data == {
"type": "OpenAIDocumentEmbedder",
"type": "haystack.preview.components.embedders.openai_document_embedder.OpenAIDocumentEmbedder",
"init_parameters": {
"model_name": "model",
"organization": "my-org",

View File

@ -59,7 +59,7 @@ class TestOpenAITextEmbedder:
component = OpenAITextEmbedder(api_key="fake-api-key")
data = component.to_dict()
assert data == {
"type": "OpenAITextEmbedder",
"type": "haystack.preview.components.embedders.openai_text_embedder.OpenAITextEmbedder",
"init_parameters": {
"model_name": "text-embedding-ada-002",
"organization": None,
@ -79,7 +79,7 @@ class TestOpenAITextEmbedder:
)
data = component.to_dict()
assert data == {
"type": "OpenAITextEmbedder",
"type": "haystack.preview.components.embedders.openai_text_embedder.OpenAITextEmbedder",
"init_parameters": {
"model_name": "model",
"organization": "fake-organization",

View File

@ -53,7 +53,7 @@ class TestSentenceTransformersDocumentEmbedder:
component = SentenceTransformersDocumentEmbedder(model_name_or_path="model")
data = component.to_dict()
assert data == {
"type": "SentenceTransformersDocumentEmbedder",
"type": "haystack.preview.components.embedders.sentence_transformers_document_embedder.SentenceTransformersDocumentEmbedder",
"init_parameters": {
"model_name_or_path": "model",
"device": "cpu",
@ -85,7 +85,7 @@ class TestSentenceTransformersDocumentEmbedder:
data = component.to_dict()
assert data == {
"type": "SentenceTransformersDocumentEmbedder",
"type": "haystack.preview.components.embedders.sentence_transformers_document_embedder.SentenceTransformersDocumentEmbedder",
"init_parameters": {
"model_name_or_path": "model",
"device": "cuda",

View File

@ -45,7 +45,7 @@ class TestSentenceTransformersTextEmbedder:
component = SentenceTransformersTextEmbedder(model_name_or_path="model")
data = component.to_dict()
assert data == {
"type": "SentenceTransformersTextEmbedder",
"type": "haystack.preview.components.embedders.sentence_transformers_text_embedder.SentenceTransformersTextEmbedder",
"init_parameters": {
"model_name_or_path": "model",
"device": "cpu",
@ -72,7 +72,7 @@ class TestSentenceTransformersTextEmbedder:
)
data = component.to_dict()
assert data == {
"type": "SentenceTransformersTextEmbedder",
"type": "haystack.preview.components.embedders.sentence_transformers_text_embedder.SentenceTransformersTextEmbedder",
"init_parameters": {
"model_name_or_path": "model",
"device": "cuda",
@ -90,7 +90,7 @@ class TestSentenceTransformersTextEmbedder:
component = SentenceTransformersTextEmbedder(model_name_or_path="model", token="awesome-token")
data = component.to_dict()
assert data == {
"type": "SentenceTransformersTextEmbedder",
"type": "haystack.preview.components.embedders.sentence_transformers_text_embedder.SentenceTransformersTextEmbedder",
"init_parameters": {
"model_name_or_path": "model",
"device": "cpu",

View File

@ -18,7 +18,7 @@ class TestAzureOCRDocumentConverter:
component = AzureOCRDocumentConverter(endpoint="test_endpoint", api_key="test_credential_key")
data = component.to_dict()
assert data == {
"type": "AzureOCRDocumentConverter",
"type": "haystack.preview.components.file_converters.azure.AzureOCRDocumentConverter",
"init_parameters": {"endpoint": "test_endpoint", "model_id": "prebuilt-read"},
}

View File

@ -105,7 +105,7 @@ class TestGPTChatGenerator:
component = GPTChatGenerator(api_key="test-api-key")
data = component.to_dict()
assert data == {
"type": "GPTChatGenerator",
"type": "haystack.preview.components.generators.chat.openai.GPTChatGenerator",
"init_parameters": {
"model_name": "gpt-3.5-turbo",
"streaming_callback": None,
@ -125,7 +125,7 @@ class TestGPTChatGenerator:
)
data = component.to_dict()
assert data == {
"type": "GPTChatGenerator",
"type": "haystack.preview.components.generators.chat.openai.GPTChatGenerator",
"init_parameters": {
"model_name": "gpt-4",
"max_tokens": 10,
@ -147,7 +147,7 @@ class TestGPTChatGenerator:
)
data = component.to_dict()
assert data == {
"type": "GPTChatGenerator",
"type": "haystack.preview.components.generators.chat.openai.GPTChatGenerator",
"init_parameters": {
"model_name": "gpt-4",
"max_tokens": 10,
@ -161,7 +161,7 @@ class TestGPTChatGenerator:
def test_from_dict(self, monkeypatch):
monkeypatch.setenv("OPENAI_API_KEY", "fake-api-key")
data = {
"type": "GPTChatGenerator",
"type": "haystack.preview.components.generators.chat.openai.GPTChatGenerator",
"init_parameters": {
"model_name": "gpt-4",
"max_tokens": 10,
@ -181,7 +181,7 @@ class TestGPTChatGenerator:
openai.api_key = None
monkeypatch.delenv("OPENAI_API_KEY", raising=False)
data = {
"type": "GPTChatGenerator",
"type": "haystack.preview.components.generators.chat.openai.GPTChatGenerator",
"init_parameters": {
"model_name": "gpt-4",
"max_tokens": 10,

View File

@ -145,7 +145,7 @@ class TestHuggingFaceLocalGenerator:
data = component.to_dict()
assert data == {
"type": "HuggingFaceLocalGenerator",
"type": "haystack.preview.components.generators.hugging_face_local.HuggingFaceLocalGenerator",
"init_parameters": {
"pipeline_kwargs": {"model": "google/flan-t5-base", "task": "text2text-generation", "token": None},
"generation_kwargs": {},
@ -166,7 +166,7 @@ class TestHuggingFaceLocalGenerator:
data = component.to_dict()
assert data == {
"type": "HuggingFaceLocalGenerator",
"type": "haystack.preview.components.generators.hugging_face_local.HuggingFaceLocalGenerator",
"init_parameters": {
"pipeline_kwargs": {
"model": "gpt2",

View File

@ -98,7 +98,7 @@ class TestGPTGenerator:
component = GPTGenerator(api_key="test-api-key")
data = component.to_dict()
assert data == {
"type": "GPTGenerator",
"type": "haystack.preview.components.generators.openai.GPTGenerator",
"init_parameters": {
"model_name": "gpt-3.5-turbo",
"streaming_callback": None,
@ -119,7 +119,7 @@ class TestGPTGenerator:
)
data = component.to_dict()
assert data == {
"type": "GPTGenerator",
"type": "haystack.preview.components.generators.openai.GPTGenerator",
"init_parameters": {
"model_name": "gpt-4",
"max_tokens": 10,
@ -142,7 +142,7 @@ class TestGPTGenerator:
)
data = component.to_dict()
assert data == {
"type": "GPTGenerator",
"type": "haystack.preview.components.generators.openai.GPTGenerator",
"init_parameters": {
"model_name": "gpt-4",
"max_tokens": 10,
@ -157,7 +157,7 @@ class TestGPTGenerator:
def test_from_dict(self, monkeypatch):
monkeypatch.setenv("OPENAI_API_KEY", "fake-api-key")
data = {
"type": "GPTGenerator",
"type": "haystack.preview.components.generators.openai.GPTGenerator",
"init_parameters": {
"model_name": "gpt-4",
"max_tokens": 10,
@ -178,7 +178,7 @@ class TestGPTGenerator:
openai.api_key = None
monkeypatch.delenv("OPENAI_API_KEY", raising=False)
data = {
"type": "GPTGenerator",
"type": "haystack.preview.components.generators.openai.GPTGenerator",
"init_parameters": {
"model_name": "gpt-4",
"max_tokens": 10,

View File

@ -10,7 +10,7 @@ class TestMetaFieldRanker:
component = MetaFieldRanker(metadata_field="rating")
data = component.to_dict()
assert data == {
"type": "MetaFieldRanker",
"type": "haystack.preview.components.rankers.meta_field.MetaFieldRanker",
"init_parameters": {
"metadata_field": "rating",
"weight": 1.0,
@ -24,7 +24,7 @@ class TestMetaFieldRanker:
component = MetaFieldRanker(metadata_field="rating", weight=0.5, top_k=5, ranking_mode="linear_score")
data = component.to_dict()
assert data == {
"type": "MetaFieldRanker",
"type": "haystack.preview.components.rankers.meta_field.MetaFieldRanker",
"init_parameters": {"metadata_field": "rating", "weight": 0.5, "top_k": 5, "ranking_mode": "linear_score"},
}

View File

@ -10,7 +10,7 @@ class TestSimilarityRanker:
component = TransformersSimilarityRanker()
data = component.to_dict()
assert data == {
"type": "TransformersSimilarityRanker",
"type": "haystack.preview.components.rankers.transformers_similarity.TransformersSimilarityRanker",
"init_parameters": {
"device": "cpu",
"top_k": 10,
@ -26,7 +26,7 @@ class TestSimilarityRanker:
)
data = component.to_dict()
assert data == {
"type": "TransformersSimilarityRanker",
"type": "haystack.preview.components.rankers.transformers_similarity.TransformersSimilarityRanker",
"init_parameters": {
"device": "cuda",
"model_name_or_path": "my_model",

View File

@ -93,7 +93,7 @@ def test_to_dict():
data = component.to_dict()
assert data == {
"type": "ExtractiveReader",
"type": "haystack.preview.components.readers.extractive.ExtractiveReader",
"init_parameters": {
"model_name_or_path": "my-model",
"device": None,
@ -117,7 +117,7 @@ def test_to_dict_empty_model_kwargs():
data = component.to_dict()
assert data == {
"type": "ExtractiveReader",
"type": "haystack.preview.components.readers.extractive.ExtractiveReader",
"init_parameters": {
"model_name_or_path": "my-model",
"device": None,

View File

@ -51,7 +51,7 @@ class TestMemoryBM25Retriever:
data = component.to_dict()
assert data == {
"type": "InMemoryBM25Retriever",
"type": "haystack.preview.components.retrievers.in_memory_bm25_retriever.InMemoryBM25Retriever",
"init_parameters": {
"document_store": {"type": "MyFakeStore", "init_parameters": {}},
"filters": None,
@ -70,7 +70,7 @@ class TestMemoryBM25Retriever:
)
data = component.to_dict()
assert data == {
"type": "InMemoryBM25Retriever",
"type": "haystack.preview.components.retrievers.in_memory_bm25_retriever.InMemoryBM25Retriever",
"init_parameters": {
"document_store": {"type": "MyFakeStore", "init_parameters": {}},
"filters": {"name": "test.txt"},
@ -83,9 +83,9 @@ class TestMemoryBM25Retriever:
def test_from_dict(self):
document_store_class("MyFakeStore", bases=(InMemoryDocumentStore,))
data = {
"type": "InMemoryBM25Retriever",
"type": "haystack.preview.components.retrievers.in_memory_bm25_retriever.InMemoryBM25Retriever",
"init_parameters": {
"document_store": {"type": "MyFakeStore", "init_parameters": {}},
"document_store": {"type": "haystack.preview.testing.factory.MyFakeStore", "init_parameters": {}},
"filters": {"name": "test.txt"},
"top_k": 5,
},
@ -111,7 +111,7 @@ class TestMemoryBM25Retriever:
@pytest.mark.unit
def test_from_dict_nonexisting_docstore(self):
data = {
"type": "InMemoryBM25Retriever",
"type": "haystack.preview.components.retrievers.in_memory_bm25_retriever.InMemoryBM25Retriever",
"init_parameters": {"document_store": {"type": "NonexistingDocstore", "init_parameters": {}}},
}
with pytest.raises(DeserializationError, match="DocumentStore type 'NonexistingDocstore' not found"):

View File

@ -36,14 +36,14 @@ class TestMemoryEmbeddingRetriever:
def test_to_dict(self):
MyFakeStore = document_store_class("MyFakeStore", bases=(InMemoryDocumentStore,))
document_store = MyFakeStore()
document_store.to_dict = lambda: {"type": "MyFakeStore", "init_parameters": {}}
document_store.to_dict = lambda: {"type": "test_module.MyFakeStore", "init_parameters": {}}
component = InMemoryEmbeddingRetriever(document_store=document_store)
data = component.to_dict()
assert data == {
"type": "InMemoryEmbeddingRetriever",
"type": "haystack.preview.components.retrievers.in_memory_embedding_retriever.InMemoryEmbeddingRetriever",
"init_parameters": {
"document_store": {"type": "MyFakeStore", "init_parameters": {}},
"document_store": {"type": "test_module.MyFakeStore", "init_parameters": {}},
"filters": None,
"top_k": 10,
"scale_score": False,
@ -55,7 +55,7 @@ class TestMemoryEmbeddingRetriever:
def test_to_dict_with_custom_init_parameters(self):
MyFakeStore = document_store_class("MyFakeStore", bases=(InMemoryDocumentStore,))
document_store = MyFakeStore()
document_store.to_dict = lambda: {"type": "MyFakeStore", "init_parameters": {}}
document_store.to_dict = lambda: {"type": "test_module.MyFakeStore", "init_parameters": {}}
component = InMemoryEmbeddingRetriever(
document_store=document_store,
filters={"name": "test.txt"},
@ -65,9 +65,9 @@ class TestMemoryEmbeddingRetriever:
)
data = component.to_dict()
assert data == {
"type": "InMemoryEmbeddingRetriever",
"type": "haystack.preview.components.retrievers.in_memory_embedding_retriever.InMemoryEmbeddingRetriever",
"init_parameters": {
"document_store": {"type": "MyFakeStore", "init_parameters": {}},
"document_store": {"type": "test_module.MyFakeStore", "init_parameters": {}},
"filters": {"name": "test.txt"},
"top_k": 5,
"scale_score": True,
@ -79,9 +79,9 @@ class TestMemoryEmbeddingRetriever:
def test_from_dict(self):
document_store_class("MyFakeStore", bases=(InMemoryDocumentStore,))
data = {
"type": "InMemoryEmbeddingRetriever",
"type": "haystack.preview.components.retrievers.in_memory_embedding_retriever.InMemoryEmbeddingRetriever",
"init_parameters": {
"document_store": {"type": "MyFakeStore", "init_parameters": {}},
"document_store": {"type": "haystack.preview.testing.factory.MyFakeStore", "init_parameters": {}},
"filters": {"name": "test.txt"},
"top_k": 5,
},
@ -94,20 +94,26 @@ class TestMemoryEmbeddingRetriever:
@pytest.mark.unit
def test_from_dict_without_docstore(self):
data = {"type": "InMemoryEmbeddingRetriever", "init_parameters": {}}
data = {
"type": "haystack.preview.components.retrievers.in_memory_embedding_retriever.InMemoryEmbeddingRetriever",
"init_parameters": {},
}
with pytest.raises(DeserializationError, match="Missing 'document_store' in serialization data"):
InMemoryEmbeddingRetriever.from_dict(data)
@pytest.mark.unit
def test_from_dict_without_docstore_type(self):
data = {"type": "InMemoryEmbeddingRetriever", "init_parameters": {"document_store": {"init_parameters": {}}}}
data = {
"type": "haystack.preview.components.retrievers.in_memory_embedding_retriever.InMemoryEmbeddingRetriever",
"init_parameters": {"document_store": {"init_parameters": {}}},
}
with pytest.raises(DeserializationError, match="Missing 'type' in document store's serialization data"):
InMemoryEmbeddingRetriever.from_dict(data)
@pytest.mark.unit
def test_from_dict_nonexisting_docstore(self):
data = {
"type": "InMemoryEmbeddingRetriever",
"type": "haystack.preview.components.retrievers.in_memory_embedding_retriever.InMemoryEmbeddingRetriever",
"init_parameters": {"document_store": {"type": "NonexistingDocstore", "init_parameters": {}}},
}
with pytest.raises(DeserializationError, match="DocumentStore type 'NonexistingDocstore' not found"):

View File

@ -121,7 +121,7 @@ class TestSerperDevSearchAPI:
)
data = component.to_dict()
assert data == {
"type": "SerperDevWebSearch",
"type": "haystack.preview.components.websearch.serper_dev.SerperDevWebSearch",
"init_parameters": {"top_k": 10, "allowed_domains": ["test.com"], "search_params": {"param": "test"}},
}

View File

@ -15,9 +15,12 @@ class TestDocumentWriter:
component = DocumentWriter(document_store=mocked_docstore_class())
data = component.to_dict()
assert data == {
"type": "DocumentWriter",
"type": "haystack.preview.components.writers.document_writer.DocumentWriter",
"init_parameters": {
"document_store": {"type": "MockedDocumentStore", "init_parameters": {}},
"document_store": {
"type": "haystack.preview.testing.factory.MockedDocumentStore",
"init_parameters": {},
},
"policy": "FAIL",
},
}
@ -28,9 +31,12 @@ class TestDocumentWriter:
component = DocumentWriter(document_store=mocked_docstore_class(), policy=DuplicatePolicy.SKIP)
data = component.to_dict()
assert data == {
"type": "DocumentWriter",
"type": "haystack.preview.components.writers.document_writer.DocumentWriter",
"init_parameters": {
"document_store": {"type": "MockedDocumentStore", "init_parameters": {}},
"document_store": {
"type": "haystack.preview.testing.factory.MockedDocumentStore",
"init_parameters": {},
},
"policy": "SKIP",
},
}
@ -39,9 +45,12 @@ class TestDocumentWriter:
def test_from_dict(self):
mocked_docstore_class = document_store_class("MockedDocumentStore")
data = {
"type": "DocumentWriter",
"type": "haystack.preview.components.writers.document_writer.DocumentWriter",
"init_parameters": {
"document_store": {"type": "MockedDocumentStore", "init_parameters": {}},
"document_store": {
"type": "haystack.preview.testing.factory.MockedDocumentStore",
"init_parameters": {},
},
"policy": "SKIP",
},
}

View File

@ -25,7 +25,7 @@ class TestMemoryDocumentStore(DocumentStoreBaseTests): # pylint: disable=R0904
store = InMemoryDocumentStore()
data = store.to_dict()
assert data == {
"type": "InMemoryDocumentStore",
"type": "haystack.preview.document_stores.in_memory.document_store.InMemoryDocumentStore",
"init_parameters": {
"bm25_tokenization_regex": r"(?u)\b\w\w+\b",
"bm25_algorithm": "BM25Okapi",
@ -44,7 +44,7 @@ class TestMemoryDocumentStore(DocumentStoreBaseTests): # pylint: disable=R0904
)
data = store.to_dict()
assert data == {
"type": "InMemoryDocumentStore",
"type": "haystack.preview.document_stores.in_memory.document_store.InMemoryDocumentStore",
"init_parameters": {
"bm25_tokenization_regex": "custom_regex",
"bm25_algorithm": "BM25Plus",
@ -57,7 +57,7 @@ class TestMemoryDocumentStore(DocumentStoreBaseTests): # pylint: disable=R0904
@patch("haystack.preview.document_stores.in_memory.document_store.re")
def test_from_dict(self, mock_regex):
data = {
"type": "InMemoryDocumentStore",
"type": "haystack.preview.document_stores.in_memory.document_store.InMemoryDocumentStore",
"init_parameters": {
"bm25_tokenization_regex": "custom_regex",
"bm25_algorithm": "BM25Plus",

View File

@ -2,11 +2,11 @@ components:
Comp1:
init_parameters:
an_init_param: null
type: TestComponent
type: test_pipeline.TestComponent
Comp2:
init_parameters:
an_init_param: null
type: TestComponent
type: test_pipeline.TestComponent
connections:
- receiver: Comp2.input_
sender: Comp1.value

View File

@ -30,7 +30,7 @@ def test_pipeline_running(telemetry):
{
"pipeline_id": str(id(pipe)),
"runs": 1,
"components": {"Component": [{"name": "component", "key": "values"}]},
"components": {"test_telemetry.Component": [{"name": "component", "key": "values"}]},
},
)
@ -49,6 +49,6 @@ def test_pipeline_running(telemetry):
{
"pipeline_id": str(id(pipe)),
"runs": 3,
"components": {"Component": [{"name": "component", "key": "values"}]},
"components": {"test_telemetry.Component": [{"name": "component", "key": "values"}]},
},
)

View File

@ -13,21 +13,21 @@ def test_document_store_class_default():
assert store.filter_documents() == []
assert store.write_documents([]) is None
assert store.delete_documents([]) is None
assert store.to_dict() == {"type": "MyStore", "init_parameters": {}}
assert store.to_dict() == {"type": "haystack.preview.testing.factory.MyStore", "init_parameters": {}}
@pytest.mark.unit
def test_document_store_from_dict():
MyStore = document_store_class("MyStore")
store = MyStore.from_dict({"type": "MyStore", "init_parameters": {}})
store = MyStore.from_dict({"type": "haystack.preview.testing.factory.MyStore", "init_parameters": {}})
assert isinstance(store, MyStore)
@pytest.mark.unit
def test_document_store_class_is_registered():
MyStore = document_store_class("MyStore")
assert document_store.registry["MyStore"] == MyStore
assert document_store.registry["haystack.preview.testing.factory.MyStore"] == MyStore
@pytest.mark.unit