2024-05-09 15:40:36 +02:00
|
|
|
# SPDX-FileCopyrightText: 2022-present deepset GmbH <info@deepset.ai>
|
|
|
|
#
|
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
2024-01-08 17:15:10 +01:00
|
|
|
import os
|
2024-11-15 10:52:44 +01:00
|
|
|
import random
|
2023-12-21 16:21:24 +01:00
|
|
|
from typing import List
|
2024-11-15 10:52:44 +01:00
|
|
|
from unittest.mock import Mock, patch
|
2023-10-31 12:44:04 +01:00
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
import pytest
|
2024-11-15 10:52:44 +01:00
|
|
|
from openai import APIError
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2023-11-24 14:48:43 +01:00
|
|
|
from haystack import Document
|
|
|
|
from haystack.components.embedders.openai_document_embedder import OpenAIDocumentEmbedder
|
2024-11-15 10:52:44 +01:00
|
|
|
from haystack.utils.auth import Secret
|
2023-09-28 15:42:51 +02:00
|
|
|
|
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
def mock_openai_response(input: List[str], model: str = "text-embedding-ada-002", **kwargs) -> dict:
|
2023-09-28 15:42:51 +02:00
|
|
|
dict_response = {
|
|
|
|
"object": "list",
|
|
|
|
"data": [
|
2024-10-18 19:12:19 +05:30
|
|
|
{"object": "embedding", "index": i, "embedding": [random.random() for _ in range(1536)]}
|
|
|
|
for i in range(len(input))
|
2023-09-28 15:42:51 +02:00
|
|
|
],
|
|
|
|
"model": model,
|
|
|
|
"usage": {"prompt_tokens": 4, "total_tokens": 4},
|
|
|
|
}
|
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
return dict_response
|
2023-09-28 15:42:51 +02:00
|
|
|
|
|
|
|
|
|
|
|
class TestOpenAIDocumentEmbedder:
|
|
|
|
def test_init_default(self, monkeypatch):
|
|
|
|
monkeypatch.setenv("OPENAI_API_KEY", "fake-api-key")
|
|
|
|
embedder = OpenAIDocumentEmbedder()
|
2024-05-15 23:58:41 +02:00
|
|
|
assert embedder.api_key.resolve_value() == "fake-api-key"
|
2024-01-12 15:30:17 +01:00
|
|
|
assert embedder.model == "text-embedding-ada-002"
|
2023-09-28 15:42:51 +02:00
|
|
|
assert embedder.organization is None
|
|
|
|
assert embedder.prefix == ""
|
|
|
|
assert embedder.suffix == ""
|
|
|
|
assert embedder.batch_size == 32
|
|
|
|
assert embedder.progress_bar is True
|
2023-12-28 12:18:15 +01:00
|
|
|
assert embedder.meta_fields_to_embed == []
|
2023-09-28 15:42:51 +02:00
|
|
|
assert embedder.embedding_separator == "\n"
|
2024-05-15 23:58:41 +02:00
|
|
|
assert embedder.client.max_retries == 5
|
|
|
|
assert embedder.client.timeout == 30.0
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2024-05-15 23:58:41 +02:00
|
|
|
def test_init_with_parameters(self, monkeypatch):
|
|
|
|
monkeypatch.setenv("OPENAI_TIMEOUT", "100")
|
|
|
|
monkeypatch.setenv("OPENAI_MAX_RETRIES", "10")
|
2023-09-28 15:42:51 +02:00
|
|
|
embedder = OpenAIDocumentEmbedder(
|
2024-05-15 23:58:41 +02:00
|
|
|
api_key=Secret.from_token("fake-api-key-2"),
|
2024-01-12 15:30:17 +01:00
|
|
|
model="model",
|
2023-09-28 15:42:51 +02:00
|
|
|
organization="my-org",
|
|
|
|
prefix="prefix",
|
|
|
|
suffix="suffix",
|
|
|
|
batch_size=64,
|
|
|
|
progress_bar=False,
|
2023-12-28 12:18:15 +01:00
|
|
|
meta_fields_to_embed=["test_field"],
|
2023-09-28 15:42:51 +02:00
|
|
|
embedding_separator=" | ",
|
2024-05-15 23:58:41 +02:00
|
|
|
timeout=40.0,
|
|
|
|
max_retries=1,
|
2023-09-28 15:42:51 +02:00
|
|
|
)
|
2024-05-15 23:58:41 +02:00
|
|
|
assert embedder.api_key.resolve_value() == "fake-api-key-2"
|
2023-09-28 15:42:51 +02:00
|
|
|
assert embedder.organization == "my-org"
|
2024-01-12 15:30:17 +01:00
|
|
|
assert embedder.model == "model"
|
2023-09-28 15:42:51 +02:00
|
|
|
assert embedder.prefix == "prefix"
|
|
|
|
assert embedder.suffix == "suffix"
|
|
|
|
assert embedder.batch_size == 64
|
|
|
|
assert embedder.progress_bar is False
|
2023-12-28 12:18:15 +01:00
|
|
|
assert embedder.meta_fields_to_embed == ["test_field"]
|
2023-09-28 15:42:51 +02:00
|
|
|
assert embedder.embedding_separator == " | "
|
2024-05-15 23:58:41 +02:00
|
|
|
assert embedder.client.max_retries == 1
|
|
|
|
assert embedder.client.timeout == 40.0
|
|
|
|
|
|
|
|
def test_init_with_parameters_and_env_vars(self, monkeypatch):
|
|
|
|
monkeypatch.setenv("OPENAI_TIMEOUT", "100")
|
|
|
|
monkeypatch.setenv("OPENAI_MAX_RETRIES", "10")
|
|
|
|
embedder = OpenAIDocumentEmbedder(
|
|
|
|
api_key=Secret.from_token("fake-api-key-2"),
|
|
|
|
model="model",
|
|
|
|
organization="my-org",
|
|
|
|
prefix="prefix",
|
|
|
|
suffix="suffix",
|
|
|
|
batch_size=64,
|
|
|
|
progress_bar=False,
|
|
|
|
meta_fields_to_embed=["test_field"],
|
|
|
|
embedding_separator=" | ",
|
|
|
|
)
|
|
|
|
assert embedder.api_key.resolve_value() == "fake-api-key-2"
|
|
|
|
assert embedder.organization == "my-org"
|
|
|
|
assert embedder.model == "model"
|
|
|
|
assert embedder.prefix == "prefix"
|
|
|
|
assert embedder.suffix == "suffix"
|
|
|
|
assert embedder.batch_size == 64
|
|
|
|
assert embedder.progress_bar is False
|
|
|
|
assert embedder.meta_fields_to_embed == ["test_field"]
|
|
|
|
assert embedder.embedding_separator == " | "
|
|
|
|
assert embedder.client.max_retries == 10
|
|
|
|
assert embedder.client.timeout == 100.0
|
2023-09-28 15:42:51 +02:00
|
|
|
|
|
|
|
def test_init_fail_wo_api_key(self, monkeypatch):
|
|
|
|
monkeypatch.delenv("OPENAI_API_KEY", raising=False)
|
2024-02-05 13:17:01 +01:00
|
|
|
with pytest.raises(ValueError, match="None of the .* environment variables are set"):
|
2023-09-28 15:42:51 +02:00
|
|
|
OpenAIDocumentEmbedder()
|
|
|
|
|
2024-02-05 13:17:01 +01:00
|
|
|
def test_to_dict(self, monkeypatch):
|
|
|
|
monkeypatch.setenv("OPENAI_API_KEY", "fake-api-key")
|
|
|
|
component = OpenAIDocumentEmbedder()
|
2023-09-28 15:42:51 +02:00
|
|
|
data = component.to_dict()
|
|
|
|
assert data == {
|
2023-11-24 14:48:43 +01:00
|
|
|
"type": "haystack.components.embedders.openai_document_embedder.OpenAIDocumentEmbedder",
|
2023-09-28 15:42:51 +02:00
|
|
|
"init_parameters": {
|
2024-02-05 13:17:01 +01:00
|
|
|
"api_key": {"env_vars": ["OPENAI_API_KEY"], "strict": True, "type": "env_var"},
|
2023-12-21 16:21:24 +01:00
|
|
|
"api_base_url": None,
|
2024-01-12 15:30:17 +01:00
|
|
|
"model": "text-embedding-ada-002",
|
2024-02-05 20:50:46 +05:30
|
|
|
"dimensions": None,
|
2023-09-28 15:42:51 +02:00
|
|
|
"organization": None,
|
|
|
|
"prefix": "",
|
|
|
|
"suffix": "",
|
|
|
|
"batch_size": 32,
|
|
|
|
"progress_bar": True,
|
2023-12-28 12:18:15 +01:00
|
|
|
"meta_fields_to_embed": [],
|
2023-09-28 15:42:51 +02:00
|
|
|
"embedding_separator": "\n",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2024-02-05 13:17:01 +01:00
|
|
|
def test_to_dict_with_custom_init_parameters(self, monkeypatch):
|
|
|
|
monkeypatch.setenv("ENV_VAR", "fake-api-key")
|
2023-09-28 15:42:51 +02:00
|
|
|
component = OpenAIDocumentEmbedder(
|
2024-02-05 13:17:01 +01:00
|
|
|
api_key=Secret.from_env_var("ENV_VAR", strict=False),
|
2024-01-12 15:30:17 +01:00
|
|
|
model="model",
|
2023-09-28 15:42:51 +02:00
|
|
|
organization="my-org",
|
|
|
|
prefix="prefix",
|
|
|
|
suffix="suffix",
|
|
|
|
batch_size=64,
|
|
|
|
progress_bar=False,
|
2023-12-28 12:18:15 +01:00
|
|
|
meta_fields_to_embed=["test_field"],
|
2023-09-28 15:42:51 +02:00
|
|
|
embedding_separator=" | ",
|
|
|
|
)
|
|
|
|
data = component.to_dict()
|
|
|
|
assert data == {
|
2023-11-24 14:48:43 +01:00
|
|
|
"type": "haystack.components.embedders.openai_document_embedder.OpenAIDocumentEmbedder",
|
2023-09-28 15:42:51 +02:00
|
|
|
"init_parameters": {
|
2024-02-05 13:17:01 +01:00
|
|
|
"api_key": {"env_vars": ["ENV_VAR"], "strict": False, "type": "env_var"},
|
2023-12-21 16:21:24 +01:00
|
|
|
"api_base_url": None,
|
2024-01-12 15:30:17 +01:00
|
|
|
"model": "model",
|
2024-02-05 20:50:46 +05:30
|
|
|
"dimensions": None,
|
2023-09-28 15:42:51 +02:00
|
|
|
"organization": "my-org",
|
|
|
|
"prefix": "prefix",
|
|
|
|
"suffix": "suffix",
|
|
|
|
"batch_size": 64,
|
|
|
|
"progress_bar": False,
|
2023-12-28 12:18:15 +01:00
|
|
|
"meta_fields_to_embed": ["test_field"],
|
2023-09-28 15:42:51 +02:00
|
|
|
"embedding_separator": " | ",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
def test_prepare_texts_to_embed_w_metadata(self):
|
|
|
|
documents = [
|
2024-11-15 10:52:44 +01:00
|
|
|
Document(id=f"{i}", content=f"document number {i}:\ncontent", meta={"meta_field": f"meta_value {i}"})
|
|
|
|
for i in range(5)
|
2023-09-28 15:42:51 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
embedder = OpenAIDocumentEmbedder(
|
2024-02-05 13:17:01 +01:00
|
|
|
api_key=Secret.from_token("fake-api-key"), meta_fields_to_embed=["meta_field"], embedding_separator=" | "
|
2023-09-28 15:42:51 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
prepared_texts = embedder._prepare_texts_to_embed(documents)
|
|
|
|
|
|
|
|
# note that newline is replaced by space
|
2024-11-15 10:52:44 +01:00
|
|
|
assert prepared_texts == {
|
|
|
|
"0": "meta_value 0 | document number 0: content",
|
|
|
|
"1": "meta_value 1 | document number 1: content",
|
|
|
|
"2": "meta_value 2 | document number 2: content",
|
|
|
|
"3": "meta_value 3 | document number 3: content",
|
|
|
|
"4": "meta_value 4 | document number 4: content",
|
|
|
|
}
|
2023-09-28 15:42:51 +02:00
|
|
|
|
|
|
|
def test_prepare_texts_to_embed_w_suffix(self):
|
2024-11-15 10:52:44 +01:00
|
|
|
documents = [Document(id=f"{i}", content=f"document number {i}") for i in range(5)]
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2024-02-05 13:17:01 +01:00
|
|
|
embedder = OpenAIDocumentEmbedder(
|
|
|
|
api_key=Secret.from_token("fake-api-key"), prefix="my_prefix ", suffix=" my_suffix"
|
|
|
|
)
|
2023-09-28 15:42:51 +02:00
|
|
|
|
|
|
|
prepared_texts = embedder._prepare_texts_to_embed(documents)
|
|
|
|
|
2024-11-15 10:52:44 +01:00
|
|
|
assert prepared_texts == {
|
|
|
|
"0": "my_prefix document number 0 my_suffix",
|
|
|
|
"1": "my_prefix document number 1 my_suffix",
|
|
|
|
"2": "my_prefix document number 2 my_suffix",
|
|
|
|
"3": "my_prefix document number 3 my_suffix",
|
|
|
|
"4": "my_prefix document number 4 my_suffix",
|
|
|
|
}
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
def test_run_wrong_input_format(self):
|
2024-02-05 13:17:01 +01:00
|
|
|
embedder = OpenAIDocumentEmbedder(api_key=Secret.from_token("fake-api-key"))
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
# wrong formats
|
|
|
|
string_input = "text"
|
|
|
|
list_integers_input = [1, 2, 3]
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
with pytest.raises(TypeError, match="OpenAIDocumentEmbedder expects a list of Documents as input"):
|
|
|
|
embedder.run(documents=string_input)
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
with pytest.raises(TypeError, match="OpenAIDocumentEmbedder expects a list of Documents as input"):
|
|
|
|
embedder.run(documents=list_integers_input)
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
def test_run_on_empty_list(self):
|
2024-02-05 13:17:01 +01:00
|
|
|
embedder = OpenAIDocumentEmbedder(api_key=Secret.from_token("fake-api-key"))
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
empty_list_input = []
|
|
|
|
result = embedder.run(documents=empty_list_input)
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
assert result["documents"] is not None
|
|
|
|
assert not result["documents"] # empty list
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2024-11-15 10:52:44 +01:00
|
|
|
def test_embed_batch_handles_exceptions_gracefully(self, caplog):
|
|
|
|
embedder = OpenAIDocumentEmbedder(api_key=Secret.from_token("fake_api_key"))
|
|
|
|
fake_texts_to_embed = {"1": "text1", "2": "text2"}
|
|
|
|
with patch.object(
|
|
|
|
embedder.client.embeddings,
|
|
|
|
"create",
|
|
|
|
side_effect=APIError(message="Mocked error", request=Mock(), body=None),
|
|
|
|
):
|
|
|
|
embedder._embed_batch(texts_to_embed=fake_texts_to_embed, batch_size=2)
|
|
|
|
|
|
|
|
assert len(caplog.records) == 1
|
|
|
|
assert "Failed embedding of documents 1, 2 caused by Mocked error" in caplog.records[0].msg
|
|
|
|
|
2024-01-08 19:19:14 +01:00
|
|
|
@pytest.mark.skipif(os.environ.get("OPENAI_API_KEY", "") == "", reason="OPENAI_API_KEY is not set")
|
2023-12-21 16:21:24 +01:00
|
|
|
@pytest.mark.integration
|
|
|
|
def test_run(self):
|
2023-09-28 15:42:51 +02:00
|
|
|
docs = [
|
2023-10-31 12:44:04 +01:00
|
|
|
Document(content="I love cheese", meta={"topic": "Cuisine"}),
|
|
|
|
Document(content="A transformer is a deep learning architecture", meta={"topic": "ML"}),
|
2023-09-28 15:42:51 +02:00
|
|
|
]
|
|
|
|
|
2024-01-08 22:06:27 +01:00
|
|
|
model = "text-embedding-ada-002"
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2024-01-12 15:30:17 +01:00
|
|
|
embedder = OpenAIDocumentEmbedder(model=model, meta_fields_to_embed=["topic"], embedding_separator=" | ")
|
2023-09-28 15:42:51 +02:00
|
|
|
|
2023-12-21 16:21:24 +01:00
|
|
|
result = embedder.run(documents=docs)
|
2023-09-28 15:42:51 +02:00
|
|
|
documents_with_embeddings = result["documents"]
|
|
|
|
|
|
|
|
assert isinstance(documents_with_embeddings, list)
|
|
|
|
assert len(documents_with_embeddings) == len(docs)
|
|
|
|
for doc in documents_with_embeddings:
|
|
|
|
assert isinstance(doc, Document)
|
|
|
|
assert isinstance(doc.embedding, list)
|
2024-01-08 22:06:27 +01:00
|
|
|
assert len(doc.embedding) == 1536
|
2023-09-28 15:42:51 +02:00
|
|
|
assert all(isinstance(x, float) for x in doc.embedding)
|
2024-01-24 15:22:47 +01:00
|
|
|
|
2025-01-09 17:25:55 +01:00
|
|
|
assert "text" in result["meta"]["model"] and "ada" in result["meta"]["model"], (
|
|
|
|
"The model name does not contain 'text' and 'ada'"
|
|
|
|
)
|
2024-01-24 15:22:47 +01:00
|
|
|
|
|
|
|
assert result["meta"]["usage"] == {"prompt_tokens": 15, "total_tokens": 15}, "Usage information does not match"
|
2025-04-04 17:25:59 +05:30
|
|
|
|
|
|
|
@pytest.mark.skipif(os.environ.get("OPENAI_API_KEY", "") == "", reason="OPENAI_API_KEY is not set")
|
|
|
|
@pytest.mark.integration
|
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_run_async(self):
|
|
|
|
docs = [
|
|
|
|
Document(content="I love cheese", meta={"topic": "Cuisine"}),
|
|
|
|
Document(content="A transformer is a deep learning architecture", meta={"topic": "ML"}),
|
|
|
|
]
|
|
|
|
|
|
|
|
model = "text-embedding-ada-002"
|
|
|
|
|
|
|
|
embedder = OpenAIDocumentEmbedder(model=model, meta_fields_to_embed=["topic"], embedding_separator=" | ")
|
|
|
|
|
|
|
|
result = await embedder.run_async(documents=docs)
|
|
|
|
documents_with_embeddings = result["documents"]
|
|
|
|
|
|
|
|
assert isinstance(documents_with_embeddings, list)
|
|
|
|
assert len(documents_with_embeddings) == len(docs)
|
|
|
|
for doc in documents_with_embeddings:
|
|
|
|
assert isinstance(doc, Document)
|
|
|
|
assert isinstance(doc.embedding, list)
|
|
|
|
assert len(doc.embedding) == 1536
|
|
|
|
assert all(isinstance(x, float) for x in doc.embedding)
|
|
|
|
|
|
|
|
assert "text" in result["meta"]["model"] and "ada" in result["meta"]["model"], (
|
|
|
|
"The model name does not contain 'text' and 'ada'"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert result["meta"]["usage"] == {"prompt_tokens": 15, "total_tokens": 15}, "Usage information does not match"
|