refact: Rename helper function (#6831)

* change function name

* add api docs

* release notes
This commit is contained in:
Massimiliano Pippi 2024-01-26 16:00:02 +01:00 committed by GitHub
parent fdf844f762
commit acf4cd502f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 37 additions and 29 deletions

View File

@ -10,7 +10,8 @@ loaders:
"chat/azure", "chat/azure",
"chat/hugging_face_local", "chat/hugging_face_local",
"chat/hugging_face_tgi", "chat/hugging_face_tgi",
"chat/openai" "chat/openai",
"utils",
] ]
ignore_when_discovered: ["__init__"] ignore_when_discovered: ["__init__"]
processors: processors:

View File

@ -6,7 +6,7 @@ from haystack import DeserializationError
from haystack.dataclasses import StreamingChunk from haystack.dataclasses import StreamingChunk
def default_streaming_callback(chunk: StreamingChunk) -> None: def print_streaming_chunk(chunk: StreamingChunk) -> None:
""" """
Default callback function for streaming responses. Default callback function for streaming responses.
Prints the tokens of the first completion to stdout as soon as they are received Prints the tokens of the first completion to stdout as soon as they are received

View File

@ -0,0 +1,7 @@
---
upgrade:
enhancements:
- |
The default in `default_streaming_callback` was confusing, this function was the go-to-helper
one would use to quickly print the generated tokens as they come, but it was not used by default.
The function was then renamed to `print_streaming_chunk.`

View File

@ -4,7 +4,7 @@ import pytest
from openai import OpenAIError from openai import OpenAIError
from haystack.components.generators.chat import AzureOpenAIChatGenerator from haystack.components.generators.chat import AzureOpenAIChatGenerator
from haystack.components.generators.utils import default_streaming_callback from haystack.components.generators.utils import print_streaming_chunk
from haystack.dataclasses import ChatMessage from haystack.dataclasses import ChatMessage
@ -25,12 +25,12 @@ class TestOpenAIChatGenerator:
component = AzureOpenAIChatGenerator( component = AzureOpenAIChatGenerator(
azure_endpoint="some-non-existing-endpoint", azure_endpoint="some-non-existing-endpoint",
api_key="test-api-key", api_key="test-api-key",
streaming_callback=default_streaming_callback, streaming_callback=print_streaming_chunk,
generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"}, generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"},
) )
assert component.client.api_key == "test-api-key" assert component.client.api_key == "test-api-key"
assert component.azure_deployment == "gpt-35-turbo" assert component.azure_deployment == "gpt-35-turbo"
assert component.streaming_callback is default_streaming_callback assert component.streaming_callback is print_streaming_chunk
assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"} assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"}
def test_to_dict_default(self): def test_to_dict_default(self):

View File

@ -4,7 +4,7 @@ import pytest
from openai import OpenAIError from openai import OpenAIError
from haystack.components.generators.chat import OpenAIChatGenerator from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.components.generators.utils import default_streaming_callback from haystack.components.generators.utils import print_streaming_chunk
from haystack.dataclasses import ChatMessage, StreamingChunk from haystack.dataclasses import ChatMessage, StreamingChunk
@ -33,13 +33,13 @@ class TestOpenAIChatGenerator:
component = OpenAIChatGenerator( component = OpenAIChatGenerator(
api_key="test-api-key", api_key="test-api-key",
model="gpt-4", model="gpt-4",
streaming_callback=default_streaming_callback, streaming_callback=print_streaming_chunk,
api_base_url="test-base-url", api_base_url="test-base-url",
generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"}, generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"},
) )
assert component.client.api_key == "test-api-key" assert component.client.api_key == "test-api-key"
assert component.model == "gpt-4" assert component.model == "gpt-4"
assert component.streaming_callback is default_streaming_callback assert component.streaming_callback is print_streaming_chunk
assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"} assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"}
def test_to_dict_default(self): def test_to_dict_default(self):
@ -60,7 +60,7 @@ class TestOpenAIChatGenerator:
component = OpenAIChatGenerator( component = OpenAIChatGenerator(
api_key="test-api-key", api_key="test-api-key",
model="gpt-4", model="gpt-4",
streaming_callback=default_streaming_callback, streaming_callback=print_streaming_chunk,
api_base_url="test-base-url", api_base_url="test-base-url",
generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"}, generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"},
) )
@ -71,7 +71,7 @@ class TestOpenAIChatGenerator:
"model": "gpt-4", "model": "gpt-4",
"organization": None, "organization": None,
"api_base_url": "test-base-url", "api_base_url": "test-base-url",
"streaming_callback": "haystack.components.generators.utils.default_streaming_callback", "streaming_callback": "haystack.components.generators.utils.print_streaming_chunk",
"generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"}, "generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"},
}, },
} }
@ -102,13 +102,13 @@ class TestOpenAIChatGenerator:
"init_parameters": { "init_parameters": {
"model": "gpt-4", "model": "gpt-4",
"api_base_url": "test-base-url", "api_base_url": "test-base-url",
"streaming_callback": "haystack.components.generators.utils.default_streaming_callback", "streaming_callback": "haystack.components.generators.utils.print_streaming_chunk",
"generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"}, "generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"},
}, },
} }
component = OpenAIChatGenerator.from_dict(data) component = OpenAIChatGenerator.from_dict(data)
assert component.model == "gpt-4" assert component.model == "gpt-4"
assert component.streaming_callback is default_streaming_callback assert component.streaming_callback is print_streaming_chunk
assert component.api_base_url == "test-base-url" assert component.api_base_url == "test-base-url"
assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"} assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"}
@ -120,7 +120,7 @@ class TestOpenAIChatGenerator:
"model": "gpt-4", "model": "gpt-4",
"organization": None, "organization": None,
"api_base_url": "test-base-url", "api_base_url": "test-base-url",
"streaming_callback": "haystack.components.generators.utils.default_streaming_callback", "streaming_callback": "haystack.components.generators.utils.print_streaming_chunk",
"generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"}, "generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"},
}, },
} }

View File

@ -4,7 +4,7 @@ import pytest
from openai import OpenAIError from openai import OpenAIError
from haystack.components.generators import AzureOpenAIGenerator from haystack.components.generators import AzureOpenAIGenerator
from haystack.components.generators.utils import default_streaming_callback from haystack.components.generators.utils import print_streaming_chunk
class TestAzureOpenAIGenerator: class TestAzureOpenAIGenerator:
@ -25,12 +25,12 @@ class TestAzureOpenAIGenerator:
api_key="test-api-key", api_key="test-api-key",
azure_endpoint="some-non-existing-endpoint", azure_endpoint="some-non-existing-endpoint",
azure_deployment="gpt-35-turbo", azure_deployment="gpt-35-turbo",
streaming_callback=default_streaming_callback, streaming_callback=print_streaming_chunk,
generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"}, generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"},
) )
assert component.client.api_key == "test-api-key" assert component.client.api_key == "test-api-key"
assert component.azure_deployment == "gpt-35-turbo" assert component.azure_deployment == "gpt-35-turbo"
assert component.streaming_callback is default_streaming_callback assert component.streaming_callback is print_streaming_chunk
assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"} assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"}
def test_to_dict_default(self): def test_to_dict_default(self):

View File

@ -5,7 +5,7 @@ import pytest
from openai import OpenAIError from openai import OpenAIError
from haystack.components.generators import OpenAIGenerator from haystack.components.generators import OpenAIGenerator
from haystack.components.generators.utils import default_streaming_callback from haystack.components.generators.utils import print_streaming_chunk
from haystack.dataclasses import StreamingChunk, ChatMessage from haystack.dataclasses import StreamingChunk, ChatMessage
@ -26,13 +26,13 @@ class TestOpenAIGenerator:
component = OpenAIGenerator( component = OpenAIGenerator(
api_key="test-api-key", api_key="test-api-key",
model="gpt-4", model="gpt-4",
streaming_callback=default_streaming_callback, streaming_callback=print_streaming_chunk,
api_base_url="test-base-url", api_base_url="test-base-url",
generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"}, generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"},
) )
assert component.client.api_key == "test-api-key" assert component.client.api_key == "test-api-key"
assert component.model == "gpt-4" assert component.model == "gpt-4"
assert component.streaming_callback is default_streaming_callback assert component.streaming_callback is print_streaming_chunk
assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"} assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"}
def test_to_dict_default(self): def test_to_dict_default(self):
@ -53,7 +53,7 @@ class TestOpenAIGenerator:
component = OpenAIGenerator( component = OpenAIGenerator(
api_key="test-api-key", api_key="test-api-key",
model="gpt-4", model="gpt-4",
streaming_callback=default_streaming_callback, streaming_callback=print_streaming_chunk,
api_base_url="test-base-url", api_base_url="test-base-url",
generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"}, generation_kwargs={"max_tokens": 10, "some_test_param": "test-params"},
) )
@ -64,7 +64,7 @@ class TestOpenAIGenerator:
"model": "gpt-4", "model": "gpt-4",
"system_prompt": None, "system_prompt": None,
"api_base_url": "test-base-url", "api_base_url": "test-base-url",
"streaming_callback": "haystack.components.generators.utils.default_streaming_callback", "streaming_callback": "haystack.components.generators.utils.print_streaming_chunk",
"generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"}, "generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"},
}, },
} }
@ -97,13 +97,13 @@ class TestOpenAIGenerator:
"model": "gpt-4", "model": "gpt-4",
"system_prompt": None, "system_prompt": None,
"api_base_url": "test-base-url", "api_base_url": "test-base-url",
"streaming_callback": "haystack.components.generators.utils.default_streaming_callback", "streaming_callback": "haystack.components.generators.utils.print_streaming_chunk",
"generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"}, "generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"},
}, },
} }
component = OpenAIGenerator.from_dict(data) component = OpenAIGenerator.from_dict(data)
assert component.model == "gpt-4" assert component.model == "gpt-4"
assert component.streaming_callback is default_streaming_callback assert component.streaming_callback is print_streaming_chunk
assert component.api_base_url == "test-base-url" assert component.api_base_url == "test-base-url"
assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"} assert component.generation_kwargs == {"max_tokens": 10, "some_test_param": "test-params"}
@ -114,7 +114,7 @@ class TestOpenAIGenerator:
"init_parameters": { "init_parameters": {
"model": "gpt-4", "model": "gpt-4",
"api_base_url": "test-base-url", "api_base_url": "test-base-url",
"streaming_callback": "haystack.components.generators.utils.default_streaming_callback", "streaming_callback": "haystack.components.generators.utils.print_streaming_chunk",
"generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"}, "generation_kwargs": {"max_tokens": 10, "some_test_param": "test-params"},
}, },
} }

View File

@ -1,6 +1,6 @@
import pytest import pytest
from haystack.components.generators.utils import default_streaming_callback from haystack.components.generators.utils import print_streaming_chunk
from haystack.components.generators.utils import serialize_callback_handler, deserialize_callback_handler from haystack.components.generators.utils import serialize_callback_handler, deserialize_callback_handler
@ -15,8 +15,8 @@ def test_callback_handler_serialization():
def test_callback_handler_serialization_non_local(): def test_callback_handler_serialization_non_local():
result = serialize_callback_handler(default_streaming_callback) result = serialize_callback_handler(print_streaming_chunk)
assert result == "haystack.components.generators.utils.default_streaming_callback" assert result == "haystack.components.generators.utils.print_streaming_chunk"
def test_callback_handler_deserialization(): def test_callback_handler_deserialization():
@ -27,7 +27,7 @@ def test_callback_handler_deserialization():
def test_callback_handler_deserialization_non_local(): def test_callback_handler_deserialization_non_local():
result = serialize_callback_handler(default_streaming_callback) result = serialize_callback_handler(print_streaming_chunk)
fn = deserialize_callback_handler(result) fn = deserialize_callback_handler(result)
assert fn is default_streaming_callback assert fn is print_streaming_chunk