chore: Remove deprecated ChatMessage.to_openai_format (#8242)

* chore: Remove deprecated `ChatMessage.to_openai_format`

* lint
This commit is contained in:
Madeesh Kannan 2024-08-16 10:34:44 +02:00 committed by GitHub
parent 9427d7aee6
commit cf5fd2a821
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 14 additions and 25 deletions

View File

@ -5,7 +5,6 @@
from dataclasses import asdict, dataclass, field
from enum import Enum
from typing import Any, Dict, Optional
from warnings import warn
class ChatRole(str, Enum):
@ -33,25 +32,6 @@ class ChatMessage:
name: Optional[str]
meta: Dict[str, Any] = field(default_factory=dict, hash=False)
def to_openai_format(self) -> Dict[str, Any]:
"""
Convert the message to the format expected by OpenAI's Chat API.
See the [API reference](https://platform.openai.com/docs/api-reference/chat/create) for details.
:returns: A dictionary with the following key:
- `role`
- `content`
- `name` (optional)
"""
warn("The `to_openai_format` method is deprecated and will be removed in Haystack 2.5.0.", DeprecationWarning)
msg = {"role": self.role.value, "content": self.content}
if self.name:
msg["name"] = self.name
return msg
def is_from(self, role: ChatRole) -> bool:
"""
Check if the message is from a specific role.

View File

@ -0,0 +1,4 @@
---
upgrade:
- |
Remove `ChatMessage.to_openai_format` method. Use `haystack.components.generators.openai_utils._convert_message_to_openai_format` instead.

View File

@ -5,6 +5,7 @@ import pytest
from transformers import AutoTokenizer
from haystack.dataclasses import ChatMessage, ChatRole
from haystack.components.generators.openai_utils import _convert_message_to_openai_format
def test_from_assistant_with_valid_content():
@ -42,20 +43,24 @@ def test_from_function_with_empty_name():
def test_to_openai_format():
message = ChatMessage.from_system("You are good assistant")
assert message.to_openai_format() == {"role": "system", "content": "You are good assistant"}
assert _convert_message_to_openai_format(message) == {"role": "system", "content": "You are good assistant"}
message = ChatMessage.from_user("I have a question")
assert message.to_openai_format() == {"role": "user", "content": "I have a question"}
assert _convert_message_to_openai_format(message) == {"role": "user", "content": "I have a question"}
message = ChatMessage.from_function("Function call", "function_name")
assert message.to_openai_format() == {"role": "function", "content": "Function call", "name": "function_name"}
assert _convert_message_to_openai_format(message) == {
"role": "function",
"content": "Function call",
"name": "function_name",
}
@pytest.mark.integration
def test_apply_chat_templating_on_chat_message():
messages = [ChatMessage.from_system("You are good assistant"), ChatMessage.from_user("I have a question")]
tokenizer = AutoTokenizer.from_pretrained("HuggingFaceH4/zephyr-7b-beta")
formatted_messages = [m.to_openai_format() for m in messages]
formatted_messages = [_convert_message_to_openai_format(m) for m in messages]
tokenized_messages = tokenizer.apply_chat_template(formatted_messages, tokenize=False)
assert tokenized_messages == "<|system|>\nYou are good assistant</s>\n<|user|>\nI have a question</s>\n"
@ -76,7 +81,7 @@ def test_apply_custom_chat_templating_on_chat_message():
messages = [ChatMessage.from_system("You are good assistant"), ChatMessage.from_user("I have a question")]
# could be any tokenizer, let's use the one we already likely have in cache
tokenizer = AutoTokenizer.from_pretrained("HuggingFaceH4/zephyr-7b-beta")
formatted_messages = [m.to_openai_format() for m in messages]
formatted_messages = [_convert_message_to_openai_format(m) for m in messages]
tokenized_messages = tokenizer.apply_chat_template(
formatted_messages, chat_template=anthropic_template, tokenize=False
)