replace all gpt-3.5-turbo with gpt-4o-mini (#9165)

This commit is contained in:
Bilge Yücel 2025-04-04 13:07:55 +03:00 committed by GitHub
parent d1db061058
commit d977b262b6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 7 additions and 7 deletions

View File

@ -33,7 +33,7 @@ class OpenAIChatGenerator:
"""
Completes chats using OpenAI's large language models (LLMs).
It works with the gpt-4 and gpt-3.5-turbo models and supports streaming responses
It works with the gpt-4 and o-series models and supports streaming responses
from OpenAI API. It uses [ChatMessage](https://docs.haystack.deepset.ai/docs/chatmessage)
format in input and output.

View File

@ -21,7 +21,7 @@ class OpenAIGenerator:
"""
Generates text using OpenAI's large language models (LLMs).
It works with the gpt-4 and gpt-3.5-turbo models and supports streaming responses
It works with the gpt-4 and o-series models and supports streaming responses
from OpenAI API. It uses strings as input and output.
You can customize how the text is generated by passing parameters to the

View File

@ -66,18 +66,18 @@ class AnswerJoiner:
pipe = Pipeline()
pipe.add_component("gpt-4o", OpenAIChatGenerator(model="gpt-4o"))
pipe.add_component("llama", OpenAIChatGenerator(model="gpt-3.5-turbo"))
pipe.add_component("gpt-4o-mini", OpenAIChatGenerator(model="gpt-4o-mini"))
pipe.add_component("aba", AnswerBuilder())
pipe.add_component("abb", AnswerBuilder())
pipe.add_component("joiner", AnswerJoiner())
pipe.connect("gpt-4o.replies", "aba")
pipe.connect("llama.replies", "abb")
pipe.connect("gpt-4o-mini.replies", "abb")
pipe.connect("aba.answers", "joiner")
pipe.connect("abb.answers", "joiner")
results = pipe.run(data={"gpt-4o": {"messages": messages},
"llama": {"messages": messages},
"gpt-4o-mini": {"messages": messages},
"aba": {"query": query},
"abb": {"query": query}})
```

View File

@ -31,14 +31,14 @@ class TestLLMMetadataExtractor:
expected_keys=["key1", "key2"],
raise_on_failure=True,
generator_api=LLMProvider.OPENAI,
generator_api_params={"model": "gpt-3.5-turbo", "generation_kwargs": {"temperature": 0.5}},
generator_api_params={"model": "gpt-4o-mini", "generation_kwargs": {"temperature": 0.5}},
page_range=["1-5"],
)
assert isinstance(extractor.builder, PromptBuilder)
assert extractor.expected_keys == ["key1", "key2"]
assert extractor.raise_on_failure is True
assert isinstance(extractor._chat_generator, OpenAIChatGenerator)
assert extractor._chat_generator.model == "gpt-3.5-turbo"
assert extractor._chat_generator.model == "gpt-4o-mini"
assert extractor._chat_generator.generation_kwargs == {"temperature": 0.5}
assert extractor.expanded_range == [1, 2, 3, 4, 5]