Update prompt builders examples (#6681)

This commit is contained in:
Vladimir Blagojevic 2024-01-04 16:54:26 +01:00 committed by GitHub
parent d61ac9e6ae
commit 1336456b4f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 106 additions and 20 deletions

View File

@ -26,13 +26,13 @@ class DynamicChatPromptBuilder:
```python
from haystack.components.builders import DynamicChatPromptBuilder
from haystack.components.generators.chat import GPTChatGenerator
from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.dataclasses import ChatMessage
from haystack import Pipeline
# no parameter init, we don't use any runtime template variables
prompt_builder = DynamicChatPromptBuilder()
llm = GPTChatGenerator(api_key="<your-api-key>", model_name="gpt-3.5-turbo")
llm = OpenAIChatGenerator(api_key="<your-api-key>", model_name="gpt-3.5-turbo")
pipe = Pipeline()
pipe.add_component("prompt_builder", prompt_builder)
@ -66,7 +66,6 @@ class DynamicChatPromptBuilder:
closer to your visit.", role=<ChatRole.ASSISTANT: 'assistant'>, name=None, meta={'model': 'gpt-3.5-turbo-0613',
'index': 0, 'finish_reason': 'stop', 'usage': {'prompt_tokens': 37, 'completion_tokens': 201,
'total_tokens': 238}})]}}
```
The primary advantage of using DynamicChatPromptBuilder is showcased in the examples provided above.
@ -75,10 +74,9 @@ class DynamicChatPromptBuilder:
In the example above, the first query asks for general information about Berlin, and the second query requests
the weather forecast for Berlin in the next few days. DynamicChatPromptBuilder efficiently handles these distinct
prompt structures by adjusting pipeline run parameters invocations, as opposed to a
regular PromptBuilder, which would require recreating or reloading the pipeline for each distinct type of query,
leading to inefficiency and potential service disruptions, especially in server environments where continuous
service is vital.
prompt structures by adjusting pipeline run parameters invocations, as opposed to a regular PromptBuilder, which
would require recreating or reloading the pipeline for each distinct type of query, leading to inefficiency and
potential service disruptions, especially in server environments where continuous service is vital.
Note that the weather forecast in the example above is fictional, but it can be easily connected to a weather
API to provide real weather forecasts.

View File

@ -17,22 +17,21 @@ class DynamicPromptBuilder:
The following example demonstrates how to use the DynamicPromptBuilder:
```python
from typing import List
from haystack.components.builders import DynamicPromptBuilder
from haystack.components.generators.chat import GPTChatGenerator
from haystack.dataclasses import ChatMessage
from haystack import Pipeline
from haystack.components.generators import OpenAIGenerator
from haystack import Pipeline, component, Document
prompt_builder = DynamicPromptBuilder(runtime_variables=["documents"])
llm = GPTGenerator(api_key="<your-api-key>", model_name="gpt-3.5-turbo")
llm = OpenAIGenerator(api_key="<your-api-key>", model_name="gpt-3.5-turbo")
@component
class DocumentProducer:
@component.output_types(documents=List[Document])
def run(self, doc_input: str):
return {"documents": [Document(content=doc_input)]}
@component.output_types(documents=List[Document])
def run(self, doc_input: str):
return {"documents": [Document(content=doc_input)]}
pipe = Pipeline()
@ -43,9 +42,16 @@ class DynamicPromptBuilder:
pipe.connect("prompt_builder.prompt", "llm.prompt")
template = "Here is the document: {{documents[0].content}} \\n Answer: {{query}}"
pipe.run(data={"doc_producer": {"doc_input": "Hello world, I live in Berlin"},
"prompt_builder": {"prompt_source": template,
"template_variables":{"query": "Where does the speaker live?"}}})
result = pipe.run(
data={
"doc_producer": {"doc_input": "Hello world, I live in Berlin"},
"prompt_builder": {
"prompt_source": template,
"template_variables": {"query": "Where does the speaker live?"},
},
}
)
print(result)
>> {'llm': {'replies': ['The speaker lives in Berlin.'],
>> 'meta': [{'model': 'gpt-3.5-turbo-0613',
@ -118,8 +124,8 @@ class DynamicPromptBuilder:
"Please provide an appropriate template variable to enable prompt generation."
)
template = self._validate_template(prompt_source, set(template_variables.keys()))
result = template.render(template_variables)
template = self._validate_template(prompt_source, set(template_variables_combined.keys()))
result = template.render(template_variables_combined)
return {"prompt": result}
def _validate_template(self, template_text: str, provided_variables: Set[str]):

View File

@ -2,6 +2,7 @@ from typing import List
import pytest
from haystack import Pipeline
from haystack.components.builders import DynamicChatPromptBuilder
from haystack.dataclasses import ChatMessage
@ -90,3 +91,50 @@ class TestDynamicChatPromptBuilder:
# provided variables are a superset of the required variables
prompt_builder._validate_template("Hello, I'm {{ name }}, and I live in {{ city }}.", {"name", "city", "age"})
def test_example_in_pipeline(self):
# no parameter init, we don't use any runtime template variables
prompt_builder = DynamicChatPromptBuilder()
pipe = Pipeline()
pipe.add_component("prompt_builder", prompt_builder)
location = "Berlin"
system_message = ChatMessage.from_system(
"You are a helpful assistant giving out valuable information to tourists."
)
messages = [system_message, ChatMessage.from_user("Tell me about {{location}}")]
res = pipe.run(
data={"prompt_builder": {"template_variables": {"location": location}, "prompt_source": messages}}
)
assert res == {
"prompt_builder": {
"prompt": [
ChatMessage.from_system("You are a helpful assistant giving out valuable information to tourists."),
ChatMessage.from_user("Tell me about Berlin"),
]
}
}
messages = [
system_message,
ChatMessage.from_user("What's the weather forecast for {{location}} in the next {{day_count}} days?"),
]
res = pipe.run(
data={
"prompt_builder": {
"template_variables": {"location": location, "day_count": "5"},
"prompt_source": messages,
}
}
)
assert res == {
"prompt_builder": {
"prompt": [
ChatMessage.from_system("You are a helpful assistant giving out valuable information to tourists."),
ChatMessage.from_user("What's the weather forecast for Berlin in the next 5 days?"),
]
}
}

View File

@ -1,6 +1,9 @@
from typing import List
import pytest
from jinja2 import TemplateSyntaxError
from haystack import component, Document, Pipeline
from haystack.components.builders import DynamicPromptBuilder
@ -74,3 +77,34 @@ class TestDynamicPromptBuilder:
# provided variables are a superset of the required variables
prompt_builder._validate_template("Hello, I'm {{ name }}, and I live in {{ city }}.", {"name", "city", "age"})
def test_example_in_pipeline(self):
prompt_builder = DynamicPromptBuilder(runtime_variables=["documents"])
@component
class DocumentProducer:
@component.output_types(documents=List[Document])
def run(self, doc_input: str):
return {"documents": [Document(content=doc_input)]}
pipe = Pipeline()
pipe.add_component("doc_producer", DocumentProducer())
pipe.add_component("prompt_builder", prompt_builder)
pipe.connect("doc_producer.documents", "prompt_builder.documents")
template = "Here is the document: {{documents[0].content}} \\n Answer: {{query}}"
result = pipe.run(
data={
"doc_producer": {"doc_input": "Hello world, I live in Berlin"},
"prompt_builder": {
"prompt_source": template,
"template_variables": {"query": "Where does the speaker live?"},
},
}
)
assert result == {
"prompt_builder": {
"prompt": "Here is the document: Hello world, I live in Berlin \\n Answer: Where does the speaker live?"
}
}