mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-11-06 12:53:35 +00:00
chore: Fixing PromptNode .prompt() docstring to include the PromptTemplate object as an option (#4135)
* fix to include the PromptTemplate object as an option * small fix
This commit is contained in:
parent
a4407f8f98
commit
cdb05f0f9a
@ -777,13 +777,11 @@ class PromptNode(BaseComponent):
|
|||||||
def prompt(self, prompt_template: Optional[Union[str, PromptTemplate]], *args, **kwargs) -> List[str]:
|
def prompt(self, prompt_template: Optional[Union[str, PromptTemplate]], *args, **kwargs) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Prompts the model and represents the central API for the PromptNode. It takes a prompt template,
|
Prompts the model and represents the central API for the PromptNode. It takes a prompt template,
|
||||||
a list of non-keyword and keyword arguments, and returns a list of strings - the responses from
|
a list of non-keyword and keyword arguments, and returns a list of strings - the responses from the underlying model.
|
||||||
the underlying model.
|
|
||||||
|
|
||||||
If you specify the optional prompt_template parameter, it takes precedence over the default prompt
|
If you specify the optional prompt_template parameter, it takes precedence over the default PromptTemplate for this PromptNode.
|
||||||
template for this PromptNode.
|
|
||||||
|
|
||||||
:param prompt_template: The name of the optional prompt template to use.
|
:param prompt_template: The name or object of the optional PromptTemplate to use.
|
||||||
:return: A list of strings as model responses.
|
:return: A list of strings as model responses.
|
||||||
"""
|
"""
|
||||||
results = []
|
results = []
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user