mirror of
https://github.com/deepset-ai/haystack.git
synced 2026-01-08 21:20:52 +00:00
fix: leading whitespace is missing in the generated text when using stop_words (#5511)
* bug fix * add release note * Update releasenotes/notes/fix-stop-words-strip-issue-22ce51306e7b91e4.yaml Co-authored-by: Stefano Fiorucci <44616784+anakin87@users.noreply.github.com> * Update releasenotes/notes/fix-stop-words-strip-issue-22ce51306e7b91e4.yaml Co-authored-by: Stefano Fiorucci <44616784+anakin87@users.noreply.github.com> --------- Co-authored-by: Stefano Fiorucci <44616784+anakin87@users.noreply.github.com>
This commit is contained in:
parent
abc6737e63
commit
4496fc6afd
@ -280,7 +280,7 @@ class HFLocalInvocationLayer(PromptModelInvocationLayer):
|
||||
# We want to exclude it to be consistent with other invocation layers
|
||||
for idx, _ in enumerate(generated_texts):
|
||||
for stop_word in stop_words:
|
||||
generated_texts[idx] = generated_texts[idx].replace(stop_word, "").strip()
|
||||
generated_texts[idx] = generated_texts[idx].replace(stop_word, "").rstrip()
|
||||
return generated_texts
|
||||
|
||||
def _ensure_token_limit(self, prompt: Union[str, List[Dict[str, str]]]) -> Union[str, List[Dict[str, str]]]:
|
||||
|
||||
@ -0,0 +1,4 @@
|
||||
---
|
||||
fixes:
|
||||
- |
|
||||
Ensure the leading whitespace in the generated text is preserved when using `stop_words` in the Hugging Face invocation layer of the PromptNode.
|
||||
Loading…
x
Reference in New Issue
Block a user