From 8d04f28e11c9383e7948cdef94ca36de6c1a97a7 Mon Sep 17 00:00:00 2001 From: Fanli Lin Date: Wed, 2 Aug 2023 22:34:33 +0800 Subject: [PATCH] fix: hf agent outputs the prompt text while the openai agent not (#5461) * add skil prompt * fix formatting * add release note * add release note * Update releasenotes/notes/add-skip-prompt-for-hf-model-agent-89aef2838edb907c.yaml Co-authored-by: Daria Fokina * Update haystack/nodes/prompt/invocation_layer/handlers.py Co-authored-by: bogdankostic * Update haystack/nodes/prompt/invocation_layer/handlers.py Co-authored-by: bogdankostic * Update haystack/nodes/prompt/invocation_layer/hugging_face.py Co-authored-by: bogdankostic * add a unit test * add a unit test2 * add skil prompt * Revert "add skil prompt" This reverts commit b1ba938c94b67a4fd636d321945990aabd2c5b2a. * add unit test --------- Co-authored-by: Daria Fokina Co-authored-by: bogdankostic --- haystack/nodes/prompt/invocation_layer/handlers.py | 2 +- ...prompt-for-hf-model-agent-89aef2838edb907c.yaml | 4 ++++ test/prompt/invocation_layer/test_hugging_face.py | 14 ++++++++++++++ 3 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 releasenotes/notes/add-skip-prompt-for-hf-model-agent-89aef2838edb907c.yaml diff --git a/haystack/nodes/prompt/invocation_layer/handlers.py b/haystack/nodes/prompt/invocation_layer/handlers.py index e764100d7..446ddedf0 100644 --- a/haystack/nodes/prompt/invocation_layer/handlers.py +++ b/haystack/nodes/prompt/invocation_layer/handlers.py @@ -47,7 +47,7 @@ class HFTokenStreamingHandler(TextStreamer): # pylint: disable=useless-object-i stream_handler: "TokenStreamingHandler", ): transformers_import.check() - super().__init__(tokenizer=tokenizer) # type: ignore + super().__init__(tokenizer=tokenizer, skip_prompt=True) # type: ignore self.token_handler = stream_handler def on_finalized_text(self, token: str, stream_end: bool = False): diff --git a/releasenotes/notes/add-skip-prompt-for-hf-model-agent-89aef2838edb907c.yaml b/releasenotes/notes/add-skip-prompt-for-hf-model-agent-89aef2838edb907c.yaml new file mode 100644 index 000000000..51760da92 --- /dev/null +++ b/releasenotes/notes/add-skip-prompt-for-hf-model-agent-89aef2838edb907c.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + Fix the bug that the responses of Agents using local HF models contain the prompt text. diff --git a/test/prompt/invocation_layer/test_hugging_face.py b/test/prompt/invocation_layer/test_hugging_face.py index e2e721a7c..7c3c43a10 100644 --- a/test/prompt/invocation_layer/test_hugging_face.py +++ b/test/prompt/invocation_layer/test_hugging_face.py @@ -632,3 +632,17 @@ def test_tokenizer_loading_unsupported_model_with_tokenizer_class_in_config( invocation_layer = HFLocalInvocationLayer(model_name_or_path="unsupported_model", trust_remote_code=True) assert not mock_tokenizer.called assert not caplog.text + + +@pytest.mark.unit +def test_skip_prompt_is_set_in_hf_text_streamer(mock_pipeline, mock_get_task): + """ + Test that skip_prompt is set in HFTextStreamingHandler. Otherwise, we will output prompt text. + """ + layer = HFLocalInvocationLayer(stream=True) + + layer.invoke(prompt="Tell me hello") + + _, kwargs = layer.pipe.call_args + assert "streamer" in kwargs and isinstance(kwargs["streamer"], HFTokenStreamingHandler) + assert kwargs["streamer"].skip_prompt