From f04b2f3cee056c6faa7230462a7d4ef2e4b1661c Mon Sep 17 00:00:00 2001 From: Sebastian Date: Fri, 17 Mar 2023 09:43:23 +0100 Subject: [PATCH] Update test to reflect change in max token length (#4451) --- test/prompt/test_prompt_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/prompt/test_prompt_node.py b/test/prompt/test_prompt_node.py index b23ec9cd7..a9b8fc6d7 100644 --- a/test/prompt/test_prompt_node.py +++ b/test/prompt/test_prompt_node.py @@ -898,7 +898,7 @@ class TestTokenLimit: with caplog.at_level(logging.WARNING): _ = prompt_node.prompt(tt, documents=["Berlin is an amazing city."]) assert "The prompt has been truncated from" in caplog.text - assert "and answer length (2000 tokens) fits within the max token limit (2048 tokens)." in caplog.text + assert "and answer length (2000 tokens) fits within the max token limit (2049 tokens)." in caplog.text class TestRunBatch: