From 58c022ef86c4ce0411efa72d5ed96c160f5920a9 Mon Sep 17 00:00:00 2001 From: darionreyes Date: Wed, 14 Jun 2023 10:24:04 -0400 Subject: [PATCH] fix: increase max token length for openai 16k models (#5145) --- haystack/utils/openai_utils.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/haystack/utils/openai_utils.py b/haystack/utils/openai_utils.py index 82fb829b5..19181acf5 100644 --- a/haystack/utils/openai_utils.py +++ b/haystack/utils/openai_utils.py @@ -84,6 +84,9 @@ def _openai_text_completion_tokenization_details(model_name: str): if "text-davinci" in model_name: max_tokens_limit = 4097 tokenizer_name = model_tokenizer + elif model_name.startswith("gpt-3.5-turbo-16k"): + max_tokens_limit = 16384 + tokenizer_name = model_tokenizer elif model_name.startswith("gpt-3"): max_tokens_limit = 4096 tokenizer_name = model_tokenizer