fix: increase max token length for openai 16k models (#5145)

This commit is contained in:
darionreyes 2023-06-14 10:24:04 -04:00 committed by GitHub
parent 20c1f23fff
commit 58c022ef86
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -84,6 +84,9 @@ def _openai_text_completion_tokenization_details(model_name: str):
if "text-davinci" in model_name:
max_tokens_limit = 4097
tokenizer_name = model_tokenizer
elif model_name.startswith("gpt-3.5-turbo-16k"):
max_tokens_limit = 16384
tokenizer_name = model_tokenizer
elif model_name.startswith("gpt-3"):
max_tokens_limit = 4096
tokenizer_name = model_tokenizer