mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-09-22 06:33:43 +00:00
fix: increase max token length for openai 16k models (#5145)
This commit is contained in:
parent
20c1f23fff
commit
58c022ef86
@ -84,6 +84,9 @@ def _openai_text_completion_tokenization_details(model_name: str):
|
||||
if "text-davinci" in model_name:
|
||||
max_tokens_limit = 4097
|
||||
tokenizer_name = model_tokenizer
|
||||
elif model_name.startswith("gpt-3.5-turbo-16k"):
|
||||
max_tokens_limit = 16384
|
||||
tokenizer_name = model_tokenizer
|
||||
elif model_name.startswith("gpt-3"):
|
||||
max_tokens_limit = 4096
|
||||
tokenizer_name = model_tokenizer
|
||||
|
Loading…
x
Reference in New Issue
Block a user