The default max tokens of 215 is too small, answers are often cut off.I will modify it to 512 to address this issue. (#845)

### What problem does this PR solve?

### Type of change

- [x] Refactoring
This commit is contained in:
dashi6174 2024-05-20 17:25:19 +08:00 committed by GitHub
parent a7bd427116
commit 9a01d1b876
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 3 additions and 3 deletions

View File

@ -759,7 +759,7 @@ class Dialog(DataBaseModel):
help_text="English|Chinese") help_text="English|Chinese")
llm_id = CharField(max_length=128, null=False, help_text="default llm ID") llm_id = CharField(max_length=128, null=False, help_text="default llm ID")
llm_setting = JSONField(null=False, default={"temperature": 0.1, "top_p": 0.3, "frequency_penalty": 0.7, llm_setting = JSONField(null=False, default={"temperature": 0.1, "top_p": 0.3, "frequency_penalty": 0.7,
"presence_penalty": 0.4, "max_tokens": 215}) "presence_penalty": 0.4, "max_tokens": 512})
prompt_type = CharField( prompt_type = CharField(
max_length=16, max_length=16,
null=False, null=False,

View File

@ -31,14 +31,14 @@ export const settledModelVariableMap = {
top_p: 0.3, top_p: 0.3,
frequency_penalty: 0.7, frequency_penalty: 0.7,
presence_penalty: 0.4, presence_penalty: 0.4,
max_tokens: 215, max_tokens: 512,
}, },
[ModelVariableType.Balance]: { [ModelVariableType.Balance]: {
temperature: 0.5, temperature: 0.5,
top_p: 0.5, top_p: 0.5,
frequency_penalty: 0.7, frequency_penalty: 0.7,
presence_penalty: 0.4, presence_penalty: 0.4,
max_tokens: 215, max_tokens: 512,
}, },
}; };