Fix: claude max tokens. (#6484)

### What problem does this PR solve?

#6458

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Kevin Hu 2025-03-25 10:41:55 +08:00 committed by GitHub
parent 542cf16292
commit 095fc84cf2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1443,7 +1443,7 @@ class AnthropicChat(Base):
del gen_conf["presence_penalty"]
if "frequency_penalty" in gen_conf:
del gen_conf["frequency_penalty"]
gen_conf["max_tokens"] = 8196
gen_conf["max_tokens"] = 8192
if "haiku" in self.model_name or "opus" in self.model_name:
gen_conf["max_tokens"] = 4096
@ -1477,7 +1477,7 @@ class AnthropicChat(Base):
del gen_conf["presence_penalty"]
if "frequency_penalty" in gen_conf:
del gen_conf["frequency_penalty"]
gen_conf["max_tokens"] = 8196
gen_conf["max_tokens"] = 8192
if "haiku" in self.model_name or "opus" in self.model_name:
gen_conf["max_tokens"] = 4096