Docs: remove max tokens. (#6198)

### What problem does this PR solve?

#6178

### Type of change

- [x] Documentation Update
This commit is contained in:
Kevin Hu 2025-03-18 11:05:06 +08:00 committed by GitHub
parent 1b9f63f799
commit 5841aa8189
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1577,7 +1577,6 @@ Success:
"language": "English",
"llm": {
"frequency_penalty": 0.7,
"max_tokens": 512,
"model_name": "qwen-plus@Tongyi-Qianwen",
"presence_penalty": 0.4,
"temperature": 0.1,
@ -1825,7 +1824,6 @@ Success:
"language": "English",
"llm": {
"frequency_penalty": 0.7,
"max_tokens": 512,
"model_name": "qwen-plus@Tongyi-Qianwen",
"presence_penalty": 0.4,
"temperature": 0.1,
@ -2807,7 +2805,6 @@ Success:
"cite": true,
"frequency_penalty": 0.7,
"llm_id": "gpt-4o___OpenAI-API@OpenAI-API-Compatible",
"max_tokens": 256,
"message_history_window_size": 12,
"parameters": [],
"presence_penalty": 0.4,
@ -2854,7 +2851,6 @@ Success:
"frequency_penalty": 0.7,
"llm_id": "gpt-4o___OpenAI-API@OpenAI-API-Compatible",
"maxTokensEnabled": true,
"max_tokens": 256,
"message_history_window_size": 12,
"parameters": [],
"presencePenaltyEnabled": true,