fix template error (#2653)

### What problem does this PR solve?

#2478

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
Kevin Hu 2024-09-29 14:47:06 +08:00 committed by GitHub
parent daa65199e8
commit 25f07e8e29
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -370,8 +370,7 @@
"params": { "params": {
"cite": false, "cite": false,
"frequency_penalty": 0.7, "frequency_penalty": 0.7,
"llm_id": "gpt-4@OpenAI", "llm_id": "deepseek-chat@DeepSeek",
"max_tokens": 256,
"message_history_window_size": 12, "message_history_window_size": 12,
"output": null, "output": null,
"output_var_name": "output", "output_var_name": "output",
@ -496,7 +495,7 @@
"message_history_window_size": 22, "message_history_window_size": 22,
"output": null, "output": null,
"output_var_name": "output", "output_var_name": "output",
"time_period": "now", "time_period": "7d",
"type": "weather", "type": "weather",
"user_type": "free", "user_type": "free",
"web_apikey": "947e8994bc5f488f8857d618ebac1b19" "web_apikey": "947e8994bc5f488f8857d618ebac1b19"
@ -1271,7 +1270,7 @@
"frequencyPenaltyEnabled": true, "frequencyPenaltyEnabled": true,
"frequency_penalty": 0.7, "frequency_penalty": 0.7,
"llm_id": "deepseek-chat@DeepSeek", "llm_id": "deepseek-chat@DeepSeek",
"maxTokensEnabled": true, "maxTokensEnabled": false,
"max_tokens": 256, "max_tokens": 256,
"message_history_window_size": 12, "message_history_window_size": 12,
"parameter": "Precise", "parameter": "Precise",
@ -1688,7 +1687,7 @@
"data": { "data": {
"form": { "form": {
"lang": "en", "lang": "en",
"time_period": "now", "time_period": "7d",
"type": "weather", "type": "weather",
"user_type": "free", "user_type": "free",
"web_apikey": "947e8994bc5f488f8857d618ebac1b19" "web_apikey": "947e8994bc5f488f8857d618ebac1b19"
@ -1719,8 +1718,8 @@
"cite": false, "cite": false,
"frequencyPenaltyEnabled": true, "frequencyPenaltyEnabled": true,
"frequency_penalty": 0.7, "frequency_penalty": 0.7,
"llm_id": "gpt-4@OpenAI", "llm_id": "deepseek-chat@DeepSeek",
"maxTokensEnabled": true, "maxTokensEnabled": false,
"max_tokens": 256, "max_tokens": 256,
"message_history_window_size": 12, "message_history_window_size": 12,
"parameter": "Precise", "parameter": "Precise",