From 9b247fccd48f97b0b5621af36afc73c8ec66f3a6 Mon Sep 17 00:00:00 2001 From: takatost Date: Wed, 23 Aug 2023 22:24:50 +0800 Subject: [PATCH] feat: adjust hf max tokens (#979) --- api/core/model_providers/providers/huggingface_hub_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/core/model_providers/providers/huggingface_hub_provider.py b/api/core/model_providers/providers/huggingface_hub_provider.py index 0c638a4e6b..0105831136 100644 --- a/api/core/model_providers/providers/huggingface_hub_provider.py +++ b/api/core/model_providers/providers/huggingface_hub_provider.py @@ -51,7 +51,7 @@ class HuggingfaceHubProvider(BaseModelProvider): top_p=KwargRule[float](min=0.01, max=0.99, default=0.7), presence_penalty=KwargRule[float](enabled=False), frequency_penalty=KwargRule[float](enabled=False), - max_tokens=KwargRule[int](alias='max_new_tokens', min=10, max=1500, default=200), + max_tokens=KwargRule[int](alias='max_new_tokens', min=10, max=4000, default=200), ) @classmethod