fix: max token not exist in generate summary when calc rest tokens (#891)

This commit is contained in:
takatost 2023-08-17 16:33:32 +08:00 committed by GitHub
parent 4f5f9506ab
commit 2f7b234cc5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -51,6 +51,7 @@ class LLMGenerator:
prompt_with_empty_context = prompt.format(context='')
prompt_tokens = model_instance.get_num_tokens([PromptMessage(content=prompt_with_empty_context)])
max_context_token_length = model_instance.model_rules.max_tokens.max
max_context_token_length = max_context_token_length if max_context_token_length else 1500
rest_tokens = max_context_token_length - prompt_tokens - max_tokens - 1
context = ''