Fix: change ollama default num_ctx. (#6395)

### What problem does this PR solve?

#6163

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Kevin Hu 2025-03-21 16:22:03 +08:00 committed by GitHub
parent 85480f6292
commit a2a4bfe3e3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 19 additions and 14 deletions

View File

@ -50,26 +50,29 @@ class CategorizeParam(GenerateParam):
for c, desc in self.category_description.items():
if desc.get("description"):
descriptions.append(
"--------------------\nCategory: {}\nDescription: {}\n".format(c, desc["description"]))
"\nCategory: {}\nDescription: {}".format(c, desc["description"]))
self.prompt = """
You're a text classifier. You need to categorize the users questions into {} categories,
namely: {}
Here's description of each category:
{}
Role: You're a text classifier.
Task: You need to categorize the users questions into {} categories, namely: {}
You could learn from the following examples:
{}
You could learn from the above examples.
Just mention the category names, no need for any additional words.
---- Real Data ----
{}
Here's description of each category:
{}
You could learn from the following examples:
{}
You could learn from the above examples.
Requirements:
- Just mention the category names, no need for any additional words.
---- Real Data ----
USER: {}\n
""".format(
len(self.category_description.keys()),
"/".join(list(self.category_description.keys())),
"\n".join(descriptions),
"- ".join(cate_lines),
"\n\n- ".join(cate_lines),
chat_hist
)
return self.prompt

View File

@ -441,7 +441,9 @@ class OllamaChat(Base):
if "max_tokens" in gen_conf:
del gen_conf["max_tokens"]
try:
options = {}
options = {
"num_ctx": 32768
}
if "temperature" in gen_conf:
options["temperature"] = gen_conf["temperature"]
if "max_tokens" in gen_conf: