mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-12 16:49:00 +08:00
Feat/advanced prompt enhancement (#1340)
This commit is contained in:
parent
3efaa713da
commit
695841a3cf
@ -20,7 +20,6 @@ class AdvancedPromptTemplateList(Resource):
|
|||||||
parser.add_argument('model_name', type=str, required=True, location='args')
|
parser.add_argument('model_name', type=str, required=True, location='args')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
service = AdvancedPromptTemplateService()
|
return AdvancedPromptTemplateService.get_prompt(args)
|
||||||
return service.get_prompt(args)
|
|
||||||
|
|
||||||
api.add_resource(AdvancedPromptTemplateList, '/app/prompt-templates')
|
api.add_resource(AdvancedPromptTemplateList, '/app/prompt-templates')
|
@ -11,7 +11,8 @@ CHAT_APP_COMPLETION_PROMPT_CONFIG = {
|
|||||||
"user_prefix": "Human",
|
"user_prefix": "Human",
|
||||||
"assistant_prefix": "Assistant"
|
"assistant_prefix": "Assistant"
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
"stop": ["Human:"]
|
||||||
}
|
}
|
||||||
|
|
||||||
CHAT_APP_CHAT_PROMPT_CONFIG = {
|
CHAT_APP_CHAT_PROMPT_CONFIG = {
|
||||||
@ -37,7 +38,8 @@ COMPLETION_APP_COMPLETION_PROMPT_CONFIG = {
|
|||||||
"prompt": {
|
"prompt": {
|
||||||
"text": "{{#pre_prompt#}}"
|
"text": "{{#pre_prompt#}}"
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
"stop": ["Human:"]
|
||||||
}
|
}
|
||||||
|
|
||||||
BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG = {
|
BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG = {
|
||||||
@ -49,7 +51,8 @@ BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG = {
|
|||||||
"user_prefix": "用户",
|
"user_prefix": "用户",
|
||||||
"assistant_prefix": "助手"
|
"assistant_prefix": "助手"
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
"stop": ["用户:"]
|
||||||
}
|
}
|
||||||
|
|
||||||
BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG = {
|
BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG = {
|
||||||
@ -75,5 +78,6 @@ BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG = {
|
|||||||
"prompt": {
|
"prompt": {
|
||||||
"text": "{{#pre_prompt#}}"
|
"text": "{{#pre_prompt#}}"
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
"stop": ["用户:"]
|
||||||
}
|
}
|
||||||
|
@ -6,51 +6,58 @@ from core.prompt.advanced_prompt_templates import CHAT_APP_COMPLETION_PROMPT_CON
|
|||||||
|
|
||||||
class AdvancedPromptTemplateService:
|
class AdvancedPromptTemplateService:
|
||||||
|
|
||||||
def get_prompt(self, args: dict) -> dict:
|
@classmethod
|
||||||
|
def get_prompt(cls, args: dict) -> dict:
|
||||||
app_mode = args['app_mode']
|
app_mode = args['app_mode']
|
||||||
model_mode = args['model_mode']
|
model_mode = args['model_mode']
|
||||||
model_name = args['model_name']
|
model_name = args['model_name']
|
||||||
has_context = args['has_context']
|
has_context = args['has_context']
|
||||||
|
|
||||||
if 'baichuan' in model_name:
|
if 'baichuan' in model_name:
|
||||||
return self.get_baichuan_prompt(app_mode, model_mode, has_context)
|
return cls.get_baichuan_prompt(app_mode, model_mode, has_context)
|
||||||
else:
|
else:
|
||||||
return self.get_common_prompt(app_mode, model_mode, has_context)
|
return cls.get_common_prompt(app_mode, model_mode, has_context)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_common_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
|
||||||
|
context_prompt = copy.deepcopy(CONTEXT)
|
||||||
|
|
||||||
def get_common_prompt(self, app_mode: str, model_mode:str, has_context: bool) -> dict:
|
|
||||||
if app_mode == 'chat':
|
if app_mode == 'chat':
|
||||||
if model_mode == 'completion':
|
if model_mode == 'completion':
|
||||||
return self.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, CONTEXT)
|
return cls.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
|
||||||
elif model_mode == 'chat':
|
elif model_mode == 'chat':
|
||||||
return self.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, CONTEXT)
|
return cls.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
|
||||||
elif app_mode == 'completion':
|
elif app_mode == 'completion':
|
||||||
if model_mode == 'completion':
|
if model_mode == 'completion':
|
||||||
return self.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, CONTEXT)
|
return cls.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
|
||||||
elif model_mode == 'chat':
|
elif model_mode == 'chat':
|
||||||
return self.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, CONTEXT)
|
return cls.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
|
||||||
|
|
||||||
def get_completion_prompt(self, prompt_template: str, has_context: bool, context: str) -> dict:
|
@classmethod
|
||||||
|
def get_completion_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
|
||||||
if has_context == 'true':
|
if has_context == 'true':
|
||||||
prompt_template['completion_prompt_config']['prompt']['text'] = context + prompt_template['completion_prompt_config']['prompt']['text']
|
prompt_template['completion_prompt_config']['prompt']['text'] = context + prompt_template['completion_prompt_config']['prompt']['text']
|
||||||
|
|
||||||
return prompt_template
|
return prompt_template
|
||||||
|
|
||||||
|
@classmethod
|
||||||
def get_chat_prompt(self, prompt_template: str, has_context: bool, context: str) -> dict:
|
def get_chat_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
|
||||||
if has_context == 'true':
|
if has_context == 'true':
|
||||||
prompt_template['chat_prompt_config']['prompt'][0]['text'] = context + prompt_template['chat_prompt_config']['prompt'][0]['text']
|
prompt_template['chat_prompt_config']['prompt'][0]['text'] = context + prompt_template['chat_prompt_config']['prompt'][0]['text']
|
||||||
|
|
||||||
return prompt_template
|
return prompt_template
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_baichuan_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
|
||||||
|
baichuan_context_prompt = copy.deepcopy(BAICHUAN_CONTEXT)
|
||||||
|
|
||||||
def get_baichuan_prompt(self, app_mode: str, model_mode:str, has_context: bool) -> dict:
|
|
||||||
if app_mode == 'chat':
|
if app_mode == 'chat':
|
||||||
if model_mode == 'completion':
|
if model_mode == 'completion':
|
||||||
return self.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
|
return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
||||||
elif model_mode == 'chat':
|
elif model_mode == 'chat':
|
||||||
return self.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
|
return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
||||||
elif app_mode == 'completion':
|
elif app_mode == 'completion':
|
||||||
if model_mode == 'completion':
|
if model_mode == 'completion':
|
||||||
return self.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
|
return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
||||||
elif model_mode == 'chat':
|
elif model_mode == 'chat':
|
||||||
return self.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
|
return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
@ -57,6 +57,9 @@ class AppModelConfigService:
|
|||||||
elif not isinstance(cp["stop"], list):
|
elif not isinstance(cp["stop"], list):
|
||||||
raise ValueError("stop in model.completion_params must be of list type")
|
raise ValueError("stop in model.completion_params must be of list type")
|
||||||
|
|
||||||
|
if len(cp["stop"]) > 4:
|
||||||
|
raise ValueError("stop sequences must be less than 4")
|
||||||
|
|
||||||
# Filter out extra parameters
|
# Filter out extra parameters
|
||||||
filtered_cp = {
|
filtered_cp = {
|
||||||
"max_tokens": cp["max_tokens"],
|
"max_tokens": cp["max_tokens"],
|
||||||
|
Loading…
x
Reference in New Issue
Block a user