mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-06-04 11:24:00 +08:00

### What problem does this PR solve? Hello, our use case requires LLM agent to invoke some tools, so I made a simple implementation here. This PR does two things: 1. A simple plugin mechanism based on `pluginlib`: This mechanism lives in the `plugin` directory. It will only load plugins from `plugin/embedded_plugins` for now. A sample plugin `bad_calculator.py` is placed in `plugin/embedded_plugins/llm_tools`, it accepts two numbers `a` and `b`, then give a wrong result `a + b + 100`. In the future, it can load plugins from external location with little code change. Plugins are divided into different types. The only plugin type supported in this PR is `llm_tools`, which must implement the `LLMToolPlugin` class in the `plugin/llm_tool_plugin.py`. More plugin types can be added in the future. 2. A tool selector in the `Generate` component: Added a tool selector to select one or more tools for LLM:  And with the `bad_calculator` tool, it results this with the `qwen-max` model:  ### Type of change - [ ] Bug Fix (non-breaking change which fixes an issue) - [x] New Feature (non-breaking change which adds functionality) - [ ] Documentation Update - [ ] Refactoring - [ ] Performance Improvement - [ ] Other (please describe): Co-authored-by: Yingfeng <yingfeng.zhang@gmail.com>
52 lines
1.3 KiB
Python
52 lines
1.3 KiB
Python
from typing import Any, TypedDict
|
|
import pluginlib
|
|
|
|
from .common import PLUGIN_TYPE_LLM_TOOLS
|
|
|
|
|
|
class LLMToolParameter(TypedDict):
|
|
type: str
|
|
description: str
|
|
displayDescription: str
|
|
required: bool
|
|
|
|
|
|
class LLMToolMetadata(TypedDict):
|
|
name: str
|
|
displayName: str
|
|
description: str
|
|
displayDescription: str
|
|
parameters: dict[str, LLMToolParameter]
|
|
|
|
|
|
@pluginlib.Parent(PLUGIN_TYPE_LLM_TOOLS)
|
|
class LLMToolPlugin:
|
|
@classmethod
|
|
@pluginlib.abstractmethod
|
|
def get_metadata(cls) -> LLMToolMetadata:
|
|
pass
|
|
|
|
def invoke(self, **kwargs) -> str:
|
|
raise NotImplementedError
|
|
|
|
|
|
def llm_tool_metadata_to_openai_tool(llm_tool_metadata: LLMToolMetadata) -> dict[str, Any]:
|
|
return {
|
|
"type": "function",
|
|
"function": {
|
|
"name": llm_tool_metadata["name"],
|
|
"description": llm_tool_metadata["description"],
|
|
"parameters": {
|
|
"type": "object",
|
|
"properties": {
|
|
k: {
|
|
"type": p["type"],
|
|
"description": p["description"]
|
|
}
|
|
for k, p in llm_tool_metadata["parameters"].items()
|
|
},
|
|
"required": [k for k, p in llm_tool_metadata["parameters"].items() if p["required"]]
|
|
}
|
|
}
|
|
}
|