mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-19 22:09:11 +08:00
feat: export dsl with dependencies
This commit is contained in:
parent
21fd58caf9
commit
f6136427a4
@ -11,6 +11,7 @@ from controllers.console.wraps import (
|
||||
cloud_edition_billing_resource_check,
|
||||
setup_required,
|
||||
)
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.ops.ops_trace_manager import OpsTraceManager
|
||||
from fields.app_fields import (
|
||||
app_detail_fields,
|
||||
@ -90,6 +91,28 @@ class AppListApi(Resource):
|
||||
return app, 201
|
||||
|
||||
|
||||
class AppImportDependenciesCheckApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@cloud_edition_billing_resource_check("apps")
|
||||
def post(self):
|
||||
"""Check dependencies"""
|
||||
# The role of the current user in the ta table must be admin, owner, or editor
|
||||
if not current_user.is_editor:
|
||||
raise Forbidden()
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("data", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
leaked_dependencies = AppDslService.check_dependencies(
|
||||
tenant_id=current_user.current_tenant_id, data=args["data"], account=current_user
|
||||
)
|
||||
|
||||
return jsonable_encoder({"leaked": leaked_dependencies}), 200
|
||||
|
||||
|
||||
class AppImportApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@ -365,6 +388,7 @@ class AppTraceApi(Resource):
|
||||
|
||||
|
||||
api.add_resource(AppListApi, "/apps")
|
||||
api.add_resource(AppImportDependenciesCheckApi, "/apps/import/dependencies/check")
|
||||
api.add_resource(AppImportApi, "/apps/import")
|
||||
api.add_resource(AppImportFromUrlApi, "/apps/import/url")
|
||||
api.add_resource(AppApi, "/apps/<uuid:app_id>")
|
||||
|
@ -1,4 +1,5 @@
|
||||
import datetime
|
||||
import re
|
||||
from collections.abc import Mapping
|
||||
from enum import Enum
|
||||
from typing import Any, Optional
|
||||
@ -95,21 +96,24 @@ class PluginDeclaration(BaseModel):
|
||||
return values
|
||||
|
||||
|
||||
class PluginEntity(BasePluginEntity):
|
||||
name: str
|
||||
plugin_id: str
|
||||
plugin_unique_identifier: str
|
||||
declaration: PluginDeclaration
|
||||
installation_id: str
|
||||
class PluginInstallation(BasePluginEntity):
|
||||
tenant_id: str
|
||||
endpoints_setups: int
|
||||
endpoints_active: int
|
||||
runtime_type: str
|
||||
source: PluginInstallationSource
|
||||
meta: Mapping[str, Any]
|
||||
plugin_id: str
|
||||
plugin_unique_identifier: str
|
||||
|
||||
|
||||
class PluginEntity(PluginInstallation):
|
||||
name: str
|
||||
declaration: PluginDeclaration
|
||||
installation_id: str
|
||||
version: str
|
||||
latest_version: Optional[str] = None
|
||||
latest_unique_identifier: Optional[str] = None
|
||||
source: PluginInstallationSource
|
||||
meta: Mapping[str, Any]
|
||||
|
||||
@model_validator(mode="after")
|
||||
def set_plugin_id(self):
|
||||
@ -127,3 +131,60 @@ class GithubPackage(BaseModel):
|
||||
class GithubVersion(BaseModel):
|
||||
repo: str
|
||||
version: str
|
||||
|
||||
|
||||
class GenericProviderID:
|
||||
organization: str
|
||||
plugin_name: str
|
||||
provider_name: str
|
||||
|
||||
def to_string(self) -> str:
|
||||
return str(self)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.organization}/{self.plugin_name}/{self.provider_name}"
|
||||
|
||||
def __init__(self, value: str) -> None:
|
||||
# check if the value is a valid plugin id with format: $organization/$plugin_name/$provider_name
|
||||
if not re.match(r"^[a-z0-9_-]+\/[a-z0-9_-]+\/[a-z0-9_-]+$", value):
|
||||
# check if matches [a-z0-9_-]+, if yes, append with langgenius/$value/$value
|
||||
if re.match(r"^[a-z0-9_-]+$", value):
|
||||
value = f"langgenius/{value}/{value}"
|
||||
else:
|
||||
raise ValueError("Invalid plugin id")
|
||||
|
||||
self.organization, self.plugin_name, self.provider_name = value.split("/")
|
||||
|
||||
@property
|
||||
def plugin_id(self) -> str:
|
||||
return f"{self.organization}/{self.plugin_name}"
|
||||
|
||||
|
||||
class PluginDependency(BaseModel):
|
||||
class Type(str, Enum):
|
||||
Github = "github"
|
||||
Marketplace = "marketplace"
|
||||
Package = "package"
|
||||
|
||||
class Github(BaseModel):
|
||||
repo: str
|
||||
version: str
|
||||
package: str
|
||||
github_plugin_unique_identifier: str
|
||||
|
||||
@property
|
||||
def plugin_unique_identifier(self) -> str:
|
||||
return self.github_plugin_unique_identifier
|
||||
|
||||
class Marketplace(BaseModel):
|
||||
marketplace_plugin_unique_identifier: str
|
||||
|
||||
@property
|
||||
def plugin_unique_identifier(self) -> str:
|
||||
return self.marketplace_plugin_unique_identifier
|
||||
|
||||
class Package(BaseModel):
|
||||
plugin_unique_identifier: str
|
||||
|
||||
type: Type
|
||||
value: Github | Marketplace | Package
|
||||
|
@ -2,7 +2,12 @@ from collections.abc import Sequence
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.plugin.entities.plugin import PluginDeclaration, PluginEntity, PluginInstallationSource
|
||||
from core.plugin.entities.plugin import (
|
||||
PluginDeclaration,
|
||||
PluginEntity,
|
||||
PluginInstallation,
|
||||
PluginInstallationSource,
|
||||
)
|
||||
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginInstallTaskStartResponse, PluginUploadResponse
|
||||
from core.plugin.manager.base import BasePluginManager
|
||||
|
||||
@ -128,6 +133,30 @@ class PluginInstallationManager(BasePluginManager):
|
||||
params={"plugin_unique_identifier": plugin_unique_identifier},
|
||||
).declaration
|
||||
|
||||
def fetch_plugin_installation_by_ids(self, tenant_id: str, plugin_ids: Sequence[str]) -> list[PluginInstallation]:
|
||||
"""
|
||||
Fetch plugin installations by ids.
|
||||
"""
|
||||
return self._request_with_plugin_daemon_response(
|
||||
"POST",
|
||||
f"plugin/{tenant_id}/management/installation/fetch/batch",
|
||||
list[PluginInstallation],
|
||||
data={"plugin_ids": plugin_ids},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
def fetch_missing_dependencies(self, tenant_id: str, plugin_unique_identifiers: list[str]) -> list[str]:
|
||||
"""
|
||||
Fetch missing dependencies
|
||||
"""
|
||||
return self._request_with_plugin_daemon_response(
|
||||
"POST",
|
||||
f"plugin/{tenant_id}/management/installation/missing",
|
||||
list[str],
|
||||
data={"plugin_unique_identifiers": plugin_unique_identifiers},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
def uninstall(self, tenant_id: str, plugin_installation_id: str) -> bool:
|
||||
"""
|
||||
Uninstall a plugin.
|
||||
|
@ -3,26 +3,13 @@ from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.plugin.entities.plugin import GenericProviderID
|
||||
from core.plugin.entities.plugin_daemon import PluginBasicBooleanResponse, PluginToolProviderEntity
|
||||
from core.plugin.manager.base import BasePluginManager
|
||||
from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
|
||||
|
||||
|
||||
class PluginToolManager(BasePluginManager):
|
||||
def _split_provider(self, provider: str) -> tuple[str, str]:
|
||||
"""
|
||||
split the provider to plugin_id and provider_name
|
||||
|
||||
provider follows format: plugin_id/provider_name
|
||||
"""
|
||||
if "/" in provider:
|
||||
parts = provider.split("/", -1)
|
||||
if len(parts) >= 2:
|
||||
return "/".join(parts[:-1]), parts[-1]
|
||||
raise ValueError(f"invalid provider format: {provider}")
|
||||
|
||||
raise ValueError(f"invalid provider format: {provider}")
|
||||
|
||||
def fetch_tool_providers(self, tenant_id: str) -> list[PluginToolProviderEntity]:
|
||||
"""
|
||||
Fetch tool providers for the given tenant.
|
||||
@ -58,11 +45,11 @@ class PluginToolManager(BasePluginManager):
|
||||
"""
|
||||
Fetch tool provider for the given tenant and plugin.
|
||||
"""
|
||||
plugin_id, provider_name = self._split_provider(provider)
|
||||
tool_provider_id = GenericProviderID(provider)
|
||||
|
||||
def transformer(json_response: dict[str, Any]) -> dict:
|
||||
for tool in json_response.get("data", {}).get("declaration", {}).get("tools", []):
|
||||
tool["identity"]["provider"] = provider_name
|
||||
tool["identity"]["provider"] = tool_provider_id.provider_name
|
||||
|
||||
return json_response
|
||||
|
||||
@ -70,7 +57,7 @@ class PluginToolManager(BasePluginManager):
|
||||
"GET",
|
||||
f"plugin/{tenant_id}/management/tool",
|
||||
PluginToolProviderEntity,
|
||||
params={"provider": provider_name, "plugin_id": plugin_id},
|
||||
params={"provider": tool_provider_id.provider_name, "plugin_id": tool_provider_id.plugin_id},
|
||||
transformer=transformer,
|
||||
)
|
||||
|
||||
@ -98,7 +85,7 @@ class PluginToolManager(BasePluginManager):
|
||||
Invoke the tool with the given tenant, user, plugin, provider, name, credentials and parameters.
|
||||
"""
|
||||
|
||||
plugin_id, provider_name = self._split_provider(tool_provider)
|
||||
tool_provider_id = GenericProviderID(tool_provider)
|
||||
|
||||
response = self._request_with_plugin_daemon_response_stream(
|
||||
"POST",
|
||||
@ -110,14 +97,14 @@ class PluginToolManager(BasePluginManager):
|
||||
"app_id": app_id,
|
||||
"message_id": message_id,
|
||||
"data": {
|
||||
"provider": provider_name,
|
||||
"provider": tool_provider_id.provider_name,
|
||||
"tool": tool_name,
|
||||
"credentials": credentials,
|
||||
"tool_parameters": tool_parameters,
|
||||
},
|
||||
},
|
||||
headers={
|
||||
"X-Plugin-ID": plugin_id,
|
||||
"X-Plugin-ID": tool_provider_id.plugin_id,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
@ -129,7 +116,7 @@ class PluginToolManager(BasePluginManager):
|
||||
"""
|
||||
validate the credentials of the provider
|
||||
"""
|
||||
plugin_id, provider_name = self._split_provider(provider)
|
||||
tool_provider_id = GenericProviderID(provider)
|
||||
|
||||
response = self._request_with_plugin_daemon_response_stream(
|
||||
"POST",
|
||||
@ -138,12 +125,12 @@ class PluginToolManager(BasePluginManager):
|
||||
data={
|
||||
"user_id": user_id,
|
||||
"data": {
|
||||
"provider": provider_name,
|
||||
"provider": tool_provider_id.provider_name,
|
||||
"credentials": credentials,
|
||||
},
|
||||
},
|
||||
headers={
|
||||
"X-Plugin-ID": plugin_id,
|
||||
"X-Plugin-ID": tool_provider_id.plugin_id,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
@ -167,7 +154,7 @@ class PluginToolManager(BasePluginManager):
|
||||
"""
|
||||
get the runtime parameters of the tool
|
||||
"""
|
||||
plugin_id, provider_name = self._split_provider(provider)
|
||||
tool_provider_id = GenericProviderID(provider)
|
||||
|
||||
class RuntimeParametersResponse(BaseModel):
|
||||
parameters: list[ToolParameter]
|
||||
@ -182,13 +169,13 @@ class PluginToolManager(BasePluginManager):
|
||||
"app_id": app_id,
|
||||
"message_id": message_id,
|
||||
"data": {
|
||||
"provider": provider_name,
|
||||
"provider": tool_provider_id.provider_name,
|
||||
"tool": tool,
|
||||
"credentials": credentials,
|
||||
},
|
||||
},
|
||||
headers={
|
||||
"X-Plugin-ID": plugin_id,
|
||||
"X-Plugin-ID": tool_provider_id.plugin_id,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
|
@ -1,5 +1,4 @@
|
||||
import base64
|
||||
import re
|
||||
from enum import Enum
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
@ -467,19 +466,3 @@ class ToolInvokeFrom(Enum):
|
||||
WORKFLOW = "workflow"
|
||||
AGENT = "agent"
|
||||
PLUGIN = "plugin"
|
||||
|
||||
|
||||
class ToolProviderID:
|
||||
organization: str
|
||||
plugin_name: str
|
||||
provider_name: str
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.organization}/{self.plugin_name}/{self.provider_name}"
|
||||
|
||||
def __init__(self, value: str) -> None:
|
||||
# check if the value is a valid plugin id with format: $organization/$plugin_name/$provider_name
|
||||
if not re.match(r"^[a-z0-9_-]+\/[a-z0-9_-]+\/[a-z0-9_-]+$", value):
|
||||
raise ValueError("Invalid plugin id")
|
||||
|
||||
self.organization, self.plugin_name, self.provider_name = value.split("/")
|
||||
|
@ -6,6 +6,7 @@ from os import listdir, path
|
||||
from threading import Lock
|
||||
from typing import TYPE_CHECKING, Any, Union, cast
|
||||
|
||||
from core.plugin.entities.plugin import GenericProviderID
|
||||
from core.plugin.manager.tool import PluginToolManager
|
||||
from core.tools.__base.tool_runtime import ToolRuntime
|
||||
from core.tools.plugin_tool.provider import PluginToolProviderController
|
||||
@ -33,7 +34,6 @@ from core.tools.entities.tool_entities import (
|
||||
ApiProviderAuthType,
|
||||
ToolInvokeFrom,
|
||||
ToolParameter,
|
||||
ToolProviderID,
|
||||
ToolProviderType,
|
||||
)
|
||||
from core.tools.errors import ToolProviderNotFoundError
|
||||
@ -164,7 +164,7 @@ class ToolManager:
|
||||
)
|
||||
|
||||
if isinstance(provider_controller, PluginToolProviderController):
|
||||
provider_id_entity = ToolProviderID(provider_id)
|
||||
provider_id_entity = GenericProviderID(provider_id)
|
||||
# get credentials
|
||||
builtin_provider: BuiltinToolProvider | None = (
|
||||
db.session.query(BuiltinToolProvider)
|
||||
@ -582,10 +582,8 @@ class ToolManager:
|
||||
|
||||
# rewrite db_builtin_providers
|
||||
for db_provider in db_builtin_providers:
|
||||
try:
|
||||
ToolProviderID(db_provider.provider)
|
||||
except Exception:
|
||||
db_provider.provider = f"langgenius/{db_provider.provider}/{db_provider.provider}"
|
||||
tool_provider_id = GenericProviderID(db_provider.provider)
|
||||
db_provider.provider = tool_provider_id.to_string()
|
||||
|
||||
find_db_builtin_provider = lambda provider: next(
|
||||
(x for x in db_builtin_providers if x.provider == provider), None
|
||||
|
@ -6,12 +6,21 @@ import yaml
|
||||
from packaging import version
|
||||
|
||||
from core.helper import ssrf_proxy
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.plugin.entities.plugin import PluginDependency
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from core.workflow.nodes.knowledge_retrieval.entities import KnowledgeRetrievalNodeData
|
||||
from core.workflow.nodes.llm.entities import LLMNodeData
|
||||
from core.workflow.nodes.parameter_extractor.entities import ParameterExtractorNodeData
|
||||
from core.workflow.nodes.question_classifier.entities import QuestionClassifierNodeData
|
||||
from core.workflow.nodes.tool.entities import ToolNodeData
|
||||
from events.app_event import app_model_config_was_updated, app_was_created
|
||||
from extensions.ext_database import db
|
||||
from factories import variable_factory
|
||||
from models.account import Account
|
||||
from models.model import App, AppMode, AppModelConfig
|
||||
from models.workflow import Workflow
|
||||
from services.plugin.dependencies_analysis import DependenciesAnalysisService
|
||||
from services.workflow_service import WorkflowService
|
||||
|
||||
from .exc import (
|
||||
@ -58,6 +67,22 @@ class AppDslService:
|
||||
|
||||
return cls.import_and_create_new_app(tenant_id, data, args, account)
|
||||
|
||||
@classmethod
|
||||
def check_dependencies(cls, tenant_id: str, data: str, account: Account) -> list[PluginDependency]:
|
||||
"""
|
||||
Returns the leaked dependencies in current workspace
|
||||
"""
|
||||
try:
|
||||
import_data = yaml.safe_load(data) or {}
|
||||
except yaml.YAMLError:
|
||||
raise InvalidYAMLFormatError("Invalid YAML format in data argument.")
|
||||
|
||||
dependencies = [PluginDependency(**dep) for dep in import_data.get("dependencies", [])]
|
||||
if not dependencies:
|
||||
return []
|
||||
|
||||
return DependenciesAnalysisService.check_dependencies(tenant_id=tenant_id, dependencies=dependencies)
|
||||
|
||||
@classmethod
|
||||
def import_and_create_new_app(cls, tenant_id: str, data: str, args: dict, account: Account) -> App:
|
||||
"""
|
||||
@ -436,6 +461,13 @@ class AppDslService:
|
||||
raise ValueError("Missing draft workflow configuration, please check.")
|
||||
|
||||
export_data["workflow"] = workflow.to_dict(include_secret=include_secret)
|
||||
dependencies = cls._extract_dependencies_from_workflow(workflow)
|
||||
export_data["dependencies"] = [
|
||||
jsonable_encoder(d.model_dump())
|
||||
for d in DependenciesAnalysisService.generate_dependencies(
|
||||
tenant_id=app_model.tenant_id, dependencies=dependencies
|
||||
)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def _append_model_config_export_data(cls, export_data: dict, app_model: App) -> None:
|
||||
@ -449,6 +481,137 @@ class AppDslService:
|
||||
raise ValueError("Missing app configuration, please check.")
|
||||
|
||||
export_data["model_config"] = app_model_config.to_dict()
|
||||
dependencies = cls._extract_dependencies_from_model_config(app_model_config)
|
||||
export_data["dependencies"] = [
|
||||
jsonable_encoder(d.model_dump())
|
||||
for d in DependenciesAnalysisService.generate_dependencies(
|
||||
tenant_id=app_model.tenant_id, dependencies=dependencies
|
||||
)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def _extract_dependencies_from_workflow(cls, workflow: Workflow) -> list[str]:
|
||||
"""
|
||||
Extract dependencies from workflow
|
||||
:param workflow: Workflow instance
|
||||
:return: dependencies list format like ["langgenius/google"]
|
||||
"""
|
||||
graph = workflow.graph_dict
|
||||
dependencies = []
|
||||
for node in graph.get("nodes", []):
|
||||
try:
|
||||
typ = node.get("data", {}).get("type")
|
||||
match typ:
|
||||
case NodeType.TOOL.value:
|
||||
tool_entity = ToolNodeData(**node["data"])
|
||||
dependencies.append(
|
||||
DependenciesAnalysisService.analyze_tool_dependency(tool_entity.provider_id),
|
||||
)
|
||||
case NodeType.LLM.value:
|
||||
llm_entity = LLMNodeData(**node["data"])
|
||||
dependencies.append(
|
||||
DependenciesAnalysisService.analyze_model_provider_dependency(llm_entity.model.provider),
|
||||
)
|
||||
case NodeType.QUESTION_CLASSIFIER.value:
|
||||
question_classifier_entity = QuestionClassifierNodeData(**node["data"])
|
||||
dependencies.append(
|
||||
DependenciesAnalysisService.analyze_model_provider_dependency(
|
||||
question_classifier_entity.model.provider
|
||||
),
|
||||
)
|
||||
case NodeType.PARAMETER_EXTRACTOR.value:
|
||||
parameter_extractor_entity = ParameterExtractorNodeData(**node["data"])
|
||||
dependencies.append(
|
||||
DependenciesAnalysisService.analyze_model_provider_dependency(
|
||||
parameter_extractor_entity.model.provider
|
||||
),
|
||||
)
|
||||
case NodeType.KNOWLEDGE_RETRIEVAL.value:
|
||||
knowledge_retrieval_entity = KnowledgeRetrievalNodeData(**node["data"])
|
||||
if knowledge_retrieval_entity.retrieval_mode == "multiple":
|
||||
if knowledge_retrieval_entity.multiple_retrieval_config:
|
||||
if (
|
||||
knowledge_retrieval_entity.multiple_retrieval_config.reranking_mode
|
||||
== "reranking_model"
|
||||
):
|
||||
if knowledge_retrieval_entity.multiple_retrieval_config.reranking_model:
|
||||
dependencies.append(
|
||||
DependenciesAnalysisService.analyze_model_provider_dependency(
|
||||
knowledge_retrieval_entity.multiple_retrieval_config.reranking_model.provider
|
||||
),
|
||||
)
|
||||
elif (
|
||||
knowledge_retrieval_entity.multiple_retrieval_config.reranking_mode
|
||||
== "weighted_score"
|
||||
):
|
||||
if knowledge_retrieval_entity.multiple_retrieval_config.weights:
|
||||
vector_setting = (
|
||||
knowledge_retrieval_entity.multiple_retrieval_config.weights.vector_setting
|
||||
)
|
||||
dependencies.append(
|
||||
DependenciesAnalysisService.analyze_model_provider_dependency(
|
||||
vector_setting.embedding_provider_name
|
||||
),
|
||||
)
|
||||
elif knowledge_retrieval_entity.retrieval_mode == "single":
|
||||
model_config = knowledge_retrieval_entity.single_retrieval_config
|
||||
if model_config:
|
||||
dependencies.append(
|
||||
DependenciesAnalysisService.analyze_model_provider_dependency(
|
||||
model_config.model.provider
|
||||
),
|
||||
)
|
||||
case _:
|
||||
# Handle default case or unknown node types
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.exception("Error extracting node dependency", exc_info=e)
|
||||
|
||||
return dependencies
|
||||
|
||||
@classmethod
|
||||
def _extract_dependencies_from_model_config(cls, model_config: AppModelConfig) -> list[str]:
|
||||
"""
|
||||
Extract dependencies from model config
|
||||
:param model_config: AppModelConfig instance
|
||||
:return: dependencies list format like ["langgenius/google:1.0.0@abcdef1234567890"]
|
||||
"""
|
||||
dependencies = []
|
||||
|
||||
try:
|
||||
# completion model
|
||||
model_dict = model_config.model_dict
|
||||
if model_dict:
|
||||
dependencies.append(
|
||||
DependenciesAnalysisService.analyze_model_provider_dependency(model_dict.get("provider"))
|
||||
)
|
||||
|
||||
# reranking model
|
||||
dataset_configs = model_config.dataset_configs_dict
|
||||
if dataset_configs:
|
||||
for dataset_config in dataset_configs:
|
||||
if dataset_config.get("reranking_model"):
|
||||
dependencies.append(
|
||||
DependenciesAnalysisService.analyze_model_provider_dependency(
|
||||
dataset_config.get("reranking_model", {})
|
||||
.get("reranking_provider_name", {})
|
||||
.get("provider")
|
||||
)
|
||||
)
|
||||
|
||||
# tools
|
||||
agent_configs = model_config.agent_mode_dict
|
||||
if agent_configs:
|
||||
for agent_config in agent_configs:
|
||||
if agent_config.get("tools"):
|
||||
for tool in agent_config.get("tools", []):
|
||||
dependencies.append(
|
||||
DependenciesAnalysisService.analyze_tool_dependency(tool.get("provider_id"))
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception("Error extracting model config dependency", exc_info=e)
|
||||
|
||||
return dependencies
|
||||
|
||||
|
||||
def _check_or_fix_dsl(import_data: dict[str, Any]) -> Mapping[str, Any]:
|
||||
|
100
api/services/plugin/dependencies_analysis.py
Normal file
100
api/services/plugin/dependencies_analysis.py
Normal file
@ -0,0 +1,100 @@
|
||||
from core.plugin.entities.plugin import GenericProviderID, PluginDependency, PluginInstallationSource
|
||||
from core.plugin.manager.plugin import PluginInstallationManager
|
||||
|
||||
|
||||
class DependenciesAnalysisService:
|
||||
@classmethod
|
||||
def analyze_tool_dependency(cls, tool_id: str) -> str:
|
||||
"""
|
||||
Analyze the dependency of a tool.
|
||||
|
||||
Convert the tool id to the plugin_id
|
||||
"""
|
||||
try:
|
||||
tool_provider_id = GenericProviderID(tool_id)
|
||||
return tool_provider_id.plugin_id
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
@classmethod
|
||||
def analyze_model_provider_dependency(cls, model_provider_id: str) -> str:
|
||||
"""
|
||||
Analyze the dependency of a model provider.
|
||||
|
||||
Convert the model provider id to the plugin_id
|
||||
"""
|
||||
try:
|
||||
generic_provider_id = GenericProviderID(model_provider_id)
|
||||
return generic_provider_id.plugin_id
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
@classmethod
|
||||
def check_dependencies(cls, tenant_id: str, dependencies: list[PluginDependency]) -> list[PluginDependency]:
|
||||
"""
|
||||
Check dependencies, returns the leaked dependencies in current workspace
|
||||
"""
|
||||
required_plugin_unique_identifiers = []
|
||||
for dependency in dependencies:
|
||||
required_plugin_unique_identifiers.append(dependency.value.plugin_unique_identifier)
|
||||
|
||||
manager = PluginInstallationManager()
|
||||
missing_plugin_unique_identifiers = manager.fetch_missing_dependencies(
|
||||
tenant_id, required_plugin_unique_identifiers
|
||||
)
|
||||
|
||||
leaked_dependencies = []
|
||||
for dependency in dependencies:
|
||||
unique_identifier = dependency.value.plugin_unique_identifier
|
||||
if unique_identifier in missing_plugin_unique_identifiers:
|
||||
leaked_dependencies.append(dependency)
|
||||
|
||||
return leaked_dependencies
|
||||
|
||||
@classmethod
|
||||
def generate_dependencies(cls, tenant_id: str, dependencies: list[str]) -> list[PluginDependency]:
|
||||
"""
|
||||
Generate dependencies through the list of plugin ids
|
||||
"""
|
||||
dependencies = list(set(dependencies))
|
||||
manager = PluginInstallationManager()
|
||||
plugins = manager.fetch_plugin_installation_by_ids(tenant_id, dependencies)
|
||||
result = []
|
||||
for plugin in plugins:
|
||||
if plugin.source == PluginInstallationSource.Github:
|
||||
result.append(
|
||||
PluginDependency(
|
||||
type=PluginDependency.Type.Github,
|
||||
value=PluginDependency.Github(
|
||||
repo=plugin.meta["repo"],
|
||||
version=plugin.meta["version"],
|
||||
package=plugin.meta["package"],
|
||||
github_plugin_unique_identifier=plugin.plugin_unique_identifier,
|
||||
),
|
||||
)
|
||||
)
|
||||
elif plugin.source == PluginInstallationSource.Marketplace:
|
||||
result.append(
|
||||
PluginDependency(
|
||||
type=PluginDependency.Type.Marketplace,
|
||||
value=PluginDependency.Marketplace(
|
||||
marketplace_plugin_unique_identifier=plugin.plugin_unique_identifier
|
||||
),
|
||||
)
|
||||
)
|
||||
elif plugin.source == PluginInstallationSource.Package:
|
||||
result.append(
|
||||
PluginDependency(
|
||||
type=PluginDependency.Type.Package,
|
||||
value=PluginDependency.Package(plugin_unique_identifier=plugin.plugin_unique_identifier),
|
||||
)
|
||||
)
|
||||
elif plugin.source == PluginInstallationSource.Remote:
|
||||
raise ValueError(
|
||||
f"You used a remote plugin: {plugin.plugin_unique_identifier} in the app, please remove it first"
|
||||
" if you want to export the DSL."
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown plugin source: {plugin.source}")
|
||||
|
||||
return result
|
@ -5,9 +5,9 @@ from pathlib import Path
|
||||
from configs import dify_config
|
||||
from core.helper.position_helper import is_filtered
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.plugin.entities.plugin import GenericProviderID
|
||||
from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort
|
||||
from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity
|
||||
from core.tools.entities.tool_entities import ToolProviderID
|
||||
from core.tools.errors import ToolNotFoundError, ToolProviderCredentialValidationError, ToolProviderNotFoundError
|
||||
from core.tools.tool_label_manager import ToolLabelManager
|
||||
from core.tools.tool_manager import ToolManager
|
||||
@ -234,7 +234,7 @@ class BuiltinToolManageService:
|
||||
# rewrite db_providers
|
||||
for db_provider in db_providers:
|
||||
try:
|
||||
ToolProviderID(db_provider.provider)
|
||||
GenericProviderID(db_provider.provider)
|
||||
except Exception:
|
||||
db_provider.provider = f"langgenius/{db_provider.provider}/{db_provider.provider}"
|
||||
|
||||
@ -287,7 +287,7 @@ class BuiltinToolManageService:
|
||||
def _fetch_builtin_provider(provider_name: str, tenant_id: str) -> BuiltinToolProvider | None:
|
||||
try:
|
||||
full_provider_name = provider_name
|
||||
provider_id_entity = ToolProviderID(provider_name)
|
||||
provider_id_entity = GenericProviderID(provider_name)
|
||||
provider_name = provider_id_entity.provider_name
|
||||
if provider_id_entity.organization != "langgenius":
|
||||
return None
|
||||
@ -305,11 +305,7 @@ class BuiltinToolManageService:
|
||||
if provider_obj is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
ToolProviderID(provider_obj.provider)
|
||||
except Exception:
|
||||
provider_obj.provider = f"langgenius/{provider_obj.provider}/{provider_obj.provider}"
|
||||
|
||||
provider_obj.provider = GenericProviderID(provider_obj.provider).to_string()
|
||||
return provider_obj
|
||||
except Exception:
|
||||
# it's an old provider without organization
|
||||
|
Loading…
x
Reference in New Issue
Block a user