mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-14 05:55:52 +08:00
Merge branch 'chore/update-edu-version-text' into deploy/dev
This commit is contained in:
commit
32633a2bad
@ -74,7 +74,7 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
||||
|
||||
CODE_EXECUTION_ENDPOINT: HttpUrl = Field(
|
||||
description="URL endpoint for the code execution service",
|
||||
default="http://sandbox:8194",
|
||||
default=HttpUrl("http://sandbox:8194"),
|
||||
)
|
||||
|
||||
CODE_EXECUTION_API_KEY: str = Field(
|
||||
@ -145,7 +145,7 @@ class PluginConfig(BaseSettings):
|
||||
|
||||
PLUGIN_DAEMON_URL: HttpUrl = Field(
|
||||
description="Plugin API URL",
|
||||
default="http://localhost:5002",
|
||||
default=HttpUrl("http://localhost:5002"),
|
||||
)
|
||||
|
||||
PLUGIN_DAEMON_KEY: str = Field(
|
||||
@ -188,7 +188,7 @@ class MarketplaceConfig(BaseSettings):
|
||||
|
||||
MARKETPLACE_API_URL: HttpUrl = Field(
|
||||
description="Marketplace API URL",
|
||||
default="https://marketplace.dify.ai",
|
||||
default=HttpUrl("https://marketplace.dify.ai"),
|
||||
)
|
||||
|
||||
|
||||
|
@ -173,7 +173,7 @@ class DatabaseConfig(BaseSettings):
|
||||
|
||||
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
|
||||
description="Number of processes for the retrieval service, default to CPU cores.",
|
||||
default=os.cpu_count(),
|
||||
default=os.cpu_count() or 1,
|
||||
)
|
||||
|
||||
@computed_field
|
||||
|
@ -209,6 +209,7 @@ class ExternalKnowledgeHitTestingApi(Resource):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("query", type=str, location="json")
|
||||
parser.add_argument("external_retrieval_model", type=dict, required=False, location="json")
|
||||
parser.add_argument("metadata_filtering_conditions", type=dict, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
HitTestingService.hit_testing_args_check(args)
|
||||
@ -219,6 +220,7 @@ class ExternalKnowledgeHitTestingApi(Resource):
|
||||
query=args["query"],
|
||||
account=current_user,
|
||||
external_retrieval_model=args["external_retrieval_model"],
|
||||
metadata_filtering_conditions=args["metadata_filtering_conditions"],
|
||||
)
|
||||
|
||||
return response
|
||||
|
@ -91,6 +91,8 @@ class BaseAgentRunner(AppRunner):
|
||||
return_resource=app_config.additional_features.show_retrieve_source,
|
||||
invoke_from=application_generate_entity.invoke_from,
|
||||
hit_callback=hit_callback,
|
||||
user_id=user_id,
|
||||
inputs=cast(dict, application_generate_entity.inputs),
|
||||
)
|
||||
# get how many agent thoughts have been created
|
||||
self.agent_thought_count = (
|
||||
|
@ -69,13 +69,6 @@ class CotAgentRunner(BaseAgentRunner, ABC):
|
||||
tool_instances, prompt_messages_tools = self._init_prompt_tools()
|
||||
self._prompt_messages_tools = prompt_messages_tools
|
||||
|
||||
# fix metadata filter not work
|
||||
if app_config.dataset is not None:
|
||||
metadata_filtering_conditions = app_config.dataset.retrieve_config.metadata_filtering_conditions
|
||||
for key, dataset_retriever_tool in tool_instances.items():
|
||||
if hasattr(dataset_retriever_tool, "retrieval_tool"):
|
||||
dataset_retriever_tool.retrieval_tool.metadata_filtering_conditions = metadata_filtering_conditions
|
||||
|
||||
function_call_state = True
|
||||
llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None}
|
||||
final_answer = ""
|
||||
@ -87,6 +80,7 @@ class CotAgentRunner(BaseAgentRunner, ABC):
|
||||
llm_usage = final_llm_usage_dict["usage"]
|
||||
llm_usage.prompt_tokens += usage.prompt_tokens
|
||||
llm_usage.completion_tokens += usage.completion_tokens
|
||||
llm_usage.total_tokens += usage.total_tokens
|
||||
llm_usage.prompt_price += usage.prompt_price
|
||||
llm_usage.completion_price += usage.completion_price
|
||||
llm_usage.total_price += usage.total_price
|
||||
|
@ -45,13 +45,6 @@ class FunctionCallAgentRunner(BaseAgentRunner):
|
||||
# convert tools into ModelRuntime Tool format
|
||||
tool_instances, prompt_messages_tools = self._init_prompt_tools()
|
||||
|
||||
# fix metadata filter not work
|
||||
if app_config.dataset is not None:
|
||||
metadata_filtering_conditions = app_config.dataset.retrieve_config.metadata_filtering_conditions
|
||||
for key, dataset_retriever_tool in tool_instances.items():
|
||||
if hasattr(dataset_retriever_tool, "retrieval_tool"):
|
||||
dataset_retriever_tool.retrieval_tool.metadata_filtering_conditions = metadata_filtering_conditions
|
||||
|
||||
assert app_config.agent
|
||||
|
||||
iteration_step = 1
|
||||
@ -72,6 +65,7 @@ class FunctionCallAgentRunner(BaseAgentRunner):
|
||||
llm_usage = final_llm_usage_dict["usage"]
|
||||
llm_usage.prompt_tokens += usage.prompt_tokens
|
||||
llm_usage.completion_tokens += usage.completion_tokens
|
||||
llm_usage.total_tokens += usage.total_tokens
|
||||
llm_usage.prompt_price += usage.prompt_price
|
||||
llm_usage.completion_price += usage.completion_price
|
||||
llm_usage.total_price += usage.total_price
|
||||
|
@ -24,7 +24,7 @@ class EndpointProviderDeclaration(BaseModel):
|
||||
"""
|
||||
|
||||
settings: list[ProviderConfig] = Field(default_factory=list)
|
||||
endpoints: Optional[list[EndpointDeclaration]] = Field(default_factory=list)
|
||||
endpoints: Optional[list[EndpointDeclaration]] = Field(default_factory=list[EndpointDeclaration])
|
||||
|
||||
|
||||
class EndpointEntity(BasePluginEntity):
|
||||
|
@ -52,7 +52,7 @@ class PluginResourceRequirements(BaseModel):
|
||||
model: Optional[Model] = Field(default=None)
|
||||
node: Optional[Node] = Field(default=None)
|
||||
endpoint: Optional[Endpoint] = Field(default=None)
|
||||
storage: Storage = Field(default=None)
|
||||
storage: Optional[Storage] = Field(default=None)
|
||||
|
||||
permission: Optional[Permission] = Field(default=None)
|
||||
|
||||
@ -66,9 +66,9 @@ class PluginCategory(enum.StrEnum):
|
||||
|
||||
class PluginDeclaration(BaseModel):
|
||||
class Plugins(BaseModel):
|
||||
tools: Optional[list[str]] = Field(default_factory=list)
|
||||
models: Optional[list[str]] = Field(default_factory=list)
|
||||
endpoints: Optional[list[str]] = Field(default_factory=list)
|
||||
tools: Optional[list[str]] = Field(default_factory=list[str])
|
||||
models: Optional[list[str]] = Field(default_factory=list[str])
|
||||
endpoints: Optional[list[str]] = Field(default_factory=list[str])
|
||||
|
||||
class Meta(BaseModel):
|
||||
minimum_dify_version: Optional[str] = Field(default=None, pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$")
|
||||
@ -84,6 +84,7 @@ class PluginDeclaration(BaseModel):
|
||||
resource: PluginResourceRequirements
|
||||
plugins: Plugins
|
||||
tags: list[str] = Field(default_factory=list)
|
||||
repo: Optional[str] = Field(default=None)
|
||||
verified: bool = Field(default=False)
|
||||
tool: Optional[ToolProviderEntity] = None
|
||||
model: Optional[ProviderEntity] = None
|
||||
|
@ -55,8 +55,8 @@ class RequestInvokeLLM(BaseRequestInvokeModel):
|
||||
mode: str
|
||||
completion_params: dict[str, Any] = Field(default_factory=dict)
|
||||
prompt_messages: list[PromptMessage] = Field(default_factory=list)
|
||||
tools: Optional[list[PromptMessageTool]] = Field(default_factory=list)
|
||||
stop: Optional[list[str]] = Field(default_factory=list)
|
||||
tools: Optional[list[PromptMessageTool]] = Field(default_factory=list[PromptMessageTool])
|
||||
stop: Optional[list[str]] = Field(default_factory=list[str])
|
||||
stream: Optional[bool] = False
|
||||
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
@ -10,6 +10,7 @@ from core.rag.data_post_processor.data_post_processor import DataPostProcessor
|
||||
from core.rag.datasource.keyword.keyword_factory import Keyword
|
||||
from core.rag.datasource.vdb.vector_factory import Vector
|
||||
from core.rag.embedding.retrieval import RetrievalSegments
|
||||
from core.rag.entities.metadata_entities import MetadataCondition
|
||||
from core.rag.index_processor.constant.index_type import IndexType
|
||||
from core.rag.models.document import Document
|
||||
from core.rag.rerank.rerank_type import RerankMode
|
||||
@ -119,12 +120,25 @@ class RetrievalService:
|
||||
return all_documents
|
||||
|
||||
@classmethod
|
||||
def external_retrieve(cls, dataset_id: str, query: str, external_retrieval_model: Optional[dict] = None):
|
||||
def external_retrieve(
|
||||
cls,
|
||||
dataset_id: str,
|
||||
query: str,
|
||||
external_retrieval_model: Optional[dict] = None,
|
||||
metadata_filtering_conditions: Optional[dict] = None,
|
||||
):
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
return []
|
||||
metadata_condition = (
|
||||
MetadataCondition(**metadata_filtering_conditions) if metadata_filtering_conditions else None
|
||||
)
|
||||
all_documents = ExternalDatasetService.fetch_external_knowledge_retrieval(
|
||||
dataset.tenant_id, dataset_id, query, external_retrieval_model or {}
|
||||
dataset.tenant_id,
|
||||
dataset_id,
|
||||
query,
|
||||
external_retrieval_model or {},
|
||||
metadata_condition=metadata_condition,
|
||||
)
|
||||
return all_documents
|
||||
|
||||
|
@ -76,8 +76,7 @@ class WordExtractor(BaseExtractor):
|
||||
parsed = urlparse(url)
|
||||
return bool(parsed.netloc) and bool(parsed.scheme)
|
||||
|
||||
def _extract_images_from_docx(self, doc, image_folder):
|
||||
os.makedirs(image_folder, exist_ok=True)
|
||||
def _extract_images_from_docx(self, doc):
|
||||
image_count = 0
|
||||
image_map = {}
|
||||
|
||||
@ -210,7 +209,7 @@ class WordExtractor(BaseExtractor):
|
||||
|
||||
content = []
|
||||
|
||||
image_map = self._extract_images_from_docx(doc, image_folder)
|
||||
image_map = self._extract_images_from_docx(doc)
|
||||
|
||||
hyperlinks_url = None
|
||||
url_pattern = re.compile(r"http://[^\s+]+//|https://[^\s+]+")
|
||||
@ -225,7 +224,7 @@ class WordExtractor(BaseExtractor):
|
||||
xml = ElementTree.XML(run.element.xml)
|
||||
x_child = [c for c in xml.iter() if c is not None]
|
||||
for x in x_child:
|
||||
if x_child is None:
|
||||
if x is None:
|
||||
continue
|
||||
if x.tag.endswith("instrText"):
|
||||
if x.text is None:
|
||||
|
@ -149,7 +149,7 @@ class DatasetRetrieval:
|
||||
else:
|
||||
inputs = {}
|
||||
available_datasets_ids = [dataset.id for dataset in available_datasets]
|
||||
metadata_filter_document_ids, metadata_condition = self._get_metadata_filter_condition(
|
||||
metadata_filter_document_ids, metadata_condition = self.get_metadata_filter_condition(
|
||||
available_datasets_ids,
|
||||
query,
|
||||
tenant_id,
|
||||
@ -649,6 +649,8 @@ class DatasetRetrieval:
|
||||
return_resource: bool,
|
||||
invoke_from: InvokeFrom,
|
||||
hit_callback: DatasetIndexToolCallbackHandler,
|
||||
user_id: str,
|
||||
inputs: dict,
|
||||
) -> Optional[list[DatasetRetrieverBaseTool]]:
|
||||
"""
|
||||
A dataset tool is a tool that can be used to retrieve information from a dataset
|
||||
@ -706,6 +708,9 @@ class DatasetRetrieval:
|
||||
hit_callbacks=[hit_callback],
|
||||
return_resource=return_resource,
|
||||
retriever_from=invoke_from.to_source(),
|
||||
retrieve_config=retrieve_config,
|
||||
user_id=user_id,
|
||||
inputs=inputs,
|
||||
)
|
||||
|
||||
tools.append(tool)
|
||||
@ -826,7 +831,7 @@ class DatasetRetrieval:
|
||||
)
|
||||
return filter_documents[:top_k] if top_k else filter_documents
|
||||
|
||||
def _get_metadata_filter_condition(
|
||||
def get_metadata_filter_condition(
|
||||
self,
|
||||
dataset_ids: list,
|
||||
query: str,
|
||||
@ -876,20 +881,31 @@ class DatasetRetrieval:
|
||||
)
|
||||
elif metadata_filtering_mode == "manual":
|
||||
if metadata_filtering_conditions:
|
||||
metadata_condition = MetadataCondition(**metadata_filtering_conditions.model_dump())
|
||||
conditions = []
|
||||
for sequence, condition in enumerate(metadata_filtering_conditions.conditions): # type: ignore
|
||||
metadata_name = condition.name
|
||||
expected_value = condition.value
|
||||
if expected_value is not None or condition.comparison_operator in ("empty", "not empty"):
|
||||
if expected_value is not None and condition.comparison_operator not in ("empty", "not empty"):
|
||||
if isinstance(expected_value, str):
|
||||
expected_value = self._replace_metadata_filter_value(expected_value, inputs)
|
||||
filters = self._process_metadata_filter_func(
|
||||
sequence,
|
||||
condition.comparison_operator,
|
||||
metadata_name,
|
||||
expected_value,
|
||||
filters,
|
||||
conditions.append(
|
||||
Condition(
|
||||
name=metadata_name,
|
||||
comparison_operator=condition.comparison_operator,
|
||||
value=expected_value,
|
||||
)
|
||||
)
|
||||
filters = self._process_metadata_filter_func(
|
||||
sequence,
|
||||
condition.comparison_operator,
|
||||
metadata_name,
|
||||
expected_value,
|
||||
filters,
|
||||
)
|
||||
metadata_condition = MetadataCondition(
|
||||
logical_operator=metadata_filtering_conditions.logical_operator,
|
||||
conditions=conditions,
|
||||
)
|
||||
else:
|
||||
raise ValueError("Invalid metadata filtering mode")
|
||||
if filters:
|
||||
|
@ -1,11 +1,12 @@
|
||||
from typing import Any
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from core.app.app_config.entities import DatasetRetrieveConfigEntity, ModelConfig
|
||||
from core.rag.datasource.retrieval_service import RetrievalService
|
||||
from core.rag.entities.context_entities import DocumentContext
|
||||
from core.rag.entities.metadata_entities import MetadataCondition
|
||||
from core.rag.models.document import Document as RetrievalDocument
|
||||
from core.rag.retrieval.dataset_retrieval import DatasetRetrieval
|
||||
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||
from core.tools.utils.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool
|
||||
from extensions.ext_database import db
|
||||
@ -34,7 +35,9 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
||||
args_schema: type[BaseModel] = DatasetRetrieverToolInput
|
||||
description: str = "use this to retrieve a dataset. "
|
||||
dataset_id: str
|
||||
metadata_filtering_conditions: MetadataCondition
|
||||
user_id: Optional[str] = None
|
||||
retrieve_config: DatasetRetrieveConfigEntity
|
||||
inputs: dict
|
||||
|
||||
@classmethod
|
||||
def from_dataset(cls, dataset: Dataset, **kwargs):
|
||||
@ -48,7 +51,6 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
||||
tenant_id=dataset.tenant_id,
|
||||
dataset_id=dataset.id,
|
||||
description=description,
|
||||
metadata_filtering_conditions=MetadataCondition(),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@ -61,6 +63,21 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
||||
return ""
|
||||
for hit_callback in self.hit_callbacks:
|
||||
hit_callback.on_query(query, dataset.id)
|
||||
dataset_retrieval = DatasetRetrieval()
|
||||
metadata_filter_document_ids, metadata_condition = dataset_retrieval.get_metadata_filter_condition(
|
||||
[dataset.id],
|
||||
query,
|
||||
self.tenant_id,
|
||||
self.user_id or "unknown",
|
||||
cast(str, self.retrieve_config.metadata_filtering_mode),
|
||||
cast(ModelConfig, self.retrieve_config.metadata_model_config),
|
||||
self.retrieve_config.metadata_filtering_conditions,
|
||||
self.inputs,
|
||||
)
|
||||
if metadata_filter_document_ids:
|
||||
document_ids_filter = metadata_filter_document_ids.get(dataset.id, [])
|
||||
else:
|
||||
document_ids_filter = None
|
||||
if dataset.provider == "external":
|
||||
results = []
|
||||
external_documents = ExternalDatasetService.fetch_external_knowledge_retrieval(
|
||||
@ -68,7 +85,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
||||
dataset_id=dataset.id,
|
||||
query=query,
|
||||
external_retrieval_parameters=dataset.retrieval_model,
|
||||
metadata_condition=self.metadata_filtering_conditions,
|
||||
metadata_condition=metadata_condition,
|
||||
)
|
||||
for external_document in external_documents:
|
||||
document = RetrievalDocument(
|
||||
@ -104,12 +121,18 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
||||
|
||||
return str("\n".join([item.page_content for item in results]))
|
||||
else:
|
||||
if metadata_condition and not document_ids_filter:
|
||||
return ""
|
||||
# get retrieval model , if the model is not setting , using default
|
||||
retrieval_model: dict[str, Any] = dataset.retrieval_model or default_retrieval_model
|
||||
if dataset.indexing_technique == "economy":
|
||||
# use keyword table query
|
||||
documents = RetrievalService.retrieve(
|
||||
retrieval_method="keyword_search", dataset_id=dataset.id, query=query, top_k=self.top_k
|
||||
retrieval_method="keyword_search",
|
||||
dataset_id=dataset.id,
|
||||
query=query,
|
||||
top_k=self.top_k,
|
||||
document_ids_filter=document_ids_filter,
|
||||
)
|
||||
return str("\n".join([document.page_content for document in documents]))
|
||||
else:
|
||||
@ -128,6 +151,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
||||
else None,
|
||||
reranking_mode=retrieval_model.get("reranking_mode") or "reranking_model",
|
||||
weights=retrieval_model.get("weights"),
|
||||
document_ids_filter=document_ids_filter,
|
||||
)
|
||||
else:
|
||||
documents = []
|
||||
|
@ -34,6 +34,8 @@ class DatasetRetrieverTool(Tool):
|
||||
return_resource: bool,
|
||||
invoke_from: InvokeFrom,
|
||||
hit_callback: DatasetIndexToolCallbackHandler,
|
||||
user_id: str,
|
||||
inputs: dict,
|
||||
) -> list["DatasetRetrieverTool"]:
|
||||
"""
|
||||
get dataset tool
|
||||
@ -57,6 +59,8 @@ class DatasetRetrieverTool(Tool):
|
||||
return_resource=return_resource,
|
||||
invoke_from=invoke_from,
|
||||
hit_callback=hit_callback,
|
||||
user_id=user_id,
|
||||
inputs=inputs,
|
||||
)
|
||||
if retrieval_tools is None or len(retrieval_tools) == 0:
|
||||
return []
|
||||
|
@ -30,7 +30,7 @@ class Variable(Segment):
|
||||
"""
|
||||
|
||||
id: str = Field(
|
||||
default=lambda _: str(uuid4()),
|
||||
default_factory=lambda: str(uuid4()),
|
||||
description="Unique identity for variable.",
|
||||
)
|
||||
name: str
|
||||
|
@ -36,7 +36,7 @@ class Graph(BaseModel):
|
||||
root_node_id: str = Field(..., description="root node id of the graph")
|
||||
node_ids: list[str] = Field(default_factory=list, description="graph node ids")
|
||||
node_id_config_mapping: dict[str, dict] = Field(
|
||||
default_factory=list, description="node configs mapping (node id: node config)"
|
||||
default_factory=dict, description="node configs mapping (node id: node config)"
|
||||
)
|
||||
edge_mapping: dict[str, list[GraphEdge]] = Field(
|
||||
default_factory=dict, description="graph edge mapping (source node id: edges)"
|
||||
|
@ -289,7 +289,7 @@ class KnowledgeRetrievalNode(LLMNode):
|
||||
"dataset_name": dataset.name,
|
||||
"document_id": document.id,
|
||||
"document_name": document.name,
|
||||
"document_data_source_type": document.data_source_type,
|
||||
"data_source_type": document.data_source_type,
|
||||
"segment_id": segment.id,
|
||||
"retriever_from": "workflow",
|
||||
"score": record.score or 0.0,
|
||||
@ -356,12 +356,12 @@ class KnowledgeRetrievalNode(LLMNode):
|
||||
)
|
||||
elif node_data.metadata_filtering_mode == "manual":
|
||||
if node_data.metadata_filtering_conditions:
|
||||
metadata_condition = MetadataCondition(**node_data.metadata_filtering_conditions.model_dump())
|
||||
conditions = []
|
||||
if node_data.metadata_filtering_conditions:
|
||||
for sequence, condition in enumerate(node_data.metadata_filtering_conditions.conditions): # type: ignore
|
||||
metadata_name = condition.name
|
||||
expected_value = condition.value
|
||||
if expected_value is not None or condition.comparison_operator in ("empty", "not empty"):
|
||||
if expected_value is not None and condition.comparison_operator not in ("empty", "not empty"):
|
||||
if isinstance(expected_value, str):
|
||||
expected_value = self.graph_runtime_state.variable_pool.convert_template(
|
||||
expected_value
|
||||
@ -372,13 +372,24 @@ class KnowledgeRetrievalNode(LLMNode):
|
||||
expected_value = re.sub(r"[\r\n\t]+", " ", expected_value.text).strip() # type: ignore
|
||||
else:
|
||||
raise ValueError("Invalid expected metadata value type")
|
||||
filters = self._process_metadata_filter_func(
|
||||
sequence,
|
||||
condition.comparison_operator,
|
||||
metadata_name,
|
||||
expected_value,
|
||||
filters,
|
||||
conditions.append(
|
||||
Condition(
|
||||
name=metadata_name,
|
||||
comparison_operator=condition.comparison_operator,
|
||||
value=expected_value,
|
||||
)
|
||||
)
|
||||
filters = self._process_metadata_filter_func(
|
||||
sequence,
|
||||
condition.comparison_operator,
|
||||
metadata_name,
|
||||
expected_value,
|
||||
filters,
|
||||
)
|
||||
metadata_condition = MetadataCondition(
|
||||
logical_operator=node_data.metadata_filtering_conditions.logical_operator,
|
||||
conditions=conditions,
|
||||
)
|
||||
else:
|
||||
raise ValueError("Invalid metadata filtering mode")
|
||||
if filters:
|
||||
|
@ -506,7 +506,7 @@ class LLMNode(BaseNode[LLMNodeData]):
|
||||
"dataset_name": metadata.get("dataset_name"),
|
||||
"document_id": metadata.get("document_id"),
|
||||
"document_name": metadata.get("document_name"),
|
||||
"data_source_type": metadata.get("document_data_source_type"),
|
||||
"data_source_type": metadata.get("data_source_type"),
|
||||
"segment_id": metadata.get("segment_id"),
|
||||
"retriever_from": metadata.get("retriever_from"),
|
||||
"score": metadata.get("score"),
|
||||
|
@ -26,7 +26,7 @@ class LoopNodeData(BaseLoopNodeData):
|
||||
loop_count: int # Maximum number of loops
|
||||
break_conditions: list[Condition] # Conditions to break the loop
|
||||
logical_operator: Literal["and", "or"]
|
||||
loop_variables: Optional[list[LoopVariableData]] = Field(default_factory=list)
|
||||
loop_variables: Optional[list[LoopVariableData]] = Field(default_factory=list[LoopVariableData])
|
||||
outputs: Optional[Mapping[str, Any]] = None
|
||||
|
||||
|
||||
|
@ -39,7 +39,7 @@ class SubCondition(BaseModel):
|
||||
|
||||
class SubVariableCondition(BaseModel):
|
||||
logical_operator: Literal["and", "or"]
|
||||
conditions: list[SubCondition] = Field(default=list)
|
||||
conditions: list[SubCondition] = Field(default_factory=list)
|
||||
|
||||
|
||||
class Condition(BaseModel):
|
||||
|
@ -6,6 +6,7 @@ import socket
|
||||
import sys
|
||||
from typing import Union
|
||||
|
||||
import flask
|
||||
from celery.signals import worker_init # type: ignore
|
||||
from flask_login import user_loaded_from_request, user_logged_in # type: ignore
|
||||
|
||||
@ -27,6 +28,8 @@ def on_user_loaded(_sender, user):
|
||||
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
from opentelemetry.semconv.trace import SpanAttributes
|
||||
|
||||
def is_celery_worker():
|
||||
return "celery" in sys.argv[0].lower()
|
||||
|
||||
@ -37,7 +40,9 @@ def init_app(app: DifyApp):
|
||||
def init_flask_instrumentor(app: DifyApp):
|
||||
meter = get_meter("http_metrics", version=dify_config.CURRENT_VERSION)
|
||||
_http_response_counter = meter.create_counter(
|
||||
"http.server.response.count", description="Total number of HTTP responses by status code", unit="{response}"
|
||||
"http.server.response.count",
|
||||
description="Total number of HTTP responses by status code, method and target",
|
||||
unit="{response}",
|
||||
)
|
||||
|
||||
def response_hook(span: Span, status: str, response_headers: list):
|
||||
@ -50,7 +55,13 @@ def init_app(app: DifyApp):
|
||||
status = status.split(" ")[0]
|
||||
status_code = int(status)
|
||||
status_class = f"{status_code // 100}xx"
|
||||
_http_response_counter.add(1, {"status_code": status_code, "status_class": status_class})
|
||||
attributes: dict[str, str | int] = {"status_code": status_code, "status_class": status_class}
|
||||
request = flask.request
|
||||
if request and request.url_rule:
|
||||
attributes[SpanAttributes.HTTP_TARGET] = str(request.url_rule.rule)
|
||||
if request and request.method:
|
||||
attributes[SpanAttributes.HTTP_METHOD] = str(request.method)
|
||||
_http_response_counter.add(1, attributes)
|
||||
|
||||
instrumentor = FlaskInstrumentor()
|
||||
if dify_config.DEBUG:
|
||||
|
@ -1,5 +1,6 @@
|
||||
import enum
|
||||
import json
|
||||
from typing import cast
|
||||
|
||||
from flask_login import UserMixin # type: ignore
|
||||
from sqlalchemy import func
|
||||
@ -46,7 +47,6 @@ class Account(UserMixin, Base):
|
||||
|
||||
@property
|
||||
def current_tenant(self):
|
||||
# FIXME: fix the type error later, because the type is important maybe cause some bugs
|
||||
return self._current_tenant # type: ignore
|
||||
|
||||
@current_tenant.setter
|
||||
@ -64,25 +64,23 @@ class Account(UserMixin, Base):
|
||||
def current_tenant_id(self) -> str | None:
|
||||
return self._current_tenant.id if self._current_tenant else None
|
||||
|
||||
@current_tenant_id.setter
|
||||
def current_tenant_id(self, value: str):
|
||||
try:
|
||||
tenant_account_join = (
|
||||
def set_tenant_id(self, tenant_id: str):
|
||||
tenant_account_join = cast(
|
||||
tuple[Tenant, TenantAccountJoin],
|
||||
(
|
||||
db.session.query(Tenant, TenantAccountJoin)
|
||||
.filter(Tenant.id == value)
|
||||
.filter(Tenant.id == tenant_id)
|
||||
.filter(TenantAccountJoin.tenant_id == Tenant.id)
|
||||
.filter(TenantAccountJoin.account_id == self.id)
|
||||
.one_or_none()
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
if tenant_account_join:
|
||||
tenant, ta = tenant_account_join
|
||||
tenant.current_role = ta.role
|
||||
else:
|
||||
tenant = None
|
||||
except Exception:
|
||||
tenant = None
|
||||
if not tenant_account_join:
|
||||
return
|
||||
|
||||
tenant, join = tenant_account_join
|
||||
tenant.current_role = join.role
|
||||
self._current_tenant = tenant
|
||||
|
||||
@property
|
||||
@ -191,7 +189,7 @@ class TenantAccountRole(enum.StrEnum):
|
||||
}
|
||||
|
||||
|
||||
class Tenant(db.Model): # type: ignore[name-defined]
|
||||
class Tenant(Base):
|
||||
__tablename__ = "tenants"
|
||||
__table_args__ = (db.PrimaryKeyConstraint("id", name="tenant_pkey"),)
|
||||
|
||||
@ -220,7 +218,7 @@ class Tenant(db.Model): # type: ignore[name-defined]
|
||||
self.custom_config = json.dumps(value)
|
||||
|
||||
|
||||
class TenantAccountJoin(db.Model): # type: ignore[name-defined]
|
||||
class TenantAccountJoin(Base):
|
||||
__tablename__ = "tenant_account_joins"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="tenant_account_join_pkey"),
|
||||
@ -239,7 +237,7 @@ class TenantAccountJoin(db.Model): # type: ignore[name-defined]
|
||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
|
||||
class AccountIntegrate(db.Model): # type: ignore[name-defined]
|
||||
class AccountIntegrate(Base):
|
||||
__tablename__ = "account_integrates"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="account_integrate_pkey"),
|
||||
@ -256,7 +254,7 @@ class AccountIntegrate(db.Model): # type: ignore[name-defined]
|
||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
|
||||
class InvitationCode(db.Model): # type: ignore[name-defined]
|
||||
class InvitationCode(Base):
|
||||
__tablename__ = "invitation_codes"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="invitation_code_pkey"),
|
||||
|
@ -2,6 +2,7 @@ import enum
|
||||
|
||||
from sqlalchemy import func
|
||||
|
||||
from .base import Base
|
||||
from .engine import db
|
||||
from .types import StringUUID
|
||||
|
||||
@ -13,7 +14,7 @@ class APIBasedExtensionPoint(enum.Enum):
|
||||
APP_MODERATION_OUTPUT = "app.moderation.output"
|
||||
|
||||
|
||||
class APIBasedExtension(db.Model): # type: ignore[name-defined]
|
||||
class APIBasedExtension(Base):
|
||||
__tablename__ = "api_based_extensions"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="api_based_extension_pkey"),
|
||||
|
@ -22,6 +22,7 @@ from extensions.ext_storage import storage
|
||||
from services.entities.knowledge_entities.knowledge_entities import ParentMode, Rule
|
||||
|
||||
from .account import Account
|
||||
from .base import Base
|
||||
from .engine import db
|
||||
from .model import App, Tag, TagBinding, UploadFile
|
||||
from .types import StringUUID
|
||||
@ -33,7 +34,7 @@ class DatasetPermissionEnum(enum.StrEnum):
|
||||
PARTIAL_TEAM = "partial_members"
|
||||
|
||||
|
||||
class Dataset(db.Model): # type: ignore[name-defined]
|
||||
class Dataset(Base):
|
||||
__tablename__ = "datasets"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="dataset_pkey"),
|
||||
@ -255,7 +256,7 @@ class Dataset(db.Model): # type: ignore[name-defined]
|
||||
return f"Vector_index_{normalized_dataset_id}_Node"
|
||||
|
||||
|
||||
class DatasetProcessRule(db.Model): # type: ignore[name-defined]
|
||||
class DatasetProcessRule(Base):
|
||||
__tablename__ = "dataset_process_rules"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="dataset_process_rule_pkey"),
|
||||
@ -295,7 +296,7 @@ class DatasetProcessRule(db.Model): # type: ignore[name-defined]
|
||||
return None
|
||||
|
||||
|
||||
class Document(db.Model): # type: ignore[name-defined]
|
||||
class Document(Base):
|
||||
__tablename__ = "documents"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="document_pkey"),
|
||||
@ -635,7 +636,7 @@ class Document(db.Model): # type: ignore[name-defined]
|
||||
)
|
||||
|
||||
|
||||
class DocumentSegment(db.Model): # type: ignore[name-defined]
|
||||
class DocumentSegment(Base):
|
||||
__tablename__ = "document_segments"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="document_segment_pkey"),
|
||||
@ -786,7 +787,7 @@ class DocumentSegment(db.Model): # type: ignore[name-defined]
|
||||
return text
|
||||
|
||||
|
||||
class ChildChunk(db.Model): # type: ignore[name-defined]
|
||||
class ChildChunk(Base):
|
||||
__tablename__ = "child_chunks"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="child_chunk_pkey"),
|
||||
@ -829,7 +830,7 @@ class ChildChunk(db.Model): # type: ignore[name-defined]
|
||||
return db.session.query(DocumentSegment).filter(DocumentSegment.id == self.segment_id).first()
|
||||
|
||||
|
||||
class AppDatasetJoin(db.Model): # type: ignore[name-defined]
|
||||
class AppDatasetJoin(Base):
|
||||
__tablename__ = "app_dataset_joins"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="app_dataset_join_pkey"),
|
||||
@ -846,7 +847,7 @@ class AppDatasetJoin(db.Model): # type: ignore[name-defined]
|
||||
return db.session.get(App, self.app_id)
|
||||
|
||||
|
||||
class DatasetQuery(db.Model): # type: ignore[name-defined]
|
||||
class DatasetQuery(Base):
|
||||
__tablename__ = "dataset_queries"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="dataset_query_pkey"),
|
||||
@ -863,7 +864,7 @@ class DatasetQuery(db.Model): # type: ignore[name-defined]
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
|
||||
|
||||
|
||||
class DatasetKeywordTable(db.Model): # type: ignore[name-defined]
|
||||
class DatasetKeywordTable(Base):
|
||||
__tablename__ = "dataset_keyword_tables"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="dataset_keyword_table_pkey"),
|
||||
@ -908,7 +909,7 @@ class DatasetKeywordTable(db.Model): # type: ignore[name-defined]
|
||||
return None
|
||||
|
||||
|
||||
class Embedding(db.Model): # type: ignore[name-defined]
|
||||
class Embedding(Base):
|
||||
__tablename__ = "embeddings"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="embedding_pkey"),
|
||||
@ -932,7 +933,7 @@ class Embedding(db.Model): # type: ignore[name-defined]
|
||||
return cast(list[float], pickle.loads(self.embedding)) # noqa: S301
|
||||
|
||||
|
||||
class DatasetCollectionBinding(db.Model): # type: ignore[name-defined]
|
||||
class DatasetCollectionBinding(Base):
|
||||
__tablename__ = "dataset_collection_bindings"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="dataset_collection_bindings_pkey"),
|
||||
@ -947,7 +948,7 @@ class DatasetCollectionBinding(db.Model): # type: ignore[name-defined]
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
|
||||
class TidbAuthBinding(db.Model): # type: ignore[name-defined]
|
||||
class TidbAuthBinding(Base):
|
||||
__tablename__ = "tidb_auth_bindings"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="tidb_auth_bindings_pkey"),
|
||||
@ -967,7 +968,7 @@ class TidbAuthBinding(db.Model): # type: ignore[name-defined]
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
|
||||
class Whitelist(db.Model): # type: ignore[name-defined]
|
||||
class Whitelist(Base):
|
||||
__tablename__ = "whitelists"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="whitelists_pkey"),
|
||||
@ -979,7 +980,7 @@ class Whitelist(db.Model): # type: ignore[name-defined]
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
|
||||
class DatasetPermission(db.Model): # type: ignore[name-defined]
|
||||
class DatasetPermission(Base):
|
||||
__tablename__ = "dataset_permissions"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="dataset_permission_pkey"),
|
||||
@ -996,7 +997,7 @@ class DatasetPermission(db.Model): # type: ignore[name-defined]
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
|
||||
class ExternalKnowledgeApis(db.Model): # type: ignore[name-defined]
|
||||
class ExternalKnowledgeApis(Base):
|
||||
__tablename__ = "external_knowledge_apis"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="external_knowledge_apis_pkey"),
|
||||
@ -1049,7 +1050,7 @@ class ExternalKnowledgeApis(db.Model): # type: ignore[name-defined]
|
||||
return dataset_bindings
|
||||
|
||||
|
||||
class ExternalKnowledgeBindings(db.Model): # type: ignore[name-defined]
|
||||
class ExternalKnowledgeBindings(Base):
|
||||
__tablename__ = "external_knowledge_bindings"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="external_knowledge_bindings_pkey"),
|
||||
@ -1070,7 +1071,7 @@ class ExternalKnowledgeBindings(db.Model): # type: ignore[name-defined]
|
||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
|
||||
class DatasetAutoDisableLog(db.Model): # type: ignore[name-defined]
|
||||
class DatasetAutoDisableLog(Base):
|
||||
__tablename__ = "dataset_auto_disable_logs"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="dataset_auto_disable_log_pkey"),
|
||||
@ -1087,7 +1088,7 @@ class DatasetAutoDisableLog(db.Model): # type: ignore[name-defined]
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
|
||||
|
||||
|
||||
class RateLimitLog(db.Model): # type: ignore[name-defined]
|
||||
class RateLimitLog(Base):
|
||||
__tablename__ = "rate_limit_logs"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="rate_limit_log_pkey"),
|
||||
@ -1102,7 +1103,7 @@ class RateLimitLog(db.Model): # type: ignore[name-defined]
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
|
||||
|
||||
|
||||
class DatasetMetadata(db.Model): # type: ignore[name-defined]
|
||||
class DatasetMetadata(Base):
|
||||
__tablename__ = "dataset_metadatas"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="dataset_metadata_pkey"),
|
||||
@ -1121,7 +1122,7 @@ class DatasetMetadata(db.Model): # type: ignore[name-defined]
|
||||
updated_by = db.Column(StringUUID, nullable=True)
|
||||
|
||||
|
||||
class DatasetMetadataBinding(db.Model): # type: ignore[name-defined]
|
||||
class DatasetMetadataBinding(Base):
|
||||
__tablename__ = "dataset_metadata_bindings"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="dataset_metadata_binding_pkey"),
|
||||
|
@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
|
||||
import sqlalchemy as sa
|
||||
from flask import request
|
||||
from flask_login import UserMixin # type: ignore
|
||||
from flask_login import UserMixin
|
||||
from sqlalchemy import Float, Index, PrimaryKeyConstraint, func, text
|
||||
from sqlalchemy.orm import Mapped, Session, mapped_column
|
||||
|
||||
@ -25,13 +25,13 @@ from constants import DEFAULT_FILE_NUMBER_LIMITS
|
||||
from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType
|
||||
from core.file import helpers as file_helpers
|
||||
from libs.helper import generate_string
|
||||
from models.base import Base
|
||||
from models.enums import CreatedByRole
|
||||
from models.workflow import WorkflowRunStatus
|
||||
|
||||
from .account import Account, Tenant
|
||||
from .base import Base
|
||||
from .engine import db
|
||||
from .enums import CreatedByRole
|
||||
from .types import StringUUID
|
||||
from .workflow import WorkflowRunStatus
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .workflow import Workflow
|
||||
@ -602,7 +602,7 @@ class InstalledApp(Base):
|
||||
return tenant
|
||||
|
||||
|
||||
class Conversation(db.Model): # type: ignore[name-defined]
|
||||
class Conversation(Base):
|
||||
__tablename__ = "conversations"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="conversation_pkey"),
|
||||
@ -794,7 +794,7 @@ class Conversation(db.Model): # type: ignore[name-defined]
|
||||
|
||||
for message in messages:
|
||||
if message.workflow_run:
|
||||
status_counts[message.workflow_run.status] += 1
|
||||
status_counts[WorkflowRunStatus(message.workflow_run.status)] += 1
|
||||
|
||||
return (
|
||||
{
|
||||
@ -864,7 +864,7 @@ class Conversation(db.Model): # type: ignore[name-defined]
|
||||
}
|
||||
|
||||
|
||||
class Message(db.Model): # type: ignore[name-defined]
|
||||
class Message(Base):
|
||||
__tablename__ = "messages"
|
||||
__table_args__ = (
|
||||
PrimaryKeyConstraint("id", name="message_pkey"),
|
||||
@ -1211,7 +1211,7 @@ class Message(db.Model): # type: ignore[name-defined]
|
||||
)
|
||||
|
||||
|
||||
class MessageFeedback(db.Model): # type: ignore[name-defined]
|
||||
class MessageFeedback(Base):
|
||||
__tablename__ = "message_feedbacks"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="message_feedback_pkey"),
|
||||
@ -1238,7 +1238,7 @@ class MessageFeedback(db.Model): # type: ignore[name-defined]
|
||||
return account
|
||||
|
||||
|
||||
class MessageFile(db.Model): # type: ignore[name-defined]
|
||||
class MessageFile(Base):
|
||||
__tablename__ = "message_files"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="message_file_pkey"),
|
||||
@ -1279,7 +1279,7 @@ class MessageFile(db.Model): # type: ignore[name-defined]
|
||||
created_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
|
||||
class MessageAnnotation(db.Model): # type: ignore[name-defined]
|
||||
class MessageAnnotation(Base):
|
||||
__tablename__ = "message_annotations"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="message_annotation_pkey"),
|
||||
@ -1310,7 +1310,7 @@ class MessageAnnotation(db.Model): # type: ignore[name-defined]
|
||||
return account
|
||||
|
||||
|
||||
class AppAnnotationHitHistory(db.Model): # type: ignore[name-defined]
|
||||
class AppAnnotationHitHistory(Base):
|
||||
__tablename__ = "app_annotation_hit_histories"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="app_annotation_hit_histories_pkey"),
|
||||
@ -1322,7 +1322,7 @@ class AppAnnotationHitHistory(db.Model): # type: ignore[name-defined]
|
||||
|
||||
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||
app_id = db.Column(StringUUID, nullable=False)
|
||||
annotation_id = db.Column(StringUUID, nullable=False)
|
||||
annotation_id: Mapped[str] = db.Column(StringUUID, nullable=False)
|
||||
source = db.Column(db.Text, nullable=False)
|
||||
question = db.Column(db.Text, nullable=False)
|
||||
account_id = db.Column(StringUUID, nullable=False)
|
||||
@ -1348,7 +1348,7 @@ class AppAnnotationHitHistory(db.Model): # type: ignore[name-defined]
|
||||
return account
|
||||
|
||||
|
||||
class AppAnnotationSetting(db.Model): # type: ignore[name-defined]
|
||||
class AppAnnotationSetting(Base):
|
||||
__tablename__ = "app_annotation_settings"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="app_annotation_settings_pkey"),
|
||||
@ -1364,26 +1364,6 @@ class AppAnnotationSetting(db.Model): # type: ignore[name-defined]
|
||||
updated_user_id = db.Column(StringUUID, nullable=False)
|
||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
@property
|
||||
def created_account(self):
|
||||
account = (
|
||||
db.session.query(Account)
|
||||
.join(AppAnnotationSetting, AppAnnotationSetting.created_user_id == Account.id)
|
||||
.filter(AppAnnotationSetting.id == self.annotation_id)
|
||||
.first()
|
||||
)
|
||||
return account
|
||||
|
||||
@property
|
||||
def updated_account(self):
|
||||
account = (
|
||||
db.session.query(Account)
|
||||
.join(AppAnnotationSetting, AppAnnotationSetting.updated_user_id == Account.id)
|
||||
.filter(AppAnnotationSetting.id == self.annotation_id)
|
||||
.first()
|
||||
)
|
||||
return account
|
||||
|
||||
@property
|
||||
def collection_binding_detail(self):
|
||||
from .dataset import DatasetCollectionBinding
|
||||
|
@ -2,8 +2,7 @@ from enum import Enum
|
||||
|
||||
from sqlalchemy import func
|
||||
|
||||
from models.base import Base
|
||||
|
||||
from .base import Base
|
||||
from .engine import db
|
||||
from .types import StringUUID
|
||||
|
||||
|
@ -9,7 +9,7 @@ from .engine import db
|
||||
from .types import StringUUID
|
||||
|
||||
|
||||
class DataSourceOauthBinding(db.Model): # type: ignore[name-defined]
|
||||
class DataSourceOauthBinding(Base):
|
||||
__tablename__ = "data_source_oauth_bindings"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="source_binding_pkey"),
|
||||
|
@ -9,7 +9,7 @@ if TYPE_CHECKING:
|
||||
from models.model import AppMode
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import Index, PrimaryKeyConstraint, func
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
import contexts
|
||||
@ -18,11 +18,11 @@ from core.helper import encrypter
|
||||
from core.variables import SecretVariable, Variable
|
||||
from factories import variable_factory
|
||||
from libs import helper
|
||||
from models.base import Base
|
||||
from models.enums import CreatedByRole
|
||||
|
||||
from .account import Account
|
||||
from .base import Base
|
||||
from .engine import db
|
||||
from .enums import CreatedByRole
|
||||
from .types import StringUUID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@ -736,8 +736,7 @@ class WorkflowAppLog(Base):
|
||||
__tablename__ = "workflow_app_logs"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="workflow_app_log_pkey"),
|
||||
db.Index("workflow_app_log_app_idx", "tenant_id", "app_id", "created_at"),
|
||||
db.Index("workflow_app_log_workflow_run_idx", "workflow_run_id"),
|
||||
db.Index("workflow_app_log_app_idx", "tenant_id", "app_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||
@ -769,17 +768,12 @@ class WorkflowAppLog(Base):
|
||||
|
||||
class ConversationVariable(Base):
|
||||
__tablename__ = "workflow_conversation_variables"
|
||||
__table_args__ = (
|
||||
PrimaryKeyConstraint("id", "conversation_id", name="workflow_conversation_variables_pkey"),
|
||||
Index("workflow__conversation_variables_app_id_idx", "app_id"),
|
||||
Index("workflow__conversation_variables_created_at_idx", "created_at"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, primary_key=True)
|
||||
conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False, primary_key=True)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False, index=True)
|
||||
data = mapped_column(db.Text, nullable=False)
|
||||
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp(), index=True)
|
||||
updated_at = mapped_column(
|
||||
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
|
||||
)
|
||||
|
@ -63,11 +63,11 @@ dependencies = [
|
||||
"psycogreen~=1.0.2",
|
||||
"psycopg2-binary~=2.9.6",
|
||||
"pycryptodome==3.19.1",
|
||||
"pydantic~=2.9.2",
|
||||
"pydantic-extra-types~=2.9.0",
|
||||
"pydantic-settings~=2.6.0",
|
||||
"pydantic~=2.11.4",
|
||||
"pydantic-extra-types~=2.10.3",
|
||||
"pydantic-settings~=2.9.1",
|
||||
"pyjwt~=2.8.0",
|
||||
"pypdfium2~=4.30.0",
|
||||
"pypdfium2==4.30.0",
|
||||
"python-docx~=1.1.0",
|
||||
"python-dotenv==1.0.1",
|
||||
"pyyaml~=6.0.1",
|
||||
|
@ -108,17 +108,20 @@ class AccountService:
|
||||
if account.status == AccountStatus.BANNED.value:
|
||||
raise Unauthorized("Account is banned.")
|
||||
|
||||
current_tenant = TenantAccountJoin.query.filter_by(account_id=account.id, current=True).first()
|
||||
current_tenant = db.session.query(TenantAccountJoin).filter_by(account_id=account.id, current=True).first()
|
||||
if current_tenant:
|
||||
account.current_tenant_id = current_tenant.tenant_id
|
||||
account.set_tenant_id(current_tenant.tenant_id)
|
||||
else:
|
||||
available_ta = (
|
||||
TenantAccountJoin.query.filter_by(account_id=account.id).order_by(TenantAccountJoin.id.asc()).first()
|
||||
db.session.query(TenantAccountJoin)
|
||||
.filter_by(account_id=account.id)
|
||||
.order_by(TenantAccountJoin.id.asc())
|
||||
.first()
|
||||
)
|
||||
if not available_ta:
|
||||
return None
|
||||
|
||||
account.current_tenant_id = available_ta.tenant_id
|
||||
account.set_tenant_id(available_ta.tenant_id)
|
||||
available_ta.current = True
|
||||
db.session.commit()
|
||||
|
||||
@ -700,7 +703,7 @@ class TenantService:
|
||||
).update({"current": False})
|
||||
tenant_account_join.current = True
|
||||
# Set the current tenant for the account
|
||||
account.current_tenant_id = tenant_account_join.tenant_id
|
||||
account.set_tenant_id(tenant_account_join.tenant_id)
|
||||
db.session.commit()
|
||||
|
||||
@staticmethod
|
||||
|
@ -69,6 +69,7 @@ class HitTestingService:
|
||||
query: str,
|
||||
account: Account,
|
||||
external_retrieval_model: dict,
|
||||
metadata_filtering_conditions: dict,
|
||||
) -> dict:
|
||||
if dataset.provider != "external":
|
||||
return {
|
||||
@ -82,6 +83,7 @@ class HitTestingService:
|
||||
dataset_id=dataset.id,
|
||||
query=cls.escape_query_for_search(query),
|
||||
external_retrieval_model=external_retrieval_model,
|
||||
metadata_filtering_conditions=metadata_filtering_conditions,
|
||||
)
|
||||
|
||||
end = time.perf_counter()
|
||||
|
@ -2,9 +2,10 @@ import os
|
||||
|
||||
import pytest
|
||||
from _pytest.monkeypatch import MonkeyPatch
|
||||
from api.core.rag.datasource.vdb.field import Field
|
||||
from elasticsearch import Elasticsearch
|
||||
|
||||
from core.rag.datasource.vdb.field import Field
|
||||
|
||||
|
||||
class MockIndicesClient:
|
||||
def __init__(self):
|
||||
|
@ -102,7 +102,7 @@ def test_flask_configs(example_env_file):
|
||||
assert str(URL(str(config["CODE_EXECUTION_ENDPOINT"])) / "v1") == "http://sandbox:8194/v1"
|
||||
|
||||
|
||||
def test_inner_api_config_exist():
|
||||
config = DifyConfig()
|
||||
def test_inner_api_config_exist(example_env_file):
|
||||
config = DifyConfig(_env_file=example_env_file)
|
||||
assert config.INNER_API is False
|
||||
assert config.INNER_API_KEY is None
|
||||
|
153
api/uv.lock
generated
153
api/uv.lock
generated
@ -1384,11 +1384,11 @@ requires-dist = [
|
||||
{ name = "psycogreen", specifier = "~=1.0.2" },
|
||||
{ name = "psycopg2-binary", specifier = "~=2.9.6" },
|
||||
{ name = "pycryptodome", specifier = "==3.19.1" },
|
||||
{ name = "pydantic", specifier = "~=2.9.2" },
|
||||
{ name = "pydantic-extra-types", specifier = "~=2.9.0" },
|
||||
{ name = "pydantic-settings", specifier = "~=2.6.0" },
|
||||
{ name = "pydantic", specifier = "~=2.11.4" },
|
||||
{ name = "pydantic-extra-types", specifier = "~=2.10.3" },
|
||||
{ name = "pydantic-settings", specifier = "~=2.9.1" },
|
||||
{ name = "pyjwt", specifier = "~=2.8.0" },
|
||||
{ name = "pypdfium2", specifier = "~=4.30.0" },
|
||||
{ name = "pypdfium2", specifier = "==4.30.0" },
|
||||
{ name = "python-docx", specifier = "~=1.1.0" },
|
||||
{ name = "python-dotenv", specifier = "==1.0.1" },
|
||||
{ name = "pyyaml", specifier = "~=6.0.1" },
|
||||
@ -2621,15 +2621,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/10/5c/abd7495c934d9af5c263c2245ae30cfaa716c3c0cf027b2b8fa686ee7bd4/json_repair-0.41.1-py3-none-any.whl", hash = "sha256:0e181fd43a696887881fe19fed23422a54b3e4c558b6ff27a86a8c3ddde9ae79", size = 21578, upload-time = "2025-04-14T07:01:46.815Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonpath-python"
|
||||
version = "1.0.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b5/49/e582e50b0c54c1b47e714241c4a4767bf28758bf90212248aea8e1ce8516/jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666", size = 18121, upload-time = "2022-03-14T02:35:01.877Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/16/8a/d63959f4eff03893a00e6e63592e3a9f15b9266ed8e0275ab77f8c7dbc94/jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575", size = 7552, upload-time = "2022-03-14T02:34:59.754Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.23.0"
|
||||
@ -4184,76 +4175,92 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.9.2"
|
||||
version = "2.11.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-types" },
|
||||
{ name = "pydantic-core" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a9/b7/d9e3f12af310e1120c21603644a1cd86f59060e040ec5c3a80b8f05fae30/pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f", size = 769917, upload-time = "2024-09-17T15:59:54.273Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/77/ab/5250d56ad03884ab5efd07f734203943c8a8ab40d551e208af81d0257bf2/pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", size = 786540, upload-time = "2025-04-29T20:38:55.02Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/df/e4/ba44652d562cbf0bf320e0f3810206149c8a4e99cdbf66da82e97ab53a15/pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12", size = 434928, upload-time = "2024-09-17T15:59:51.827Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/12/46b65f3534d099349e38ef6ec98b1a5a81f42536d17e0ba382c28c67ba67/pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb", size = 443900, upload-time = "2025-04-29T20:38:52.724Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.23.4"
|
||||
version = "2.33.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e2/aa/6b6a9b9f8537b872f552ddd46dd3da230367754b6f707b8e1e963f515ea3/pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863", size = 402156, upload-time = "2024-09-16T16:06:44.786Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/30/890a583cd3f2be27ecf32b479d5d615710bb926d92da03e3f7838ff3e58b/pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8", size = 1865160, upload-time = "2024-09-16T16:04:18.628Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/9a/b634442e1253bc6889c87afe8bb59447f106ee042140bd57680b3b113ec7/pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d", size = 1776777, upload-time = "2024-09-16T16:04:20.038Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/9a/7816295124a6b08c24c96f9ce73085032d8bcbaf7e5a781cd41aa910c891/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e", size = 1799244, upload-time = "2024-09-16T16:04:21.799Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/8f/89c1405176903e567c5f99ec53387449e62f1121894aa9fc2c4fdc51a59b/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607", size = 1805307, upload-time = "2024-09-16T16:04:23.324Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/a5/1a194447d0da1ef492e3470680c66048fef56fc1f1a25cafbea4bc1d1c48/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd", size = 2000663, upload-time = "2024-09-16T16:04:25.203Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/a5/1df8541651de4455e7d587cf556201b4f7997191e110bca3b589218745a5/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea", size = 2655941, upload-time = "2024-09-16T16:04:27.211Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/31/a3899b5ce02c4316865e390107f145089876dff7e1dfc770a231d836aed8/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e", size = 2052105, upload-time = "2024-09-16T16:04:28.611Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/aa/98e190f8745d5ec831f6d5449344c48c0627ac5fed4e5340a44b74878f8e/pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b", size = 1919967, upload-time = "2024-09-16T16:04:30.045Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/35/b6e00b6abb2acfee3e8f85558c02a0822e9a8b2f2d812ea8b9079b118ba0/pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0", size = 1964291, upload-time = "2024-09-16T16:04:32.376Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/46/7bee6d32b69191cd649bbbd2361af79c472d72cb29bb2024f0b6e350ba06/pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64", size = 2109666, upload-time = "2024-09-16T16:04:33.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/ef/7b34f1b122a81b68ed0a7d0e564da9ccdc9a2924c8d6c6b5b11fa3a56970/pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f", size = 1732940, upload-time = "2024-09-16T16:04:35.467Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/76/37b7e76c645843ff46c1d73e046207311ef298d3f7b2f7d8f6ac60113071/pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3", size = 1916804, upload-time = "2024-09-16T16:04:37.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/7b/8e315f80666194b354966ec84b7d567da77ad927ed6323db4006cf915f3f/pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231", size = 1856459, upload-time = "2024-09-16T16:04:38.438Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/de/866bdce10ed808323d437612aca1ec9971b981e1c52e5e42ad9b8e17a6f6/pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee", size = 1770007, upload-time = "2024-09-16T16:04:40.229Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/69/8edd5c3cd48bb833a3f7ef9b81d7666ccddd3c9a635225214e044b6e8281/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87", size = 1790245, upload-time = "2024-09-16T16:04:41.794Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/33/9c24334e3af796ce80d2274940aae38dd4e5676298b4398eff103a79e02d/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8", size = 1801260, upload-time = "2024-09-16T16:04:43.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/6f/e9567fd90104b79b101ca9d120219644d3314962caa7948dd8b965e9f83e/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327", size = 1996872, upload-time = "2024-09-16T16:04:45.593Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/ad/b5f0fe9e6cfee915dd144edbd10b6e9c9c9c9d7a56b69256d124b8ac682e/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2", size = 2661617, upload-time = "2024-09-16T16:04:47.3Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/c8/7d4b708f8d05a5cbfda3243aad468052c6e99de7d0937c9146c24d9f12e9/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36", size = 2071831, upload-time = "2024-09-16T16:04:48.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/4d/3079d00c47f22c9a9a8220db088b309ad6e600a73d7a69473e3a8e5e3ea3/pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126", size = 1917453, upload-time = "2024-09-16T16:04:51.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/88/9df5b7ce880a4703fcc2d76c8c2d8eb9f861f79d0c56f4b8f5f2607ccec8/pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e", size = 1968793, upload-time = "2024-09-16T16:04:52.604Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/b9/41f7efe80f6ce2ed3ee3c2dcfe10ab7adc1172f778cc9659509a79518c43/pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24", size = 2116872, upload-time = "2024-09-16T16:04:54.41Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/08/b59b7a92e03dd25554b0436554bf23e7c29abae7cce4b1c459cd92746811/pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84", size = 1738535, upload-time = "2024-09-16T16:04:55.828Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/8d/479293e4d39ab409747926eec4329de5b7129beaedc3786eca070605d07f/pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9", size = 1917992, upload-time = "2024-09-16T16:04:57.395Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-extra-types"
|
||||
version = "2.9.0"
|
||||
version = "2.10.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fb/95/d61dcadd933cb34461adc271c13bbe14a7080b9922b9e0dc3c1d18b421cb/pydantic_extra_types-2.9.0.tar.gz", hash = "sha256:e061c01636188743bb69f368dcd391f327b8cfbfede2fe1cbb1211b06601ba3b", size = 39578, upload-time = "2024-07-03T17:19:47.519Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d9/33/0cde418479949cd6aa1ac669deffcd1c37d8d9cead99ddb48f344e75f2e3/pydantic_extra_types-2.10.4.tar.gz", hash = "sha256:bf8236a63d061eb3ecb1b2afa78ba0f97e3f67aa11dbbff56ec90491e8772edc", size = 95269, upload-time = "2025-04-28T08:18:34.869Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/37/2f/efc4877d1a1536ec76ca0114c3e9dee7d0a10a262c53d384d50163f5684c/pydantic_extra_types-2.9.0-py3-none-any.whl", hash = "sha256:f0bb975508572ba7bf3390b7337807588463b7248587e69f43b1ad7c797530d0", size = 30544, upload-time = "2024-07-03T17:19:46.208Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/ac/bee195ee49256385fad460ce420aeb42703a648dba487c20b6fd107e42ea/pydantic_extra_types-2.10.4-py3-none-any.whl", hash = "sha256:ce064595af3cab05e39ae062752432dcd0362ff80f7e695b61a3493a4d842db7", size = 37276, upload-time = "2025-04-28T08:18:31.617Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.6.1"
|
||||
version = "2.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b5/d4/9dfbe238f45ad8b168f5c96ee49a3df0598ce18a0795a983b419949ce65b/pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0", size = 75646, upload-time = "2024-11-01T11:00:05.17Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234, upload-time = "2025-04-18T16:44:48.265Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/f9/ff95fd7d760af42f647ea87f9b8a383d891cdb5e5dbd4613edaeb094252a/pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87", size = 28595, upload-time = "2024-11-01T11:00:02.64Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356, upload-time = "2025-04-18T16:44:46.617Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4376,22 +4383,22 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pypdfium2"
|
||||
version = "4.30.1"
|
||||
version = "4.30.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/55/d4/905e621c62598a08168c272b42fc00136c8861cfce97afb2a1ecbd99487a/pypdfium2-4.30.1.tar.gz", hash = "sha256:5f5c7c6d03598e107d974f66b220a49436aceb191da34cda5f692be098a814ce", size = 164854, upload-time = "2024-12-19T19:28:11.459Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/14/838b3ba247a0ba92e4df5d23f2bea9478edcfd72b78a39d6ca36ccd84ad2/pypdfium2-4.30.0.tar.gz", hash = "sha256:48b5b7e5566665bc1015b9d69c1ebabe21f6aee468b509531c3c8318eeee2e16", size = 140239, upload-time = "2024-05-09T18:33:17.552Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/30/8e/3ce0856b3af0f058dd3655ce57d31d1dbde4d4bd0e172022ffbf1b58a4b9/pypdfium2-4.30.1-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:e07c47633732cc18d890bb7e965ad28a9c5a932e548acb928596f86be2e5ae37", size = 2889836, upload-time = "2024-12-19T19:27:39.531Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/6a/f6995b21f9c6c155487ce7df70632a2df1ba49efcb291b9943ea45f28b15/pypdfium2-4.30.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ea2d44e96d361123b67b00f527017aa9c847c871b5714e013c01c3eb36a79fe", size = 2769232, upload-time = "2024-12-19T19:27:43.227Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/91/79060923148e6d380b8a299b32bba46d70aac5fe1cd4f04320bcbd1a48d3/pypdfium2-4.30.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de7a3a36803171b3f66911131046d65a732f9e7834438191cb58235e6163c4e", size = 2847531, upload-time = "2024-12-19T19:27:46.372Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/6c/93507f87c159e747eaab54352c0fccbaec3f1b3749d0bb9085a47899f898/pypdfium2-4.30.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8a4231efb13170354f568c722d6540b8d5b476b08825586d48ef70c40d16e03", size = 2636266, upload-time = "2024-12-19T19:27:49.767Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/dc/d56f74a092f2091e328d6485f16562e2fc51cffb0ad6d5c616d80c1eb53c/pypdfium2-4.30.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f434a4934e8244aa95343ffcf24e9ad9f120dbb4785f631bb40a88c39292493", size = 2919296, upload-time = "2024-12-19T19:27:51.767Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/d9/a2f1ee03d47fbeb48bcfde47ed7155772739622cfadf7135a84ba6a97824/pypdfium2-4.30.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f454032a0bc7681900170f67d8711b3942824531e765f91c2f5ce7937f999794", size = 2866119, upload-time = "2024-12-19T19:27:53.561Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/47/6aa019c32aa39d3f33347c458c0c5887e84096cbe444456402bc97e66704/pypdfium2-4.30.1-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:bbf9130a72370ee9d602e39949b902db669a2a1c24746a91e5586eb829055d9f", size = 6228684, upload-time = "2024-12-19T19:27:56.781Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/07/2954c15b3f7c85ceb80cad36757fd41b3aba0dd14e68f4bed9ce3f2e7e74/pypdfium2-4.30.1-py3-none-musllinux_1_1_i686.whl", hash = "sha256:5cb52884b1583b96e94fd78542c63bb42e06df5e8f9e52f8f31f5ad5a1e53367", size = 6231815, upload-time = "2024-12-19T19:28:00.351Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/9b/b4667e95754624f4af5a912001abba90c046e1c80d4a4e887f0af664ffec/pypdfium2-4.30.1-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:1a9e372bd4867ff223cc8c338e33fe11055dad12f22885950fc27646cc8d9122", size = 6313429, upload-time = "2024-12-19T19:28:02.536Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/38/f9e77cf55ba5546a39fa659404b78b97de2ca344848271e7731efb0954cd/pypdfium2-4.30.1-py3-none-win32.whl", hash = "sha256:421f1cf205e213e07c1f2934905779547f4f4a2ff2f59dde29da3d511d3fc806", size = 2834989, upload-time = "2024-12-19T19:28:04.657Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/f3/8d3a350efb4286b5ebdabcf6736f51d8e3b10dbe68804c6930b00f5cf329/pypdfium2-4.30.1-py3-none-win_amd64.whl", hash = "sha256:598a7f20264ab5113853cba6d86c4566e4356cad037d7d1f849c8c9021007e05", size = 2960157, upload-time = "2024-12-19T19:28:07.772Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/6b/2706497c86e8d69fb76afe5ea857fe1794621aa0f3b1d863feb953fe0f22/pypdfium2-4.30.1-py3-none-win_arm64.whl", hash = "sha256:c2b6d63f6d425d9416c08d2511822b54b8e3ac38e639fc41164b1d75584b3a8c", size = 2814810, upload-time = "2024-12-19T19:28:09.857Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/9a/c8ff5cc352c1b60b0b97642ae734f51edbab6e28b45b4fcdfe5306ee3c83/pypdfium2-4.30.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b33ceded0b6ff5b2b93bc1fe0ad4b71aa6b7e7bd5875f1ca0cdfb6ba6ac01aab", size = 2837254, upload-time = "2024-05-09T18:32:48.653Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/8b/27d4d5409f3c76b985f4ee4afe147b606594411e15ac4dc1c3363c9a9810/pypdfium2-4.30.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4e55689f4b06e2d2406203e771f78789bd4f190731b5d57383d05cf611d829de", size = 2707624, upload-time = "2024-05-09T18:32:51.458Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/63/28a73ca17c24b41a205d658e177d68e198d7dde65a8c99c821d231b6ee3d/pypdfium2-4.30.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6e50f5ce7f65a40a33d7c9edc39f23140c57e37144c2d6d9e9262a2a854854", size = 2793126, upload-time = "2024-05-09T18:32:53.581Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/96/53b3ebf0955edbd02ac6da16a818ecc65c939e98fdeb4e0958362bd385c8/pypdfium2-4.30.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3d0dd3ecaffd0b6dbda3da663220e705cb563918249bda26058c6036752ba3a2", size = 2591077, upload-time = "2024-05-09T18:32:55.99Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/ee/0394e56e7cab8b5b21f744d988400948ef71a9a892cbeb0b200d324ab2c7/pypdfium2-4.30.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc3bf29b0db8c76cdfaac1ec1cde8edf211a7de7390fbf8934ad2aa9b4d6dfad", size = 2864431, upload-time = "2024-05-09T18:32:57.911Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/cd/3f1edf20a0ef4a212a5e20a5900e64942c5a374473671ac0780eaa08ea80/pypdfium2-4.30.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1f78d2189e0ddf9ac2b7a9b9bd4f0c66f54d1389ff6c17e9fd9dc034d06eb3f", size = 2812008, upload-time = "2024-05-09T18:32:59.886Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/91/2d517db61845698f41a2a974de90762e50faeb529201c6b3574935969045/pypdfium2-4.30.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:5eda3641a2da7a7a0b2f4dbd71d706401a656fea521b6b6faa0675b15d31a163", size = 6181543, upload-time = "2024-05-09T18:33:02.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/c4/ed1315143a7a84b2c7616569dfb472473968d628f17c231c39e29ae9d780/pypdfium2-4.30.0-py3-none-musllinux_1_1_i686.whl", hash = "sha256:0dfa61421b5eb68e1188b0b2231e7ba35735aef2d867d86e48ee6cab6975195e", size = 6175911, upload-time = "2024-05-09T18:33:05.376Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/c4/9e62d03f414e0e3051c56d5943c3bf42aa9608ede4e19dc96438364e9e03/pypdfium2-4.30.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f33bd79e7a09d5f7acca3b0b69ff6c8a488869a7fab48fdf400fec6e20b9c8be", size = 6267430, upload-time = "2024-05-09T18:33:08.067Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/47/eda4904f715fb98561e34012826e883816945934a851745570521ec89520/pypdfium2-4.30.0-py3-none-win32.whl", hash = "sha256:ee2410f15d576d976c2ab2558c93d392a25fb9f6635e8dd0a8a3a5241b275e0e", size = 2775951, upload-time = "2024-05-09T18:33:10.567Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/bd/56d9ec6b9f0fc4e0d95288759f3179f0fcd34b1a1526b75673d2f6d5196f/pypdfium2-4.30.0-py3-none-win_amd64.whl", hash = "sha256:90dbb2ac07be53219f56be09961eb95cf2473f834d01a42d901d13ccfad64b4c", size = 2892098, upload-time = "2024-05-09T18:33:13.107Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/7a/097801205b991bc3115e8af1edb850d30aeaf0118520b016354cf5ccd3f6/pypdfium2-4.30.0-py3-none-win_arm64.whl", hash = "sha256:119b2969a6d6b1e8d55e99caaf05290294f2d0fe49c12a3f17102d01c441bd29", size = 2752118, upload-time = "2024-05-09T18:33:15.489Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5885,6 +5892,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspection"
|
||||
version = "0.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222, upload-time = "2025-02-25T17:27:59.638Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125, upload-time = "2025-02-25T17:27:57.754Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tzdata"
|
||||
version = "2025.2"
|
||||
@ -5973,24 +5992,22 @@ pptx = [
|
||||
|
||||
[[package]]
|
||||
name = "unstructured-client"
|
||||
version = "0.28.1"
|
||||
version = "0.34.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiofiles" },
|
||||
{ name = "cryptography" },
|
||||
{ name = "eval-type-backport" },
|
||||
{ name = "httpx" },
|
||||
{ name = "jsonpath-python" },
|
||||
{ name = "nest-asyncio" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pypdf" },
|
||||
{ name = "python-dateutil" },
|
||||
{ name = "requests-toolbelt" },
|
||||
{ name = "typing-inspect" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/a1/8b0bf11e8c092aeb704b579f5855b5cfb0d5278a6c542312cddad5a8097e/unstructured_client-0.28.1.tar.gz", hash = "sha256:aac11fe5dd6b8dfdbc15aad3205fe791a3834dac29bb9f499fd515643554f709", size = 48607, upload-time = "2024-11-26T21:26:29.01Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ac/32/9e819deaa5a59b57d97055b6c2cb9a83494e2f9c0fb07f56b3030bd1490f/unstructured_client-0.34.0.tar.gz", hash = "sha256:bc1c34edc622545993f1061127996da2576fc602fefd23e5cd8454e04c421e1f", size = 81006, upload-time = "2025-04-22T21:00:10.531Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/09/69/38e51f6ce07f2a454f53a364f6ef9acdbfc73841fe736d9c7cd152525048/unstructured_client-0.28.1-py3-none-any.whl", hash = "sha256:0112688908f544681a67abf314e0d2023dfa120c8e5d9fa6d31390b914a06d72", size = 62865, upload-time = "2024-11-26T21:26:27.811Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/e3/d1c2d02d953555d2830af3013d5ce76351507441f148f81469ae751bec7c/unstructured_client-0.34.0-py3-none-any.whl", hash = "sha256:3180d2030695fe6279e7f6f3a1fb92b4038f26c5706e6f9dfe063f816893b734", size = 189417, upload-time = "2025-04-22T21:00:08.679Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -395,7 +395,7 @@ SUPABASE_URL=your-server-url
|
||||
# ------------------------------
|
||||
|
||||
# The type of vector store to use.
|
||||
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `tidb_vector`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`.
|
||||
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`.
|
||||
VECTOR_STORE=weaviate
|
||||
|
||||
# The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`.
|
||||
@ -865,7 +865,7 @@ CHROMA_IS_PERSISTENT=TRUE
|
||||
|
||||
# ------------------------------
|
||||
# Environment Variables for Oracle Service
|
||||
# (only used when VECTOR_STORE is Oracle)
|
||||
# (only used when VECTOR_STORE is oracle)
|
||||
# ------------------------------
|
||||
ORACLE_PWD=Dify123456
|
||||
ORACLE_CHARACTERSET=AL32UTF8
|
||||
|
@ -22,7 +22,7 @@ const MarkdownButton = ({ node }: any) => {
|
||||
return <Button
|
||||
variant={variant}
|
||||
size={size}
|
||||
className={cn('!h-8 select-none !px-3')}
|
||||
className={cn('!h-auto min-h-8 select-none whitespace-normal !px-3')}
|
||||
onClick={() => {
|
||||
if (is_valid_url(link)) {
|
||||
window.open(link, '_blank')
|
||||
|
@ -128,13 +128,19 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
const language = match?.[1]
|
||||
const languageShowName = getCorrectCapitalizationLanguageName(language || '')
|
||||
const chartData = useMemo(() => {
|
||||
const str = String(children).replace(/\n$/, '')
|
||||
if (language === 'echarts') {
|
||||
try {
|
||||
return JSON.parse(String(children).replace(/\n$/, ''))
|
||||
return JSON.parse(str)
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
// eslint-disable-next-line no-new-func, sonarjs/code-eval
|
||||
return new Function(`return ${str}`)()
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
return JSON.parse('{"title":{"text":"ECharts error - Wrong JSON format."}}')
|
||||
return JSON.parse('{"title":{"text":"ECharts error - Wrong option."}}')
|
||||
}, [language, children])
|
||||
|
||||
const renderCodeContent = useMemo(() => {
|
||||
|
@ -149,7 +149,7 @@ const EndpointCard = ({
|
||||
</ActionButton>
|
||||
</div>
|
||||
</div>
|
||||
{data.declaration.endpoints.map((endpoint, index) => (
|
||||
{data.declaration.endpoints.filter(endpoint => !endpoint.hidden).map((endpoint, index) => (
|
||||
<div key={index} className='flex h-6 items-center'>
|
||||
<div className='system-xs-regular w-12 shrink-0 text-text-tertiary'>{endpoint.method}</div>
|
||||
<div className='group/item system-xs-regular flex grow items-center truncate text-text-secondary'>
|
||||
|
@ -36,6 +36,7 @@ export type PluginEndpointDeclaration = {
|
||||
export type EndpointItem = {
|
||||
path: string
|
||||
method: string
|
||||
hidden?: boolean
|
||||
}
|
||||
|
||||
export type EndpointListItem = {
|
||||
|
@ -67,6 +67,9 @@ export const useWorkflowNodeStarted = () => {
|
||||
|
||||
incomeEdges.forEach((edge) => {
|
||||
const incomeNode = nodes.find(node => node.id === edge.source)!
|
||||
if (!incomeNode || !('data' in incomeNode))
|
||||
return
|
||||
|
||||
if (
|
||||
(!incomeNode.data._runningBranchId && edge.sourceHandle === 'source')
|
||||
|| (incomeNode.data._runningBranchId && edge.sourceHandle === incomeNode.data._runningBranchId)
|
||||
|
@ -66,6 +66,7 @@ export const AgentStrategy = memo((props: AgentStrategyProps) => {
|
||||
case FormTypeEnum.textInput: {
|
||||
const def = schema as CredentialFormSchemaTextInput
|
||||
const value = props.value[schema.variable] || schema.default
|
||||
const instanceId = schema.variable
|
||||
const onChange = (value: string) => {
|
||||
props.onChange({ ...props.value, [schema.variable]: value })
|
||||
}
|
||||
@ -77,6 +78,8 @@ export const AgentStrategy = memo((props: AgentStrategyProps) => {
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onGenerated={handleGenerated}
|
||||
instanceId={instanceId}
|
||||
key={instanceId}
|
||||
title={renderI18nObject(schema.label)}
|
||||
headerClassName='bg-transparent px-0 text-text-secondary system-sm-semibold-uppercase'
|
||||
containerBackgroundClassName='bg-transparent'
|
||||
|
@ -102,7 +102,6 @@ const AgentPanel: FC<NodePanelProps<AgentNodeType>> = (props) => {
|
||||
agent_strategy_label: strategy?.agent_strategy_label,
|
||||
output_schema: strategy!.agent_output_schema,
|
||||
plugin_unique_identifier: strategy!.plugin_unique_identifier,
|
||||
agent_parameters: {},
|
||||
})
|
||||
resetEditor(Date.now())
|
||||
}}
|
||||
|
@ -85,12 +85,13 @@ const useConfig = (id: string, payload: AgentNodeType) => {
|
||||
enabled: Boolean(pluginId),
|
||||
})
|
||||
const formData = useMemo(() => {
|
||||
const paramNameList = (currentStrategy?.parameters || []).map(item => item.name)
|
||||
return Object.fromEntries(
|
||||
Object.entries(inputs.agent_parameters || {}).map(([key, value]) => {
|
||||
Object.entries(inputs.agent_parameters || {}).filter(([name]) => paramNameList.includes(name)).map(([key, value]) => {
|
||||
return [key, value.value]
|
||||
}),
|
||||
)
|
||||
}, [inputs.agent_parameters])
|
||||
}, [inputs.agent_parameters, currentStrategy?.parameters])
|
||||
const onFormChange = (value: Record<string, any>) => {
|
||||
const res: ToolVarInputs = {}
|
||||
Object.entries(value).forEach(([key, val]) => {
|
||||
|
@ -191,7 +191,7 @@ const VersionHistoryPanel = () => {
|
||||
}, [t, updateWorkflow, resetWorkflowVersionHistory])
|
||||
|
||||
return (
|
||||
<div className='flex w-[268px] flex-col rounded-l-2xl border-y-[0.5px] border-l-[0.5px] border-components-panel-border bg-components-panel-bg shadow-xl shadow-shadow-shadow-5'>
|
||||
<div className='flex h-full w-[268px] flex-col rounded-l-2xl border-y-[0.5px] border-l-[0.5px] border-components-panel-border bg-components-panel-bg shadow-xl shadow-shadow-shadow-5'>
|
||||
<div className='flex items-center gap-x-2 px-4 pt-3'>
|
||||
<div className='system-xl-semibold flex-1 py-1 text-text-primary'>{t('workflow.versionHistory.title')}</div>
|
||||
<Filter
|
||||
@ -208,50 +208,51 @@ const VersionHistoryPanel = () => {
|
||||
<RiCloseLine className='h-4 w-4 text-text-tertiary' />
|
||||
</div>
|
||||
</div>
|
||||
<div className="relative flex-1 overflow-y-auto px-3 py-2">
|
||||
{(isFetching && !versionHistory?.pages?.length)
|
||||
? (
|
||||
<Loading />
|
||||
)
|
||||
: (
|
||||
<>
|
||||
{versionHistory?.pages?.map((page, pageNumber) => (
|
||||
page.items?.map((item, idx) => {
|
||||
const isLast = pageNumber === versionHistory.pages.length - 1 && idx === page.items.length - 1
|
||||
return <VersionHistoryItem
|
||||
key={item.id}
|
||||
item={item}
|
||||
currentVersion={currentVersion}
|
||||
latestVersionId={appDetail!.workflow!.id}
|
||||
onClick={handleVersionClick}
|
||||
handleClickMenuItem={handleClickMenuItem.bind(null, item)}
|
||||
isLast={isLast}
|
||||
/>
|
||||
})
|
||||
))}
|
||||
{hasNextPage && (
|
||||
<div className='absolute bottom-2 left-2 flex p-2'>
|
||||
<div
|
||||
className='flex cursor-pointer items-center gap-x-1'
|
||||
onClick={handleNextPage}
|
||||
>
|
||||
<div className='item-center flex justify-center p-0.5'>
|
||||
{
|
||||
isFetching
|
||||
? <RiLoader2Line className='h-3.5 w-3.5 animate-spin text-text-accent' />
|
||||
: <RiArrowDownDoubleLine className='h-3.5 w-3.5 text-text-accent' />}
|
||||
</div>
|
||||
<div className='system-xs-medium-uppercase py-[1px] text-text-accent'>
|
||||
{t('workflow.common.loadMore')}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{!isFetching && (!versionHistory?.pages?.length || !versionHistory.pages[0].items.length) && (
|
||||
<Empty onResetFilter={handleResetFilter} />
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
<div className="flex h-0 flex-1 flex-col">
|
||||
<div className="flex-1 overflow-y-auto px-3 py-2">
|
||||
{(isFetching && !versionHistory?.pages?.length)
|
||||
? (
|
||||
<Loading />
|
||||
)
|
||||
: (
|
||||
<>
|
||||
{versionHistory?.pages?.map((page, pageNumber) => (
|
||||
page.items?.map((item, idx) => {
|
||||
const isLast = pageNumber === versionHistory.pages.length - 1 && idx === page.items.length - 1
|
||||
return <VersionHistoryItem
|
||||
key={item.id}
|
||||
item={item}
|
||||
currentVersion={currentVersion}
|
||||
latestVersionId={appDetail!.workflow!.id}
|
||||
onClick={handleVersionClick}
|
||||
handleClickMenuItem={handleClickMenuItem.bind(null, item)}
|
||||
isLast={isLast}
|
||||
/>
|
||||
})
|
||||
))}
|
||||
{!isFetching && (!versionHistory?.pages?.length || !versionHistory.pages[0].items.length) && (
|
||||
<Empty onResetFilter={handleResetFilter} />
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
{hasNextPage && (
|
||||
<div className='p-2'>
|
||||
<div
|
||||
className='flex cursor-pointer items-center gap-x-1'
|
||||
onClick={handleNextPage}
|
||||
>
|
||||
<div className='item-center flex justify-center p-0.5'>
|
||||
{isFetching
|
||||
? <RiLoader2Line className='h-3.5 w-3.5 animate-spin text-text-accent' />
|
||||
: <RiArrowDownDoubleLine className='h-3.5 w-3.5 text-text-accent' />}
|
||||
</div>
|
||||
<div className='system-xs-medium-uppercase py-[1px] text-text-accent'>
|
||||
{t('workflow.common.loadMore')}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{restoreConfirmOpen && (<RestoreConfirmModal
|
||||
isOpen={restoreConfirmOpen}
|
||||
|
@ -109,8 +109,10 @@ const IterationResultPanel: FC<Props> = ({
|
||||
className="h-px grow bg-divider-subtle"
|
||||
></div>}
|
||||
<div className={cn(
|
||||
'overflow-hidden transition-all duration-200',
|
||||
expandedIterations[index] ? 'max-h-[1000px] opacity-100' : 'max-h-0 opacity-0',
|
||||
'transition-all duration-200',
|
||||
expandedIterations[index]
|
||||
? 'opacity-100'
|
||||
: 'max-h-0 opacity-0 overflow-hidden',
|
||||
)}>
|
||||
<TracingPanel
|
||||
list={iteration}
|
||||
|
@ -115,8 +115,10 @@ const LoopResultPanel: FC<Props> = ({
|
||||
className="h-px grow bg-divider-subtle"
|
||||
></div>}
|
||||
<div className={cn(
|
||||
'overflow-hidden transition-all duration-200',
|
||||
expandedLoops[index] ? 'max-h-[1000px] opacity-100' : 'max-h-0 opacity-0',
|
||||
'transition-all duration-200',
|
||||
expandedLoops[index]
|
||||
? 'opacity-100'
|
||||
: 'max-h-0 opacity-0 overflow-hidden',
|
||||
)}>
|
||||
{
|
||||
loopVariableMap?.[index] && (
|
||||
|
@ -82,8 +82,10 @@ const LoopResultPanel: FC<Props> = ({
|
||||
className="h-px grow bg-divider-subtle"
|
||||
></div>}
|
||||
<div className={cn(
|
||||
'overflow-hidden transition-all duration-200',
|
||||
expandedLoops[index] ? 'max-h-[1000px] opacity-100' : 'max-h-0 opacity-0',
|
||||
'transition-all duration-200',
|
||||
expandedLoops[index]
|
||||
? 'opacity-100'
|
||||
: 'max-h-0 opacity-0 overflow-hidden',
|
||||
)}>
|
||||
<TracingPanel
|
||||
list={loop}
|
||||
|
@ -2,7 +2,7 @@ const translation = {
|
||||
toVerified: 'Get Education Verified',
|
||||
toVerifiedTip: {
|
||||
front: 'You are now eligible for Education Verified status. Please enter your education information below to complete the process and receive an',
|
||||
coupon: 'exclusive 50% coupon',
|
||||
coupon: 'exclusive 100% coupon',
|
||||
end: 'for the Dify Professional Plan.',
|
||||
},
|
||||
currentSigned: 'CURRENTLY SIGNED IN AS',
|
||||
@ -38,9 +38,9 @@ const translation = {
|
||||
submitError: 'Form submission failed. Please try again later.',
|
||||
learn: 'Learn how to get education verified',
|
||||
successTitle: 'You Have Got Dify Education Verified',
|
||||
successContent: 'We have issued a 50% discount coupon for the Dify Professional plan to your account. The coupon is valid for one year, please use it within the validity period.',
|
||||
successContent: 'We have issued a 100% discount coupon for the Dify Professional plan to your account. The coupon is valid for one year, please use it within the validity period.',
|
||||
rejectTitle: 'Your Dify Educational Verification Has Been Rejected',
|
||||
rejectContent: 'Unfortunately, you are not eligible for Education Verified status and therefore cannot receive the exclusive 50% coupon for the Dify Professional Plan if you use this email address.',
|
||||
rejectContent: 'Unfortunately, you are not eligible for Education Verified status and therefore cannot receive the exclusive 100% coupon for the Dify Professional Plan if you use this email address.',
|
||||
emailLabel: 'Your current email',
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@ const translation = {
|
||||
toVerified: '获取教育版认证',
|
||||
toVerifiedTip: {
|
||||
front: '您现在符合教育版认证的资格。请在下方输入您的教育信息,以完成认证流程,并领取 Dify Professional 版的',
|
||||
coupon: '50% 独家优惠券',
|
||||
coupon: '100% 独家优惠券',
|
||||
end: '。',
|
||||
},
|
||||
currentSigned: '您当前登录的账户是',
|
||||
@ -38,9 +38,9 @@ const translation = {
|
||||
submitError: '提交表单失败,请稍后重新提交问卷。',
|
||||
learn: '了解如何获取教育版认证',
|
||||
successTitle: '您已成功获得 Dify 教育版认证!',
|
||||
successContent: '我们已向您的账户发放 Dify Professional 版 50% 折扣优惠券。该优惠券有效期为一年,请在有效期内使用。',
|
||||
successContent: '我们已向您的账户发放 Dify Professional 版 100% 折扣优惠券。该优惠券有效期为一年,请在有效期内使用。',
|
||||
rejectTitle: '您的 Dify 教育版认证已被拒绝',
|
||||
rejectContent: '非常遗憾,您无法使用此电子邮件以获得教育版认证资格,也无法领取 Dify Professional 版的 50% 独家优惠券。',
|
||||
rejectContent: '非常遗憾,您无法使用此电子邮件以获得教育版认证资格,也无法领取 Dify Professional 版的 100% 独家优惠券。',
|
||||
emailLabel: '您当前的邮箱',
|
||||
|
||||
}
|
||||
|
@ -274,7 +274,10 @@
|
||||
// Add click event listener to toggle chatbot
|
||||
containerDiv.addEventListener("click", handleClick);
|
||||
// Add touch event listener
|
||||
containerDiv.addEventListener("touchend", handleClick);
|
||||
containerDiv.addEventListener("touchend", (event) => {
|
||||
event.preventDefault();
|
||||
handleClick();
|
||||
}, { passive: false });
|
||||
|
||||
function handleClick() {
|
||||
if (isDragging) return;
|
||||
|
2
web/public/embed.min.js
vendored
2
web/public/embed.min.js
vendored
@ -39,4 +39,4 @@
|
||||
<svg id="closeIcon" style="display:none" width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M18 18L6 6M6 18L18 6" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
`,n.appendChild(e),document.body.appendChild(n),n.addEventListener("click",t),n.addEventListener("touchend",t),y.draggable){var a=n;var l=y.dragAxis||"both";let s,d,t,r;function o(e){u=!1,r=("touchstart"===e.type?(s=e.touches[0].clientX-a.offsetLeft,d=e.touches[0].clientY-a.offsetTop,t=e.touches[0].clientX,e.touches[0]):(s=e.clientX-a.offsetLeft,d=e.clientY-a.offsetTop,t=e.clientX,e)).clientY,document.addEventListener("mousemove",i),document.addEventListener("touchmove",i,{passive:!1}),document.addEventListener("mouseup",c),document.addEventListener("touchend",c),e.preventDefault()}function i(n){var o="touchmove"===n.type?n.touches[0]:n,i=o.clientX-t,o=o.clientY-r;if(u=8<Math.abs(i)||8<Math.abs(o)?!0:u){a.style.transition="none",a.style.cursor="grabbing";i=document.getElementById(m);i&&(i.style.display="none",p("open"));let e,t;t="touchmove"===n.type?(e=n.touches[0].clientX-s,window.innerHeight-n.touches[0].clientY-d):(e=n.clientX-s,window.innerHeight-n.clientY-d);o=a.getBoundingClientRect(),i=window.innerWidth-o.width,n=window.innerHeight-o.height;"x"!==l&&"both"!==l||a.style.setProperty(`--${h}-left`,Math.max(0,Math.min(e,i))+"px"),"y"!==l&&"both"!==l||a.style.setProperty(`--${h}-bottom`,Math.max(0,Math.min(t,n))+"px")}}function c(){setTimeout(()=>{u=!1},0),a.style.transition="",a.style.cursor="pointer",document.removeEventListener("mousemove",i),document.removeEventListener("touchmove",i),document.removeEventListener("mouseup",c),document.removeEventListener("touchend",c)}a.addEventListener("mousedown",o),a.addEventListener("touchstart",o)}}n.style.display="none",document.body.appendChild(n),2048<t.length&&console.error("The URL is too long, please reduce the number of inputs to prevent the bot from failing to load"),window.addEventListener("message",e=>{var t,n;e.origin===o&&(t=document.getElementById(m))&&e.source===t.contentWindow&&("dify-chatbot-iframe-ready"===e.data.type&&t.contentWindow?.postMessage({type:"dify-chatbot-config",payload:{isToggledByButton:!0,isDraggable:!!y.draggable}},o),"dify-chatbot-expand-change"===e.data.type)&&(a=!a,n=document.getElementById(m))&&(a?n.style.cssText="\n position: absolute;\n display: flex;\n flex-direction: column;\n justify-content: space-between;\n top: unset;\n right: var(--dify-chatbot-bubble-button-right, 1rem); /* Align with dify-chatbot-bubble-button. */\n bottom: var(--dify-chatbot-bubble-button-bottom, 1rem); /* Align with dify-chatbot-bubble-button. */\n left: unset;\n min-width: 24rem;\n width: 48%;\n max-width: 40rem; /* Match mobile breakpoint*/\n min-height: 43.75rem;\n height: 88%;\n max-height: calc(100vh - 6rem);\n border: none;\n z-index: 2147483640;\n overflow: hidden;\n user-select: none;\n transition-property: width, height;\n transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);\n transition-duration: 150ms;\n ":n.style.cssText=l,d())}),document.getElementById(h)||r()}else console.error(t+" is empty or token is not provided")}function p(e="open"){"open"===e?(document.getElementById("openIcon").style.display="block",document.getElementById("closeIcon").style.display="none"):(document.getElementById("openIcon").style.display="none",document.getElementById("closeIcon").style.display="block")}function b(e){"Escape"===e.key&&(e=document.getElementById(m))&&"none"!==e.style.display&&(e.style.display="none",p("open"))}h,h,document.addEventListener("keydown",b),y?.dynamicScript?e():document.body.onload=e})();
|
||||
`,n.appendChild(e),document.body.appendChild(n),n.addEventListener("click",t),n.addEventListener("touchend",e=>{e.preventDefault(),t()},{passive:!1}),y.draggable){var a=n;var l=y.dragAxis||"both";let s,d,t,r;function o(e){u=!1,r=("touchstart"===e.type?(s=e.touches[0].clientX-a.offsetLeft,d=e.touches[0].clientY-a.offsetTop,t=e.touches[0].clientX,e.touches[0]):(s=e.clientX-a.offsetLeft,d=e.clientY-a.offsetTop,t=e.clientX,e)).clientY,document.addEventListener("mousemove",i),document.addEventListener("touchmove",i,{passive:!1}),document.addEventListener("mouseup",c),document.addEventListener("touchend",c),e.preventDefault()}function i(n){var o="touchmove"===n.type?n.touches[0]:n,i=o.clientX-t,o=o.clientY-r;if(u=8<Math.abs(i)||8<Math.abs(o)?!0:u){a.style.transition="none",a.style.cursor="grabbing";i=document.getElementById(m);i&&(i.style.display="none",p("open"));let e,t;t="touchmove"===n.type?(e=n.touches[0].clientX-s,window.innerHeight-n.touches[0].clientY-d):(e=n.clientX-s,window.innerHeight-n.clientY-d);o=a.getBoundingClientRect(),i=window.innerWidth-o.width,n=window.innerHeight-o.height;"x"!==l&&"both"!==l||a.style.setProperty(`--${h}-left`,Math.max(0,Math.min(e,i))+"px"),"y"!==l&&"both"!==l||a.style.setProperty(`--${h}-bottom`,Math.max(0,Math.min(t,n))+"px")}}function c(){setTimeout(()=>{u=!1},0),a.style.transition="",a.style.cursor="pointer",document.removeEventListener("mousemove",i),document.removeEventListener("touchmove",i),document.removeEventListener("mouseup",c),document.removeEventListener("touchend",c)}a.addEventListener("mousedown",o),a.addEventListener("touchstart",o)}}n.style.display="none",document.body.appendChild(n),2048<t.length&&console.error("The URL is too long, please reduce the number of inputs to prevent the bot from failing to load"),window.addEventListener("message",e=>{var t,n;e.origin===o&&(t=document.getElementById(m))&&e.source===t.contentWindow&&("dify-chatbot-iframe-ready"===e.data.type&&t.contentWindow?.postMessage({type:"dify-chatbot-config",payload:{isToggledByButton:!0,isDraggable:!!y.draggable}},o),"dify-chatbot-expand-change"===e.data.type)&&(a=!a,n=document.getElementById(m))&&(a?n.style.cssText="\n position: absolute;\n display: flex;\n flex-direction: column;\n justify-content: space-between;\n top: unset;\n right: var(--dify-chatbot-bubble-button-right, 1rem); /* Align with dify-chatbot-bubble-button. */\n bottom: var(--dify-chatbot-bubble-button-bottom, 1rem); /* Align with dify-chatbot-bubble-button. */\n left: unset;\n min-width: 24rem;\n width: 48%;\n max-width: 40rem; /* Match mobile breakpoint*/\n min-height: 43.75rem;\n height: 88%;\n max-height: calc(100vh - 6rem);\n border: none;\n z-index: 2147483640;\n overflow: hidden;\n user-select: none;\n transition-property: width, height;\n transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);\n transition-duration: 150ms;\n ":n.style.cssText=l,d())}),document.getElementById(h)||r()}else console.error(t+" is empty or token is not provided")}function p(e="open"){"open"===e?(document.getElementById("openIcon").style.display="block",document.getElementById("closeIcon").style.display="none"):(document.getElementById("openIcon").style.display="none",document.getElementById("closeIcon").style.display="block")}function b(e){"Escape"===e.key&&(e=document.getElementById(m))&&"none"!==e.style.display&&(e.style.display="none",p("open"))}h,h,document.addEventListener("keydown",b),y?.dynamicScript?e():document.body.onload=e})();
|
Loading…
x
Reference in New Issue
Block a user