mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-17 02:35:56 +08:00
Merge branch 'main' into feat/structured-output
This commit is contained in:
commit
3e4b342f38
@ -464,3 +464,16 @@ CREATE_TIDB_SERVICE_JOB_ENABLED=false
|
||||
MAX_SUBMIT_COUNT=100
|
||||
# Lockout duration in seconds
|
||||
LOGIN_LOCKOUT_DURATION=86400
|
||||
|
||||
# Enable OpenTelemetry
|
||||
ENABLE_OTEL=false
|
||||
OTLP_BASE_ENDPOINT=http://localhost:4318
|
||||
OTLP_API_KEY=
|
||||
OTEL_EXPORTER_TYPE=otlp
|
||||
OTEL_SAMPLING_RATE=0.1
|
||||
OTEL_BATCH_EXPORT_SCHEDULE_DELAY=5000
|
||||
OTEL_MAX_QUEUE_SIZE=2048
|
||||
OTEL_MAX_EXPORT_BATCH_SIZE=512
|
||||
OTEL_METRIC_EXPORT_INTERVAL=60000
|
||||
OTEL_BATCH_EXPORT_TIMEOUT=10000
|
||||
OTEL_METRIC_EXPORT_TIMEOUT=30000
|
@ -51,6 +51,7 @@ def initialize_extensions(app: DifyApp):
|
||||
ext_login,
|
||||
ext_mail,
|
||||
ext_migrate,
|
||||
ext_otel,
|
||||
ext_proxy_fix,
|
||||
ext_redis,
|
||||
ext_sentry,
|
||||
@ -81,6 +82,7 @@ def initialize_extensions(app: DifyApp):
|
||||
ext_proxy_fix,
|
||||
ext_blueprints,
|
||||
ext_commands,
|
||||
ext_otel,
|
||||
]
|
||||
for ext in extensions:
|
||||
short_name = ext.__name__.split(".")[-1]
|
||||
|
@ -9,6 +9,7 @@ from .enterprise import EnterpriseFeatureConfig
|
||||
from .extra import ExtraServiceConfig
|
||||
from .feature import FeatureConfig
|
||||
from .middleware import MiddlewareConfig
|
||||
from .observability import ObservabilityConfig
|
||||
from .packaging import PackagingInfo
|
||||
from .remote_settings_sources import RemoteSettingsSource, RemoteSettingsSourceConfig, RemoteSettingsSourceName
|
||||
from .remote_settings_sources.apollo import ApolloSettingsSource
|
||||
@ -59,6 +60,8 @@ class DifyConfig(
|
||||
MiddlewareConfig,
|
||||
# Extra service configs
|
||||
ExtraServiceConfig,
|
||||
# Observability configs
|
||||
ObservabilityConfig,
|
||||
# Remote source configs
|
||||
RemoteSettingsSourceConfig,
|
||||
# Enterprise feature configs
|
||||
|
9
api/configs/observability/__init__.py
Normal file
9
api/configs/observability/__init__.py
Normal file
@ -0,0 +1,9 @@
|
||||
from configs.observability.otel.otel_config import OTelConfig
|
||||
|
||||
|
||||
class ObservabilityConfig(OTelConfig):
|
||||
"""
|
||||
Observability configuration settings
|
||||
"""
|
||||
|
||||
pass
|
44
api/configs/observability/otel/otel_config.py
Normal file
44
api/configs/observability/otel/otel_config.py
Normal file
@ -0,0 +1,44 @@
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class OTelConfig(BaseSettings):
|
||||
"""
|
||||
OpenTelemetry configuration settings
|
||||
"""
|
||||
|
||||
ENABLE_OTEL: bool = Field(
|
||||
description="Whether to enable OpenTelemetry",
|
||||
default=False,
|
||||
)
|
||||
|
||||
OTLP_BASE_ENDPOINT: str = Field(
|
||||
description="OTLP base endpoint",
|
||||
default="http://localhost:4318",
|
||||
)
|
||||
|
||||
OTLP_API_KEY: str = Field(
|
||||
description="OTLP API key",
|
||||
default="",
|
||||
)
|
||||
|
||||
OTEL_EXPORTER_TYPE: str = Field(
|
||||
description="OTEL exporter type",
|
||||
default="otlp",
|
||||
)
|
||||
|
||||
OTEL_SAMPLING_RATE: float = Field(default=0.1, description="Sampling rate for traces (0.0 to 1.0)")
|
||||
|
||||
OTEL_BATCH_EXPORT_SCHEDULE_DELAY: int = Field(
|
||||
default=5000, description="Batch export schedule delay in milliseconds"
|
||||
)
|
||||
|
||||
OTEL_MAX_QUEUE_SIZE: int = Field(default=2048, description="Maximum queue size for the batch span processor")
|
||||
|
||||
OTEL_MAX_EXPORT_BATCH_SIZE: int = Field(default=512, description="Maximum export batch size")
|
||||
|
||||
OTEL_METRIC_EXPORT_INTERVAL: int = Field(default=60000, description="Metric export interval in milliseconds")
|
||||
|
||||
OTEL_BATCH_EXPORT_TIMEOUT: int = Field(default=10000, description="Batch export timeout in milliseconds")
|
||||
|
||||
OTEL_METRIC_EXPORT_TIMEOUT: int = Field(default=30000, description="Metric export timeout in milliseconds")
|
@ -270,7 +270,7 @@ class ApolloClient:
|
||||
while not self._stopping:
|
||||
for namespace in self._notification_map:
|
||||
self._do_heart_beat(namespace)
|
||||
time.sleep(60 * 10) # 10分钟
|
||||
time.sleep(60 * 10) # 10 minutes
|
||||
|
||||
def _do_heart_beat(self, namespace):
|
||||
url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip)
|
||||
|
@ -3,6 +3,8 @@ from configs import dify_config
|
||||
HIDDEN_VALUE = "[__HIDDEN__]"
|
||||
UUID_NIL = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
DEFAULT_FILE_NUMBER_LIMITS = 3
|
||||
|
||||
IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"]
|
||||
IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS])
|
||||
|
||||
|
@ -10,6 +10,8 @@ from uuid import uuid4
|
||||
|
||||
import httpx
|
||||
|
||||
from constants import DEFAULT_FILE_NUMBER_LIMITS
|
||||
|
||||
try:
|
||||
import magic
|
||||
except ImportError:
|
||||
@ -108,7 +110,7 @@ def get_parameters_from_feature_dict(*, features_dict: Mapping[str, Any], user_i
|
||||
{
|
||||
"image": {
|
||||
"enabled": False,
|
||||
"number_limits": 3,
|
||||
"number_limits": DEFAULT_FILE_NUMBER_LIMITS,
|
||||
"detail": "high",
|
||||
"transfer_methods": ["remote_url", "local_file"],
|
||||
}
|
||||
|
@ -21,12 +21,6 @@ def _validate_name(name):
|
||||
return name
|
||||
|
||||
|
||||
def _validate_description_length(description):
|
||||
if description and len(description) > 400:
|
||||
raise ValueError("Description cannot exceed 400 characters.")
|
||||
return description
|
||||
|
||||
|
||||
class ExternalApiTemplateListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
@ -14,18 +14,6 @@ from services.entities.knowledge_entities.knowledge_entities import (
|
||||
from services.metadata_service import MetadataService
|
||||
|
||||
|
||||
def _validate_name(name):
|
||||
if not name or len(name) < 1 or len(name) > 40:
|
||||
raise ValueError("Name must be between 1 to 40 characters.")
|
||||
return name
|
||||
|
||||
|
||||
def _validate_description_length(description):
|
||||
if len(description) > 400:
|
||||
raise ValueError("Description cannot exceed 400 characters.")
|
||||
return description
|
||||
|
||||
|
||||
class DatasetMetadataCreateApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
@ -286,8 +286,6 @@ class AccountDeleteApi(Resource):
|
||||
class AccountDeleteUpdateFeedbackApi(Resource):
|
||||
@setup_required
|
||||
def post(self):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("email", type=str, required=True, location="json")
|
||||
parser.add_argument("feedback", type=str, required=True, location="json")
|
||||
|
@ -13,18 +13,6 @@ from services.entities.knowledge_entities.knowledge_entities import (
|
||||
from services.metadata_service import MetadataService
|
||||
|
||||
|
||||
def _validate_name(name):
|
||||
if not name or len(name) < 1 or len(name) > 40:
|
||||
raise ValueError("Name must be between 1 to 40 characters.")
|
||||
return name
|
||||
|
||||
|
||||
def _validate_description_length(description):
|
||||
if len(description) > 400:
|
||||
raise ValueError("Description cannot exceed 400 characters.")
|
||||
return description
|
||||
|
||||
|
||||
class DatasetMetadataCreateServiceApi(DatasetApiResource):
|
||||
def post(self, tenant_id, dataset_id):
|
||||
parser = reqparse.RequestParser()
|
||||
|
@ -117,9 +117,6 @@ class SegmentApi(DatasetApiResource):
|
||||
parser.add_argument("keyword", type=str, default=None, location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
status_list = args["status"]
|
||||
keyword = args["keyword"]
|
||||
|
||||
segments, total = SegmentService.get_segments(
|
||||
document_id=document_id,
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
|
@ -1,6 +1,7 @@
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from constants import DEFAULT_FILE_NUMBER_LIMITS
|
||||
from core.file import FileUploadConfig
|
||||
|
||||
|
||||
@ -18,7 +19,7 @@ class FileUploadConfigManager:
|
||||
if file_upload_dict.get("enabled"):
|
||||
transform_methods = file_upload_dict.get("allowed_file_upload_methods", [])
|
||||
file_upload_dict["image_config"] = {
|
||||
"number_limits": file_upload_dict.get("number_limits", 1),
|
||||
"number_limits": file_upload_dict.get("number_limits", DEFAULT_FILE_NUMBER_LIMITS),
|
||||
"transfer_methods": transform_methods,
|
||||
}
|
||||
|
||||
|
@ -177,7 +177,7 @@ class ModelInstance:
|
||||
)
|
||||
|
||||
def get_llm_num_tokens(
|
||||
self, prompt_messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None
|
||||
self, prompt_messages: Sequence[PromptMessage], tools: Optional[Sequence[PromptMessageTool]] = None
|
||||
) -> int:
|
||||
"""
|
||||
Get number of tokens for llm
|
||||
|
@ -58,7 +58,7 @@ class Callback(ABC):
|
||||
chunk: LLMResultChunk,
|
||||
model: str,
|
||||
credentials: dict,
|
||||
prompt_messages: list[PromptMessage],
|
||||
prompt_messages: Sequence[PromptMessage],
|
||||
model_parameters: dict,
|
||||
tools: Optional[list[PromptMessageTool]] = None,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
@ -88,7 +88,7 @@ class Callback(ABC):
|
||||
result: LLMResult,
|
||||
model: str,
|
||||
credentials: dict,
|
||||
prompt_messages: list[PromptMessage],
|
||||
prompt_messages: Sequence[PromptMessage],
|
||||
model_parameters: dict,
|
||||
tools: Optional[list[PromptMessageTool]] = None,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
|
@ -74,7 +74,7 @@ class LoggingCallback(Callback):
|
||||
chunk: LLMResultChunk,
|
||||
model: str,
|
||||
credentials: dict,
|
||||
prompt_messages: list[PromptMessage],
|
||||
prompt_messages: Sequence[PromptMessage],
|
||||
model_parameters: dict,
|
||||
tools: Optional[list[PromptMessageTool]] = None,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
@ -104,7 +104,7 @@ class LoggingCallback(Callback):
|
||||
result: LLMResult,
|
||||
model: str,
|
||||
credentials: dict,
|
||||
prompt_messages: list[PromptMessage],
|
||||
prompt_messages: Sequence[PromptMessage],
|
||||
model_parameters: dict,
|
||||
tools: Optional[list[PromptMessageTool]] = None,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
|
@ -367,7 +367,7 @@ Inherit the `__base.text2speech_model.Text2SpeechModel` base class and implement
|
||||
|
||||
- Returns:
|
||||
|
||||
Text converted speech stream。
|
||||
Text converted speech stream.
|
||||
|
||||
### Moderation
|
||||
|
||||
|
@ -1,8 +1,9 @@
|
||||
from collections.abc import Sequence
|
||||
from decimal import Decimal
|
||||
from enum import StrEnum
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from core.model_runtime.entities.message_entities import AssistantPromptMessage, PromptMessage
|
||||
from core.model_runtime.entities.model_entities import ModelUsage, PriceInfo
|
||||
@ -107,7 +108,7 @@ class LLMResult(BaseModel):
|
||||
|
||||
id: Optional[str] = None
|
||||
model: str
|
||||
prompt_messages: list[PromptMessage]
|
||||
prompt_messages: Sequence[PromptMessage] = Field(default_factory=list)
|
||||
message: AssistantPromptMessage
|
||||
usage: LLMUsage
|
||||
system_fingerprint: Optional[str] = None
|
||||
@ -130,7 +131,7 @@ class LLMResultChunk(BaseModel):
|
||||
"""
|
||||
|
||||
model: str
|
||||
prompt_messages: list[PromptMessage]
|
||||
prompt_messages: Sequence[PromptMessage] = Field(default_factory=list)
|
||||
system_fingerprint: Optional[str] = None
|
||||
delta: LLMResultChunkDelta
|
||||
|
||||
|
@ -45,7 +45,7 @@ class LargeLanguageModel(AIModel):
|
||||
stream: bool = True,
|
||||
user: Optional[str] = None,
|
||||
callbacks: Optional[list[Callback]] = None,
|
||||
) -> Union[LLMResult, Generator]:
|
||||
) -> Union[LLMResult, Generator[LLMResultChunk, None, None]]:
|
||||
"""
|
||||
Invoke large language model
|
||||
|
||||
@ -205,22 +205,26 @@ class LargeLanguageModel(AIModel):
|
||||
user=user,
|
||||
callbacks=callbacks,
|
||||
)
|
||||
|
||||
# Following https://github.com/langgenius/dify/issues/17799,
|
||||
# we removed the prompt_messages from the chunk on the plugin daemon side.
|
||||
# To ensure compatibility, we add the prompt_messages back here.
|
||||
result.prompt_messages = prompt_messages
|
||||
return result
|
||||
raise NotImplementedError("unsupported invoke result type", type(result))
|
||||
|
||||
def _invoke_result_generator(
|
||||
self,
|
||||
model: str,
|
||||
result: Generator,
|
||||
credentials: dict,
|
||||
prompt_messages: list[PromptMessage],
|
||||
prompt_messages: Sequence[PromptMessage],
|
||||
model_parameters: dict,
|
||||
tools: Optional[list[PromptMessageTool]] = None,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
stream: bool = True,
|
||||
user: Optional[str] = None,
|
||||
callbacks: Optional[list[Callback]] = None,
|
||||
) -> Generator:
|
||||
) -> Generator[LLMResultChunk, None, None]:
|
||||
"""
|
||||
Invoke result generator
|
||||
|
||||
@ -235,6 +239,10 @@ class LargeLanguageModel(AIModel):
|
||||
|
||||
try:
|
||||
for chunk in result:
|
||||
# Following https://github.com/langgenius/dify/issues/17799,
|
||||
# we removed the prompt_messages from the chunk on the plugin daemon side.
|
||||
# To ensure compatibility, we add the prompt_messages back here.
|
||||
chunk.prompt_messages = prompt_messages
|
||||
yield chunk
|
||||
|
||||
self._trigger_new_chunk_callbacks(
|
||||
@ -403,7 +411,7 @@ class LargeLanguageModel(AIModel):
|
||||
chunk: LLMResultChunk,
|
||||
model: str,
|
||||
credentials: dict,
|
||||
prompt_messages: list[PromptMessage],
|
||||
prompt_messages: Sequence[PromptMessage],
|
||||
model_parameters: dict,
|
||||
tools: Optional[list[PromptMessageTool]] = None,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
@ -450,7 +458,7 @@ class LargeLanguageModel(AIModel):
|
||||
model: str,
|
||||
result: LLMResult,
|
||||
credentials: dict,
|
||||
prompt_messages: list[PromptMessage],
|
||||
prompt_messages: Sequence[PromptMessage],
|
||||
model_parameters: dict,
|
||||
tools: Optional[list[PromptMessageTool]] = None,
|
||||
stop: Optional[Sequence[str]] = None,
|
||||
|
@ -168,16 +168,18 @@ class BasePluginManager:
|
||||
Make a stream request to the plugin daemon inner API and yield the response as a model.
|
||||
"""
|
||||
for line in self._stream_request(method, path, params, headers, data, files):
|
||||
line_data = None
|
||||
try:
|
||||
rep = PluginDaemonBasicResponse[type].model_validate_json(line) # type: ignore
|
||||
except (ValueError, TypeError):
|
||||
# TODO modify this when line_data has code and message
|
||||
try:
|
||||
line_data = json.loads(line)
|
||||
rep = PluginDaemonBasicResponse[type](**line_data) # type: ignore
|
||||
except Exception:
|
||||
# TODO modify this when line_data has code and message
|
||||
if line_data and "error" in line_data:
|
||||
raise ValueError(line_data["error"])
|
||||
else:
|
||||
except (ValueError, TypeError):
|
||||
raise ValueError(line)
|
||||
# If the dictionary contains the `error` key, use its value as the argument
|
||||
# for `ValueError`.
|
||||
# Otherwise, use the `line` to provide better contextual information about the error.
|
||||
raise ValueError(line_data.get("error", line))
|
||||
|
||||
if rep.code != 0:
|
||||
if rep.code == -500:
|
||||
|
@ -28,7 +28,7 @@ BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG = {
|
||||
},
|
||||
"conversation_histories_role": {"user_prefix": "用户", "assistant_prefix": "助手"},
|
||||
},
|
||||
"stop": ["用户:"],
|
||||
"stop": ["用户:"],
|
||||
}
|
||||
|
||||
BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG = {
|
||||
@ -41,5 +41,5 @@ BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG = {
|
||||
|
||||
BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG = {
|
||||
"completion_prompt_config": {"prompt": {"text": "{{#pre_prompt#}}"}},
|
||||
"stop": ["用户:"],
|
||||
"stop": ["用户:"],
|
||||
}
|
||||
|
@ -228,7 +228,7 @@ class OracleVector(BaseVector):
|
||||
|
||||
top_k = kwargs.get("top_k", 5)
|
||||
# just not implement fetch by score_threshold now, may be later
|
||||
score_threshold = float(kwargs.get("score_threshold") or 0.0)
|
||||
# score_threshold = float(kwargs.get("score_threshold") or 0.0)
|
||||
if len(query) > 0:
|
||||
# Check which language the query is in
|
||||
zh_pattern = re.compile("[\u4e00-\u9fa5]+")
|
||||
@ -239,7 +239,7 @@ class OracleVector(BaseVector):
|
||||
words = pseg.cut(query)
|
||||
current_entity = ""
|
||||
for word, pos in words:
|
||||
if pos in {"nr", "Ng", "eng", "nz", "n", "ORG", "v"}: # nr: 人名, ns: 地名, nt: 机构名
|
||||
if pos in {"nr", "Ng", "eng", "nz", "n", "ORG", "v"}: # nr: 人名,ns: 地名,nt: 机构名
|
||||
current_entity += word
|
||||
else:
|
||||
if current_entity:
|
||||
|
@ -65,8 +65,6 @@ class RelytVector(BaseVector):
|
||||
return VectorType.RELYT
|
||||
|
||||
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs) -> None:
|
||||
index_params: dict[str, Any] = {}
|
||||
metadatas = [d.metadata for d in texts]
|
||||
self.create_collection(len(embeddings[0]))
|
||||
self.embedding_dimension = len(embeddings[0])
|
||||
self.add_texts(texts, embeddings)
|
||||
|
@ -187,7 +187,6 @@ class TiDBVector(BaseVector):
|
||||
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
||||
top_k = kwargs.get("top_k", 4)
|
||||
score_threshold = float(kwargs.get("score_threshold") or 0.0)
|
||||
filter = kwargs.get("filter")
|
||||
distance = 1 - score_threshold
|
||||
|
||||
query_vector_str = ", ".join(format(x) for x in query_vector)
|
||||
|
@ -206,6 +206,7 @@ class DatasetRetrieval:
|
||||
source = {
|
||||
"dataset_id": item.metadata.get("dataset_id"),
|
||||
"dataset_name": item.metadata.get("dataset_name"),
|
||||
"document_id": item.metadata.get("document_id") or item.metadata.get("title"),
|
||||
"document_name": item.metadata.get("title"),
|
||||
"data_source_type": "external",
|
||||
"retriever_from": invoke_from.to_source(),
|
||||
|
@ -19,7 +19,7 @@ parameters:
|
||||
zh_Hans: 本地时间
|
||||
human_description:
|
||||
en_US: localtime, such as 2024-1-1 0:0:0
|
||||
zh_Hans: 本地时间, 比如2024-1-1 0:0:0
|
||||
zh_Hans: 本地时间,比如 2024-1-1 0:0:0
|
||||
- name: timezone
|
||||
type: string
|
||||
required: false
|
||||
@ -29,5 +29,5 @@ parameters:
|
||||
zh_Hans: 时区
|
||||
human_description:
|
||||
en_US: Timezone, such as Asia/Shanghai
|
||||
zh_Hans: 时区, 比如Asia/Shanghai
|
||||
zh_Hans: 时区,比如 Asia/Shanghai
|
||||
default: Asia/Shanghai
|
||||
|
@ -29,5 +29,5 @@ parameters:
|
||||
zh_Hans: 时区
|
||||
human_description:
|
||||
en_US: Timezone, such as Asia/Shanghai
|
||||
zh_Hans: 时区, 比如Asia/Shanghai
|
||||
zh_Hans: 时区,比如 Asia/Shanghai
|
||||
default: Asia/Shanghai
|
||||
|
@ -19,7 +19,7 @@ parameters:
|
||||
zh_Hans: 当前时间
|
||||
human_description:
|
||||
en_US: current time, such as 2024-1-1 0:0:0
|
||||
zh_Hans: 当前时间, 比如2024-1-1 0:0:0
|
||||
zh_Hans: 当前时间,比如 2024-1-1 0:0:0
|
||||
- name: current_timezone
|
||||
type: string
|
||||
required: true
|
||||
@ -29,7 +29,7 @@ parameters:
|
||||
zh_Hans: 当前时区
|
||||
human_description:
|
||||
en_US: Current Timezone, such as Asia/Shanghai
|
||||
zh_Hans: 当前时区, 比如Asia/Shanghai
|
||||
zh_Hans: 当前时区,比如 Asia/Shanghai
|
||||
default: Asia/Shanghai
|
||||
- name: target_timezone
|
||||
type: string
|
||||
@ -40,5 +40,5 @@ parameters:
|
||||
zh_Hans: 目标时区
|
||||
human_description:
|
||||
en_US: Target Timezone, such as Asia/Tokyo
|
||||
zh_Hans: 目标时区, 比如Asia/Tokyo
|
||||
zh_Hans: 目标时区,比如 Asia/Tokyo
|
||||
default: Asia/Tokyo
|
||||
|
@ -86,6 +86,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
||||
"position": position,
|
||||
"dataset_id": item.metadata.get("dataset_id"),
|
||||
"dataset_name": item.metadata.get("dataset_name"),
|
||||
"document_id": item.metadata.get("document_id") or item.metadata.get("title"),
|
||||
"document_name": item.metadata.get("title"),
|
||||
"data_source_type": "external",
|
||||
"retriever_from": self.retriever_from,
|
||||
|
@ -1,13 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.workflow.graph_engine.entities.graph import GraphParallel
|
||||
|
||||
|
||||
class NextGraphNode(BaseModel):
|
||||
node_id: str
|
||||
"""next node id"""
|
||||
|
||||
parallel: Optional[GraphParallel] = None
|
||||
"""parallel"""
|
@ -259,6 +259,7 @@ class KnowledgeRetrievalNode(LLMNode):
|
||||
"_source": "knowledge",
|
||||
"dataset_id": item.metadata.get("dataset_id"),
|
||||
"dataset_name": item.metadata.get("dataset_name"),
|
||||
"document_id": item.metadata.get("document_id") or item.metadata.get("title"),
|
||||
"document_name": item.metadata.get("title"),
|
||||
"data_source_type": "external",
|
||||
"retriever_from": "workflow",
|
||||
@ -596,7 +597,6 @@ class KnowledgeRetrievalNode(LLMNode):
|
||||
def _get_prompt_template(self, node_data: KnowledgeRetrievalNodeData, metadata_fields: list, query: str):
|
||||
model_mode = ModelMode.value_of(node_data.metadata_model_config.mode) # type: ignore
|
||||
input_text = query
|
||||
memory_str = ""
|
||||
|
||||
prompt_messages: list[LLMNodeChatModelMessage] = []
|
||||
if model_mode == ModelMode.CHAT:
|
||||
|
@ -8,7 +8,6 @@ from models.provider import Provider
|
||||
|
||||
@message_was_created.connect
|
||||
def handle(sender, **kwargs):
|
||||
message = sender
|
||||
application_generate_entity = kwargs.get("application_generate_entity")
|
||||
|
||||
if not isinstance(application_generate_entity, ChatAppGenerateEntity | AgentChatAppGenerateEntity):
|
||||
|
130
api/extensions/ext_otel.py
Normal file
130
api/extensions/ext_otel.py
Normal file
@ -0,0 +1,130 @@
|
||||
import atexit
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
from typing import Union
|
||||
|
||||
from flask_login import user_loaded_from_request, user_logged_in # type: ignore
|
||||
from opentelemetry import trace
|
||||
from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
||||
from opentelemetry.instrumentation.flask import FlaskInstrumentor
|
||||
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
|
||||
from opentelemetry.metrics import set_meter_provider
|
||||
from opentelemetry.propagate import set_global_textmap
|
||||
from opentelemetry.propagators.b3 import B3Format
|
||||
from opentelemetry.propagators.composite import CompositePropagator
|
||||
from opentelemetry.sdk.metrics import MeterProvider
|
||||
from opentelemetry.sdk.metrics.export import ConsoleMetricExporter, PeriodicExportingMetricReader
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.sdk.trace import TracerProvider
|
||||
from opentelemetry.sdk.trace.export import (
|
||||
BatchSpanProcessor,
|
||||
ConsoleSpanExporter,
|
||||
)
|
||||
from opentelemetry.sdk.trace.sampling import ParentBasedTraceIdRatio
|
||||
from opentelemetry.semconv.resource import ResourceAttributes
|
||||
from opentelemetry.trace import Span, get_current_span, set_tracer_provider
|
||||
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
|
||||
from opentelemetry.trace.status import StatusCode
|
||||
|
||||
from configs import dify_config
|
||||
from dify_app import DifyApp
|
||||
|
||||
|
||||
@user_logged_in.connect
|
||||
@user_loaded_from_request.connect
|
||||
def on_user_loaded(_sender, user):
|
||||
if user:
|
||||
current_span = get_current_span()
|
||||
if current_span:
|
||||
current_span.set_attribute("service.tenant.id", user.current_tenant_id)
|
||||
current_span.set_attribute("service.user.id", user.id)
|
||||
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
if dify_config.ENABLE_OTEL:
|
||||
setup_context_propagation()
|
||||
# Initialize OpenTelemetry
|
||||
# Follow Semantic Convertions 1.32.0 to define resource attributes
|
||||
resource = Resource(
|
||||
attributes={
|
||||
ResourceAttributes.SERVICE_NAME: dify_config.APPLICATION_NAME,
|
||||
ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}",
|
||||
ResourceAttributes.PROCESS_PID: os.getpid(),
|
||||
ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}",
|
||||
ResourceAttributes.HOST_NAME: socket.gethostname(),
|
||||
ResourceAttributes.HOST_ARCH: platform.machine(),
|
||||
"custom.deployment.git_commit": dify_config.COMMIT_SHA,
|
||||
ResourceAttributes.HOST_ID: platform.node(),
|
||||
ResourceAttributes.OS_TYPE: platform.system().lower(),
|
||||
ResourceAttributes.OS_DESCRIPTION: platform.platform(),
|
||||
ResourceAttributes.OS_VERSION: platform.version(),
|
||||
}
|
||||
)
|
||||
sampler = ParentBasedTraceIdRatio(dify_config.OTEL_SAMPLING_RATE)
|
||||
provider = TracerProvider(resource=resource, sampler=sampler)
|
||||
set_tracer_provider(provider)
|
||||
exporter: Union[OTLPSpanExporter, ConsoleSpanExporter]
|
||||
metric_exporter: Union[OTLPMetricExporter, ConsoleMetricExporter]
|
||||
if dify_config.OTEL_EXPORTER_TYPE == "otlp":
|
||||
exporter = OTLPSpanExporter(
|
||||
endpoint=dify_config.OTLP_BASE_ENDPOINT + "/v1/traces",
|
||||
headers={"Authorization": f"Bearer {dify_config.OTLP_API_KEY}"},
|
||||
)
|
||||
metric_exporter = OTLPMetricExporter(
|
||||
endpoint=dify_config.OTLP_BASE_ENDPOINT + "/v1/metrics",
|
||||
headers={"Authorization": f"Bearer {dify_config.OTLP_API_KEY}"},
|
||||
)
|
||||
else:
|
||||
# Fallback to console exporter
|
||||
exporter = ConsoleSpanExporter()
|
||||
metric_exporter = ConsoleMetricExporter()
|
||||
|
||||
provider.add_span_processor(
|
||||
BatchSpanProcessor(
|
||||
exporter,
|
||||
max_queue_size=dify_config.OTEL_MAX_QUEUE_SIZE,
|
||||
schedule_delay_millis=dify_config.OTEL_BATCH_EXPORT_SCHEDULE_DELAY,
|
||||
max_export_batch_size=dify_config.OTEL_MAX_EXPORT_BATCH_SIZE,
|
||||
export_timeout_millis=dify_config.OTEL_BATCH_EXPORT_TIMEOUT,
|
||||
)
|
||||
)
|
||||
reader = PeriodicExportingMetricReader(
|
||||
metric_exporter,
|
||||
export_interval_millis=dify_config.OTEL_METRIC_EXPORT_INTERVAL,
|
||||
export_timeout_millis=dify_config.OTEL_METRIC_EXPORT_TIMEOUT,
|
||||
)
|
||||
set_meter_provider(MeterProvider(resource=resource, metric_readers=[reader]))
|
||||
|
||||
def response_hook(span: Span, status: str, response_headers: list):
|
||||
if span and span.is_recording():
|
||||
if status.startswith("2"):
|
||||
span.set_status(StatusCode.OK)
|
||||
else:
|
||||
span.set_status(StatusCode.ERROR, status)
|
||||
|
||||
instrumentor = FlaskInstrumentor()
|
||||
instrumentor.instrument_app(app, response_hook=response_hook)
|
||||
with app.app_context():
|
||||
engines = list(app.extensions["sqlalchemy"].engines.values())
|
||||
SQLAlchemyInstrumentor().instrument(enable_commenter=True, engines=engines)
|
||||
atexit.register(shutdown_tracer)
|
||||
|
||||
|
||||
def setup_context_propagation():
|
||||
# Configure propagators
|
||||
set_global_textmap(
|
||||
CompositePropagator(
|
||||
[
|
||||
TraceContextTextMapPropagator(), # W3C trace context
|
||||
B3Format(), # B3 propagation (used by many systems)
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def shutdown_tracer():
|
||||
provider = trace.get_tracer_provider()
|
||||
if hasattr(provider, "force_flush"):
|
||||
provider.force_flush()
|
@ -23,6 +23,7 @@ from sqlalchemy import Float, Index, PrimaryKeyConstraint, func, text
|
||||
from sqlalchemy.orm import Mapped, Session, mapped_column
|
||||
|
||||
from configs import dify_config
|
||||
from constants import DEFAULT_FILE_NUMBER_LIMITS
|
||||
from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType
|
||||
from core.file import helpers as file_helpers
|
||||
from core.file.tool_file_parser import ToolFileParser
|
||||
@ -442,7 +443,7 @@ class AppModelConfig(Base):
|
||||
else {
|
||||
"image": {
|
||||
"enabled": False,
|
||||
"number_limits": 3,
|
||||
"number_limits": DEFAULT_FILE_NUMBER_LIMITS,
|
||||
"detail": "high",
|
||||
"transfer_methods": ["remote_url", "local_file"],
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ from sqlalchemy import Index, PrimaryKeyConstraint, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
import contexts
|
||||
from constants import HIDDEN_VALUE
|
||||
from constants import DEFAULT_FILE_NUMBER_LIMITS, HIDDEN_VALUE
|
||||
from core.helper import encrypter
|
||||
from core.variables import SecretVariable, Variable
|
||||
from factories import variable_factory
|
||||
@ -186,7 +186,7 @@ class Workflow(Base):
|
||||
features = json.loads(self._features)
|
||||
if features.get("file_upload", {}).get("image", {}).get("enabled", False):
|
||||
image_enabled = True
|
||||
image_number_limits = int(features["file_upload"]["image"].get("number_limits", 1))
|
||||
image_number_limits = int(features["file_upload"]["image"].get("number_limits", DEFAULT_FILE_NUMBER_LIMITS))
|
||||
image_transfer_methods = features["file_upload"]["image"].get(
|
||||
"transfer_methods", ["remote_url", "local_file"]
|
||||
)
|
||||
|
2643
api/poetry.lock
generated
2643
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -50,6 +50,20 @@ numpy = "~1.26.4"
|
||||
oci = "~2.135.1"
|
||||
openai = "~1.61.0"
|
||||
openpyxl = "~3.1.5"
|
||||
opentelemetry-api = "1.27.0"
|
||||
opentelemetry-distro = "0.48b0"
|
||||
opentelemetry-exporter-otlp = "1.27.0"
|
||||
opentelemetry-exporter-otlp-proto-common = "1.27.0"
|
||||
opentelemetry-exporter-otlp-proto-grpc = "1.27.0"
|
||||
opentelemetry-exporter-otlp-proto-http = "1.27.0"
|
||||
opentelemetry-instrumentation = "0.48b0"
|
||||
opentelemetry-instrumentation-flask = "0.48b0"
|
||||
opentelemetry-instrumentation-sqlalchemy = "0.48b0"
|
||||
opentelemetry-propagator-b3 = "1.27.0"
|
||||
opentelemetry-proto = "1.27.0" # 1.28.0 depends on protobuf (>=5.0,<6.0), conflict with googleapis-common-protos (1.63.0)
|
||||
opentelemetry-sdk = "1.27.0"
|
||||
opentelemetry-semantic-conventions = "0.48b0"
|
||||
opentelemetry-util-http = "0.48b0"
|
||||
opik = "~1.3.4"
|
||||
pandas = { version = "~2.2.2", extras = [
|
||||
"performance",
|
||||
|
@ -571,10 +571,6 @@ class AccountService:
|
||||
return False
|
||||
|
||||
|
||||
def _get_login_cache_key(*, account_id: str, token: str):
|
||||
return f"account_login:{account_id}:{token}"
|
||||
|
||||
|
||||
class TenantService:
|
||||
@staticmethod
|
||||
def create_tenant(name: str, is_setup: Optional[bool] = False, is_from_dashboard: Optional[bool] = False) -> Tenant:
|
||||
|
@ -515,5 +515,10 @@ class WorkflowService:
|
||||
# Cannot delete a workflow that's currently in use by an app
|
||||
raise WorkflowInUseError(f"Cannot delete workflow that is currently in use by app '{app.name}'")
|
||||
|
||||
# Check if this workflow is published as a tool
|
||||
if workflow.tool_published:
|
||||
# Cannot delete a workflow that's published as a tool
|
||||
raise WorkflowInUseError("Cannot delete workflow that is published as a tool")
|
||||
|
||||
session.delete(workflow)
|
||||
return True
|
||||
|
117
api/test_workflow_deletion.py
Normal file
117
api/test_workflow_deletion.py
Normal file
@ -0,0 +1,117 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from models.model import App
|
||||
from models.workflow import Workflow
|
||||
from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError, WorkflowService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_setup():
|
||||
workflow_service = WorkflowService()
|
||||
session = MagicMock(spec=Session)
|
||||
tenant_id = "test-tenant-id"
|
||||
workflow_id = "test-workflow-id"
|
||||
|
||||
# Mock workflow
|
||||
workflow = MagicMock(spec=Workflow)
|
||||
workflow.id = workflow_id
|
||||
workflow.tenant_id = tenant_id
|
||||
workflow.version = "1.0" # Not a draft
|
||||
workflow.tool_published = False # Not published as a tool by default
|
||||
|
||||
# Mock app
|
||||
app = MagicMock(spec=App)
|
||||
app.id = "test-app-id"
|
||||
app.name = "Test App"
|
||||
app.workflow_id = None # Not used by an app by default
|
||||
|
||||
return {
|
||||
"workflow_service": workflow_service,
|
||||
"session": session,
|
||||
"tenant_id": tenant_id,
|
||||
"workflow_id": workflow_id,
|
||||
"workflow": workflow,
|
||||
"app": app,
|
||||
}
|
||||
|
||||
|
||||
def test_delete_workflow_success(workflow_setup):
|
||||
# Setup mocks
|
||||
workflow_setup["session"].scalar = MagicMock(
|
||||
side_effect=[workflow_setup["workflow"], None]
|
||||
) # Return workflow first, then None for app
|
||||
|
||||
# Call the method
|
||||
result = workflow_setup["workflow_service"].delete_workflow(
|
||||
session=workflow_setup["session"],
|
||||
workflow_id=workflow_setup["workflow_id"],
|
||||
tenant_id=workflow_setup["tenant_id"],
|
||||
)
|
||||
|
||||
# Verify
|
||||
assert result is True
|
||||
workflow_setup["session"].delete.assert_called_once_with(workflow_setup["workflow"])
|
||||
|
||||
|
||||
def test_delete_workflow_draft_error(workflow_setup):
|
||||
# Setup mocks
|
||||
workflow_setup["workflow"].version = "draft"
|
||||
workflow_setup["session"].scalar = MagicMock(return_value=workflow_setup["workflow"])
|
||||
|
||||
# Call the method and verify exception
|
||||
with pytest.raises(DraftWorkflowDeletionError):
|
||||
workflow_setup["workflow_service"].delete_workflow(
|
||||
session=workflow_setup["session"],
|
||||
workflow_id=workflow_setup["workflow_id"],
|
||||
tenant_id=workflow_setup["tenant_id"],
|
||||
)
|
||||
|
||||
# Verify
|
||||
workflow_setup["session"].delete.assert_not_called()
|
||||
|
||||
|
||||
def test_delete_workflow_in_use_by_app_error(workflow_setup):
|
||||
# Setup mocks
|
||||
workflow_setup["app"].workflow_id = workflow_setup["workflow_id"]
|
||||
workflow_setup["session"].scalar = MagicMock(
|
||||
side_effect=[workflow_setup["workflow"], workflow_setup["app"]]
|
||||
) # Return workflow first, then app
|
||||
|
||||
# Call the method and verify exception
|
||||
with pytest.raises(WorkflowInUseError) as excinfo:
|
||||
workflow_setup["workflow_service"].delete_workflow(
|
||||
session=workflow_setup["session"],
|
||||
workflow_id=workflow_setup["workflow_id"],
|
||||
tenant_id=workflow_setup["tenant_id"],
|
||||
)
|
||||
|
||||
# Verify error message contains app name
|
||||
assert "Cannot delete workflow that is currently in use by app" in str(excinfo.value)
|
||||
|
||||
# Verify
|
||||
workflow_setup["session"].delete.assert_not_called()
|
||||
|
||||
|
||||
def test_delete_workflow_published_as_tool_error(workflow_setup):
|
||||
# Setup mocks
|
||||
workflow_setup["workflow"].tool_published = True
|
||||
workflow_setup["session"].scalar = MagicMock(
|
||||
side_effect=[workflow_setup["workflow"], None]
|
||||
) # Return workflow first, then None for app
|
||||
|
||||
# Call the method and verify exception
|
||||
with pytest.raises(WorkflowInUseError) as excinfo:
|
||||
workflow_setup["workflow_service"].delete_workflow(
|
||||
session=workflow_setup["session"],
|
||||
workflow_id=workflow_setup["workflow_id"],
|
||||
tenant_id=workflow_setup["tenant_id"],
|
||||
)
|
||||
|
||||
# Verify error message
|
||||
assert "Cannot delete workflow that is published as a tool" in str(excinfo.value)
|
||||
|
||||
# Verify
|
||||
workflow_setup["session"].delete.assert_not_called()
|
@ -1,5 +1,7 @@
|
||||
import time
|
||||
|
||||
import psycopg2 # type: ignore
|
||||
|
||||
from core.rag.datasource.vdb.opengauss.opengauss import OpenGauss, OpenGaussConfig
|
||||
from tests.integration_tests.vdb.test_vector_store import (
|
||||
AbstractVectorTest,
|
||||
|
@ -45,7 +45,7 @@ def main():
|
||||
if is_tiflash_ready:
|
||||
break
|
||||
else:
|
||||
print(f"Attempt {attempt + 1} failed,retry in {retry_interval_seconds} seconds...")
|
||||
print(f"Attempt {attempt + 1} failed, retry in {retry_interval_seconds} seconds...")
|
||||
time.sleep(retry_interval_seconds)
|
||||
|
||||
if is_tiflash_ready:
|
||||
|
@ -425,8 +425,8 @@ def test_multi_colons_parse(setup_http_mock):
|
||||
result = node._run()
|
||||
assert result.process_data is not None
|
||||
assert result.outputs is not None
|
||||
resp = result.outputs
|
||||
|
||||
assert urlencode({"Redirect": "http://example2.com"}) in result.process_data.get("request", "")
|
||||
assert 'form-data; name="Redirect"\r\n\r\nhttp://example6.com' in result.process_data.get("request", "")
|
||||
# resp = result.outputs
|
||||
# assert "http://example3.com" == resp.get("headers", {}).get("referer")
|
||||
|
@ -46,13 +46,6 @@ def test_retry_default_value_partial_success():
|
||||
|
||||
def test_retry_failed():
|
||||
"""retry failed with success status"""
|
||||
error_code = """
|
||||
def main() -> dict:
|
||||
return {
|
||||
"result": 1 / 0,
|
||||
}
|
||||
"""
|
||||
|
||||
graph_config = {
|
||||
"edges": DEFAULT_VALUE_EDGE,
|
||||
"nodes": [
|
||||
|
@ -1040,3 +1040,18 @@ PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING=
|
||||
PLUGIN_TENCENT_COS_SECRET_KEY=
|
||||
PLUGIN_TENCENT_COS_SECRET_ID=
|
||||
PLUGIN_TENCENT_COS_REGION=
|
||||
|
||||
# ------------------------------
|
||||
# OTLP Collector Configuration
|
||||
# ------------------------------
|
||||
ENABLE_OTEL=false
|
||||
OTLP_BASE_ENDPOINT=http://localhost:4318
|
||||
OTLP_API_KEY=
|
||||
OTEL_EXPORTER_TYPE=otlp
|
||||
OTEL_SAMPLING_RATE=0.1
|
||||
OTEL_BATCH_EXPORT_SCHEDULE_DELAY=5000
|
||||
OTEL_MAX_QUEUE_SIZE=2048
|
||||
OTEL_MAX_EXPORT_BATCH_SIZE=512
|
||||
OTEL_METRIC_EXPORT_INTERVAL=60000
|
||||
OTEL_BATCH_EXPORT_TIMEOUT=10000
|
||||
OTEL_METRIC_EXPORT_TIMEOUT=30000
|
||||
|
@ -28,6 +28,9 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T
|
||||
- To specify a vector database, set the `VECTOR_STORE` variable in your `.env` file to your desired vector database service, such as `milvus`, `weaviate`, or `opensearch`.
|
||||
4. **SSL Certificate Setup**:
|
||||
- Refer `docker/certbot/README.md` to set up SSL certificates using Certbot.
|
||||
5. **OpenTelemetry Collector Setup**:
|
||||
- Change `ENABLE_OTEL` to `true` in `.env`.
|
||||
- Configure `OTLP_BASE_ENDPOINT` properly.
|
||||
|
||||
### How to Deploy Middleware for Developing Dify
|
||||
|
||||
@ -89,7 +92,11 @@ The `.env.example` file provided in the Docker setup is extensive and covers a w
|
||||
8. **CORS Configuration**:
|
||||
- `WEB_API_CORS_ALLOW_ORIGINS`, `CONSOLE_CORS_ALLOW_ORIGINS`: Settings for cross-origin resource sharing.
|
||||
|
||||
9. **Other Service-Specific Environment Variables**:
|
||||
9. **OpenTelemetry Configuration**:
|
||||
- `ENABLE_OTEL`: Enable OpenTelemetry collector in api.
|
||||
- `OTLP_BASE_ENDPOINT`: Endpoint for your OTLP exporter.
|
||||
|
||||
10. **Other Service-Specific Environment Variables**:
|
||||
- Each service like `nginx`, `redis`, `db`, and vector databases have specific environment variables that are directly referenced in the `docker-compose.yaml`.
|
||||
|
||||
### Additional Information
|
||||
|
@ -90,10 +90,10 @@ services:
|
||||
volumes:
|
||||
- ./volumes/db/data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: [ 'CMD', 'pg_isready' ]
|
||||
test: [ 'CMD', 'pg_isready', '-h', 'db', '-U', '${PGUSER:-postgres}', '-d', '${POSTGRES_DB:-dify}' ]
|
||||
interval: 1s
|
||||
timeout: 3s
|
||||
retries: 30
|
||||
retries: 60
|
||||
|
||||
# The redis cache.
|
||||
redis:
|
||||
@ -175,7 +175,8 @@ services:
|
||||
volumes:
|
||||
- ./volumes/plugin_daemon:/app/storage
|
||||
depends_on:
|
||||
- db
|
||||
db:
|
||||
condition: service_healthy
|
||||
|
||||
# ssrf_proxy server
|
||||
# for more information, please refer to
|
||||
|
@ -456,6 +456,17 @@ x-shared-env: &shared-api-worker-env
|
||||
PLUGIN_TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
||||
PLUGIN_TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
||||
PLUGIN_TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
||||
ENABLE_OTEL: ${ENABLE_OTEL:-false}
|
||||
OTLP_BASE_ENDPOINT: ${OTLP_BASE_ENDPOINT:-http://localhost:4318}
|
||||
OTLP_API_KEY: ${OTLP_API_KEY:-}
|
||||
OTEL_EXPORTER_TYPE: ${OTEL_EXPORTER_TYPE:-otlp}
|
||||
OTEL_SAMPLING_RATE: ${OTEL_SAMPLING_RATE:-0.1}
|
||||
OTEL_BATCH_EXPORT_SCHEDULE_DELAY: ${OTEL_BATCH_EXPORT_SCHEDULE_DELAY:-5000}
|
||||
OTEL_MAX_QUEUE_SIZE: ${OTEL_MAX_QUEUE_SIZE:-2048}
|
||||
OTEL_MAX_EXPORT_BATCH_SIZE: ${OTEL_MAX_EXPORT_BATCH_SIZE:-512}
|
||||
OTEL_METRIC_EXPORT_INTERVAL: ${OTEL_METRIC_EXPORT_INTERVAL:-60000}
|
||||
OTEL_BATCH_EXPORT_TIMEOUT: ${OTEL_BATCH_EXPORT_TIMEOUT:-10000}
|
||||
OTEL_METRIC_EXPORT_TIMEOUT: ${OTEL_METRIC_EXPORT_TIMEOUT:-30000}
|
||||
|
||||
services:
|
||||
# API service
|
||||
@ -548,10 +559,10 @@ services:
|
||||
volumes:
|
||||
- ./volumes/db/data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: [ 'CMD', 'pg_isready' ]
|
||||
test: [ 'CMD', 'pg_isready', '-h', 'db', '-U', '${PGUSER:-postgres}', '-d', '${POSTGRES_DB:-dify}' ]
|
||||
interval: 1s
|
||||
timeout: 3s
|
||||
retries: 30
|
||||
retries: 60
|
||||
|
||||
# The redis cache.
|
||||
redis:
|
||||
@ -633,7 +644,8 @@ services:
|
||||
volumes:
|
||||
- ./volumes/plugin_daemon:/app/storage
|
||||
depends_on:
|
||||
- db
|
||||
db:
|
||||
condition: service_healthy
|
||||
|
||||
# ssrf_proxy server
|
||||
# for more information, please refer to
|
||||
|
0
web/.husky/pre-commit
Executable file → Normal file
0
web/.husky/pre-commit
Executable file → Normal file
@ -170,7 +170,7 @@ const DatasetCard = ({
|
||||
{dataset.description}
|
||||
</div>
|
||||
<div className={cn(
|
||||
'mt-1 h-[42px] shrink-0 items-center pb-[6px] pl-[14px] pr-[6px] pt-1',
|
||||
'mt-4 h-[42px] shrink-0 items-center pb-[6px] pl-[14px] pr-[6px] pt-1',
|
||||
tags.length ? 'flex' : '!hidden group-hover:!flex',
|
||||
)}>
|
||||
<div className={cn('flex w-0 grow items-center gap-1', !dataset.embedding_available && 'opacity-50 hover:opacity-100')} onClick={(e) => {
|
||||
|
@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { RiListUnordered } from '@remixicon/react'
|
||||
@ -67,6 +67,17 @@ const Doc = ({ apiBaseUrl }: DocProps) => {
|
||||
}
|
||||
}
|
||||
|
||||
const Template = useMemo(() => {
|
||||
switch (locale) {
|
||||
case LanguagesSupported[1]:
|
||||
return <TemplateZh apiBaseUrl={apiBaseUrl} />
|
||||
case LanguagesSupported[7]:
|
||||
return <TemplateJa apiBaseUrl={apiBaseUrl} />
|
||||
default:
|
||||
return <TemplateEn apiBaseUrl={apiBaseUrl} />
|
||||
}
|
||||
}, [apiBaseUrl, locale])
|
||||
|
||||
return (
|
||||
<div className="flex">
|
||||
<div className={`fixed right-20 top-32 z-10 transition-all ${isTocExpanded ? 'w-64' : 'w-10'}`}>
|
||||
@ -107,16 +118,7 @@ const Doc = ({ apiBaseUrl }: DocProps) => {
|
||||
)}
|
||||
</div>
|
||||
<article className='prose-xl prose mx-1 rounded-t-xl bg-white px-4 pt-16 sm:mx-12'>
|
||||
{(() => {
|
||||
switch (locale) {
|
||||
case LanguagesSupported[1]:
|
||||
return <TemplateZh apiBaseUrl={apiBaseUrl} />
|
||||
case LanguagesSupported[7]:
|
||||
return <TemplateJa apiBaseUrl={apiBaseUrl} />
|
||||
default:
|
||||
return <TemplateEn apiBaseUrl={apiBaseUrl} />
|
||||
}
|
||||
})()}
|
||||
{Template}
|
||||
</article>
|
||||
</div>
|
||||
)
|
||||
|
@ -1,3 +1,8 @@
|
||||
{/**
|
||||
* @typedef Props
|
||||
* @property {string} apiBaseUrl
|
||||
*/}
|
||||
|
||||
import { CodeGroup } from '@/app/components/develop/code.tsx'
|
||||
import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstruction, Paragraph } from '@/app/components/develop/md.tsx'
|
||||
|
||||
|
@ -1,3 +1,8 @@
|
||||
{/**
|
||||
* @typedef Props
|
||||
* @property {string} apiBaseUrl
|
||||
*/}
|
||||
|
||||
import { CodeGroup } from '@/app/components/develop/code.tsx'
|
||||
import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstruction, Paragraph } from '@/app/components/develop/md.tsx'
|
||||
|
||||
|
@ -1,3 +1,8 @@
|
||||
{/**
|
||||
* @typedef Props
|
||||
* @property {string} apiBaseUrl
|
||||
*/}
|
||||
|
||||
import { CodeGroup } from '@/app/components/develop/code.tsx'
|
||||
import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstruction, Paragraph } from '@/app/components/develop/md.tsx'
|
||||
|
||||
|
@ -7,7 +7,6 @@ import {
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import {
|
||||
RiClipboardLine,
|
||||
RiEditLine,
|
||||
RiResetLeftLine,
|
||||
RiThumbDownLine,
|
||||
RiThumbUpLine,
|
||||
@ -16,6 +15,7 @@ import type { ChatItem } from '../../types'
|
||||
import { useChatContext } from '../context'
|
||||
import copy from 'copy-to-clipboard'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import AnnotationCtrlButton from '@/app/components/base/features/new-feature-panel/annotation-reply/annotation-ctrl-button'
|
||||
import EditReplyModal from '@/app/components/app/annotation/edit-annotation-modal'
|
||||
import Log from '@/app/components/base/chat/chat/log'
|
||||
import ActionButton, { ActionButtonState } from '@/app/components/base/action-button'
|
||||
@ -134,9 +134,15 @@ const Operation: FC<OperationProps> = ({
|
||||
</ActionButton>
|
||||
)}
|
||||
{(config?.supportAnnotation && config.annotation_reply?.enabled) && (
|
||||
<ActionButton onClick={() => setIsShowReplyModal(true)}>
|
||||
<RiEditLine className='h-4 w-4' />
|
||||
</ActionButton>
|
||||
<AnnotationCtrlButton
|
||||
appId={config?.appId || ''}
|
||||
messageId={id}
|
||||
cached={!!annotation?.id}
|
||||
query={question}
|
||||
answer={content}
|
||||
onAdded={(id, authorName) => onAnnotationAdded?.(id, authorName, question, content, index)}
|
||||
onEdit={() => setIsShowReplyModal(true)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
@ -44,17 +44,17 @@ export default function Drawer({
|
||||
unmount={unmount}
|
||||
open={isOpen}
|
||||
onClose={() => !clickOutsideNotOpen && onClose()}
|
||||
className="fixed inset-0 z-30 overflow-y-auto"
|
||||
className="fixed inset-0 z-[80] overflow-y-auto"
|
||||
>
|
||||
<div className={cn('flex h-screen w-screen justify-end', positionCenter && '!justify-center')}>
|
||||
{/* mask */}
|
||||
<DialogBackdrop
|
||||
className={cn('fixed inset-0 z-40', mask && 'bg-black bg-opacity-30')}
|
||||
className={cn('fixed inset-0 z-[90]', mask && 'bg-black bg-opacity-30')}
|
||||
onClick={() => {
|
||||
!clickOutsideNotOpen && onClose()
|
||||
}}
|
||||
/>
|
||||
<div className={cn('relative z-50 flex w-full max-w-sm flex-col justify-between overflow-hidden bg-components-panel-bg p-6 text-left align-middle shadow-xl', panelClassname)}>
|
||||
<div className={cn('relative z-[100] flex w-full max-w-sm flex-col justify-between overflow-hidden bg-components-panel-bg p-6 text-left align-middle shadow-xl', panelClassname)}>
|
||||
<>
|
||||
<div className='flex justify-between'>
|
||||
{title && <DialogTitle
|
||||
|
@ -0,0 +1,79 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import {
|
||||
RiEditLine,
|
||||
RiFileEditLine,
|
||||
} from '@remixicon/react'
|
||||
import ActionButton from '@/app/components/base/action-button'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { addAnnotation } from '@/service/annotation'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { useModalContext } from '@/context/modal-context'
|
||||
|
||||
type Props = {
|
||||
appId: string
|
||||
messageId?: string
|
||||
cached: boolean
|
||||
query: string
|
||||
answer: string
|
||||
onAdded: (annotationId: string, authorName: string) => void
|
||||
onEdit: () => void
|
||||
}
|
||||
|
||||
const AnnotationCtrlButton: FC<Props> = ({
|
||||
cached,
|
||||
query,
|
||||
answer,
|
||||
appId,
|
||||
messageId,
|
||||
onAdded,
|
||||
onEdit,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { plan, enableBilling } = useProviderContext()
|
||||
const isAnnotationFull = (enableBilling && plan.usage.annotatedResponse >= plan.total.annotatedResponse)
|
||||
const { setShowAnnotationFullModal } = useModalContext()
|
||||
const handleAdd = async () => {
|
||||
if (isAnnotationFull) {
|
||||
setShowAnnotationFullModal()
|
||||
return
|
||||
}
|
||||
const res: any = await addAnnotation(appId, {
|
||||
message_id: messageId,
|
||||
question: query,
|
||||
answer,
|
||||
})
|
||||
Toast.notify({
|
||||
message: t('common.api.actionSuccess') as string,
|
||||
type: 'success',
|
||||
})
|
||||
onAdded(res.id, res.account?.name)
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{cached && (
|
||||
<Tooltip
|
||||
popupContent={t('appDebug.feature.annotation.edit')}
|
||||
>
|
||||
<ActionButton onClick={onEdit}>
|
||||
<RiEditLine className='h-4 w-4' />
|
||||
</ActionButton>
|
||||
</Tooltip>
|
||||
)}
|
||||
{!cached && answer && (
|
||||
<Tooltip
|
||||
popupContent={t('appDebug.feature.annotation.add')}
|
||||
>
|
||||
<ActionButton onClick={handleAdd}>
|
||||
<RiFileEditLine className='h-4 w-4' />
|
||||
</ActionButton>
|
||||
</Tooltip>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
export default React.memo(AnnotationCtrlButton)
|
614
web/app/components/base/file-uploader/utils.spec.ts
Normal file
614
web/app/components/base/file-uploader/utils.spec.ts
Normal file
@ -0,0 +1,614 @@
|
||||
import mime from 'mime'
|
||||
import { upload } from '@/service/base'
|
||||
import {
|
||||
downloadFile,
|
||||
fileIsUploaded,
|
||||
fileUpload,
|
||||
getFileAppearanceType,
|
||||
getFileExtension,
|
||||
getFileNameFromUrl,
|
||||
getFilesInLogs,
|
||||
getProcessedFiles,
|
||||
getProcessedFilesFromResponse,
|
||||
getSupportFileExtensionList,
|
||||
getSupportFileType,
|
||||
isAllowedFileExtension,
|
||||
} from './utils'
|
||||
import { FileAppearanceTypeEnum } from './types'
|
||||
import { SupportUploadFileTypes } from '@/app/components/workflow/types'
|
||||
import { TransferMethod } from '@/types/app'
|
||||
import { FILE_EXTS } from '../prompt-editor/constants'
|
||||
|
||||
jest.mock('mime', () => ({
|
||||
__esModule: true,
|
||||
default: {
|
||||
getExtension: jest.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
jest.mock('@/service/base', () => ({
|
||||
upload: jest.fn(),
|
||||
}))
|
||||
|
||||
describe('file-uploader utils', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('fileUpload', () => {
|
||||
it('should handle successful file upload', async () => {
|
||||
const mockFile = new File(['test'], 'test.txt')
|
||||
const mockCallbacks = {
|
||||
onProgressCallback: jest.fn(),
|
||||
onSuccessCallback: jest.fn(),
|
||||
onErrorCallback: jest.fn(),
|
||||
}
|
||||
|
||||
jest.mocked(upload).mockResolvedValue({ id: '123' })
|
||||
|
||||
await fileUpload({
|
||||
file: mockFile,
|
||||
...mockCallbacks,
|
||||
})
|
||||
|
||||
expect(upload).toHaveBeenCalled()
|
||||
expect(mockCallbacks.onSuccessCallback).toHaveBeenCalledWith({ id: '123' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFileExtension', () => {
|
||||
it('should get extension from mimetype', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('pdf')
|
||||
expect(getFileExtension('file', 'application/pdf')).toBe('pdf')
|
||||
})
|
||||
|
||||
it('should get extension from filename if mimetype fails', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue(null)
|
||||
expect(getFileExtension('file.txt', '')).toBe('txt')
|
||||
expect(getFileExtension('file.txt.docx', '')).toBe('docx')
|
||||
expect(getFileExtension('file', '')).toBe('')
|
||||
})
|
||||
|
||||
it('should return empty string for remote files', () => {
|
||||
expect(getFileExtension('file.txt', '', true)).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFileAppearanceType', () => {
|
||||
it('should identify gif files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('gif')
|
||||
expect(getFileAppearanceType('image.gif', 'image/gif'))
|
||||
.toBe(FileAppearanceTypeEnum.gif)
|
||||
})
|
||||
|
||||
it('should identify image files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('jpg')
|
||||
expect(getFileAppearanceType('image.jpg', 'image/jpeg'))
|
||||
.toBe(FileAppearanceTypeEnum.image)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('jpeg')
|
||||
expect(getFileAppearanceType('image.jpeg', 'image/jpeg'))
|
||||
.toBe(FileAppearanceTypeEnum.image)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('png')
|
||||
expect(getFileAppearanceType('image.png', 'image/png'))
|
||||
.toBe(FileAppearanceTypeEnum.image)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('webp')
|
||||
expect(getFileAppearanceType('image.webp', 'image/webp'))
|
||||
.toBe(FileAppearanceTypeEnum.image)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('svg')
|
||||
expect(getFileAppearanceType('image.svg', 'image/svgxml'))
|
||||
.toBe(FileAppearanceTypeEnum.image)
|
||||
})
|
||||
|
||||
it('should identify video files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('mp4')
|
||||
expect(getFileAppearanceType('video.mp4', 'video/mp4'))
|
||||
.toBe(FileAppearanceTypeEnum.video)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('mov')
|
||||
expect(getFileAppearanceType('video.mov', 'video/quicktime'))
|
||||
.toBe(FileAppearanceTypeEnum.video)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('mpeg')
|
||||
expect(getFileAppearanceType('video.mpeg', 'video/mpeg'))
|
||||
.toBe(FileAppearanceTypeEnum.video)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('webm')
|
||||
expect(getFileAppearanceType('video.web', 'video/webm'))
|
||||
.toBe(FileAppearanceTypeEnum.video)
|
||||
})
|
||||
|
||||
it('should identify audio files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('mp3')
|
||||
expect(getFileAppearanceType('audio.mp3', 'audio/mpeg'))
|
||||
.toBe(FileAppearanceTypeEnum.audio)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('m4a')
|
||||
expect(getFileAppearanceType('audio.m4a', 'audio/mp4'))
|
||||
.toBe(FileAppearanceTypeEnum.audio)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('wav')
|
||||
expect(getFileAppearanceType('audio.wav', 'audio/vnd.wav'))
|
||||
.toBe(FileAppearanceTypeEnum.audio)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('amr')
|
||||
expect(getFileAppearanceType('audio.amr', 'audio/AMR'))
|
||||
.toBe(FileAppearanceTypeEnum.audio)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('mpga')
|
||||
expect(getFileAppearanceType('audio.mpga', 'audio/mpeg'))
|
||||
.toBe(FileAppearanceTypeEnum.audio)
|
||||
})
|
||||
|
||||
it('should identify code files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('html')
|
||||
expect(getFileAppearanceType('index.html', 'text/html'))
|
||||
.toBe(FileAppearanceTypeEnum.code)
|
||||
})
|
||||
|
||||
it('should identify PDF files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('pdf')
|
||||
expect(getFileAppearanceType('doc.pdf', 'application/pdf'))
|
||||
.toBe(FileAppearanceTypeEnum.pdf)
|
||||
})
|
||||
|
||||
it('should identify markdown files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('md')
|
||||
expect(getFileAppearanceType('file.md', 'text/markdown'))
|
||||
.toBe(FileAppearanceTypeEnum.markdown)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('markdown')
|
||||
expect(getFileAppearanceType('file.markdown', 'text/markdown'))
|
||||
.toBe(FileAppearanceTypeEnum.markdown)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('mdx')
|
||||
expect(getFileAppearanceType('file.mdx', 'text/mdx'))
|
||||
.toBe(FileAppearanceTypeEnum.markdown)
|
||||
})
|
||||
|
||||
it('should identify excel files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('xlsx')
|
||||
expect(getFileAppearanceType('doc.xlsx', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'))
|
||||
.toBe(FileAppearanceTypeEnum.excel)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('xls')
|
||||
expect(getFileAppearanceType('doc.xls', 'application/vnd.ms-excel'))
|
||||
.toBe(FileAppearanceTypeEnum.excel)
|
||||
})
|
||||
|
||||
it('should identify word files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('doc')
|
||||
expect(getFileAppearanceType('doc.doc', 'application/msword'))
|
||||
.toBe(FileAppearanceTypeEnum.word)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('docx')
|
||||
expect(getFileAppearanceType('doc.docx', 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'))
|
||||
.toBe(FileAppearanceTypeEnum.word)
|
||||
})
|
||||
|
||||
it('should identify word files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('ppt')
|
||||
expect(getFileAppearanceType('doc.ppt', 'application/vnd.ms-powerpoint'))
|
||||
.toBe(FileAppearanceTypeEnum.ppt)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('pptx')
|
||||
expect(getFileAppearanceType('doc.pptx', 'application/vnd.openxmlformats-officedocument.presentationml.presentation'))
|
||||
.toBe(FileAppearanceTypeEnum.ppt)
|
||||
})
|
||||
|
||||
it('should identify document files', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('txt')
|
||||
expect(getFileAppearanceType('file.txt', 'text/plain'))
|
||||
.toBe(FileAppearanceTypeEnum.document)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('csv')
|
||||
expect(getFileAppearanceType('file.csv', 'text/csv'))
|
||||
.toBe(FileAppearanceTypeEnum.document)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('msg')
|
||||
expect(getFileAppearanceType('file.msg', 'application/vnd.ms-outlook'))
|
||||
.toBe(FileAppearanceTypeEnum.document)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('eml')
|
||||
expect(getFileAppearanceType('file.eml', 'message/rfc822'))
|
||||
.toBe(FileAppearanceTypeEnum.document)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('xml')
|
||||
expect(getFileAppearanceType('file.xml', 'application/rssxml'))
|
||||
.toBe(FileAppearanceTypeEnum.document)
|
||||
|
||||
jest.mocked(mime.getExtension).mockReturnValue('epub')
|
||||
expect(getFileAppearanceType('file.epub', 'application/epubzip'))
|
||||
.toBe(FileAppearanceTypeEnum.document)
|
||||
})
|
||||
|
||||
it('should handle null mime extension', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue(null)
|
||||
expect(getFileAppearanceType('file.txt', 'text/plain'))
|
||||
.toBe(FileAppearanceTypeEnum.document)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getSupportFileType', () => {
|
||||
it('should return custom type when isCustom is true', () => {
|
||||
expect(getSupportFileType('file.txt', '', true))
|
||||
.toBe(SupportUploadFileTypes.custom)
|
||||
})
|
||||
|
||||
it('should return file type when isCustom is false', () => {
|
||||
expect(getSupportFileType('file.txt', 'text/plain'))
|
||||
.toBe(SupportUploadFileTypes.document)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProcessedFiles', () => {
|
||||
it('should process files correctly', () => {
|
||||
const files = [{
|
||||
id: '123',
|
||||
name: 'test.txt',
|
||||
size: 1024,
|
||||
type: 'text/plain',
|
||||
progress: 100,
|
||||
supportFileType: 'document',
|
||||
transferMethod: TransferMethod.remote_url,
|
||||
url: 'http://example.com',
|
||||
uploadedId: '123',
|
||||
}]
|
||||
|
||||
const result = getProcessedFiles(files)
|
||||
expect(result[0]).toEqual({
|
||||
type: 'document',
|
||||
transfer_method: TransferMethod.remote_url,
|
||||
url: 'http://example.com',
|
||||
upload_file_id: '123',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProcessedFilesFromResponse', () => {
|
||||
it('should process files correctly', () => {
|
||||
const files = [{
|
||||
related_id: '2a38e2ca-1295-415d-a51d-65d4ff9912d9',
|
||||
extension: '.jpeg',
|
||||
filename: 'test.jpeg',
|
||||
size: 2881761,
|
||||
mime_type: 'image/jpeg',
|
||||
transfer_method: TransferMethod.local_file,
|
||||
type: 'image',
|
||||
url: 'https://upload.dify.dev/files/xxx/file-preview',
|
||||
}]
|
||||
|
||||
const result = getProcessedFilesFromResponse(files)
|
||||
expect(result[0]).toEqual({
|
||||
id: '2a38e2ca-1295-415d-a51d-65d4ff9912d9',
|
||||
name: 'test.jpeg',
|
||||
size: 2881761,
|
||||
type: 'image/jpeg',
|
||||
progress: 100,
|
||||
transferMethod: TransferMethod.local_file,
|
||||
supportFileType: 'image',
|
||||
uploadedId: '2a38e2ca-1295-415d-a51d-65d4ff9912d9',
|
||||
url: 'https://upload.dify.dev/files/xxx/file-preview',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFileNameFromUrl', () => {
|
||||
it('should extract filename from URL', () => {
|
||||
expect(getFileNameFromUrl('http://example.com/path/file.txt'))
|
||||
.toBe('file.txt')
|
||||
})
|
||||
})
|
||||
|
||||
describe('getSupportFileExtensionList', () => {
|
||||
it('should handle custom file types', () => {
|
||||
const result = getSupportFileExtensionList(
|
||||
[SupportUploadFileTypes.custom],
|
||||
['.pdf', '.txt', '.doc'],
|
||||
)
|
||||
expect(result).toEqual(['PDF', 'TXT', 'DOC'])
|
||||
})
|
||||
|
||||
it('should handle standard file types', () => {
|
||||
const mockFileExts = {
|
||||
image: ['JPG', 'PNG'],
|
||||
document: ['PDF', 'TXT'],
|
||||
video: ['MP4', 'MOV'],
|
||||
}
|
||||
|
||||
// Temporarily mock FILE_EXTS
|
||||
const originalFileExts = { ...FILE_EXTS }
|
||||
Object.assign(FILE_EXTS, mockFileExts)
|
||||
|
||||
const result = getSupportFileExtensionList(
|
||||
['image', 'document'],
|
||||
[],
|
||||
)
|
||||
expect(result).toEqual(['JPG', 'PNG', 'PDF', 'TXT'])
|
||||
|
||||
// Restore original FILE_EXTS
|
||||
Object.assign(FILE_EXTS, originalFileExts)
|
||||
})
|
||||
|
||||
it('should return empty array for empty inputs', () => {
|
||||
const result = getSupportFileExtensionList([], [])
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
|
||||
it('should prioritize custom types over standard types', () => {
|
||||
const mockFileExts = {
|
||||
image: ['JPG', 'PNG'],
|
||||
}
|
||||
|
||||
// Temporarily mock FILE_EXTS
|
||||
const originalFileExts = { ...FILE_EXTS }
|
||||
Object.assign(FILE_EXTS, mockFileExts)
|
||||
|
||||
const result = getSupportFileExtensionList(
|
||||
[SupportUploadFileTypes.custom, 'image'],
|
||||
['.csv', '.xml'],
|
||||
)
|
||||
expect(result).toEqual(['CSV', 'XML'])
|
||||
|
||||
// Restore original FILE_EXTS
|
||||
Object.assign(FILE_EXTS, originalFileExts)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isAllowedFileExtension', () => {
|
||||
it('should validate allowed file extensions', () => {
|
||||
jest.mocked(mime.getExtension).mockReturnValue('pdf')
|
||||
expect(isAllowedFileExtension(
|
||||
'test.pdf',
|
||||
'application/pdf',
|
||||
['document'],
|
||||
['.pdf'],
|
||||
)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFilesInLogs', () => {
|
||||
const mockFileData = {
|
||||
dify_model_identity: '__dify__file__',
|
||||
related_id: '123',
|
||||
filename: 'test.pdf',
|
||||
size: 1024,
|
||||
mime_type: 'application/pdf',
|
||||
transfer_method: 'local_file',
|
||||
type: 'document',
|
||||
url: 'http://example.com/test.pdf',
|
||||
}
|
||||
|
||||
it('should handle empty or null input', () => {
|
||||
expect(getFilesInLogs(null)).toEqual([])
|
||||
expect(getFilesInLogs({})).toEqual([])
|
||||
expect(getFilesInLogs(undefined)).toEqual([])
|
||||
})
|
||||
|
||||
it('should process single file object', () => {
|
||||
const input = {
|
||||
file1: mockFileData,
|
||||
}
|
||||
|
||||
const expected = [{
|
||||
varName: 'file1',
|
||||
list: [{
|
||||
id: '123',
|
||||
name: 'test.pdf',
|
||||
size: 1024,
|
||||
type: 'application/pdf',
|
||||
progress: 100,
|
||||
transferMethod: 'local_file',
|
||||
supportFileType: 'document',
|
||||
uploadedId: '123',
|
||||
url: 'http://example.com/test.pdf',
|
||||
}],
|
||||
}]
|
||||
|
||||
expect(getFilesInLogs(input)).toEqual(expected)
|
||||
})
|
||||
|
||||
it('should process array of files', () => {
|
||||
const input = {
|
||||
files: [mockFileData, mockFileData],
|
||||
}
|
||||
|
||||
const expected = [{
|
||||
varName: 'files',
|
||||
list: [
|
||||
{
|
||||
id: '123',
|
||||
name: 'test.pdf',
|
||||
size: 1024,
|
||||
type: 'application/pdf',
|
||||
progress: 100,
|
||||
transferMethod: 'local_file',
|
||||
supportFileType: 'document',
|
||||
uploadedId: '123',
|
||||
url: 'http://example.com/test.pdf',
|
||||
},
|
||||
{
|
||||
id: '123',
|
||||
name: 'test.pdf',
|
||||
size: 1024,
|
||||
type: 'application/pdf',
|
||||
progress: 100,
|
||||
transferMethod: 'local_file',
|
||||
supportFileType: 'document',
|
||||
uploadedId: '123',
|
||||
url: 'http://example.com/test.pdf',
|
||||
},
|
||||
],
|
||||
}]
|
||||
|
||||
expect(getFilesInLogs(input)).toEqual(expected)
|
||||
})
|
||||
|
||||
it('should ignore non-file objects and arrays', () => {
|
||||
const input = {
|
||||
regularString: 'not a file',
|
||||
regularNumber: 123,
|
||||
regularArray: [1, 2, 3],
|
||||
regularObject: { key: 'value' },
|
||||
file: mockFileData,
|
||||
}
|
||||
|
||||
const expected = [{
|
||||
varName: 'file',
|
||||
list: [{
|
||||
id: '123',
|
||||
name: 'test.pdf',
|
||||
size: 1024,
|
||||
type: 'application/pdf',
|
||||
progress: 100,
|
||||
transferMethod: 'local_file',
|
||||
supportFileType: 'document',
|
||||
uploadedId: '123',
|
||||
url: 'http://example.com/test.pdf',
|
||||
}],
|
||||
}]
|
||||
|
||||
expect(getFilesInLogs(input)).toEqual(expected)
|
||||
})
|
||||
|
||||
it('should handle mixed file types in array', () => {
|
||||
const input = {
|
||||
mixedFiles: [
|
||||
mockFileData,
|
||||
{ notAFile: true },
|
||||
mockFileData,
|
||||
],
|
||||
}
|
||||
|
||||
const expected = [{
|
||||
varName: 'mixedFiles',
|
||||
list: [
|
||||
{
|
||||
id: '123',
|
||||
name: 'test.pdf',
|
||||
size: 1024,
|
||||
type: 'application/pdf',
|
||||
progress: 100,
|
||||
transferMethod: 'local_file',
|
||||
supportFileType: 'document',
|
||||
uploadedId: '123',
|
||||
url: 'http://example.com/test.pdf',
|
||||
},
|
||||
{
|
||||
id: undefined,
|
||||
name: undefined,
|
||||
progress: 100,
|
||||
size: 0,
|
||||
supportFileType: undefined,
|
||||
transferMethod: undefined,
|
||||
type: undefined,
|
||||
uploadedId: undefined,
|
||||
url: undefined,
|
||||
},
|
||||
{
|
||||
id: '123',
|
||||
name: 'test.pdf',
|
||||
size: 1024,
|
||||
type: 'application/pdf',
|
||||
progress: 100,
|
||||
transferMethod: 'local_file',
|
||||
supportFileType: 'document',
|
||||
uploadedId: '123',
|
||||
url: 'http://example.com/test.pdf',
|
||||
},
|
||||
],
|
||||
}]
|
||||
|
||||
expect(getFilesInLogs(input)).toEqual(expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('fileIsUploaded', () => {
|
||||
it('should identify uploaded files', () => {
|
||||
expect(fileIsUploaded({
|
||||
uploadedId: '123',
|
||||
progress: 100,
|
||||
} as any)).toBe(true)
|
||||
})
|
||||
|
||||
it('should identify remote files as uploaded', () => {
|
||||
expect(fileIsUploaded({
|
||||
transferMethod: TransferMethod.remote_url,
|
||||
progress: 100,
|
||||
} as any)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('downloadFile', () => {
|
||||
let mockAnchor: HTMLAnchorElement
|
||||
let createElementMock: jest.SpyInstance
|
||||
let appendChildMock: jest.SpyInstance
|
||||
let removeChildMock: jest.SpyInstance
|
||||
|
||||
beforeEach(() => {
|
||||
// Mock createElement and appendChild
|
||||
mockAnchor = {
|
||||
href: '',
|
||||
download: '',
|
||||
style: { display: '' },
|
||||
target: '',
|
||||
title: '',
|
||||
click: jest.fn(),
|
||||
} as unknown as HTMLAnchorElement
|
||||
|
||||
createElementMock = jest.spyOn(document, 'createElement').mockReturnValue(mockAnchor as any)
|
||||
appendChildMock = jest.spyOn(document.body, 'appendChild').mockImplementation((node: Node) => {
|
||||
return node
|
||||
})
|
||||
removeChildMock = jest.spyOn(document.body, 'removeChild').mockImplementation((node: Node) => {
|
||||
return node
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks()
|
||||
})
|
||||
|
||||
it('should create and trigger download with correct attributes', () => {
|
||||
const url = 'https://example.com/test.pdf'
|
||||
const filename = 'test.pdf'
|
||||
|
||||
downloadFile(url, filename)
|
||||
|
||||
// Verify anchor element was created with correct properties
|
||||
expect(createElementMock).toHaveBeenCalledWith('a')
|
||||
expect(mockAnchor.href).toBe(url)
|
||||
expect(mockAnchor.download).toBe(filename)
|
||||
expect(mockAnchor.style.display).toBe('none')
|
||||
expect(mockAnchor.target).toBe('_blank')
|
||||
expect(mockAnchor.title).toBe(filename)
|
||||
|
||||
// Verify DOM operations
|
||||
expect(appendChildMock).toHaveBeenCalledWith(mockAnchor)
|
||||
expect(mockAnchor.click).toHaveBeenCalled()
|
||||
expect(removeChildMock).toHaveBeenCalledWith(mockAnchor)
|
||||
})
|
||||
|
||||
it('should handle empty filename', () => {
|
||||
const url = 'https://example.com/test.pdf'
|
||||
const filename = ''
|
||||
|
||||
downloadFile(url, filename)
|
||||
|
||||
expect(mockAnchor.download).toBe('')
|
||||
expect(mockAnchor.title).toBe('')
|
||||
})
|
||||
|
||||
it('should handle empty url', () => {
|
||||
const url = ''
|
||||
const filename = 'test.pdf'
|
||||
|
||||
downloadFile(url, filename)
|
||||
|
||||
expect(mockAnchor.href).toBe('')
|
||||
})
|
||||
})
|
||||
})
|
@ -53,6 +53,6 @@ export const getInputVars = (text: string): ValueSelector[] => {
|
||||
export const FILE_EXTS: Record<string, string[]> = {
|
||||
[SupportUploadFileTypes.image]: ['JPG', 'JPEG', 'PNG', 'GIF', 'WEBP', 'SVG'],
|
||||
[SupportUploadFileTypes.document]: ['TXT', 'MD', 'MDX', 'MARKDOWN', 'PDF', 'HTML', 'XLSX', 'XLS', 'DOC', 'DOCX', 'CSV', 'EML', 'MSG', 'PPTX', 'PPT', 'XML', 'EPUB'],
|
||||
[SupportUploadFileTypes.audio]: ['MP3', 'M4A', 'WAV', 'WEBM', 'AMR', 'MPGA'],
|
||||
[SupportUploadFileTypes.video]: ['MP4', 'MOV', 'MPEG', 'MPGA'],
|
||||
[SupportUploadFileTypes.audio]: ['MP3', 'M4A', 'WAV', 'AMR', 'MPGA'],
|
||||
[SupportUploadFileTypes.video]: ['MP4', 'MOV', 'MPEG', 'WEBM'],
|
||||
}
|
||||
|
@ -20,10 +20,10 @@ const Header: FC<Props> = ({
|
||||
<div className='flex h-6 items-center justify-between'>
|
||||
<div className='flex items-center'>
|
||||
<div className='text-base font-medium text-text-secondary'>{t(`${I18N_PREFIX}.watercrawlTitle`)}</div>
|
||||
<div className='ml-2 mr-2 w-px h-3.5 bg-divider-regular' />
|
||||
<Button className='flex items-center gap-x-[1px] h-6 px-1.5' onClick={onSetting}>
|
||||
<RiEqualizer2Line className='w-3.5 h-3.5 text-components-button-secondary-text' />
|
||||
<span className='text-components-button-secondary-text text-xs font-medium px-[3px]'>
|
||||
<div className='ml-2 mr-2 h-3.5 w-px bg-divider-regular' />
|
||||
<Button className='flex h-6 items-center gap-x-[1px] px-1.5' onClick={onSetting}>
|
||||
<RiEqualizer2Line className='h-3.5 w-3.5 text-components-button-secondary-text' />
|
||||
<span className='px-[3px] text-xs font-medium text-components-button-secondary-text'>
|
||||
{t(`${I18N_PREFIX}.configureWatercrawl`)}
|
||||
</span>
|
||||
</Button>
|
||||
@ -34,7 +34,7 @@ const Header: FC<Props> = ({
|
||||
rel='noopener noreferrer'
|
||||
className='inline-flex items-center gap-x-1 text-xs font-medium text-text-accent'
|
||||
>
|
||||
<RiBookOpenLine className='w-3.5 h-3.5 text-text-accent' />
|
||||
<RiBookOpenLine className='h-3.5 w-3.5 text-text-accent' />
|
||||
<span>{t(`${I18N_PREFIX}.watercrawlDoc`)}</span>
|
||||
</a>
|
||||
</div>
|
||||
|
@ -88,11 +88,11 @@ const ConfigWatercrawlModal: FC<Props> = ({
|
||||
|
||||
return (
|
||||
<PortalToFollowElem open>
|
||||
<PortalToFollowElemContent className='w-full h-full z-[60]'>
|
||||
<PortalToFollowElemContent className='z-[60] h-full w-full'>
|
||||
<div className='fixed inset-0 flex items-center justify-center bg-background-overlay'>
|
||||
<div className='mx-2 w-[640px] max-h-[calc(100vh-120px)] bg-components-panel-bg shadow-xl rounded-2xl overflow-y-auto'>
|
||||
<div className='mx-2 max-h-[calc(100vh-120px)] w-[640px] overflow-y-auto rounded-2xl bg-components-panel-bg shadow-xl'>
|
||||
<div className='px-8 pt-8'>
|
||||
<div className='flex justify-between items-center mb-4'>
|
||||
<div className='mb-4 flex items-center justify-between'>
|
||||
<div className='system-xl-semibold text-text-primary'>{t(`${I18N_PREFIX}.configWatercrawl`)}</div>
|
||||
</div>
|
||||
|
||||
@ -113,10 +113,10 @@ const ConfigWatercrawlModal: FC<Props> = ({
|
||||
placeholder={DEFAULT_BASE_URL}
|
||||
/>
|
||||
</div>
|
||||
<div className='my-8 flex justify-between items-center h-8'>
|
||||
<a className='flex items-center space-x-1 leading-[18px] text-xs font-normal text-text-accent' target='_blank' href='https://app.watercrawl.dev/'>
|
||||
<div className='my-8 flex h-8 items-center justify-between'>
|
||||
<a className='flex items-center space-x-1 text-xs font-normal leading-[18px] text-text-accent' target='_blank' href='https://app.watercrawl.dev/'>
|
||||
<span>{t(`${I18N_PREFIX}.getApiKeyLinkText`)}</span>
|
||||
<LinkExternal02 className='w-3 h-3' />
|
||||
<LinkExternal02 className='h-3 w-3' />
|
||||
</a>
|
||||
<div className='flex'>
|
||||
<Button
|
||||
@ -139,11 +139,11 @@ const ConfigWatercrawlModal: FC<Props> = ({
|
||||
</div>
|
||||
</div>
|
||||
<div className='border-t-[0.5px] border-t-divider-regular'>
|
||||
<div className='flex justify-center items-center py-3 bg-background-section-burn text-xs text-text-tertiary'>
|
||||
<Lock01 className='mr-1 w-3 h-3 text-text-tertiary' />
|
||||
<div className='flex items-center justify-center bg-background-section-burn py-3 text-xs text-text-tertiary'>
|
||||
<Lock01 className='mr-1 h-3 w-3 text-text-tertiary' />
|
||||
{t('common.modelProvider.encrypted.front')}
|
||||
<a
|
||||
className='text-text-accent mx-1'
|
||||
className='mx-1 text-text-accent'
|
||||
target='_blank' rel='noopener noreferrer'
|
||||
href='https://pycryptodome.readthedocs.io/en/latest/src/cipher/oaep.html'
|
||||
>
|
||||
|
@ -278,12 +278,13 @@ const ModelModal: FC<ModelModalProps> = ({
|
||||
<PortalToFollowElem open>
|
||||
<PortalToFollowElemContent className='z-[60] h-full w-full'>
|
||||
<div className='fixed inset-0 flex items-center justify-center bg-black/[.25]'>
|
||||
<div className='mx-2 max-h-[calc(100vh-120px)] w-[640px] overflow-y-auto rounded-2xl bg-components-panel-bg shadow-xl'>
|
||||
<div className='mx-2 w-[640px] overflow-auto rounded-2xl bg-components-panel-bg shadow-xl'>
|
||||
<div className='px-8 pt-8'>
|
||||
<div className='mb-2 flex items-center'>
|
||||
<div className='text-xl font-semibold text-text-primary'>{renderTitlePrefix()}</div>
|
||||
</div>
|
||||
|
||||
<div className='max-h-[calc(100vh-320px)] overflow-y-auto'>
|
||||
<Form
|
||||
value={value}
|
||||
onChange={handleValueChange}
|
||||
@ -293,7 +294,6 @@ const ModelModal: FC<ModelModalProps> = ({
|
||||
showOnVariableMap={showOnVariableMap}
|
||||
isEditMode={isEditMode}
|
||||
/>
|
||||
|
||||
<div className='mb-4 mt-1 border-t-[0.5px] border-t-divider-regular' />
|
||||
<ModelLoadBalancingConfigs withSwitch {...{
|
||||
draftConfig,
|
||||
@ -302,6 +302,7 @@ const ModelModal: FC<ModelModalProps> = ({
|
||||
currentCustomConfigurationModelFixedFields,
|
||||
configurationMethod: configurateMethod,
|
||||
}} />
|
||||
</div>
|
||||
|
||||
<div className='sticky bottom-0 -mx-2 mt-2 flex flex-wrap items-center justify-between gap-y-2 bg-components-panel-bg px-2 pb-6 pt-4'>
|
||||
{
|
||||
|
@ -45,7 +45,10 @@ const RunOnce: FC<IRunOnceProps> = ({
|
||||
const onClear = () => {
|
||||
const newInputs: Record<string, any> = {}
|
||||
promptConfig.prompt_variables.forEach((item) => {
|
||||
if (item.type === 'text-input' || item.type === 'paragraph')
|
||||
newInputs[item.key] = ''
|
||||
else
|
||||
newInputs[item.key] = undefined
|
||||
})
|
||||
onInputsChange(newInputs)
|
||||
}
|
||||
@ -63,7 +66,10 @@ const RunOnce: FC<IRunOnceProps> = ({
|
||||
useEffect(() => {
|
||||
const newInputs: Record<string, any> = {}
|
||||
promptConfig.prompt_variables.forEach((item) => {
|
||||
if (item.type === 'text-input' || item.type === 'paragraph')
|
||||
newInputs[item.key] = ''
|
||||
else
|
||||
newInputs[item.key] = undefined
|
||||
})
|
||||
onInputsChange(newInputs)
|
||||
}, [promptConfig.prompt_variables, onInputsChange])
|
||||
|
@ -15,7 +15,8 @@ import { useToolTabs } from './hooks'
|
||||
import ViewTypeSelect, { ViewType } from './view-type-select'
|
||||
import cn from '@/utils/classnames'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
import PluginList from '@/app/components/workflow/block-selector/market-place-plugin/list'
|
||||
import type { ListRef } from '@/app/components/workflow/block-selector/market-place-plugin/list'
|
||||
import PluginList, { type ListProps } from '@/app/components/workflow/block-selector/market-place-plugin/list'
|
||||
import ActionButton from '../../base/action-button'
|
||||
import { RiAddLine } from '@remixicon/react'
|
||||
import { PluginType } from '../../plugins/types'
|
||||
@ -26,7 +27,7 @@ type AllToolsProps = {
|
||||
className?: string
|
||||
toolContentClassName?: string
|
||||
searchText: string
|
||||
tags: string[]
|
||||
tags: ListProps['tags']
|
||||
buildInTools: ToolWithProvider[]
|
||||
customTools: ToolWithProvider[]
|
||||
workflowTools: ToolWithProvider[]
|
||||
@ -36,11 +37,14 @@ type AllToolsProps = {
|
||||
onShowAddCustomCollectionModal?: () => void
|
||||
selectedTools?: ToolValue[]
|
||||
}
|
||||
|
||||
const DEFAULT_TAGS: AllToolsProps['tags'] = []
|
||||
|
||||
const AllTools = ({
|
||||
className,
|
||||
toolContentClassName,
|
||||
searchText,
|
||||
tags = [],
|
||||
tags = DEFAULT_TAGS,
|
||||
onSelect,
|
||||
buildInTools,
|
||||
workflowTools,
|
||||
@ -97,7 +101,7 @@ const AllTools = ({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [searchText, tags, enable_marketplace])
|
||||
|
||||
const pluginRef = useRef(null)
|
||||
const pluginRef = useRef<ListRef>(null)
|
||||
const wrapElemRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
return (
|
||||
@ -136,7 +140,7 @@ const AllTools = ({
|
||||
<div
|
||||
ref={wrapElemRef}
|
||||
className='max-h-[464px] overflow-y-auto'
|
||||
onScroll={(pluginRef.current as any)?.handleScroll}
|
||||
onScroll={pluginRef.current?.handleScroll}
|
||||
>
|
||||
<Tools
|
||||
className={toolContentClassName}
|
||||
@ -149,8 +153,9 @@ const AllTools = ({
|
||||
/>
|
||||
{/* Plugins from marketplace */}
|
||||
{enable_marketplace && <PluginList
|
||||
ref={pluginRef}
|
||||
wrapElemRef={wrapElemRef}
|
||||
list={notInstalledPlugins as any} ref={pluginRef}
|
||||
list={notInstalledPlugins}
|
||||
searchText={searchText}
|
||||
toolContentClassName={toolContentClassName}
|
||||
tags={tags}
|
||||
|
@ -1,5 +1,5 @@
|
||||
'use client'
|
||||
import React, { useEffect, useImperativeHandle, useMemo, useRef } from 'react'
|
||||
import React, { forwardRef, useEffect, useImperativeHandle, useMemo, useRef } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import useStickyScroll, { ScrollPosition } from '../use-sticky-scroll'
|
||||
import Item from './item'
|
||||
@ -8,10 +8,9 @@ import cn from '@/utils/classnames'
|
||||
import Link from 'next/link'
|
||||
import { marketplaceUrlPrefix } from '@/config'
|
||||
import { RiArrowRightUpLine, RiSearchLine } from '@remixicon/react'
|
||||
// import { RiArrowRightUpLine } from '@remixicon/react'
|
||||
import { noop } from 'lodash-es'
|
||||
|
||||
type Props = {
|
||||
export type ListProps = {
|
||||
wrapElemRef: React.RefObject<HTMLElement>
|
||||
list: Plugin[]
|
||||
searchText: string
|
||||
@ -20,17 +19,16 @@ type Props = {
|
||||
disableMaxWidth?: boolean
|
||||
}
|
||||
|
||||
const List = (
|
||||
{
|
||||
ref,
|
||||
export type ListRef = { handleScroll: () => void }
|
||||
|
||||
const List = forwardRef<ListRef, ListProps>(({
|
||||
wrapElemRef,
|
||||
searchText,
|
||||
tags,
|
||||
list,
|
||||
toolContentClassName,
|
||||
disableMaxWidth = false,
|
||||
},
|
||||
) => {
|
||||
}, ref) => {
|
||||
const { t } = useTranslation()
|
||||
const hasFilter = !searchText
|
||||
const hasRes = list.length > 0
|
||||
@ -126,7 +124,7 @@ const List = (
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
List.displayName = 'List'
|
||||
|
||||
|
@ -18,10 +18,13 @@ import { CollectionType } from '@/app/components/tools/types'
|
||||
import useGetIcon from '@/app/components/plugins/install-plugin/base/use-get-icon'
|
||||
import { useStrategyInfo } from '../../agent/use-config'
|
||||
import { SwitchPluginVersion } from './switch-plugin-version'
|
||||
import PluginList from '@/app/components/workflow/block-selector/market-place-plugin/list'
|
||||
import type { ListRef } from '@/app/components/workflow/block-selector/market-place-plugin/list'
|
||||
import PluginList, { type ListProps } from '@/app/components/workflow/block-selector/market-place-plugin/list'
|
||||
import { useMarketplacePlugins } from '@/app/components/plugins/marketplace/hooks'
|
||||
import { ToolTipContent } from '@/app/components/base/tooltip/content'
|
||||
|
||||
const DEFAULT_TAGS: ListProps['tags'] = []
|
||||
|
||||
const NotFoundWarn = (props: {
|
||||
title: ReactNode,
|
||||
description: ReactNode
|
||||
@ -138,7 +141,7 @@ export const AgentStrategySelector = memo((props: AgentStrategySelectorProps) =>
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [query])
|
||||
|
||||
const pluginRef = useRef(null)
|
||||
const pluginRef = useRef<ListRef>(null)
|
||||
|
||||
return <PortalToFollowElem open={open} onOpenChange={setOpen} placement='bottom'>
|
||||
<PortalToFollowElemTrigger className='w-full'>
|
||||
@ -213,10 +216,11 @@ export const AgentStrategySelector = memo((props: AgentStrategySelectorProps) =>
|
||||
className='h-full max-h-full max-w-none overflow-y-auto'
|
||||
indexBarClassName='top-0 xl:top-36' showWorkflowEmpty={false} hasSearchText={false} />
|
||||
<PluginList
|
||||
ref={pluginRef}
|
||||
wrapElemRef={wrapElemRef}
|
||||
list={notInstalledPlugins as any} ref={pluginRef}
|
||||
list={notInstalledPlugins}
|
||||
searchText={query}
|
||||
tags={[]}
|
||||
tags={DEFAULT_TAGS}
|
||||
disableMaxWidth
|
||||
/>
|
||||
</main>
|
||||
|
@ -322,9 +322,11 @@ const ChatVariableModal = ({
|
||||
</div>
|
||||
<div className='flex'>
|
||||
{type === ChatVarType.String && (
|
||||
<Input
|
||||
placeholder={t('workflow.chatVariable.modal.valuePlaceholder') || ''}
|
||||
// Input will remove \n\r, so use Textarea just like description area
|
||||
<textarea
|
||||
className='system-sm-regular placeholder:system-sm-regular block h-20 w-full resize-none appearance-none rounded-lg border border-transparent bg-components-input-bg-normal p-2 caret-primary-600 outline-none placeholder:text-components-input-text-placeholder hover:border-components-input-border-hover hover:bg-components-input-bg-hover focus:border-components-input-border-active focus:bg-components-input-bg-active focus:shadow-xs'
|
||||
value={value}
|
||||
placeholder={t('workflow.chatVariable.modal.valuePlaceholder') || ''}
|
||||
onChange={e => setValue(e.target.value)}
|
||||
/>
|
||||
)}
|
||||
|
@ -131,12 +131,20 @@ const VariableModal = ({
|
||||
<div className=''>
|
||||
<div className='system-sm-semibold mb-1 flex h-6 items-center text-text-secondary'>{t('workflow.env.modal.value')}</div>
|
||||
<div className='flex'>
|
||||
<Input
|
||||
{
|
||||
type !== 'number' ? <textarea
|
||||
className='system-sm-regular placeholder:system-sm-regular block h-20 w-full resize-none appearance-none rounded-lg border border-transparent bg-components-input-bg-normal p-2 caret-primary-600 outline-none placeholder:text-components-input-text-placeholder hover:border-components-input-border-hover hover:bg-components-input-bg-hover focus:border-components-input-border-active focus:bg-components-input-bg-active focus:shadow-xs'
|
||||
value={value}
|
||||
placeholder={t('workflow.env.modal.valuePlaceholder') || ''}
|
||||
onChange={e => setValue(e.target.value)}
|
||||
/>
|
||||
: <Input
|
||||
placeholder={t('workflow.env.modal.valuePlaceholder') || ''}
|
||||
value={value}
|
||||
onChange={e => setValue(e.target.value)}
|
||||
type={type !== 'number' ? 'text' : 'number'}
|
||||
type="number"
|
||||
/>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
65
web/hooks/use-timestamp.spec.ts
Normal file
65
web/hooks/use-timestamp.spec.ts
Normal file
@ -0,0 +1,65 @@
|
||||
import { renderHook } from '@testing-library/react'
|
||||
import useTimestamp from './use-timestamp'
|
||||
|
||||
jest.mock('@/context/app-context', () => ({
|
||||
useAppContext: jest.fn(() => ({
|
||||
userProfile: {
|
||||
id: '8b18e24b-1ac8-4262-aa5c-e9aa95c76846',
|
||||
name: 'test',
|
||||
avatar: null,
|
||||
avatar_url: null,
|
||||
email: 'test@dify.ai',
|
||||
is_password_set: false,
|
||||
interface_language: 'zh-Hans',
|
||||
interface_theme: 'light',
|
||||
timezone: 'Asia/Shanghai',
|
||||
last_login_at: 1744188761,
|
||||
last_login_ip: '127.0.0.1',
|
||||
created_at: 1728444483,
|
||||
},
|
||||
})),
|
||||
}))
|
||||
|
||||
describe('useTimestamp', () => {
|
||||
describe('formatTime', () => {
|
||||
it('should format unix timestamp correctly', () => {
|
||||
const { result } = renderHook(() => useTimestamp())
|
||||
const timestamp = 1704132000
|
||||
|
||||
expect(result.current.formatTime(timestamp, 'YYYY-MM-DD HH:mm:ss'))
|
||||
.toBe('2024-01-02 02:00:00')
|
||||
})
|
||||
|
||||
it('should format with different patterns', () => {
|
||||
const { result } = renderHook(() => useTimestamp())
|
||||
const timestamp = 1704132000
|
||||
|
||||
expect(result.current.formatTime(timestamp, 'MM/DD/YYYY'))
|
||||
.toBe('01/02/2024')
|
||||
|
||||
expect(result.current.formatTime(timestamp, 'HH:mm'))
|
||||
.toBe('02:00')
|
||||
})
|
||||
})
|
||||
|
||||
describe('formatDate', () => {
|
||||
it('should format date string correctly', () => {
|
||||
const { result } = renderHook(() => useTimestamp())
|
||||
const dateString = '2024-01-01T12:00:00Z'
|
||||
|
||||
expect(result.current.formatDate(dateString, 'YYYY-MM-DD HH:mm:ss'))
|
||||
.toBe('2024-01-01 20:00:00')
|
||||
})
|
||||
|
||||
it('should format with different patterns', () => {
|
||||
const { result } = renderHook(() => useTimestamp())
|
||||
const dateString = '2024-01-01T12:00:00Z'
|
||||
|
||||
expect(result.current.formatDate(dateString, 'MM/DD/YYYY'))
|
||||
.toBe('01/01/2024')
|
||||
|
||||
expect(result.current.formatDate(dateString, 'HH:mm'))
|
||||
.toBe('20:00')
|
||||
})
|
||||
})
|
||||
})
|
@ -147,7 +147,7 @@ const config: Config = {
|
||||
// setupFiles: [],
|
||||
|
||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||
// setupFilesAfterEnv: [],
|
||||
setupFilesAfterEnv: ['<rootDir>/jest.setup.ts'],
|
||||
|
||||
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||
// slowTestThreshold: 5,
|
||||
|
1
web/jest.setup.ts
Normal file
1
web/jest.setup.ts
Normal file
@ -0,0 +1 @@
|
||||
import '@testing-library/jest-dom'
|
@ -14,17 +14,18 @@
|
||||
const config = window[configKey];
|
||||
|
||||
// SVG icons for open and close states
|
||||
const svgIcons = {
|
||||
open: `<svg id="openIcon" width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
const svgIcons = `<svg id="openIcon" width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.7586 2L16.2412 2C17.0462 1.99999 17.7105 1.99998 18.2517 2.04419C18.8138 2.09012 19.3305 2.18868 19.8159 2.43598C20.5685 2.81947 21.1804 3.43139 21.5639 4.18404C21.8112 4.66937 21.9098 5.18608 21.9557 5.74818C21.9999 6.28937 21.9999 6.95373 21.9999 7.7587L22 14.1376C22.0004 14.933 22.0007 15.5236 21.8636 16.0353C21.4937 17.4156 20.4155 18.4938 19.0352 18.8637C18.7277 18.9461 18.3917 18.9789 17.9999 18.9918L17.9999 20.371C18 20.6062 18 20.846 17.9822 21.0425C17.9651 21.2305 17.9199 21.5852 17.6722 21.8955C17.3872 22.2525 16.9551 22.4602 16.4983 22.4597C16.1013 22.4593 15.7961 22.273 15.6386 22.1689C15.474 22.06 15.2868 21.9102 15.1031 21.7632L12.69 19.8327C12.1714 19.4178 12.0174 19.3007 11.8575 19.219C11.697 19.137 11.5262 19.0771 11.3496 19.0408C11.1737 19.0047 10.9803 19 10.3162 19H7.75858C6.95362 19 6.28927 19 5.74808 18.9558C5.18598 18.9099 4.66928 18.8113 4.18394 18.564C3.43129 18.1805 2.81937 17.5686 2.43588 16.816C2.18859 16.3306 2.09002 15.8139 2.0441 15.2518C1.99988 14.7106 1.99989 14.0463 1.9999 13.2413V7.75868C1.99989 6.95372 1.99988 6.28936 2.0441 5.74818C2.09002 5.18608 2.18859 4.66937 2.43588 4.18404C2.81937 3.43139 3.43129 2.81947 4.18394 2.43598C4.66928 2.18868 5.18598 2.09012 5.74808 2.04419C6.28927 1.99998 6.95364 1.99999 7.7586 2ZM10.5073 7.5C10.5073 6.67157 9.83575 6 9.00732 6C8.1789 6 7.50732 6.67157 7.50732 7.5C7.50732 8.32843 8.1789 9 9.00732 9C9.83575 9 10.5073 8.32843 10.5073 7.5ZM16.6073 11.7001C16.1669 11.3697 15.5426 11.4577 15.2105 11.8959C15.1488 11.9746 15.081 12.0486 15.0119 12.1207C14.8646 12.2744 14.6432 12.4829 14.3566 12.6913C13.7796 13.111 12.9818 13.5001 12.0073 13.5001C11.0328 13.5001 10.235 13.111 9.65799 12.6913C9.37138 12.4829 9.15004 12.2744 9.00274 12.1207C8.93366 12.0486 8.86581 11.9745 8.80418 11.8959C8.472 11.4577 7.84775 11.3697 7.40732 11.7001C6.96549 12.0314 6.87595 12.6582 7.20732 13.1001C7.20479 13.0968 7.21072 13.1043 7.22094 13.1171C7.24532 13.1478 7.29407 13.2091 7.31068 13.2289C7.36932 13.2987 7.45232 13.3934 7.55877 13.5045C7.77084 13.7258 8.08075 14.0172 8.48165 14.3088C9.27958 14.8891 10.4818 15.5001 12.0073 15.5001C13.5328 15.5001 14.735 14.8891 15.533 14.3088C15.9339 14.0172 16.2438 13.7258 16.4559 13.5045C16.5623 13.3934 16.6453 13.2987 16.704 13.2289C16.7333 13.1939 16.7567 13.165 16.7739 13.1432C17.1193 12.6969 17.0729 12.0493 16.6073 11.7001ZM15.0073 6C15.8358 6 16.5073 6.67157 16.5073 7.5C16.5073 8.32843 15.8358 9 15.0073 9C14.1789 9 13.5073 8.32843 13.5073 7.5C13.5073 6.67157 14.1789 6 15.0073 6Z" fill="white"/>
|
||||
</svg>`,
|
||||
close: `<svg id="closeIcon" width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
</svg>
|
||||
<svg id="closeIcon" style="display:none" width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M18 18L6 6M6 18L18 6" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>`
|
||||
};
|
||||
</svg>
|
||||
`;
|
||||
|
||||
// Main function to embed the chatbot
|
||||
async function embedChatbot() {
|
||||
let isDragging = false
|
||||
|
||||
if (!config || !config.token) {
|
||||
console.error(`${configKey} is empty or token is not provided`);
|
||||
return;
|
||||
@ -33,7 +34,9 @@
|
||||
async function compressAndEncodeBase64(input) {
|
||||
const uint8Array = new TextEncoder().encode(input);
|
||||
const compressedStream = new Response(
|
||||
new Blob([uint8Array]).stream().pipeThrough(new CompressionStream('gzip'))
|
||||
new Blob([uint8Array])
|
||||
.stream()
|
||||
.pipeThrough(new CompressionStream("gzip"))
|
||||
).arrayBuffer();
|
||||
const compressedUint8Array = new Uint8Array(await compressedStream);
|
||||
return btoa(String.fromCharCode(...compressedUint8Array));
|
||||
@ -112,33 +115,30 @@
|
||||
|
||||
// Function to reset the iframe position
|
||||
function resetIframePosition() {
|
||||
if (window.innerWidth <= 640)
|
||||
return
|
||||
if (window.innerWidth <= 640) return;
|
||||
|
||||
const targetIframe = document.getElementById(iframeId);
|
||||
const targetButton = document.getElementById(buttonId);
|
||||
if (targetIframe && targetButton) {
|
||||
const buttonRect = targetButton.getBoundingClientRect();
|
||||
|
||||
const buttonInBottom = buttonRect.top - 5 > targetIframe.clientHeight
|
||||
const buttonInBottom = buttonRect.top - 5 > targetIframe.clientHeight;
|
||||
|
||||
if (buttonInBottom) {
|
||||
targetIframe.style.bottom = '0px';
|
||||
targetIframe.style.top = 'unset';
|
||||
}
|
||||
else {
|
||||
targetIframe.style.bottom = 'unset';
|
||||
targetIframe.style.top = '0px';
|
||||
targetIframe.style.bottom = "0px";
|
||||
targetIframe.style.top = "unset";
|
||||
} else {
|
||||
targetIframe.style.bottom = "unset";
|
||||
targetIframe.style.top = "0px";
|
||||
}
|
||||
|
||||
const buttonInRight = buttonRect.right > targetIframe.clientWidth;
|
||||
|
||||
if (buttonInRight) {
|
||||
targetIframe.style.right = '0';
|
||||
targetIframe.style.left = 'unset';
|
||||
}
|
||||
else {
|
||||
targetIframe.style.right = 'unset';
|
||||
targetIframe.style.right = "0";
|
||||
targetIframe.style.left = "unset";
|
||||
} else {
|
||||
targetIframe.style.right = "unset";
|
||||
targetIframe.style.left = 0;
|
||||
}
|
||||
}
|
||||
@ -193,33 +193,41 @@
|
||||
const displayDiv = document.createElement("div");
|
||||
displayDiv.style.cssText =
|
||||
"position: relative; display: flex; align-items: center; justify-content: center; width: 100%; height: 100%; z-index: 2147483647;";
|
||||
displayDiv.innerHTML = svgIcons.open;
|
||||
displayDiv.innerHTML = svgIcons;
|
||||
containerDiv.appendChild(displayDiv);
|
||||
document.body.appendChild(containerDiv);
|
||||
|
||||
// Add click event listener to toggle chatbot
|
||||
containerDiv.addEventListener("click", function () {
|
||||
containerDiv.addEventListener("click", handleClick);
|
||||
// Add touch event listener
|
||||
containerDiv.addEventListener("touchend", handleClick);
|
||||
|
||||
function handleClick() {
|
||||
if (isDragging) return;
|
||||
|
||||
const targetIframe = document.getElementById(iframeId);
|
||||
if (!targetIframe) {
|
||||
containerDiv.prepend(createIframe());
|
||||
containerDiv.appendChild(createIframe());
|
||||
resetIframePosition();
|
||||
this.title = "Exit (ESC)";
|
||||
displayDiv.innerHTML = svgIcons.close;
|
||||
document.addEventListener('keydown', handleEscKey);
|
||||
setSvgIcon("close");
|
||||
document.addEventListener("keydown", handleEscKey);
|
||||
return;
|
||||
}
|
||||
targetIframe.style.display = targetIframe.style.display === "none" ? "block" : "none";
|
||||
displayDiv.innerHTML = targetIframe.style.display === "none" ? svgIcons.open : svgIcons.close;
|
||||
targetIframe.style.display =
|
||||
targetIframe.style.display === "none" ? "block" : "none";
|
||||
targetIframe.style.display === "none"
|
||||
? setSvgIcon("open")
|
||||
: setSvgIcon("close");
|
||||
|
||||
if (targetIframe.style.display === "none") {
|
||||
document.removeEventListener('keydown', handleEscKey);
|
||||
document.removeEventListener("keydown", handleEscKey);
|
||||
} else {
|
||||
document.addEventListener('keydown', handleEscKey);
|
||||
document.addEventListener("keydown", handleEscKey);
|
||||
}
|
||||
|
||||
|
||||
resetIframePosition();
|
||||
});
|
||||
}
|
||||
|
||||
// Enable dragging if specified in config
|
||||
if (config.draggable) {
|
||||
@ -229,20 +237,39 @@
|
||||
|
||||
// Function to enable dragging of the chat button
|
||||
function enableDragging(element, axis) {
|
||||
let isDragging = false;
|
||||
let startX, startY;
|
||||
let startX, startY, startClientX, startClientY;
|
||||
|
||||
element.addEventListener("mousedown", startDragging);
|
||||
document.addEventListener("mousemove", drag);
|
||||
document.addEventListener("mouseup", stopDragging);
|
||||
element.addEventListener("touchstart", startDragging);
|
||||
|
||||
function startDragging(e) {
|
||||
isDragging = true;
|
||||
isDragging = false;
|
||||
if (e.type === "touchstart") {
|
||||
startX = e.touches[0].clientX - element.offsetLeft;
|
||||
startY = e.touches[0].clientY - element.offsetTop;
|
||||
startClientX = e.touches[0].clientX;
|
||||
startClientY = e.touches[0].clientY;
|
||||
} else {
|
||||
startX = e.clientX - element.offsetLeft;
|
||||
startY = e.clientY - element.offsetTop;
|
||||
}
|
||||
document.addEventListener("mousemove", drag);
|
||||
document.addEventListener("touchmove", drag, { passive: false });
|
||||
document.addEventListener("mouseup", stopDragging);
|
||||
document.addEventListener("touchend", stopDragging);
|
||||
e.preventDefault();
|
||||
}
|
||||
|
||||
function drag(e) {
|
||||
const touch = e.type === "touchmove" ? e.touches[0] : e;
|
||||
const deltaX = touch.clientX - startClientX;
|
||||
const deltaY = touch.clientY - startClientY;
|
||||
|
||||
// Determine whether it is a drag operation
|
||||
if (Math.abs(deltaX) > 8 || Math.abs(deltaY) > 8) {
|
||||
isDragging = true;
|
||||
}
|
||||
|
||||
if (!isDragging) return;
|
||||
|
||||
element.style.transition = "none";
|
||||
@ -252,11 +279,17 @@
|
||||
const targetIframe = document.getElementById(iframeId);
|
||||
if (targetIframe) {
|
||||
targetIframe.style.display = "none";
|
||||
element.querySelector("div").innerHTML = svgIcons.open;
|
||||
setSvgIcon("open");
|
||||
}
|
||||
|
||||
const newLeft = e.clientX - startX;
|
||||
const newBottom = window.innerHeight - e.clientY - startY;
|
||||
let newLeft, newBottom;
|
||||
if (e.type === "touchmove") {
|
||||
newLeft = e.touches[0].clientX - startX;
|
||||
newBottom = window.innerHeight - e.touches[0].clientY - startY;
|
||||
} else {
|
||||
newLeft = e.clientX - startX;
|
||||
newBottom = window.innerHeight - e.clientY - startY;
|
||||
}
|
||||
|
||||
const elementRect = element.getBoundingClientRect();
|
||||
const maxX = window.innerWidth - elementRect.width;
|
||||
@ -279,9 +312,16 @@
|
||||
}
|
||||
|
||||
function stopDragging() {
|
||||
setTimeout(() => {
|
||||
isDragging = false;
|
||||
}, 0);
|
||||
element.style.transition = "";
|
||||
element.style.cursor = "pointer";
|
||||
|
||||
document.removeEventListener("mousemove", drag);
|
||||
document.removeEventListener("touchmove", drag);
|
||||
document.removeEventListener("mouseup", stopDragging);
|
||||
document.removeEventListener("touchend", stopDragging);
|
||||
}
|
||||
}
|
||||
|
||||
@ -291,18 +331,27 @@
|
||||
}
|
||||
}
|
||||
|
||||
function setSvgIcon(type = "open") {
|
||||
if (type === "open") {
|
||||
document.getElementById("openIcon").style.display = "block";
|
||||
document.getElementById("closeIcon").style.display = "none";
|
||||
} else {
|
||||
document.getElementById("openIcon").style.display = "none";
|
||||
document.getElementById("closeIcon").style.display = "block";
|
||||
}
|
||||
}
|
||||
|
||||
// Add esc Exit keyboard event triggered
|
||||
function handleEscKey(event) {
|
||||
if (event.key === 'Escape') {
|
||||
if (event.key === "Escape") {
|
||||
const targetIframe = document.getElementById(iframeId);
|
||||
const button = document.getElementById(buttonId);
|
||||
if (targetIframe && targetIframe.style.display !== 'none') {
|
||||
targetIframe.style.display = 'none';
|
||||
button.querySelector('div').innerHTML = svgIcons.open;
|
||||
if (targetIframe && targetIframe.style.display !== "none") {
|
||||
targetIframe.style.display = "none";
|
||||
setSvgIcon("open");
|
||||
}
|
||||
}
|
||||
}
|
||||
document.addEventListener('keydown', handleEscKey);
|
||||
document.addEventListener("keydown", handleEscKey);
|
||||
|
||||
// Set the embedChatbot function to run when the body is loaded,Avoid infinite nesting
|
||||
if (config?.dynamicScript) {
|
||||
|
16
web/public/embed.min.js
vendored
16
web/public/embed.min.js
vendored
@ -1,8 +1,4 @@
|
||||
(()=>{let t="difyChatbotConfig",a="dify-chatbot-bubble-button",c="dify-chatbot-bubble-window",p=window[t],u={open:`<svg id="openIcon" width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.7586 2L16.2412 2C17.0462 1.99999 17.7105 1.99998 18.2517 2.04419C18.8138 2.09012 19.3305 2.18868 19.8159 2.43598C20.5685 2.81947 21.1804 3.43139 21.5639 4.18404C21.8112 4.66937 21.9098 5.18608 21.9557 5.74818C21.9999 6.28937 21.9999 6.95373 21.9999 7.7587L22 14.1376C22.0004 14.933 22.0007 15.5236 21.8636 16.0353C21.4937 17.4156 20.4155 18.4938 19.0352 18.8637C18.7277 18.9461 18.3917 18.9789 17.9999 18.9918L17.9999 20.371C18 20.6062 18 20.846 17.9822 21.0425C17.9651 21.2305 17.9199 21.5852 17.6722 21.8955C17.3872 22.2525 16.9551 22.4602 16.4983 22.4597C16.1013 22.4593 15.7961 22.273 15.6386 22.1689C15.474 22.06 15.2868 21.9102 15.1031 21.7632L12.69 19.8327C12.1714 19.4178 12.0174 19.3007 11.8575 19.219C11.697 19.137 11.5262 19.0771 11.3496 19.0408C11.1737 19.0047 10.9803 19 10.3162 19H7.75858C6.95362 19 6.28927 19 5.74808 18.9558C5.18598 18.9099 4.66928 18.8113 4.18394 18.564C3.43129 18.1805 2.81937 17.5686 2.43588 16.816C2.18859 16.3306 2.09002 15.8139 2.0441 15.2518C1.99988 14.7106 1.99989 14.0463 1.9999 13.2413V7.75868C1.99989 6.95372 1.99988 6.28936 2.0441 5.74818C2.09002 5.18608 2.18859 4.66937 2.43588 4.18404C2.81937 3.43139 3.43129 2.81947 4.18394 2.43598C4.66928 2.18868 5.18598 2.09012 5.74808 2.04419C6.28927 1.99998 6.95364 1.99999 7.7586 2ZM10.5073 7.5C10.5073 6.67157 9.83575 6 9.00732 6C8.1789 6 7.50732 6.67157 7.50732 7.5C7.50732 8.32843 8.1789 9 9.00732 9C9.83575 9 10.5073 8.32843 10.5073 7.5ZM16.6073 11.7001C16.1669 11.3697 15.5426 11.4577 15.2105 11.8959C15.1488 11.9746 15.081 12.0486 15.0119 12.1207C14.8646 12.2744 14.6432 12.4829 14.3566 12.6913C13.7796 13.111 12.9818 13.5001 12.0073 13.5001C11.0328 13.5001 10.235 13.111 9.65799 12.6913C9.37138 12.4829 9.15004 12.2744 9.00274 12.1207C8.93366 12.0486 8.86581 11.9745 8.80418 11.8959C8.472 11.4577 7.84775 11.3697 7.40732 11.7001C6.96549 12.0314 6.87595 12.6582 7.20732 13.1001C7.20479 13.0968 7.21072 13.1043 7.22094 13.1171C7.24532 13.1478 7.29407 13.2091 7.31068 13.2289C7.36932 13.2987 7.45232 13.3934 7.55877 13.5045C7.77084 13.7258 8.08075 14.0172 8.48165 14.3088C9.27958 14.8891 10.4818 15.5001 12.0073 15.5001C13.5328 15.5001 14.735 14.8891 15.533 14.3088C15.9339 14.0172 16.2438 13.7258 16.4559 13.5045C16.5623 13.3934 16.6453 13.2987 16.704 13.2289C16.7333 13.1939 16.7567 13.165 16.7739 13.1432C17.1193 12.6969 17.0729 12.0493 16.6073 11.7001ZM15.0073 6C15.8358 6 16.5073 6.67157 16.5073 7.5C16.5073 8.32843 15.8358 9 15.0073 9C14.1789 9 13.5073 8.32843 13.5073 7.5C13.5073 6.67157 14.1789 6 15.0073 6Z" fill="white"/>
|
||||
</svg>`,close:`<svg id="closeIcon" width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M18 18L6 6M6 18L18 6" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>`};async function e(){if(p&&p.token){var e=new URLSearchParams({...await(async()=>{var e=p?.inputs||{};let n={};return await Promise.all(Object.entries(e).map(async([e,t])=>{n[e]=await o(t)})),n})(),...await(async()=>{var e=p?.systemVariables||{};let n={};return await Promise.all(Object.entries(e).map(async([e,t])=>{n["sys."+e]=await o(t)})),n})()});let t=`${p.baseUrl||`https://${p.isDev?"dev.":""}udify.app`}/chatbot/${p.token}?`+e;e=i();async function o(e){e=(new TextEncoder).encode(e),e=new Response(new Blob([e]).stream().pipeThrough(new CompressionStream("gzip"))).arrayBuffer(),e=new Uint8Array(await e);return btoa(String.fromCharCode(...e))}function i(){var e=document.createElement("iframe");return e.allow="fullscreen;microphone",e.title="dify chatbot bubble window",e.id=c,e.src=t,e.style.cssText=`
|
||||
(()=>{let t="difyChatbotConfig",m="dify-chatbot-bubble-button",h="dify-chatbot-bubble-window",p=window[t];async function e(){let u=!1;if(p&&p.token){var e=new URLSearchParams({...await(async()=>{var e=p?.inputs||{};let n={};return await Promise.all(Object.entries(e).map(async([e,t])=>{n[e]=await o(t)})),n})(),...await(async()=>{var e=p?.systemVariables||{};let n={};return await Promise.all(Object.entries(e).map(async([e,t])=>{n["sys."+e]=await o(t)})),n})()});let t=`${p.baseUrl||`https://${p.isDev?"dev.":""}udify.app`}/chatbot/${p.token}?`+e;e=s();async function o(e){e=(new TextEncoder).encode(e),e=new Response(new Blob([e]).stream().pipeThrough(new CompressionStream("gzip"))).arrayBuffer(),e=new Uint8Array(await e);return btoa(String.fromCharCode(...e))}function s(){var e=document.createElement("iframe");return e.allow="fullscreen;microphone",e.title="dify chatbot bubble window",e.id=h,e.src=t,e.style.cssText=`
|
||||
position: absolute;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
@ -18,7 +14,7 @@
|
||||
z-index: 2147483640;
|
||||
overflow: hidden;
|
||||
user-select: none;
|
||||
`,e}function r(){var e,t;window.innerWidth<=640||(e=document.getElementById(c),t=document.getElementById(a),e&&t&&((t=t.getBoundingClientRect()).top-5>e.clientHeight?(e.style.bottom="0px",e.style.top="unset"):(e.style.bottom="unset",e.style.top="0px"),t.right>e.clientWidth?(e.style.right="0",e.style.left="unset"):(e.style.right="unset",e.style.left=0)))}function n(){let n=document.createElement("div");Object.entries(p.containerProps||{}).forEach(([e,t])=>{"className"===e?n.classList.add(...t.split(" ")):"style"===e?"object"==typeof t?Object.assign(n.style,t):n.style.cssText=t:"function"==typeof t?n.addEventListener(e.replace(/^on/,"").toLowerCase(),t):n[e]=t}),n.id=a;var e=document.createElement("style");document.head.appendChild(e),e.sheet.insertRule(`
|
||||
`,e}function d(){var e,t;window.innerWidth<=640||(e=document.getElementById(h),t=document.getElementById(m),e&&t&&((t=t.getBoundingClientRect()).top-5>e.clientHeight?(e.style.bottom="0px",e.style.top="unset"):(e.style.bottom="unset",e.style.top="0px"),t.right>e.clientWidth?(e.style.right="0",e.style.left="unset"):(e.style.right="unset",e.style.left=0)))}function n(){let n=document.createElement("div");Object.entries(p.containerProps||{}).forEach(([e,t])=>{"className"===e?n.classList.add(...t.split(" ")):"style"===e?"object"==typeof t?Object.assign(n.style,t):n.style.cssText=t:"function"==typeof t?n.addEventListener(e.replace(/^on/,"").toLowerCase(),t):n[e]=t}),n.id=m;var e=document.createElement("style"),e=(document.head.appendChild(e),e.sheet.insertRule(`
|
||||
#${n.id} {
|
||||
position: fixed;
|
||||
bottom: var(--${n.id}-bottom, 1rem);
|
||||
@ -33,4 +29,10 @@
|
||||
cursor: pointer;
|
||||
z-index: 2147483647;
|
||||
}
|
||||
`);let t=document.createElement("div");if(t.style.cssText="position: relative; display: flex; align-items: center; justify-content: center; width: 100%; height: 100%; z-index: 2147483647;",t.innerHTML=u.open,n.appendChild(t),document.body.appendChild(n),n.addEventListener("click",function(){var e=document.getElementById(c);e?(e.style.display="none"===e.style.display?"block":"none",t.innerHTML="none"===e.style.display?u.open:u.close,"none"===e.style.display?document.removeEventListener("keydown",d):document.addEventListener("keydown",d),r()):(n.prepend(i()),r(),this.title="Exit (ESC)",t.innerHTML=u.close,document.addEventListener("keydown",d))}),p.draggable){var s=n;var l=p.dragAxis||"both";let i=!1,r,d;s.addEventListener("mousedown",function(e){i=!0,r=e.clientX-s.offsetLeft,d=e.clientY-s.offsetTop}),document.addEventListener("mousemove",function(e){var t,n,o;i&&(s.style.transition="none",s.style.cursor="grabbing",(t=document.getElementById(c))&&(t.style.display="none",s.querySelector("div").innerHTML=u.open),t=e.clientX-r,e=window.innerHeight-e.clientY-d,o=s.getBoundingClientRect(),n=window.innerWidth-o.width,o=window.innerHeight-o.height,"x"!==l&&"both"!==l||s.style.setProperty(`--${a}-left`,Math.max(0,Math.min(t,n))+"px"),"y"!==l&&"both"!==l||s.style.setProperty(`--${a}-bottom`,Math.max(0,Math.min(e,o))+"px"))}),document.addEventListener("mouseup",function(){i=!1,s.style.transition="",s.style.cursor="pointer"})}}e.style.display="none",document.body.appendChild(e),2048<t.length&&console.error("The URL is too long, please reduce the number of inputs to prevent the bot from failing to load"),document.getElementById(a)||n()}else console.error(t+" is empty or token is not provided")}function d(e){var t;"Escape"===e.key&&(e=document.getElementById(c),t=document.getElementById(a),e)&&"none"!==e.style.display&&(e.style.display="none",t.querySelector("div").innerHTML=u.open)}document.addEventListener("keydown",d),p?.dynamicScript?e():document.body.onload=e})();
|
||||
`),document.createElement("div"));function t(){var e;u||((e=document.getElementById(h))?(e.style.display="none"===e.style.display?"block":"none","none"===e.style.display?y("open"):y("close"),"none"===e.style.display?document.removeEventListener("keydown",l):document.addEventListener("keydown",l),d()):(n.appendChild(s()),d(),this.title="Exit (ESC)",y("close"),document.addEventListener("keydown",l)))}if(e.style.cssText="position: relative; display: flex; align-items: center; justify-content: center; width: 100%; height: 100%; z-index: 2147483647;",e.innerHTML=`<svg id="openIcon" width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.7586 2L16.2412 2C17.0462 1.99999 17.7105 1.99998 18.2517 2.04419C18.8138 2.09012 19.3305 2.18868 19.8159 2.43598C20.5685 2.81947 21.1804 3.43139 21.5639 4.18404C21.8112 4.66937 21.9098 5.18608 21.9557 5.74818C21.9999 6.28937 21.9999 6.95373 21.9999 7.7587L22 14.1376C22.0004 14.933 22.0007 15.5236 21.8636 16.0353C21.4937 17.4156 20.4155 18.4938 19.0352 18.8637C18.7277 18.9461 18.3917 18.9789 17.9999 18.9918L17.9999 20.371C18 20.6062 18 20.846 17.9822 21.0425C17.9651 21.2305 17.9199 21.5852 17.6722 21.8955C17.3872 22.2525 16.9551 22.4602 16.4983 22.4597C16.1013 22.4593 15.7961 22.273 15.6386 22.1689C15.474 22.06 15.2868 21.9102 15.1031 21.7632L12.69 19.8327C12.1714 19.4178 12.0174 19.3007 11.8575 19.219C11.697 19.137 11.5262 19.0771 11.3496 19.0408C11.1737 19.0047 10.9803 19 10.3162 19H7.75858C6.95362 19 6.28927 19 5.74808 18.9558C5.18598 18.9099 4.66928 18.8113 4.18394 18.564C3.43129 18.1805 2.81937 17.5686 2.43588 16.816C2.18859 16.3306 2.09002 15.8139 2.0441 15.2518C1.99988 14.7106 1.99989 14.0463 1.9999 13.2413V7.75868C1.99989 6.95372 1.99988 6.28936 2.0441 5.74818C2.09002 5.18608 2.18859 4.66937 2.43588 4.18404C2.81937 3.43139 3.43129 2.81947 4.18394 2.43598C4.66928 2.18868 5.18598 2.09012 5.74808 2.04419C6.28927 1.99998 6.95364 1.99999 7.7586 2ZM10.5073 7.5C10.5073 6.67157 9.83575 6 9.00732 6C8.1789 6 7.50732 6.67157 7.50732 7.5C7.50732 8.32843 8.1789 9 9.00732 9C9.83575 9 10.5073 8.32843 10.5073 7.5ZM16.6073 11.7001C16.1669 11.3697 15.5426 11.4577 15.2105 11.8959C15.1488 11.9746 15.081 12.0486 15.0119 12.1207C14.8646 12.2744 14.6432 12.4829 14.3566 12.6913C13.7796 13.111 12.9818 13.5001 12.0073 13.5001C11.0328 13.5001 10.235 13.111 9.65799 12.6913C9.37138 12.4829 9.15004 12.2744 9.00274 12.1207C8.93366 12.0486 8.86581 11.9745 8.80418 11.8959C8.472 11.4577 7.84775 11.3697 7.40732 11.7001C6.96549 12.0314 6.87595 12.6582 7.20732 13.1001C7.20479 13.0968 7.21072 13.1043 7.22094 13.1171C7.24532 13.1478 7.29407 13.2091 7.31068 13.2289C7.36932 13.2987 7.45232 13.3934 7.55877 13.5045C7.77084 13.7258 8.08075 14.0172 8.48165 14.3088C9.27958 14.8891 10.4818 15.5001 12.0073 15.5001C13.5328 15.5001 14.735 14.8891 15.533 14.3088C15.9339 14.0172 16.2438 13.7258 16.4559 13.5045C16.5623 13.3934 16.6453 13.2987 16.704 13.2289C16.7333 13.1939 16.7567 13.165 16.7739 13.1432C17.1193 12.6969 17.0729 12.0493 16.6073 11.7001ZM15.0073 6C15.8358 6 16.5073 6.67157 16.5073 7.5C16.5073 8.32843 15.8358 9 15.0073 9C14.1789 9 13.5073 8.32843 13.5073 7.5C13.5073 6.67157 14.1789 6 15.0073 6Z" fill="white"/>
|
||||
</svg>
|
||||
<svg id="closeIcon" style="display:none" width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M18 18L6 6M6 18L18 6" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
`,n.appendChild(e),document.body.appendChild(n),n.addEventListener("click",t),n.addEventListener("touchend",t),p.draggable){var r=n;var a=p.dragAxis||"both";let s,d,t,l;function o(e){u=!1,"touchstart"===e.type?(s=e.touches[0].clientX-r.offsetLeft,d=e.touches[0].clientY-r.offsetTop,t=e.touches[0].clientX,l=e.touches[0].clientY):(s=e.clientX-r.offsetLeft,d=e.clientY-r.offsetTop),document.addEventListener("mousemove",i),document.addEventListener("touchmove",i,{passive:!1}),document.addEventListener("mouseup",c),document.addEventListener("touchend",c),e.preventDefault()}function i(n){var o="touchmove"===n.type?n.touches[0]:n,i=o.clientX-t,o=o.clientY-l;if(u=8<Math.abs(i)||8<Math.abs(o)?!0:u){r.style.transition="none",r.style.cursor="grabbing";i=document.getElementById(h);i&&(i.style.display="none",y("open"));let e,t;t="touchmove"===n.type?(e=n.touches[0].clientX-s,window.innerHeight-n.touches[0].clientY-d):(e=n.clientX-s,window.innerHeight-n.clientY-d);o=r.getBoundingClientRect(),i=window.innerWidth-o.width,n=window.innerHeight-o.height;"x"!==a&&"both"!==a||r.style.setProperty(`--${m}-left`,Math.max(0,Math.min(e,i))+"px"),"y"!==a&&"both"!==a||r.style.setProperty(`--${m}-bottom`,Math.max(0,Math.min(t,n))+"px")}}function c(){setTimeout(()=>{u=!1},0),r.style.transition="",r.style.cursor="pointer",document.removeEventListener("mousemove",i),document.removeEventListener("touchmove",i),document.removeEventListener("mouseup",c),document.removeEventListener("touchend",c)}r.addEventListener("mousedown",o),r.addEventListener("touchstart",o)}}e.style.display="none",document.body.appendChild(e),2048<t.length&&console.error("The URL is too long, please reduce the number of inputs to prevent the bot from failing to load"),document.getElementById(m)||n()}else console.error(t+" is empty or token is not provided")}function y(e="open"){"open"===e?(document.getElementById("openIcon").style.display="block",document.getElementById("closeIcon").style.display="none"):(document.getElementById("openIcon").style.display="none",document.getElementById("closeIcon").style.display="block")}function l(e){"Escape"===e.key&&(e=document.getElementById(h))&&"none"!==e.style.display&&(e.style.display="none",y("open"))}document.addEventListener("keydown",l),p?.dynamicScript?e():document.body.onload=e})();
|
@ -1,4 +1,5 @@
|
||||
import { formatFileSize, formatNumber, formatTime } from './format'
|
||||
import { downloadFile, formatFileSize, formatNumber, formatTime } from './format'
|
||||
|
||||
describe('formatNumber', () => {
|
||||
test('should correctly format integers', () => {
|
||||
expect(formatNumber(1234567)).toBe('1,234,567')
|
||||
@ -59,3 +60,45 @@ describe('formatTime', () => {
|
||||
expect(formatTime(7200)).toBe('2.00 h')
|
||||
})
|
||||
})
|
||||
describe('downloadFile', () => {
|
||||
test('should create a link and trigger a download correctly', () => {
|
||||
// Mock data
|
||||
const blob = new Blob(['test content'], { type: 'text/plain' })
|
||||
const fileName = 'test-file.txt'
|
||||
const mockUrl = 'blob:mockUrl'
|
||||
|
||||
// Mock URL.createObjectURL
|
||||
const createObjectURLMock = jest.fn().mockReturnValue(mockUrl)
|
||||
const revokeObjectURLMock = jest.fn()
|
||||
Object.defineProperty(window.URL, 'createObjectURL', { value: createObjectURLMock })
|
||||
Object.defineProperty(window.URL, 'revokeObjectURL', { value: revokeObjectURLMock })
|
||||
|
||||
// Mock createElement and appendChild
|
||||
const mockLink = {
|
||||
href: '',
|
||||
download: '',
|
||||
click: jest.fn(),
|
||||
remove: jest.fn(),
|
||||
}
|
||||
const createElementMock = jest.spyOn(document, 'createElement').mockReturnValue(mockLink as any)
|
||||
const appendChildMock = jest.spyOn(document.body, 'appendChild').mockImplementation((node: Node) => {
|
||||
return node
|
||||
})
|
||||
|
||||
// Call the function
|
||||
downloadFile({ data: blob, fileName })
|
||||
|
||||
// Assertions
|
||||
expect(createObjectURLMock).toHaveBeenCalledWith(blob)
|
||||
expect(createElementMock).toHaveBeenCalledWith('a')
|
||||
expect(mockLink.href).toBe(mockUrl)
|
||||
expect(mockLink.download).toBe(fileName)
|
||||
expect(appendChildMock).toHaveBeenCalledWith(mockLink)
|
||||
expect(mockLink.click).toHaveBeenCalled()
|
||||
expect(mockLink.remove).toHaveBeenCalled()
|
||||
expect(revokeObjectURLMock).toHaveBeenCalledWith(mockUrl)
|
||||
|
||||
// Clean up mocks
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
})
|
||||
|
295
web/utils/index.spec.ts
Normal file
295
web/utils/index.spec.ts
Normal file
@ -0,0 +1,295 @@
|
||||
import {
|
||||
asyncRunSafe,
|
||||
canFindTool,
|
||||
correctModelProvider,
|
||||
correctToolProvider,
|
||||
fetchWithRetry,
|
||||
getPurifyHref,
|
||||
getTextWidthWithCanvas,
|
||||
randomString,
|
||||
removeSpecificQueryParam,
|
||||
sleep,
|
||||
} from './index'
|
||||
|
||||
describe('sleep', () => {
|
||||
it('should wait for the specified time', async () => {
|
||||
const timeVariance = 10
|
||||
const sleepTime = 100
|
||||
const start = Date.now()
|
||||
await sleep(sleepTime)
|
||||
const elapsed = Date.now() - start
|
||||
expect(elapsed).toBeGreaterThanOrEqual(sleepTime - timeVariance)
|
||||
})
|
||||
})
|
||||
|
||||
describe('asyncRunSafe', () => {
|
||||
it('should return [null, result] when promise resolves', async () => {
|
||||
const result = await asyncRunSafe(Promise.resolve('success'))
|
||||
expect(result).toEqual([null, 'success'])
|
||||
})
|
||||
|
||||
it('should return [error] when promise rejects', async () => {
|
||||
const error = new Error('test error')
|
||||
const result = await asyncRunSafe(Promise.reject(error))
|
||||
expect(result).toEqual([error])
|
||||
})
|
||||
|
||||
it('should return [Error] when promise rejects with undefined', async () => {
|
||||
// eslint-disable-next-line prefer-promise-reject-errors
|
||||
const result = await asyncRunSafe(Promise.reject())
|
||||
expect(result[0]).toBeInstanceOf(Error)
|
||||
expect(result[0]?.message).toBe('unknown error')
|
||||
})
|
||||
})
|
||||
|
||||
describe('getTextWidthWithCanvas', () => {
|
||||
let originalCreateElement: any
|
||||
|
||||
beforeEach(() => {
|
||||
// Store original implementation
|
||||
originalCreateElement = document.createElement
|
||||
|
||||
// Mock canvas and context
|
||||
const measureTextMock = jest.fn().mockReturnValue({ width: 100 })
|
||||
const getContextMock = jest.fn().mockReturnValue({
|
||||
measureText: measureTextMock,
|
||||
font: '',
|
||||
})
|
||||
|
||||
document.createElement = jest.fn().mockReturnValue({
|
||||
getContext: getContextMock,
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original implementation
|
||||
document.createElement = originalCreateElement
|
||||
})
|
||||
|
||||
it('should return the width of text', () => {
|
||||
const width = getTextWidthWithCanvas('test text')
|
||||
expect(width).toBe(100)
|
||||
})
|
||||
|
||||
it('should return 0 if context is not available', () => {
|
||||
// Override mock for this test
|
||||
document.createElement = jest.fn().mockReturnValue({
|
||||
getContext: () => null,
|
||||
})
|
||||
|
||||
const width = getTextWidthWithCanvas('test text')
|
||||
expect(width).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('randomString', () => {
|
||||
it('should generate string of specified length', () => {
|
||||
const result = randomString(10)
|
||||
expect(result.length).toBe(10)
|
||||
})
|
||||
|
||||
it('should only contain valid characters', () => {
|
||||
const result = randomString(100)
|
||||
const validChars = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_'
|
||||
for (const char of result)
|
||||
expect(validChars).toContain(char)
|
||||
})
|
||||
|
||||
it('should generate different strings on consecutive calls', () => {
|
||||
const result1 = randomString(20)
|
||||
const result2 = randomString(20)
|
||||
expect(result1).not.toEqual(result2)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getPurifyHref', () => {
|
||||
it('should return empty string for falsy input', () => {
|
||||
expect(getPurifyHref('')).toBe('')
|
||||
expect(getPurifyHref(undefined as any)).toBe('')
|
||||
})
|
||||
|
||||
it('should escape HTML characters', () => {
|
||||
expect(getPurifyHref('<script>alert("xss")</script>')).not.toContain('<script>')
|
||||
})
|
||||
})
|
||||
|
||||
describe('fetchWithRetry', () => {
|
||||
it('should return successfully on first try', async () => {
|
||||
const successData = { status: 'success' }
|
||||
const promise = Promise.resolve(successData)
|
||||
|
||||
const result = await fetchWithRetry(promise)
|
||||
|
||||
expect(result).toEqual([null, successData])
|
||||
})
|
||||
|
||||
// it('should retry and succeed on second attempt', async () => {
|
||||
// let attemptCount = 0
|
||||
// const mockFn = new Promise((resolve, reject) => {
|
||||
// attemptCount++
|
||||
// if (attemptCount === 1)
|
||||
// reject(new Error('First attempt failed'))
|
||||
// else
|
||||
// resolve('success')
|
||||
// })
|
||||
|
||||
// const result = await fetchWithRetry(mockFn)
|
||||
|
||||
// expect(result).toEqual([null, 'success'])
|
||||
// expect(attemptCount).toBe(2)
|
||||
// })
|
||||
|
||||
// it('should stop after max retries and return last error', async () => {
|
||||
// const testError = new Error('Test error')
|
||||
// const promise = Promise.reject(testError)
|
||||
|
||||
// const result = await fetchWithRetry(promise, 2)
|
||||
|
||||
// expect(result).toEqual([testError])
|
||||
// })
|
||||
|
||||
// it('should handle non-Error rejection with custom error', async () => {
|
||||
// const stringError = 'string error message'
|
||||
// const promise = Promise.reject(stringError)
|
||||
|
||||
// const result = await fetchWithRetry(promise, 0)
|
||||
|
||||
// expect(result[0]).toBeInstanceOf(Error)
|
||||
// expect(result[0]?.message).toBe('unknown error')
|
||||
// })
|
||||
|
||||
// it('should use default 3 retries when retries parameter is not provided', async () => {
|
||||
// let attempts = 0
|
||||
// const mockFn = () => new Promise((resolve, reject) => {
|
||||
// attempts++
|
||||
// reject(new Error(`Attempt ${attempts} failed`))
|
||||
// })
|
||||
|
||||
// await fetchWithRetry(mockFn())
|
||||
|
||||
// expect(attempts).toBe(4) // Initial attempt + 3 retries
|
||||
// })
|
||||
})
|
||||
|
||||
describe('correctModelProvider', () => {
|
||||
it('should return empty string for falsy input', () => {
|
||||
expect(correctModelProvider('')).toBe('')
|
||||
})
|
||||
|
||||
it('should return the provider if it already contains a slash', () => {
|
||||
expect(correctModelProvider('company/model')).toBe('company/model')
|
||||
})
|
||||
|
||||
it('should format google provider correctly', () => {
|
||||
expect(correctModelProvider('google')).toBe('langgenius/gemini/google')
|
||||
})
|
||||
|
||||
it('should format standard providers correctly', () => {
|
||||
expect(correctModelProvider('openai')).toBe('langgenius/openai/openai')
|
||||
})
|
||||
})
|
||||
|
||||
describe('correctToolProvider', () => {
|
||||
it('should return empty string for falsy input', () => {
|
||||
expect(correctToolProvider('')).toBe('')
|
||||
})
|
||||
|
||||
it('should return the provider if toolInCollectionList is true', () => {
|
||||
expect(correctToolProvider('any-provider', true)).toBe('any-provider')
|
||||
})
|
||||
|
||||
it('should return the provider if it already contains a slash', () => {
|
||||
expect(correctToolProvider('company/tool')).toBe('company/tool')
|
||||
})
|
||||
|
||||
it('should format special tool providers correctly', () => {
|
||||
expect(correctToolProvider('stepfun')).toBe('langgenius/stepfun_tool/stepfun')
|
||||
expect(correctToolProvider('jina')).toBe('langgenius/jina_tool/jina')
|
||||
})
|
||||
|
||||
it('should format standard tool providers correctly', () => {
|
||||
expect(correctToolProvider('standard')).toBe('langgenius/standard/standard')
|
||||
})
|
||||
})
|
||||
|
||||
describe('canFindTool', () => {
|
||||
it('should match when IDs are identical', () => {
|
||||
expect(canFindTool('tool-id', 'tool-id')).toBe(true)
|
||||
})
|
||||
|
||||
it('should match when provider ID is formatted with standard pattern', () => {
|
||||
expect(canFindTool('langgenius/tool-id/tool-id', 'tool-id')).toBe(true)
|
||||
})
|
||||
|
||||
it('should match when provider ID is formatted with tool pattern', () => {
|
||||
expect(canFindTool('langgenius/tool-id_tool/tool-id', 'tool-id')).toBe(true)
|
||||
})
|
||||
|
||||
it('should not match when IDs are completely different', () => {
|
||||
expect(canFindTool('provider-a', 'tool-b')).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('removeSpecificQueryParam', () => {
|
||||
let originalLocation: any
|
||||
let originalReplaceState: any
|
||||
|
||||
beforeEach(() => {
|
||||
originalLocation = window.location
|
||||
originalReplaceState = window.history.replaceState
|
||||
|
||||
const mockUrl = new URL('https://example.com?param1=value1¶m2=value2¶m3=value3')
|
||||
|
||||
// Mock window.location using defineProperty to handle URL properly
|
||||
delete (window as any).location
|
||||
Object.defineProperty(window, 'location', {
|
||||
writable: true,
|
||||
value: {
|
||||
...originalLocation,
|
||||
href: mockUrl.href,
|
||||
search: mockUrl.search,
|
||||
toString: () => mockUrl.toString(),
|
||||
},
|
||||
})
|
||||
|
||||
window.history.replaceState = jest.fn()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
Object.defineProperty(window, 'location', {
|
||||
writable: true,
|
||||
value: originalLocation,
|
||||
})
|
||||
window.history.replaceState = originalReplaceState
|
||||
})
|
||||
|
||||
it('should remove a single query parameter', () => {
|
||||
removeSpecificQueryParam('param2')
|
||||
expect(window.history.replaceState).toHaveBeenCalledTimes(1)
|
||||
const replaceStateCall = (window.history.replaceState as jest.Mock).mock.calls[0]
|
||||
expect(replaceStateCall[0]).toBe(null)
|
||||
expect(replaceStateCall[1]).toBe('')
|
||||
expect(replaceStateCall[2]).toMatch(/param1=value1/)
|
||||
expect(replaceStateCall[2]).toMatch(/param3=value3/)
|
||||
expect(replaceStateCall[2]).not.toMatch(/param2=value2/)
|
||||
})
|
||||
|
||||
it('should remove multiple query parameters', () => {
|
||||
removeSpecificQueryParam(['param1', 'param3'])
|
||||
expect(window.history.replaceState).toHaveBeenCalledTimes(1)
|
||||
const replaceStateCall = (window.history.replaceState as jest.Mock).mock.calls[0]
|
||||
expect(replaceStateCall[2]).toMatch(/param2=value2/)
|
||||
expect(replaceStateCall[2]).not.toMatch(/param1=value1/)
|
||||
expect(replaceStateCall[2]).not.toMatch(/param3=value3/)
|
||||
})
|
||||
|
||||
it('should handle non-existent parameters gracefully', () => {
|
||||
removeSpecificQueryParam('nonexistent')
|
||||
|
||||
expect(window.history.replaceState).toHaveBeenCalledTimes(1)
|
||||
const replaceStateCall = (window.history.replaceState as jest.Mock).mock.calls[0]
|
||||
expect(replaceStateCall[2]).toMatch(/param1=value1/)
|
||||
expect(replaceStateCall[2]).toMatch(/param2=value2/)
|
||||
expect(replaceStateCall[2]).toMatch(/param3=value3/)
|
||||
})
|
||||
})
|
Loading…
x
Reference in New Issue
Block a user