mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-14 22:25:58 +08:00
Merge branch 'refs/heads/main' into feat/workflow-parallel-support
# Conflicts: # api/services/app_generate_service.py
This commit is contained in:
commit
6b6750b9ad
@ -55,7 +55,7 @@ RUN apt-get update \
|
|||||||
&& echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \
|
&& echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
# For Security
|
# For Security
|
||||||
&& apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1 expat=2.6.2-1 libldap-2.5-0=2.5.18+dfsg-2 perl=5.38.2-5 libsqlite3-0=3.46.0-1 \
|
&& apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1 expat=2.6.2-1 libldap-2.5-0=2.5.18+dfsg-3 perl=5.38.2-5 libsqlite3-0=3.46.0-1 \
|
||||||
&& apt-get autoremove -y \
|
&& apt-get autoremove -y \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import AliasChoices, Field, NegativeInt, NonNegativeInt, PositiveInt, computed_field
|
from pydantic import AliasChoices, Field, HttpUrl, NegativeInt, NonNegativeInt, PositiveInt, computed_field
|
||||||
from pydantic_settings import BaseSettings
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
from configs.feature.hosted_service import HostedServiceConfig
|
from configs.feature.hosted_service import HostedServiceConfig
|
||||||
@ -45,7 +45,7 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
|||||||
Code Execution Sandbox configs
|
Code Execution Sandbox configs
|
||||||
"""
|
"""
|
||||||
|
|
||||||
CODE_EXECUTION_ENDPOINT: str = Field(
|
CODE_EXECUTION_ENDPOINT: HttpUrl = Field(
|
||||||
description="endpoint URL of code execution servcie",
|
description="endpoint URL of code execution servcie",
|
||||||
default="http://sandbox:8194",
|
default="http://sandbox:8194",
|
||||||
)
|
)
|
||||||
@ -55,6 +55,21 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
|||||||
default="dify-sandbox",
|
default="dify-sandbox",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field(
|
||||||
|
description="connect timeout in seconds for code execution request",
|
||||||
|
default=10.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field(
|
||||||
|
description="read timeout in seconds for code execution request",
|
||||||
|
default=60.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field(
|
||||||
|
description="write timeout in seconds for code execution request",
|
||||||
|
default=10.0,
|
||||||
|
)
|
||||||
|
|
||||||
CODE_MAX_NUMBER: PositiveInt = Field(
|
CODE_MAX_NUMBER: PositiveInt = Field(
|
||||||
description="max depth for code execution",
|
description="max depth for code execution",
|
||||||
default=9223372036854775807,
|
default=9223372036854775807,
|
||||||
|
@ -13,6 +13,7 @@ from configs.middleware.storage.oci_storage_config import OCIStorageConfig
|
|||||||
from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
|
from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
|
||||||
from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
|
from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
|
||||||
from configs.middleware.vdb.chroma_config import ChromaConfig
|
from configs.middleware.vdb.chroma_config import ChromaConfig
|
||||||
|
from configs.middleware.vdb.elasticsearch_config import ElasticsearchConfig
|
||||||
from configs.middleware.vdb.milvus_config import MilvusConfig
|
from configs.middleware.vdb.milvus_config import MilvusConfig
|
||||||
from configs.middleware.vdb.myscale_config import MyScaleConfig
|
from configs.middleware.vdb.myscale_config import MyScaleConfig
|
||||||
from configs.middleware.vdb.opensearch_config import OpenSearchConfig
|
from configs.middleware.vdb.opensearch_config import OpenSearchConfig
|
||||||
@ -200,5 +201,6 @@ class MiddlewareConfig(
|
|||||||
TencentVectorDBConfig,
|
TencentVectorDBConfig,
|
||||||
TiDBVectorConfig,
|
TiDBVectorConfig,
|
||||||
WeaviateConfig,
|
WeaviateConfig,
|
||||||
|
ElasticsearchConfig,
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
30
api/configs/middleware/vdb/elasticsearch_config.py
Normal file
30
api/configs/middleware/vdb/elasticsearch_config.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field, PositiveInt
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class ElasticsearchConfig(BaseSettings):
|
||||||
|
"""
|
||||||
|
Elasticsearch configs
|
||||||
|
"""
|
||||||
|
|
||||||
|
ELASTICSEARCH_HOST: Optional[str] = Field(
|
||||||
|
description="Elasticsearch host",
|
||||||
|
default="127.0.0.1",
|
||||||
|
)
|
||||||
|
|
||||||
|
ELASTICSEARCH_PORT: PositiveInt = Field(
|
||||||
|
description="Elasticsearch port",
|
||||||
|
default=9200,
|
||||||
|
)
|
||||||
|
|
||||||
|
ELASTICSEARCH_USERNAME: Optional[str] = Field(
|
||||||
|
description="Elasticsearch username",
|
||||||
|
default="elastic",
|
||||||
|
)
|
||||||
|
|
||||||
|
ELASTICSEARCH_PASSWORD: Optional[str] = Field(
|
||||||
|
description="Elasticsearch password",
|
||||||
|
default="elastic",
|
||||||
|
)
|
@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
|
|||||||
|
|
||||||
CURRENT_VERSION: str = Field(
|
CURRENT_VERSION: str = Field(
|
||||||
description="Dify version",
|
description="Dify version",
|
||||||
default="0.7.1",
|
default="0.7.2",
|
||||||
)
|
)
|
||||||
|
|
||||||
COMMIT_SHA: str = Field(
|
COMMIT_SHA: str = Field(
|
||||||
|
@ -17,6 +17,7 @@ from controllers.console.app.error import (
|
|||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
from controllers.console.setup import setup_required
|
from controllers.console.setup import setup_required
|
||||||
from controllers.console.wraps import account_initialization_required
|
from controllers.console.wraps import account_initialization_required
|
||||||
|
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
|
||||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||||
from core.errors.error import (
|
from core.errors.error import (
|
||||||
@ -31,6 +32,7 @@ from libs.helper import uuid_value
|
|||||||
from libs.login import login_required
|
from libs.login import login_required
|
||||||
from models.model import AppMode
|
from models.model import AppMode
|
||||||
from services.app_generate_service import AppGenerateService
|
from services.app_generate_service import AppGenerateService
|
||||||
|
from services.errors.llm import InvokeRateLimitError
|
||||||
|
|
||||||
|
|
||||||
# define completion message api for user
|
# define completion message api for user
|
||||||
@ -135,6 +137,8 @@ class ChatMessageApi(Resource):
|
|||||||
raise ProviderQuotaExceededError()
|
raise ProviderQuotaExceededError()
|
||||||
except ModelCurrentlyNotSupportError:
|
except ModelCurrentlyNotSupportError:
|
||||||
raise ProviderModelCurrentlyNotSupportError()
|
raise ProviderModelCurrentlyNotSupportError()
|
||||||
|
except InvokeRateLimitError as ex:
|
||||||
|
raise InvokeRateLimitHttpError(ex.description)
|
||||||
except InvokeError as e:
|
except InvokeError as e:
|
||||||
raise CompletionRequestError(e.description)
|
raise CompletionRequestError(e.description)
|
||||||
except (ValueError, AppInvokeQuotaExceededError) as e:
|
except (ValueError, AppInvokeQuotaExceededError) as e:
|
||||||
|
@ -119,3 +119,11 @@ class TracingConfigCheckError(BaseHTTPException):
|
|||||||
error_code = "trace_config_check_error"
|
error_code = "trace_config_check_error"
|
||||||
description = "Invalid Credentials."
|
description = "Invalid Credentials."
|
||||||
code = 400
|
code = 400
|
||||||
|
|
||||||
|
|
||||||
|
class InvokeRateLimitError(BaseHTTPException):
|
||||||
|
"""Raised when the Invoke returns rate limit error."""
|
||||||
|
|
||||||
|
error_code = "rate_limit_error"
|
||||||
|
description = "Rate Limit Error"
|
||||||
|
code = 429
|
||||||
|
@ -32,6 +32,8 @@ class ModelConfigResource(Resource):
|
|||||||
|
|
||||||
new_app_model_config = AppModelConfig(
|
new_app_model_config = AppModelConfig(
|
||||||
app_id=app_model.id,
|
app_id=app_model.id,
|
||||||
|
created_by=current_user.id,
|
||||||
|
updated_by=current_user.id,
|
||||||
)
|
)
|
||||||
new_app_model_config = new_app_model_config.from_model_config_dict(model_configuration)
|
new_app_model_config = new_app_model_config.from_model_config_dict(model_configuration)
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal_with, reqparse
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from werkzeug.exceptions import Forbidden, NotFound
|
from werkzeug.exceptions import Forbidden, NotFound
|
||||||
@ -71,6 +73,8 @@ class AppSite(Resource):
|
|||||||
if value is not None:
|
if value is not None:
|
||||||
setattr(site, attr_name, value)
|
setattr(site, attr_name, value)
|
||||||
|
|
||||||
|
site.updated_by = current_user.id
|
||||||
|
site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return site
|
return site
|
||||||
@ -93,6 +97,8 @@ class AppSiteAccessTokenReset(Resource):
|
|||||||
raise NotFound
|
raise NotFound
|
||||||
|
|
||||||
site.code = Site.generate_code(16)
|
site.code = Site.generate_code(16)
|
||||||
|
site.updated_by = current_user.id
|
||||||
|
site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return site
|
return site
|
||||||
|
@ -16,6 +16,60 @@ from libs.login import login_required
|
|||||||
from models.model import AppMode
|
from models.model import AppMode
|
||||||
|
|
||||||
|
|
||||||
|
class DailyMessageStatistic(Resource):
|
||||||
|
@setup_required
|
||||||
|
@login_required
|
||||||
|
@account_initialization_required
|
||||||
|
@get_app_model
|
||||||
|
def get(self, app_model):
|
||||||
|
account = current_user
|
||||||
|
|
||||||
|
parser = reqparse.RequestParser()
|
||||||
|
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||||
|
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
sql_query = """
|
||||||
|
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(*) AS message_count
|
||||||
|
FROM messages where app_id = :app_id
|
||||||
|
"""
|
||||||
|
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||||
|
|
||||||
|
timezone = pytz.timezone(account.timezone)
|
||||||
|
utc_timezone = pytz.utc
|
||||||
|
|
||||||
|
if args["start"]:
|
||||||
|
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
|
||||||
|
start_datetime = start_datetime.replace(second=0)
|
||||||
|
|
||||||
|
start_datetime_timezone = timezone.localize(start_datetime)
|
||||||
|
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||||
|
|
||||||
|
sql_query += " and created_at >= :start"
|
||||||
|
arg_dict["start"] = start_datetime_utc
|
||||||
|
|
||||||
|
if args["end"]:
|
||||||
|
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
|
||||||
|
end_datetime = end_datetime.replace(second=0)
|
||||||
|
|
||||||
|
end_datetime_timezone = timezone.localize(end_datetime)
|
||||||
|
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||||
|
|
||||||
|
sql_query += " and created_at < :end"
|
||||||
|
arg_dict["end"] = end_datetime_utc
|
||||||
|
|
||||||
|
sql_query += " GROUP BY date order by date"
|
||||||
|
|
||||||
|
response_data = []
|
||||||
|
|
||||||
|
with db.engine.begin() as conn:
|
||||||
|
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||||
|
for i in rs:
|
||||||
|
response_data.append({"date": str(i.date), "message_count": i.message_count})
|
||||||
|
|
||||||
|
return jsonify({"data": response_data})
|
||||||
|
|
||||||
|
|
||||||
class DailyConversationStatistic(Resource):
|
class DailyConversationStatistic(Resource):
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@ -419,6 +473,7 @@ WHERE app_id = :app_id"""
|
|||||||
return jsonify({"data": response_data})
|
return jsonify({"data": response_data})
|
||||||
|
|
||||||
|
|
||||||
|
api.add_resource(DailyMessageStatistic, "/apps/<uuid:app_id>/statistics/daily-messages")
|
||||||
api.add_resource(DailyConversationStatistic, "/apps/<uuid:app_id>/statistics/daily-conversations")
|
api.add_resource(DailyConversationStatistic, "/apps/<uuid:app_id>/statistics/daily-conversations")
|
||||||
api.add_resource(DailyTerminalsStatistic, "/apps/<uuid:app_id>/statistics/daily-end-users")
|
api.add_resource(DailyTerminalsStatistic, "/apps/<uuid:app_id>/statistics/daily-end-users")
|
||||||
api.add_resource(DailyTokenCostStatistic, "/apps/<uuid:app_id>/statistics/token-costs")
|
api.add_resource(DailyTokenCostStatistic, "/apps/<uuid:app_id>/statistics/token-costs")
|
||||||
|
@ -599,6 +599,7 @@ class DocumentDetailApi(DocumentResource):
|
|||||||
"hit_count": document.hit_count,
|
"hit_count": document.hit_count,
|
||||||
"display_status": document.display_status,
|
"display_status": document.display_status,
|
||||||
"doc_form": document.doc_form,
|
"doc_form": document.doc_form,
|
||||||
|
"doc_language": document.doc_language,
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
process_rules = DatasetService.get_process_rules(dataset_id)
|
process_rules = DatasetService.get_process_rules(dataset_id)
|
||||||
@ -631,6 +632,7 @@ class DocumentDetailApi(DocumentResource):
|
|||||||
"hit_count": document.hit_count,
|
"hit_count": document.hit_count,
|
||||||
"display_status": document.display_status,
|
"display_status": document.display_status,
|
||||||
"doc_form": document.doc_form,
|
"doc_form": document.doc_form,
|
||||||
|
"doc_language": document.doc_language,
|
||||||
}
|
}
|
||||||
|
|
||||||
return response, 200
|
return response, 200
|
||||||
|
@ -15,6 +15,7 @@ from controllers.web.error import (
|
|||||||
ProviderNotInitializeError,
|
ProviderNotInitializeError,
|
||||||
ProviderQuotaExceededError,
|
ProviderQuotaExceededError,
|
||||||
)
|
)
|
||||||
|
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
|
||||||
from controllers.web.wraps import WebApiResource
|
from controllers.web.wraps import WebApiResource
|
||||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||||
@ -24,6 +25,7 @@ from libs import helper
|
|||||||
from libs.helper import uuid_value
|
from libs.helper import uuid_value
|
||||||
from models.model import AppMode
|
from models.model import AppMode
|
||||||
from services.app_generate_service import AppGenerateService
|
from services.app_generate_service import AppGenerateService
|
||||||
|
from services.errors.llm import InvokeRateLimitError
|
||||||
|
|
||||||
|
|
||||||
# define completion api for user
|
# define completion api for user
|
||||||
@ -120,6 +122,8 @@ class ChatApi(WebApiResource):
|
|||||||
raise ProviderQuotaExceededError()
|
raise ProviderQuotaExceededError()
|
||||||
except ModelCurrentlyNotSupportError:
|
except ModelCurrentlyNotSupportError:
|
||||||
raise ProviderModelCurrentlyNotSupportError()
|
raise ProviderModelCurrentlyNotSupportError()
|
||||||
|
except InvokeRateLimitError as ex:
|
||||||
|
raise InvokeRateLimitHttpError(ex.description)
|
||||||
except InvokeError as e:
|
except InvokeError as e:
|
||||||
raise CompletionRequestError(e.description)
|
raise CompletionRequestError(e.description)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
|
@ -125,3 +125,11 @@ class WebSSOAuthRequiredError(BaseHTTPException):
|
|||||||
error_code = "web_sso_auth_required"
|
error_code = "web_sso_auth_required"
|
||||||
description = "Web SSO authentication required."
|
description = "Web SSO authentication required."
|
||||||
code = 401
|
code = 401
|
||||||
|
|
||||||
|
|
||||||
|
class InvokeRateLimitError(BaseHTTPException):
|
||||||
|
"""Raised when the Invoke returns rate limit error."""
|
||||||
|
|
||||||
|
error_code = "rate_limit_error"
|
||||||
|
description = "Rate Limit Error"
|
||||||
|
code = 429
|
||||||
|
@ -43,3 +43,8 @@ class ModelCurrentlyNotSupportError(Exception):
|
|||||||
Custom exception raised when the model not support
|
Custom exception raised when the model not support
|
||||||
"""
|
"""
|
||||||
description = "Model Currently Not Support"
|
description = "Model Currently Not Support"
|
||||||
|
|
||||||
|
|
||||||
|
class InvokeRateLimitError(Exception):
|
||||||
|
"""Raised when the Invoke returns rate limit error."""
|
||||||
|
description = "Rate Limit Error"
|
||||||
|
@ -15,12 +15,6 @@ from core.helper.code_executor.template_transformer import TemplateTransformer
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Code Executor
|
|
||||||
CODE_EXECUTION_ENDPOINT = dify_config.CODE_EXECUTION_ENDPOINT
|
|
||||||
CODE_EXECUTION_API_KEY = dify_config.CODE_EXECUTION_API_KEY
|
|
||||||
|
|
||||||
CODE_EXECUTION_TIMEOUT = Timeout(connect=10, write=10, read=60, pool=None)
|
|
||||||
|
|
||||||
class CodeExecutionException(Exception):
|
class CodeExecutionException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -71,10 +65,10 @@ class CodeExecutor:
|
|||||||
:param code: code
|
:param code: code
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
url = URL(CODE_EXECUTION_ENDPOINT) / 'v1' / 'sandbox' / 'run'
|
url = URL(str(dify_config.CODE_EXECUTION_ENDPOINT)) / 'v1' / 'sandbox' / 'run'
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
'X-Api-Key': CODE_EXECUTION_API_KEY
|
'X-Api-Key': dify_config.CODE_EXECUTION_API_KEY
|
||||||
}
|
}
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
@ -85,7 +79,12 @@ class CodeExecutor:
|
|||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = post(str(url), json=data, headers=headers, timeout=CODE_EXECUTION_TIMEOUT)
|
response = post(str(url), json=data, headers=headers,
|
||||||
|
timeout=Timeout(
|
||||||
|
connect=dify_config.CODE_EXECUTION_CONNECT_TIMEOUT,
|
||||||
|
read=dify_config.CODE_EXECUTION_READ_TIMEOUT,
|
||||||
|
write=dify_config.CODE_EXECUTION_WRITE_TIMEOUT,
|
||||||
|
pool=None))
|
||||||
if response.status_code == 503:
|
if response.status_code == 503:
|
||||||
raise CodeExecutionException('Code execution service is unavailable')
|
raise CodeExecutionException('Code execution service is unavailable')
|
||||||
elif response.status_code != 200:
|
elif response.status_code != 200:
|
||||||
@ -133,4 +132,3 @@ class CodeExecutor:
|
|||||||
raise e
|
raise e
|
||||||
|
|
||||||
return template_transformer.transform_response(response)
|
return template_transformer.transform_response(response)
|
||||||
|
|
@ -58,7 +58,8 @@ class HostingConfiguration:
|
|||||||
|
|
||||||
self.moderation_config = self.init_moderation_config(config)
|
self.moderation_config = self.init_moderation_config(config)
|
||||||
|
|
||||||
def init_azure_openai(self, app_config: Config) -> HostingProvider:
|
@staticmethod
|
||||||
|
def init_azure_openai(app_config: Config) -> HostingProvider:
|
||||||
quota_unit = QuotaUnit.TIMES
|
quota_unit = QuotaUnit.TIMES
|
||||||
if app_config.get("HOSTED_AZURE_OPENAI_ENABLED"):
|
if app_config.get("HOSTED_AZURE_OPENAI_ENABLED"):
|
||||||
credentials = {
|
credentials = {
|
||||||
@ -145,7 +146,8 @@ class HostingConfiguration:
|
|||||||
quota_unit=quota_unit,
|
quota_unit=quota_unit,
|
||||||
)
|
)
|
||||||
|
|
||||||
def init_anthropic(self, app_config: Config) -> HostingProvider:
|
@staticmethod
|
||||||
|
def init_anthropic(app_config: Config) -> HostingProvider:
|
||||||
quota_unit = QuotaUnit.TOKENS
|
quota_unit = QuotaUnit.TOKENS
|
||||||
quotas = []
|
quotas = []
|
||||||
|
|
||||||
@ -180,7 +182,8 @@ class HostingConfiguration:
|
|||||||
quota_unit=quota_unit,
|
quota_unit=quota_unit,
|
||||||
)
|
)
|
||||||
|
|
||||||
def init_minimax(self, app_config: Config) -> HostingProvider:
|
@staticmethod
|
||||||
|
def init_minimax(app_config: Config) -> HostingProvider:
|
||||||
quota_unit = QuotaUnit.TOKENS
|
quota_unit = QuotaUnit.TOKENS
|
||||||
if app_config.get("HOSTED_MINIMAX_ENABLED"):
|
if app_config.get("HOSTED_MINIMAX_ENABLED"):
|
||||||
quotas = [FreeHostingQuota()]
|
quotas = [FreeHostingQuota()]
|
||||||
@ -197,7 +200,8 @@ class HostingConfiguration:
|
|||||||
quota_unit=quota_unit,
|
quota_unit=quota_unit,
|
||||||
)
|
)
|
||||||
|
|
||||||
def init_spark(self, app_config: Config) -> HostingProvider:
|
@staticmethod
|
||||||
|
def init_spark(app_config: Config) -> HostingProvider:
|
||||||
quota_unit = QuotaUnit.TOKENS
|
quota_unit = QuotaUnit.TOKENS
|
||||||
if app_config.get("HOSTED_SPARK_ENABLED"):
|
if app_config.get("HOSTED_SPARK_ENABLED"):
|
||||||
quotas = [FreeHostingQuota()]
|
quotas = [FreeHostingQuota()]
|
||||||
@ -214,7 +218,8 @@ class HostingConfiguration:
|
|||||||
quota_unit=quota_unit,
|
quota_unit=quota_unit,
|
||||||
)
|
)
|
||||||
|
|
||||||
def init_zhipuai(self, app_config: Config) -> HostingProvider:
|
@staticmethod
|
||||||
|
def init_zhipuai(app_config: Config) -> HostingProvider:
|
||||||
quota_unit = QuotaUnit.TOKENS
|
quota_unit = QuotaUnit.TOKENS
|
||||||
if app_config.get("HOSTED_ZHIPUAI_ENABLED"):
|
if app_config.get("HOSTED_ZHIPUAI_ENABLED"):
|
||||||
quotas = [FreeHostingQuota()]
|
quotas = [FreeHostingQuota()]
|
||||||
@ -231,7 +236,8 @@ class HostingConfiguration:
|
|||||||
quota_unit=quota_unit,
|
quota_unit=quota_unit,
|
||||||
)
|
)
|
||||||
|
|
||||||
def init_moderation_config(self, app_config: Config) -> HostedModerationConfig:
|
@staticmethod
|
||||||
|
def init_moderation_config(app_config: Config) -> HostedModerationConfig:
|
||||||
if app_config.get("HOSTED_MODERATION_ENABLED") \
|
if app_config.get("HOSTED_MODERATION_ENABLED") \
|
||||||
and app_config.get("HOSTED_MODERATION_PROVIDERS"):
|
and app_config.get("HOSTED_MODERATION_PROVIDERS"):
|
||||||
return HostedModerationConfig(
|
return HostedModerationConfig(
|
||||||
|
@ -411,7 +411,8 @@ class IndexingRunner:
|
|||||||
|
|
||||||
return text_docs
|
return text_docs
|
||||||
|
|
||||||
def filter_string(self, text):
|
@staticmethod
|
||||||
|
def filter_string(text):
|
||||||
text = re.sub(r'<\|', '<', text)
|
text = re.sub(r'<\|', '<', text)
|
||||||
text = re.sub(r'\|>', '>', text)
|
text = re.sub(r'\|>', '>', text)
|
||||||
text = re.sub(r'[\x00-\x08\x0B\x0C\x0E-\x1F\x7F\xEF\xBF\xBE]', '', text)
|
text = re.sub(r'[\x00-\x08\x0B\x0C\x0E-\x1F\x7F\xEF\xBF\xBE]', '', text)
|
||||||
@ -419,7 +420,8 @@ class IndexingRunner:
|
|||||||
text = re.sub('\uFFFE', '', text)
|
text = re.sub('\uFFFE', '', text)
|
||||||
return text
|
return text
|
||||||
|
|
||||||
def _get_splitter(self, processing_rule: DatasetProcessRule,
|
@staticmethod
|
||||||
|
def _get_splitter(processing_rule: DatasetProcessRule,
|
||||||
embedding_model_instance: Optional[ModelInstance]) -> TextSplitter:
|
embedding_model_instance: Optional[ModelInstance]) -> TextSplitter:
|
||||||
"""
|
"""
|
||||||
Get the NodeParser object according to the processing rule.
|
Get the NodeParser object according to the processing rule.
|
||||||
@ -611,7 +613,8 @@ class IndexingRunner:
|
|||||||
|
|
||||||
return all_documents
|
return all_documents
|
||||||
|
|
||||||
def _document_clean(self, text: str, processing_rule: DatasetProcessRule) -> str:
|
@staticmethod
|
||||||
|
def _document_clean(text: str, processing_rule: DatasetProcessRule) -> str:
|
||||||
"""
|
"""
|
||||||
Clean the document text according to the processing rules.
|
Clean the document text according to the processing rules.
|
||||||
"""
|
"""
|
||||||
@ -640,7 +643,8 @@ class IndexingRunner:
|
|||||||
|
|
||||||
return text
|
return text
|
||||||
|
|
||||||
def format_split_text(self, text):
|
@staticmethod
|
||||||
|
def format_split_text(text):
|
||||||
regex = r"Q\d+:\s*(.*?)\s*A\d+:\s*([\s\S]*?)(?=Q\d+:|$)"
|
regex = r"Q\d+:\s*(.*?)\s*A\d+:\s*([\s\S]*?)(?=Q\d+:|$)"
|
||||||
matches = re.findall(regex, text, re.UNICODE)
|
matches = re.findall(regex, text, re.UNICODE)
|
||||||
|
|
||||||
@ -704,7 +708,8 @@ class IndexingRunner:
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def _process_keyword_index(self, flask_app, dataset_id, document_id, documents):
|
@staticmethod
|
||||||
|
def _process_keyword_index(flask_app, dataset_id, document_id, documents):
|
||||||
with flask_app.app_context():
|
with flask_app.app_context():
|
||||||
dataset = Dataset.query.filter_by(id=dataset_id).first()
|
dataset = Dataset.query.filter_by(id=dataset_id).first()
|
||||||
if not dataset:
|
if not dataset:
|
||||||
@ -715,6 +720,7 @@ class IndexingRunner:
|
|||||||
document_ids = [document.metadata['doc_id'] for document in documents]
|
document_ids = [document.metadata['doc_id'] for document in documents]
|
||||||
db.session.query(DocumentSegment).filter(
|
db.session.query(DocumentSegment).filter(
|
||||||
DocumentSegment.document_id == document_id,
|
DocumentSegment.document_id == document_id,
|
||||||
|
DocumentSegment.dataset_id == dataset_id,
|
||||||
DocumentSegment.index_node_id.in_(document_ids),
|
DocumentSegment.index_node_id.in_(document_ids),
|
||||||
DocumentSegment.status == "indexing"
|
DocumentSegment.status == "indexing"
|
||||||
).update({
|
).update({
|
||||||
@ -746,6 +752,7 @@ class IndexingRunner:
|
|||||||
document_ids = [document.metadata['doc_id'] for document in chunk_documents]
|
document_ids = [document.metadata['doc_id'] for document in chunk_documents]
|
||||||
db.session.query(DocumentSegment).filter(
|
db.session.query(DocumentSegment).filter(
|
||||||
DocumentSegment.document_id == dataset_document.id,
|
DocumentSegment.document_id == dataset_document.id,
|
||||||
|
DocumentSegment.dataset_id == dataset.id,
|
||||||
DocumentSegment.index_node_id.in_(document_ids),
|
DocumentSegment.index_node_id.in_(document_ids),
|
||||||
DocumentSegment.status == "indexing"
|
DocumentSegment.status == "indexing"
|
||||||
).update({
|
).update({
|
||||||
@ -758,13 +765,15 @@ class IndexingRunner:
|
|||||||
|
|
||||||
return tokens
|
return tokens
|
||||||
|
|
||||||
def _check_document_paused_status(self, document_id: str):
|
@staticmethod
|
||||||
|
def _check_document_paused_status(document_id: str):
|
||||||
indexing_cache_key = 'document_{}_is_paused'.format(document_id)
|
indexing_cache_key = 'document_{}_is_paused'.format(document_id)
|
||||||
result = redis_client.get(indexing_cache_key)
|
result = redis_client.get(indexing_cache_key)
|
||||||
if result:
|
if result:
|
||||||
raise DocumentIsPausedException()
|
raise DocumentIsPausedException()
|
||||||
|
|
||||||
def _update_document_index_status(self, document_id: str, after_indexing_status: str,
|
@staticmethod
|
||||||
|
def _update_document_index_status(document_id: str, after_indexing_status: str,
|
||||||
extra_update_params: Optional[dict] = None) -> None:
|
extra_update_params: Optional[dict] = None) -> None:
|
||||||
"""
|
"""
|
||||||
Update the document indexing status.
|
Update the document indexing status.
|
||||||
@ -786,14 +795,16 @@ class IndexingRunner:
|
|||||||
DatasetDocument.query.filter_by(id=document_id).update(update_params)
|
DatasetDocument.query.filter_by(id=document_id).update(update_params)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
def _update_segments_by_document(self, dataset_document_id: str, update_params: dict) -> None:
|
@staticmethod
|
||||||
|
def _update_segments_by_document(dataset_document_id: str, update_params: dict) -> None:
|
||||||
"""
|
"""
|
||||||
Update the document segment by document id.
|
Update the document segment by document id.
|
||||||
"""
|
"""
|
||||||
DocumentSegment.query.filter_by(document_id=dataset_document_id).update(update_params)
|
DocumentSegment.query.filter_by(document_id=dataset_document_id).update(update_params)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
def batch_add_segments(self, segments: list[DocumentSegment], dataset: Dataset):
|
@staticmethod
|
||||||
|
def batch_add_segments(segments: list[DocumentSegment], dataset: Dataset):
|
||||||
"""
|
"""
|
||||||
Batch add segments index processing
|
Batch add segments index processing
|
||||||
"""
|
"""
|
||||||
|
@ -44,7 +44,8 @@ class ModelInstance:
|
|||||||
credentials=self.credentials
|
credentials=self.credentials
|
||||||
)
|
)
|
||||||
|
|
||||||
def _fetch_credentials_from_bundle(self, provider_model_bundle: ProviderModelBundle, model: str) -> dict:
|
@staticmethod
|
||||||
|
def _fetch_credentials_from_bundle(provider_model_bundle: ProviderModelBundle, model: str) -> dict:
|
||||||
"""
|
"""
|
||||||
Fetch credentials from provider model bundle
|
Fetch credentials from provider model bundle
|
||||||
:param provider_model_bundle: provider model bundle
|
:param provider_model_bundle: provider model bundle
|
||||||
@ -63,7 +64,8 @@ class ModelInstance:
|
|||||||
|
|
||||||
return credentials
|
return credentials
|
||||||
|
|
||||||
def _get_load_balancing_manager(self, configuration: ProviderConfiguration,
|
@staticmethod
|
||||||
|
def _get_load_balancing_manager(configuration: ProviderConfiguration,
|
||||||
model_type: ModelType,
|
model_type: ModelType,
|
||||||
model: str,
|
model: str,
|
||||||
credentials: dict) -> Optional["LBModelManager"]:
|
credentials: dict) -> Optional["LBModelManager"]:
|
||||||
@ -515,8 +517,8 @@ class LBModelManager:
|
|||||||
res = cast(bool, res)
|
res = cast(bool, res)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
@classmethod
|
@staticmethod
|
||||||
def get_config_in_cooldown_and_ttl(cls, tenant_id: str,
|
def get_config_in_cooldown_and_ttl(tenant_id: str,
|
||||||
provider: str,
|
provider: str,
|
||||||
model_type: ModelType,
|
model_type: ModelType,
|
||||||
model: str,
|
model: str,
|
||||||
|
Binary file not shown.
After Width: | Height: | Size: 21 KiB |
Binary file not shown.
After Width: | Height: | Size: 10 KiB |
@ -0,0 +1,17 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AzureAIStudioProvider(ModelProvider):
|
||||||
|
def validate_provider_credentials(self, credentials: dict) -> None:
|
||||||
|
"""
|
||||||
|
Validate provider credentials
|
||||||
|
|
||||||
|
if validate failed, raise exception
|
||||||
|
|
||||||
|
:param credentials: provider credentials, credentials form defined in `provider_credential_schema`.
|
||||||
|
"""
|
||||||
|
pass
|
@ -0,0 +1,65 @@
|
|||||||
|
provider: azure_ai_studio
|
||||||
|
label:
|
||||||
|
zh_Hans: Azure AI Studio
|
||||||
|
en_US: Azure AI Studio
|
||||||
|
icon_small:
|
||||||
|
en_US: icon_s_en.png
|
||||||
|
icon_large:
|
||||||
|
en_US: icon_l_en.png
|
||||||
|
description:
|
||||||
|
en_US: Azure AI Studio
|
||||||
|
zh_Hans: Azure AI Studio
|
||||||
|
background: "#93c5fd"
|
||||||
|
help:
|
||||||
|
title:
|
||||||
|
en_US: How to deploy customized model on Azure AI Studio
|
||||||
|
zh_Hans: 如何在Azure AI Studio上的私有化部署的模型
|
||||||
|
url:
|
||||||
|
en_US: https://learn.microsoft.com/en-us/azure/ai-studio/how-to/deploy-models
|
||||||
|
zh_Hans: https://learn.microsoft.com/zh-cn/azure/ai-studio/how-to/deploy-models
|
||||||
|
supported_model_types:
|
||||||
|
- llm
|
||||||
|
- rerank
|
||||||
|
configurate_methods:
|
||||||
|
- customizable-model
|
||||||
|
model_credential_schema:
|
||||||
|
model:
|
||||||
|
label:
|
||||||
|
en_US: Model Name
|
||||||
|
zh_Hans: 模型名称
|
||||||
|
placeholder:
|
||||||
|
en_US: Enter your model name
|
||||||
|
zh_Hans: 输入模型名称
|
||||||
|
credential_form_schemas:
|
||||||
|
- variable: endpoint
|
||||||
|
label:
|
||||||
|
en_US: Azure AI Studio Endpoint
|
||||||
|
type: text-input
|
||||||
|
required: true
|
||||||
|
placeholder:
|
||||||
|
zh_Hans: 请输入你的Azure AI Studio推理端点
|
||||||
|
en_US: 'Enter your API Endpoint, eg: https://example.com'
|
||||||
|
- variable: api_key
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: API Key
|
||||||
|
zh_Hans: API Key
|
||||||
|
type: secret-input
|
||||||
|
placeholder:
|
||||||
|
en_US: Enter your Azure AI Studio API Key
|
||||||
|
zh_Hans: 在此输入您的 Azure AI Studio API Key
|
||||||
|
show_on:
|
||||||
|
- variable: __model_type
|
||||||
|
value: llm
|
||||||
|
- variable: jwt_token
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: JWT Token
|
||||||
|
zh_Hans: JWT令牌
|
||||||
|
type: secret-input
|
||||||
|
placeholder:
|
||||||
|
en_US: Enter your Azure AI Studio JWT Token
|
||||||
|
zh_Hans: 在此输入您的 Azure AI Studio 推理 API Key
|
||||||
|
show_on:
|
||||||
|
- variable: __model_type
|
||||||
|
value: rerank
|
@ -0,0 +1,334 @@
|
|||||||
|
import logging
|
||||||
|
from collections.abc import Generator
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
from azure.ai.inference import ChatCompletionsClient
|
||||||
|
from azure.ai.inference.models import StreamingChatCompletionsUpdate
|
||||||
|
from azure.core.credentials import AzureKeyCredential
|
||||||
|
from azure.core.exceptions import (
|
||||||
|
ClientAuthenticationError,
|
||||||
|
DecodeError,
|
||||||
|
DeserializationError,
|
||||||
|
HttpResponseError,
|
||||||
|
ResourceExistsError,
|
||||||
|
ResourceModifiedError,
|
||||||
|
ResourceNotFoundError,
|
||||||
|
ResourceNotModifiedError,
|
||||||
|
SerializationError,
|
||||||
|
ServiceRequestError,
|
||||||
|
ServiceResponseError,
|
||||||
|
)
|
||||||
|
|
||||||
|
from core.model_runtime.callbacks.base_callback import Callback
|
||||||
|
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
|
||||||
|
from core.model_runtime.entities.message_entities import (
|
||||||
|
AssistantPromptMessage,
|
||||||
|
PromptMessage,
|
||||||
|
PromptMessageTool,
|
||||||
|
)
|
||||||
|
from core.model_runtime.entities.model_entities import (
|
||||||
|
AIModelEntity,
|
||||||
|
FetchFrom,
|
||||||
|
I18nObject,
|
||||||
|
ModelType,
|
||||||
|
ParameterRule,
|
||||||
|
ParameterType,
|
||||||
|
)
|
||||||
|
from core.model_runtime.errors.invoke import (
|
||||||
|
InvokeAuthorizationError,
|
||||||
|
InvokeBadRequestError,
|
||||||
|
InvokeConnectionError,
|
||||||
|
InvokeError,
|
||||||
|
InvokeServerUnavailableError,
|
||||||
|
)
|
||||||
|
from core.model_runtime.errors.validate import CredentialsValidateFailedError
|
||||||
|
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AzureAIStudioLargeLanguageModel(LargeLanguageModel):
|
||||||
|
"""
|
||||||
|
Model class for Azure AI Studio large language model.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client: Any = None
|
||||||
|
|
||||||
|
from azure.ai.inference.models import StreamingChatCompletionsUpdate
|
||||||
|
|
||||||
|
def _invoke(
|
||||||
|
self,
|
||||||
|
model: str,
|
||||||
|
credentials: dict,
|
||||||
|
prompt_messages: list[PromptMessage],
|
||||||
|
model_parameters: dict,
|
||||||
|
tools: Optional[list[PromptMessageTool]] = None,
|
||||||
|
stop: Optional[list[str]] = None,
|
||||||
|
stream: bool = True,
|
||||||
|
user: Optional[str] = None,
|
||||||
|
) -> Union[LLMResult, Generator]:
|
||||||
|
"""
|
||||||
|
Invoke large language model
|
||||||
|
|
||||||
|
:param model: model name
|
||||||
|
:param credentials: model credentials
|
||||||
|
:param prompt_messages: prompt messages
|
||||||
|
:param model_parameters: model parameters
|
||||||
|
:param tools: tools for tool calling
|
||||||
|
:param stop: stop words
|
||||||
|
:param stream: is stream response
|
||||||
|
:param user: unique user id
|
||||||
|
:return: full response or stream response chunk generator result
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.client:
|
||||||
|
endpoint = credentials.get("endpoint")
|
||||||
|
api_key = credentials.get("api_key")
|
||||||
|
self.client = ChatCompletionsClient(endpoint=endpoint, credential=AzureKeyCredential(api_key))
|
||||||
|
|
||||||
|
messages = [{"role": msg.role.value, "content": msg.content} for msg in prompt_messages]
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"messages": messages,
|
||||||
|
"max_tokens": model_parameters.get("max_tokens", 4096),
|
||||||
|
"temperature": model_parameters.get("temperature", 0),
|
||||||
|
"top_p": model_parameters.get("top_p", 1),
|
||||||
|
"stream": stream,
|
||||||
|
}
|
||||||
|
|
||||||
|
if stop:
|
||||||
|
payload["stop"] = stop
|
||||||
|
|
||||||
|
if tools:
|
||||||
|
payload["tools"] = [tool.model_dump() for tool in tools]
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self.client.complete(**payload)
|
||||||
|
|
||||||
|
if stream:
|
||||||
|
return self._handle_stream_response(response, model, prompt_messages)
|
||||||
|
else:
|
||||||
|
return self._handle_non_stream_response(response, model, prompt_messages, credentials)
|
||||||
|
except Exception as e:
|
||||||
|
raise self._transform_invoke_error(e)
|
||||||
|
|
||||||
|
def _handle_stream_response(self, response, model: str, prompt_messages: list[PromptMessage]) -> Generator:
|
||||||
|
for chunk in response:
|
||||||
|
if isinstance(chunk, StreamingChatCompletionsUpdate):
|
||||||
|
if chunk.choices:
|
||||||
|
delta = chunk.choices[0].delta
|
||||||
|
if delta.content:
|
||||||
|
yield LLMResultChunk(
|
||||||
|
model=model,
|
||||||
|
prompt_messages=prompt_messages,
|
||||||
|
delta=LLMResultChunkDelta(
|
||||||
|
index=0,
|
||||||
|
message=AssistantPromptMessage(content=delta.content, tool_calls=[]),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _handle_non_stream_response(
|
||||||
|
self, response, model: str, prompt_messages: list[PromptMessage], credentials: dict
|
||||||
|
) -> LLMResult:
|
||||||
|
assistant_text = response.choices[0].message.content
|
||||||
|
assistant_prompt_message = AssistantPromptMessage(content=assistant_text)
|
||||||
|
usage = self._calc_response_usage(
|
||||||
|
model, credentials, response.usage.prompt_tokens, response.usage.completion_tokens
|
||||||
|
)
|
||||||
|
result = LLMResult(model=model, prompt_messages=prompt_messages, message=assistant_prompt_message, usage=usage)
|
||||||
|
|
||||||
|
if hasattr(response, "system_fingerprint"):
|
||||||
|
result.system_fingerprint = response.system_fingerprint
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _invoke_result_generator(
|
||||||
|
self,
|
||||||
|
model: str,
|
||||||
|
result: Generator,
|
||||||
|
credentials: dict,
|
||||||
|
prompt_messages: list[PromptMessage],
|
||||||
|
model_parameters: dict,
|
||||||
|
tools: Optional[list[PromptMessageTool]] = None,
|
||||||
|
stop: Optional[list[str]] = None,
|
||||||
|
stream: bool = True,
|
||||||
|
user: Optional[str] = None,
|
||||||
|
callbacks: Optional[list[Callback]] = None,
|
||||||
|
) -> Generator:
|
||||||
|
"""
|
||||||
|
Invoke result generator
|
||||||
|
|
||||||
|
:param result: result generator
|
||||||
|
:return: result generator
|
||||||
|
"""
|
||||||
|
callbacks = callbacks or []
|
||||||
|
prompt_message = AssistantPromptMessage(content="")
|
||||||
|
usage = None
|
||||||
|
system_fingerprint = None
|
||||||
|
real_model = model
|
||||||
|
|
||||||
|
try:
|
||||||
|
for chunk in result:
|
||||||
|
if isinstance(chunk, dict):
|
||||||
|
content = chunk["choices"][0]["message"]["content"]
|
||||||
|
usage = chunk["usage"]
|
||||||
|
chunk = LLMResultChunk(
|
||||||
|
model=model,
|
||||||
|
prompt_messages=prompt_messages,
|
||||||
|
delta=LLMResultChunkDelta(
|
||||||
|
index=0,
|
||||||
|
message=AssistantPromptMessage(content=content, tool_calls=[]),
|
||||||
|
),
|
||||||
|
system_fingerprint=chunk.get("system_fingerprint"),
|
||||||
|
)
|
||||||
|
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
self._trigger_new_chunk_callbacks(
|
||||||
|
chunk=chunk,
|
||||||
|
model=model,
|
||||||
|
credentials=credentials,
|
||||||
|
prompt_messages=prompt_messages,
|
||||||
|
model_parameters=model_parameters,
|
||||||
|
tools=tools,
|
||||||
|
stop=stop,
|
||||||
|
stream=stream,
|
||||||
|
user=user,
|
||||||
|
callbacks=callbacks,
|
||||||
|
)
|
||||||
|
|
||||||
|
prompt_message.content += chunk.delta.message.content
|
||||||
|
real_model = chunk.model
|
||||||
|
if hasattr(chunk.delta, "usage"):
|
||||||
|
usage = chunk.delta.usage
|
||||||
|
|
||||||
|
if chunk.system_fingerprint:
|
||||||
|
system_fingerprint = chunk.system_fingerprint
|
||||||
|
except Exception as e:
|
||||||
|
raise self._transform_invoke_error(e)
|
||||||
|
|
||||||
|
self._trigger_after_invoke_callbacks(
|
||||||
|
model=model,
|
||||||
|
result=LLMResult(
|
||||||
|
model=real_model,
|
||||||
|
prompt_messages=prompt_messages,
|
||||||
|
message=prompt_message,
|
||||||
|
usage=usage if usage else LLMUsage.empty_usage(),
|
||||||
|
system_fingerprint=system_fingerprint,
|
||||||
|
),
|
||||||
|
credentials=credentials,
|
||||||
|
prompt_messages=prompt_messages,
|
||||||
|
model_parameters=model_parameters,
|
||||||
|
tools=tools,
|
||||||
|
stop=stop,
|
||||||
|
stream=stream,
|
||||||
|
user=user,
|
||||||
|
callbacks=callbacks,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_num_tokens(
|
||||||
|
self,
|
||||||
|
model: str,
|
||||||
|
credentials: dict,
|
||||||
|
prompt_messages: list[PromptMessage],
|
||||||
|
tools: Optional[list[PromptMessageTool]] = None,
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Get number of tokens for given prompt messages
|
||||||
|
|
||||||
|
:param model: model name
|
||||||
|
:param credentials: model credentials
|
||||||
|
:param prompt_messages: prompt messages
|
||||||
|
:param tools: tools for tool calling
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
# Implement token counting logic here
|
||||||
|
# Might need to use a tokenizer specific to the Azure AI Studio model
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def validate_credentials(self, model: str, credentials: dict) -> None:
|
||||||
|
"""
|
||||||
|
Validate model credentials
|
||||||
|
|
||||||
|
:param model: model name
|
||||||
|
:param credentials: model credentials
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
endpoint = credentials.get("endpoint")
|
||||||
|
api_key = credentials.get("api_key")
|
||||||
|
client = ChatCompletionsClient(endpoint=endpoint, credential=AzureKeyCredential(api_key))
|
||||||
|
client.get_model_info()
|
||||||
|
except Exception as ex:
|
||||||
|
raise CredentialsValidateFailedError(str(ex))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]:
|
||||||
|
"""
|
||||||
|
Map model invoke error to unified error
|
||||||
|
The key is the error type thrown to the caller
|
||||||
|
The value is the error type thrown by the model,
|
||||||
|
which needs to be converted into a unified error type for the caller.
|
||||||
|
|
||||||
|
:return: Invoke error mapping
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
InvokeConnectionError: [
|
||||||
|
ServiceRequestError,
|
||||||
|
],
|
||||||
|
InvokeServerUnavailableError: [
|
||||||
|
ServiceResponseError,
|
||||||
|
],
|
||||||
|
InvokeAuthorizationError: [
|
||||||
|
ClientAuthenticationError,
|
||||||
|
],
|
||||||
|
InvokeBadRequestError: [
|
||||||
|
HttpResponseError,
|
||||||
|
DecodeError,
|
||||||
|
ResourceExistsError,
|
||||||
|
ResourceNotFoundError,
|
||||||
|
ResourceModifiedError,
|
||||||
|
ResourceNotModifiedError,
|
||||||
|
SerializationError,
|
||||||
|
DeserializationError,
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
|
||||||
|
"""
|
||||||
|
Used to define customizable model schema
|
||||||
|
"""
|
||||||
|
rules = [
|
||||||
|
ParameterRule(
|
||||||
|
name="temperature",
|
||||||
|
type=ParameterType.FLOAT,
|
||||||
|
use_template="temperature",
|
||||||
|
label=I18nObject(zh_Hans="温度", en_US="Temperature"),
|
||||||
|
),
|
||||||
|
ParameterRule(
|
||||||
|
name="top_p",
|
||||||
|
type=ParameterType.FLOAT,
|
||||||
|
use_template="top_p",
|
||||||
|
label=I18nObject(zh_Hans="Top P", en_US="Top P"),
|
||||||
|
),
|
||||||
|
ParameterRule(
|
||||||
|
name="max_tokens",
|
||||||
|
type=ParameterType.INT,
|
||||||
|
use_template="max_tokens",
|
||||||
|
min=1,
|
||||||
|
default=512,
|
||||||
|
label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
entity = AIModelEntity(
|
||||||
|
model=model,
|
||||||
|
label=I18nObject(en_US=model),
|
||||||
|
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||||
|
model_type=ModelType.LLM,
|
||||||
|
features=[],
|
||||||
|
model_properties={},
|
||||||
|
parameter_rules=rules,
|
||||||
|
)
|
||||||
|
|
||||||
|
return entity
|
@ -0,0 +1,164 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import ssl
|
||||||
|
import urllib.request
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from core.model_runtime.entities.common_entities import I18nObject
|
||||||
|
from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType
|
||||||
|
from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult
|
||||||
|
from core.model_runtime.errors.invoke import (
|
||||||
|
InvokeAuthorizationError,
|
||||||
|
InvokeBadRequestError,
|
||||||
|
InvokeConnectionError,
|
||||||
|
InvokeError,
|
||||||
|
InvokeRateLimitError,
|
||||||
|
InvokeServerUnavailableError,
|
||||||
|
)
|
||||||
|
from core.model_runtime.errors.validate import CredentialsValidateFailedError
|
||||||
|
from core.model_runtime.model_providers.__base.rerank_model import RerankModel
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AzureRerankModel(RerankModel):
|
||||||
|
"""
|
||||||
|
Model class for Azure AI Studio rerank model.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _allow_self_signed_https(self, allowed):
|
||||||
|
# bypass the server certificate verification on client side
|
||||||
|
if allowed and not os.environ.get("PYTHONHTTPSVERIFY", "") and getattr(ssl, "_create_unverified_context", None):
|
||||||
|
ssl._create_default_https_context = ssl._create_unverified_context
|
||||||
|
|
||||||
|
def _azure_rerank(self, query_input: str, docs: list[str], endpoint: str, api_key: str):
|
||||||
|
# self._allow_self_signed_https(True) # Enable if using self-signed certificate
|
||||||
|
|
||||||
|
data = {"inputs": query_input, "docs": docs}
|
||||||
|
|
||||||
|
body = json.dumps(data).encode("utf-8")
|
||||||
|
headers = {"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}
|
||||||
|
|
||||||
|
req = urllib.request.Request(endpoint, body, headers)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with urllib.request.urlopen(req) as response:
|
||||||
|
result = response.read()
|
||||||
|
return json.loads(result)
|
||||||
|
except urllib.error.HTTPError as error:
|
||||||
|
logger.error(f"The request failed with status code: {error.code}")
|
||||||
|
logger.error(error.info())
|
||||||
|
logger.error(error.read().decode("utf8", "ignore"))
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _invoke(
|
||||||
|
self,
|
||||||
|
model: str,
|
||||||
|
credentials: dict,
|
||||||
|
query: str,
|
||||||
|
docs: list[str],
|
||||||
|
score_threshold: Optional[float] = None,
|
||||||
|
top_n: Optional[int] = None,
|
||||||
|
user: Optional[str] = None,
|
||||||
|
) -> RerankResult:
|
||||||
|
"""
|
||||||
|
Invoke rerank model
|
||||||
|
|
||||||
|
:param model: model name
|
||||||
|
:param credentials: model credentials
|
||||||
|
:param query: search query
|
||||||
|
:param docs: docs for reranking
|
||||||
|
:param score_threshold: score threshold
|
||||||
|
:param top_n: top n
|
||||||
|
:param user: unique user id
|
||||||
|
:return: rerank result
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if len(docs) == 0:
|
||||||
|
return RerankResult(model=model, docs=[])
|
||||||
|
|
||||||
|
endpoint = credentials.get("endpoint")
|
||||||
|
api_key = credentials.get("jwt_token")
|
||||||
|
|
||||||
|
if not endpoint or not api_key:
|
||||||
|
raise ValueError("Azure endpoint and API key must be provided in credentials")
|
||||||
|
|
||||||
|
result = self._azure_rerank(query, docs, endpoint, api_key)
|
||||||
|
logger.info(f"Azure rerank result: {result}")
|
||||||
|
|
||||||
|
rerank_documents = []
|
||||||
|
for idx, (doc, score_dict) in enumerate(zip(docs, result)):
|
||||||
|
score = score_dict["score"]
|
||||||
|
rerank_document = RerankDocument(index=idx, text=doc, score=score)
|
||||||
|
|
||||||
|
if score_threshold is None or score >= score_threshold:
|
||||||
|
rerank_documents.append(rerank_document)
|
||||||
|
|
||||||
|
rerank_documents.sort(key=lambda x: x.score, reverse=True)
|
||||||
|
|
||||||
|
if top_n:
|
||||||
|
rerank_documents = rerank_documents[:top_n]
|
||||||
|
|
||||||
|
return RerankResult(model=model, docs=rerank_documents)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(f"Exception in Azure rerank: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def validate_credentials(self, model: str, credentials: dict) -> None:
|
||||||
|
"""
|
||||||
|
Validate model credentials
|
||||||
|
|
||||||
|
:param model: model name
|
||||||
|
:param credentials: model credentials
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self._invoke(
|
||||||
|
model=model,
|
||||||
|
credentials=credentials,
|
||||||
|
query="What is the capital of the United States?",
|
||||||
|
docs=[
|
||||||
|
"Carson City is the capital city of the American state of Nevada. At the 2010 United States "
|
||||||
|
"Census, Carson City had a population of 55,274.",
|
||||||
|
"The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that "
|
||||||
|
"are a political division controlled by the United States. Its capital is Saipan.",
|
||||||
|
],
|
||||||
|
score_threshold=0.8,
|
||||||
|
)
|
||||||
|
except Exception as ex:
|
||||||
|
raise CredentialsValidateFailedError(str(ex))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]:
|
||||||
|
"""
|
||||||
|
Map model invoke error to unified error
|
||||||
|
The key is the error type thrown to the caller
|
||||||
|
The value is the error type thrown by the model,
|
||||||
|
which needs to be converted into a unified error type for the caller.
|
||||||
|
|
||||||
|
:return: Invoke error mapping
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
InvokeConnectionError: [urllib.error.URLError],
|
||||||
|
InvokeServerUnavailableError: [urllib.error.HTTPError],
|
||||||
|
InvokeRateLimitError: [InvokeRateLimitError],
|
||||||
|
InvokeAuthorizationError: [InvokeAuthorizationError],
|
||||||
|
InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError, json.JSONDecodeError],
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
|
||||||
|
"""
|
||||||
|
used to define customizable model schema
|
||||||
|
"""
|
||||||
|
entity = AIModelEntity(
|
||||||
|
model=model,
|
||||||
|
label=I18nObject(en_US=model),
|
||||||
|
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
|
||||||
|
model_type=ModelType.RERANK,
|
||||||
|
model_properties={},
|
||||||
|
parameter_rules=[],
|
||||||
|
)
|
||||||
|
|
||||||
|
return entity
|
@ -649,7 +649,7 @@ class OAIAPICompatLargeLanguageModel(_CommonOAI_API_Compat, LargeLanguageModel):
|
|||||||
else:
|
else:
|
||||||
raise ValueError(f"Got unknown type {message}")
|
raise ValueError(f"Got unknown type {message}")
|
||||||
|
|
||||||
if message.name:
|
if message.name and message_dict.get("role", "") != "tool":
|
||||||
message_dict["name"] = message.name
|
message_dict["name"] = message.name
|
||||||
|
|
||||||
return message_dict
|
return message_dict
|
||||||
|
@ -137,9 +137,19 @@ class TongyiTextEmbeddingModel(_CommonTongyi, TextEmbeddingModel):
|
|||||||
input=text,
|
input=text,
|
||||||
text_type="document",
|
text_type="document",
|
||||||
)
|
)
|
||||||
|
if response.output and "embeddings" in response.output and response.output["embeddings"]:
|
||||||
data = response.output["embeddings"][0]
|
data = response.output["embeddings"][0]
|
||||||
|
if "embedding" in data:
|
||||||
embeddings.append(data["embedding"])
|
embeddings.append(data["embedding"])
|
||||||
|
else:
|
||||||
|
raise ValueError("Embedding data is missing in the response.")
|
||||||
|
else:
|
||||||
|
raise ValueError("Response output is missing or does not contain embeddings.")
|
||||||
|
|
||||||
|
if response.usage and "total_tokens" in response.usage:
|
||||||
embedding_used_tokens += response.usage["total_tokens"]
|
embedding_used_tokens += response.usage["total_tokens"]
|
||||||
|
else:
|
||||||
|
raise ValueError("Response usage is missing or does not contain total tokens.")
|
||||||
|
|
||||||
return [list(map(float, e)) for e in embeddings], embedding_used_tokens
|
return [list(map(float, e)) for e in embeddings], embedding_used_tokens
|
||||||
|
|
||||||
|
@ -32,6 +32,9 @@ from core.model_runtime.entities.message_entities import (
|
|||||||
UserPromptMessage,
|
UserPromptMessage,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
DEFAULT_V2_ENDPOINT = "maas-api.ml-platform-cn-beijing.volces.com"
|
||||||
|
DEFAULT_V3_ENDPOINT = "https://ark.cn-beijing.volces.com/api/v3"
|
||||||
|
|
||||||
|
|
||||||
class ArkClientV3:
|
class ArkClientV3:
|
||||||
endpoint_id: Optional[str] = None
|
endpoint_id: Optional[str] = None
|
||||||
@ -43,16 +46,24 @@ class ArkClientV3:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_legacy(credentials: dict) -> bool:
|
def is_legacy(credentials: dict) -> bool:
|
||||||
|
# match default v2 endpoint
|
||||||
if ArkClientV3.is_compatible_with_legacy(credentials):
|
if ArkClientV3.is_compatible_with_legacy(credentials):
|
||||||
return False
|
return False
|
||||||
sdk_version = credentials.get("sdk_version", "v2")
|
# match default v3 endpoint
|
||||||
return sdk_version != "v3"
|
if credentials.get("api_endpoint_host") == DEFAULT_V3_ENDPOINT:
|
||||||
|
return False
|
||||||
|
# only v3 support api_key
|
||||||
|
if credentials.get("auth_method") == "api_key":
|
||||||
|
return False
|
||||||
|
# these cases are considered as sdk v2
|
||||||
|
# - modified default v2 endpoint
|
||||||
|
# - modified default v3 endpoint and auth without api_key
|
||||||
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_compatible_with_legacy(credentials: dict) -> bool:
|
def is_compatible_with_legacy(credentials: dict) -> bool:
|
||||||
sdk_version = credentials.get("sdk_version")
|
|
||||||
endpoint = credentials.get("api_endpoint_host")
|
endpoint = credentials.get("api_endpoint_host")
|
||||||
return sdk_version is None and endpoint == "maas-api.ml-platform-cn-beijing.volces.com"
|
return endpoint == DEFAULT_V2_ENDPOINT
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_credentials(cls, credentials):
|
def from_credentials(cls, credentials):
|
||||||
@ -64,7 +75,7 @@ class ArkClientV3:
|
|||||||
"sk": credentials['volc_secret_access_key'],
|
"sk": credentials['volc_secret_access_key'],
|
||||||
}
|
}
|
||||||
if cls.is_compatible_with_legacy(credentials):
|
if cls.is_compatible_with_legacy(credentials):
|
||||||
args["base_url"] = "https://ark.cn-beijing.volces.com/api/v3"
|
args["base_url"] = DEFAULT_V3_ENDPOINT
|
||||||
|
|
||||||
client = ArkClientV3(
|
client = ArkClientV3(
|
||||||
**args
|
**args
|
||||||
|
@ -38,7 +38,7 @@ configs: dict[str, ModelConfig] = {
|
|||||||
),
|
),
|
||||||
'Doubao-lite-128k': ModelConfig(
|
'Doubao-lite-128k': ModelConfig(
|
||||||
properties=ModelProperties(context_size=131072, max_tokens=4096, mode=LLMMode.CHAT),
|
properties=ModelProperties(context_size=131072, max_tokens=4096, mode=LLMMode.CHAT),
|
||||||
features=[ModelFeature.TOOL_CALL]
|
features=[]
|
||||||
),
|
),
|
||||||
'Skylark2-pro-4k': ModelConfig(
|
'Skylark2-pro-4k': ModelConfig(
|
||||||
properties=ModelProperties(context_size=4096, max_tokens=4096, mode=LLMMode.CHAT),
|
properties=ModelProperties(context_size=4096, max_tokens=4096, mode=LLMMode.CHAT),
|
||||||
@ -54,23 +54,23 @@ configs: dict[str, ModelConfig] = {
|
|||||||
),
|
),
|
||||||
'Moonshot-v1-8k': ModelConfig(
|
'Moonshot-v1-8k': ModelConfig(
|
||||||
properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT),
|
properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT),
|
||||||
features=[]
|
features=[ModelFeature.TOOL_CALL]
|
||||||
),
|
),
|
||||||
'Moonshot-v1-32k': ModelConfig(
|
'Moonshot-v1-32k': ModelConfig(
|
||||||
properties=ModelProperties(context_size=32768, max_tokens=16384, mode=LLMMode.CHAT),
|
properties=ModelProperties(context_size=32768, max_tokens=16384, mode=LLMMode.CHAT),
|
||||||
features=[]
|
features=[ModelFeature.TOOL_CALL]
|
||||||
),
|
),
|
||||||
'Moonshot-v1-128k': ModelConfig(
|
'Moonshot-v1-128k': ModelConfig(
|
||||||
properties=ModelProperties(context_size=131072, max_tokens=65536, mode=LLMMode.CHAT),
|
properties=ModelProperties(context_size=131072, max_tokens=65536, mode=LLMMode.CHAT),
|
||||||
features=[]
|
features=[ModelFeature.TOOL_CALL]
|
||||||
),
|
),
|
||||||
'GLM3-130B': ModelConfig(
|
'GLM3-130B': ModelConfig(
|
||||||
properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT),
|
properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT),
|
||||||
features=[]
|
features=[ModelFeature.TOOL_CALL]
|
||||||
),
|
),
|
||||||
'GLM3-130B-Fin': ModelConfig(
|
'GLM3-130B-Fin': ModelConfig(
|
||||||
properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT),
|
properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT),
|
||||||
features=[]
|
features=[ModelFeature.TOOL_CALL]
|
||||||
),
|
),
|
||||||
'Mistral-7B': ModelConfig(
|
'Mistral-7B': ModelConfig(
|
||||||
properties=ModelProperties(context_size=8192, max_tokens=2048, mode=LLMMode.CHAT),
|
properties=ModelProperties(context_size=8192, max_tokens=2048, mode=LLMMode.CHAT),
|
||||||
|
@ -64,7 +64,7 @@ model_credential_schema:
|
|||||||
en_US: API Endpoint Host
|
en_US: API Endpoint Host
|
||||||
zh_Hans: API Endpoint Host
|
zh_Hans: API Endpoint Host
|
||||||
type: text-input
|
type: text-input
|
||||||
default: maas-api.ml-platform-cn-beijing.volces.com
|
default: https://ark.cn-beijing.volces.com/api/v3
|
||||||
placeholder:
|
placeholder:
|
||||||
en_US: Enter your API Endpoint Host
|
en_US: Enter your API Endpoint Host
|
||||||
zh_Hans: 输入 API Endpoint Host
|
zh_Hans: 输入 API Endpoint Host
|
||||||
|
@ -21,7 +21,6 @@ class LangfuseConfig(BaseTracingConfig):
|
|||||||
"""
|
"""
|
||||||
public_key: str
|
public_key: str
|
||||||
secret_key: str
|
secret_key: str
|
||||||
project_key: str
|
|
||||||
host: str = 'https://api.langfuse.com'
|
host: str = 'https://api.langfuse.com'
|
||||||
|
|
||||||
@field_validator("host")
|
@field_validator("host")
|
||||||
|
@ -350,7 +350,8 @@ class ProviderManager:
|
|||||||
|
|
||||||
return default_model
|
return default_model
|
||||||
|
|
||||||
def _get_all_providers(self, tenant_id: str) -> dict[str, list[Provider]]:
|
@staticmethod
|
||||||
|
def _get_all_providers(tenant_id: str) -> dict[str, list[Provider]]:
|
||||||
"""
|
"""
|
||||||
Get all provider records of the workspace.
|
Get all provider records of the workspace.
|
||||||
|
|
||||||
@ -369,7 +370,8 @@ class ProviderManager:
|
|||||||
|
|
||||||
return provider_name_to_provider_records_dict
|
return provider_name_to_provider_records_dict
|
||||||
|
|
||||||
def _get_all_provider_models(self, tenant_id: str) -> dict[str, list[ProviderModel]]:
|
@staticmethod
|
||||||
|
def _get_all_provider_models(tenant_id: str) -> dict[str, list[ProviderModel]]:
|
||||||
"""
|
"""
|
||||||
Get all provider model records of the workspace.
|
Get all provider model records of the workspace.
|
||||||
|
|
||||||
@ -389,7 +391,8 @@ class ProviderManager:
|
|||||||
|
|
||||||
return provider_name_to_provider_model_records_dict
|
return provider_name_to_provider_model_records_dict
|
||||||
|
|
||||||
def _get_all_preferred_model_providers(self, tenant_id: str) -> dict[str, TenantPreferredModelProvider]:
|
@staticmethod
|
||||||
|
def _get_all_preferred_model_providers(tenant_id: str) -> dict[str, TenantPreferredModelProvider]:
|
||||||
"""
|
"""
|
||||||
Get All preferred provider types of the workspace.
|
Get All preferred provider types of the workspace.
|
||||||
|
|
||||||
@ -408,7 +411,8 @@ class ProviderManager:
|
|||||||
|
|
||||||
return provider_name_to_preferred_provider_type_records_dict
|
return provider_name_to_preferred_provider_type_records_dict
|
||||||
|
|
||||||
def _get_all_provider_model_settings(self, tenant_id: str) -> dict[str, list[ProviderModelSetting]]:
|
@staticmethod
|
||||||
|
def _get_all_provider_model_settings(tenant_id: str) -> dict[str, list[ProviderModelSetting]]:
|
||||||
"""
|
"""
|
||||||
Get All provider model settings of the workspace.
|
Get All provider model settings of the workspace.
|
||||||
|
|
||||||
@ -427,7 +431,8 @@ class ProviderManager:
|
|||||||
|
|
||||||
return provider_name_to_provider_model_settings_dict
|
return provider_name_to_provider_model_settings_dict
|
||||||
|
|
||||||
def _get_all_provider_load_balancing_configs(self, tenant_id: str) -> dict[str, list[LoadBalancingModelConfig]]:
|
@staticmethod
|
||||||
|
def _get_all_provider_load_balancing_configs(tenant_id: str) -> dict[str, list[LoadBalancingModelConfig]]:
|
||||||
"""
|
"""
|
||||||
Get All provider load balancing configs of the workspace.
|
Get All provider load balancing configs of the workspace.
|
||||||
|
|
||||||
@ -458,7 +463,8 @@ class ProviderManager:
|
|||||||
|
|
||||||
return provider_name_to_provider_load_balancing_model_configs_dict
|
return provider_name_to_provider_load_balancing_model_configs_dict
|
||||||
|
|
||||||
def _init_trial_provider_records(self, tenant_id: str,
|
@staticmethod
|
||||||
|
def _init_trial_provider_records(tenant_id: str,
|
||||||
provider_name_to_provider_records_dict: dict[str, list]) -> dict[str, list]:
|
provider_name_to_provider_records_dict: dict[str, list]) -> dict[str, list]:
|
||||||
"""
|
"""
|
||||||
Initialize trial provider records if not exists.
|
Initialize trial provider records if not exists.
|
||||||
@ -791,7 +797,8 @@ class ProviderManager:
|
|||||||
credentials=current_using_credentials
|
credentials=current_using_credentials
|
||||||
)
|
)
|
||||||
|
|
||||||
def _choice_current_using_quota_type(self, quota_configurations: list[QuotaConfiguration]) -> ProviderQuotaType:
|
@staticmethod
|
||||||
|
def _choice_current_using_quota_type(quota_configurations: list[QuotaConfiguration]) -> ProviderQuotaType:
|
||||||
"""
|
"""
|
||||||
Choice current using quota type.
|
Choice current using quota type.
|
||||||
paid quotas > provider free quotas > hosting trial quotas
|
paid quotas > provider free quotas > hosting trial quotas
|
||||||
@ -818,7 +825,8 @@ class ProviderManager:
|
|||||||
|
|
||||||
raise ValueError('No quota type available')
|
raise ValueError('No quota type available')
|
||||||
|
|
||||||
def _extract_secret_variables(self, credential_form_schemas: list[CredentialFormSchema]) -> list[str]:
|
@staticmethod
|
||||||
|
def _extract_secret_variables(credential_form_schemas: list[CredentialFormSchema]) -> list[str]:
|
||||||
"""
|
"""
|
||||||
Extract secret input form variables.
|
Extract secret input form variables.
|
||||||
|
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
import json
|
import json
|
||||||
from typing import Any
|
import logging
|
||||||
|
from typing import Any, Optional
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from elasticsearch import Elasticsearch
|
from elasticsearch import Elasticsearch
|
||||||
@ -7,16 +9,20 @@ from flask import current_app
|
|||||||
from pydantic import BaseModel, model_validator
|
from pydantic import BaseModel, model_validator
|
||||||
|
|
||||||
from core.rag.datasource.entity.embedding import Embeddings
|
from core.rag.datasource.entity.embedding import Embeddings
|
||||||
|
from core.rag.datasource.vdb.field import Field
|
||||||
from core.rag.datasource.vdb.vector_base import BaseVector
|
from core.rag.datasource.vdb.vector_base import BaseVector
|
||||||
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
|
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
|
||||||
from core.rag.datasource.vdb.vector_type import VectorType
|
from core.rag.datasource.vdb.vector_type import VectorType
|
||||||
from core.rag.models.document import Document
|
from core.rag.models.document import Document
|
||||||
|
from extensions.ext_redis import redis_client
|
||||||
from models.dataset import Dataset
|
from models.dataset import Dataset
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ElasticSearchConfig(BaseModel):
|
class ElasticSearchConfig(BaseModel):
|
||||||
host: str
|
host: str
|
||||||
port: str
|
port: int
|
||||||
username: str
|
username: str
|
||||||
password: str
|
password: str
|
||||||
|
|
||||||
@ -37,12 +43,19 @@ class ElasticSearchVector(BaseVector):
|
|||||||
def __init__(self, index_name: str, config: ElasticSearchConfig, attributes: list):
|
def __init__(self, index_name: str, config: ElasticSearchConfig, attributes: list):
|
||||||
super().__init__(index_name.lower())
|
super().__init__(index_name.lower())
|
||||||
self._client = self._init_client(config)
|
self._client = self._init_client(config)
|
||||||
|
self._version = self._get_version()
|
||||||
|
self._check_version()
|
||||||
self._attributes = attributes
|
self._attributes = attributes
|
||||||
|
|
||||||
def _init_client(self, config: ElasticSearchConfig) -> Elasticsearch:
|
def _init_client(self, config: ElasticSearchConfig) -> Elasticsearch:
|
||||||
try:
|
try:
|
||||||
|
parsed_url = urlparse(config.host)
|
||||||
|
if parsed_url.scheme in ['http', 'https']:
|
||||||
|
hosts = f'{config.host}:{config.port}'
|
||||||
|
else:
|
||||||
|
hosts = f'http://{config.host}:{config.port}'
|
||||||
client = Elasticsearch(
|
client = Elasticsearch(
|
||||||
hosts=f'{config.host}:{config.port}',
|
hosts=hosts,
|
||||||
basic_auth=(config.username, config.password),
|
basic_auth=(config.username, config.password),
|
||||||
request_timeout=100000,
|
request_timeout=100000,
|
||||||
retry_on_timeout=True,
|
retry_on_timeout=True,
|
||||||
@ -53,42 +66,27 @@ class ElasticSearchVector(BaseVector):
|
|||||||
|
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
def _get_version(self) -> str:
|
||||||
|
info = self._client.info()
|
||||||
|
return info['version']['number']
|
||||||
|
|
||||||
|
def _check_version(self):
|
||||||
|
if self._version < '8.0.0':
|
||||||
|
raise ValueError("Elasticsearch vector database version must be greater than 8.0.0")
|
||||||
|
|
||||||
def get_type(self) -> str:
|
def get_type(self) -> str:
|
||||||
return 'elasticsearch'
|
return 'elasticsearch'
|
||||||
|
|
||||||
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
|
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
|
||||||
uuids = self._get_uuids(documents)
|
uuids = self._get_uuids(documents)
|
||||||
texts = [d.page_content for d in documents]
|
for i in range(len(documents)):
|
||||||
metadatas = [d.metadata for d in documents]
|
|
||||||
|
|
||||||
if not self._client.indices.exists(index=self._collection_name):
|
|
||||||
dim = len(embeddings[0])
|
|
||||||
mapping = {
|
|
||||||
"properties": {
|
|
||||||
"text": {
|
|
||||||
"type": "text"
|
|
||||||
},
|
|
||||||
"vector": {
|
|
||||||
"type": "dense_vector",
|
|
||||||
"index": True,
|
|
||||||
"dims": dim,
|
|
||||||
"similarity": "l2_norm"
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self._client.indices.create(index=self._collection_name, mappings=mapping)
|
|
||||||
|
|
||||||
added_ids = []
|
|
||||||
for i, text in enumerate(texts):
|
|
||||||
self._client.index(index=self._collection_name,
|
self._client.index(index=self._collection_name,
|
||||||
id=uuids[i],
|
id=uuids[i],
|
||||||
document={
|
document={
|
||||||
"text": text,
|
Field.CONTENT_KEY.value: documents[i].page_content,
|
||||||
"vector": embeddings[i] if embeddings[i] else None,
|
Field.VECTOR.value: embeddings[i] if embeddings[i] else None,
|
||||||
"metadata": metadatas[i] if metadatas[i] else {},
|
Field.METADATA_KEY.value: documents[i].metadata if documents[i].metadata else {}
|
||||||
})
|
})
|
||||||
added_ids.append(uuids[i])
|
|
||||||
|
|
||||||
self._client.indices.refresh(index=self._collection_name)
|
self._client.indices.refresh(index=self._collection_name)
|
||||||
return uuids
|
return uuids
|
||||||
|
|
||||||
@ -116,28 +114,21 @@ class ElasticSearchVector(BaseVector):
|
|||||||
self._client.indices.delete(index=self._collection_name)
|
self._client.indices.delete(index=self._collection_name)
|
||||||
|
|
||||||
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
||||||
query_str = {
|
top_k = kwargs.get("top_k", 10)
|
||||||
"query": {
|
knn = {
|
||||||
"script_score": {
|
"field": Field.VECTOR.value,
|
||||||
"query": {
|
"query_vector": query_vector,
|
||||||
"match_all": {}
|
"k": top_k
|
||||||
},
|
|
||||||
"script": {
|
|
||||||
"source": "cosineSimilarity(params.query_vector, 'vector') + 1.0",
|
|
||||||
"params": {
|
|
||||||
"query_vector": query_vector
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
results = self._client.search(index=self._collection_name, body=query_str)
|
results = self._client.search(index=self._collection_name, knn=knn, size=top_k)
|
||||||
|
|
||||||
docs_and_scores = []
|
docs_and_scores = []
|
||||||
for hit in results['hits']['hits']:
|
for hit in results['hits']['hits']:
|
||||||
docs_and_scores.append(
|
docs_and_scores.append(
|
||||||
(Document(page_content=hit['_source']['text'], metadata=hit['_source']['metadata']), hit['_score']))
|
(Document(page_content=hit['_source'][Field.CONTENT_KEY.value],
|
||||||
|
vector=hit['_source'][Field.VECTOR.value],
|
||||||
|
metadata=hit['_source'][Field.METADATA_KEY.value]), hit['_score']))
|
||||||
|
|
||||||
docs = []
|
docs = []
|
||||||
for doc, score in docs_and_scores:
|
for doc, score in docs_and_scores:
|
||||||
@ -146,25 +137,61 @@ class ElasticSearchVector(BaseVector):
|
|||||||
doc.metadata['score'] = score
|
doc.metadata['score'] = score
|
||||||
docs.append(doc)
|
docs.append(doc)
|
||||||
|
|
||||||
# Sort the documents by score in descending order
|
|
||||||
docs = sorted(docs, key=lambda x: x.metadata['score'], reverse=True)
|
|
||||||
|
|
||||||
return docs
|
return docs
|
||||||
|
|
||||||
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
|
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
|
||||||
query_str = {
|
query_str = {
|
||||||
"match": {
|
"match": {
|
||||||
"text": query
|
Field.CONTENT_KEY.value: query
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
results = self._client.search(index=self._collection_name, query=query_str)
|
results = self._client.search(index=self._collection_name, query=query_str)
|
||||||
docs = []
|
docs = []
|
||||||
for hit in results['hits']['hits']:
|
for hit in results['hits']['hits']:
|
||||||
docs.append(Document(page_content=hit['_source']['text'], metadata=hit['_source']['metadata']))
|
docs.append(Document(
|
||||||
|
page_content=hit['_source'][Field.CONTENT_KEY.value],
|
||||||
|
vector=hit['_source'][Field.VECTOR.value],
|
||||||
|
metadata=hit['_source'][Field.METADATA_KEY.value],
|
||||||
|
))
|
||||||
|
|
||||||
return docs
|
return docs
|
||||||
|
|
||||||
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
|
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
|
||||||
return self.add_texts(texts, embeddings, **kwargs)
|
metadatas = [d.metadata for d in texts]
|
||||||
|
self.create_collection(embeddings, metadatas)
|
||||||
|
self.add_texts(texts, embeddings, **kwargs)
|
||||||
|
|
||||||
|
def create_collection(
|
||||||
|
self, embeddings: list, metadatas: Optional[list[dict]] = None, index_params: Optional[dict] = None
|
||||||
|
):
|
||||||
|
lock_name = f'vector_indexing_lock_{self._collection_name}'
|
||||||
|
with redis_client.lock(lock_name, timeout=20):
|
||||||
|
collection_exist_cache_key = f'vector_indexing_{self._collection_name}'
|
||||||
|
if redis_client.get(collection_exist_cache_key):
|
||||||
|
logger.info(f"Collection {self._collection_name} already exists.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self._client.indices.exists(index=self._collection_name):
|
||||||
|
dim = len(embeddings[0])
|
||||||
|
mappings = {
|
||||||
|
"properties": {
|
||||||
|
Field.CONTENT_KEY.value: {"type": "text"},
|
||||||
|
Field.VECTOR.value: { # Make sure the dimension is correct here
|
||||||
|
"type": "dense_vector",
|
||||||
|
"dims": dim,
|
||||||
|
"similarity": "cosine"
|
||||||
|
},
|
||||||
|
Field.METADATA_KEY.value: {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"doc_id": {"type": "keyword"} # Map doc_id to keyword type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self._client.indices.create(index=self._collection_name, mappings=mappings)
|
||||||
|
|
||||||
|
redis_client.set(collection_exist_cache_key, 1, ex=3600)
|
||||||
|
|
||||||
|
|
||||||
class ElasticSearchVectorFactory(AbstractVectorFactory):
|
class ElasticSearchVectorFactory(AbstractVectorFactory):
|
||||||
|
@ -122,7 +122,7 @@ class MyScaleVector(BaseVector):
|
|||||||
|
|
||||||
def _search(self, dist: str, order: SortOrder, **kwargs: Any) -> list[Document]:
|
def _search(self, dist: str, order: SortOrder, **kwargs: Any) -> list[Document]:
|
||||||
top_k = kwargs.get("top_k", 5)
|
top_k = kwargs.get("top_k", 5)
|
||||||
score_threshold = kwargs.get("score_threshold", 0.0)
|
score_threshold = kwargs.get('score_threshold') or 0.0
|
||||||
where_str = f"WHERE dist < {1 - score_threshold}" if \
|
where_str = f"WHERE dist < {1 - score_threshold}" if \
|
||||||
self._metric.upper() == "COSINE" and order == SortOrder.ASC and score_threshold > 0.0 else ""
|
self._metric.upper() == "COSINE" and order == SortOrder.ASC and score_threshold > 0.0 else ""
|
||||||
sql = f"""
|
sql = f"""
|
||||||
|
@ -57,7 +57,7 @@ class BaseIndexProcessor(ABC):
|
|||||||
|
|
||||||
character_splitter = FixedRecursiveCharacterTextSplitter.from_encoder(
|
character_splitter = FixedRecursiveCharacterTextSplitter.from_encoder(
|
||||||
chunk_size=segmentation["max_tokens"],
|
chunk_size=segmentation["max_tokens"],
|
||||||
chunk_overlap=segmentation.get('chunk_overlap', 0),
|
chunk_overlap=segmentation.get('chunk_overlap', 0) or 0,
|
||||||
fixed_separator=separator,
|
fixed_separator=separator,
|
||||||
separators=["\n\n", "。", ". ", " ", ""],
|
separators=["\n\n", "。", ". ", " ", ""],
|
||||||
embedding_model_instance=embedding_model_instance
|
embedding_model_instance=embedding_model_instance
|
||||||
|
@ -148,7 +148,7 @@ class ToolParameter(BaseModel):
|
|||||||
form: ToolParameterForm = Field(..., description="The form of the parameter, schema/form/llm")
|
form: ToolParameterForm = Field(..., description="The form of the parameter, schema/form/llm")
|
||||||
llm_description: Optional[str] = None
|
llm_description: Optional[str] = None
|
||||||
required: Optional[bool] = False
|
required: Optional[bool] = False
|
||||||
default: Optional[Union[int, str]] = None
|
default: Optional[Union[float, int, str]] = None
|
||||||
min: Optional[Union[float, int]] = None
|
min: Optional[Union[float, int]] = None
|
||||||
max: Optional[Union[float, int]] = None
|
max: Optional[Union[float, int]] = None
|
||||||
options: Optional[list[ToolParameterOption]] = None
|
options: Optional[list[ToolParameterOption]] = None
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
- wikipedia
|
- wikipedia
|
||||||
- nominatim
|
- nominatim
|
||||||
- yahoo
|
- yahoo
|
||||||
|
- alphavantage
|
||||||
- arxiv
|
- arxiv
|
||||||
- pubmed
|
- pubmed
|
||||||
- stablediffusion
|
- stablediffusion
|
||||||
@ -30,5 +31,7 @@
|
|||||||
- dingtalk
|
- dingtalk
|
||||||
- feishu
|
- feishu
|
||||||
- feishu_base
|
- feishu_base
|
||||||
|
- feishu_document
|
||||||
|
- feishu_message
|
||||||
- slack
|
- slack
|
||||||
- tianditu
|
- tianditu
|
||||||
|
@ -0,0 +1,7 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg width="56px" height="56px" viewBox="0 0 56 56" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<title>形状结合</title>
|
||||||
|
<g id="设计规范" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||||
|
<path d="M56,0 L56,56 L0,56 L0,0 L56,0 Z M31.6063018,12 L24.3936982,12 L24.1061064,12.7425499 L12.6071308,42.4324141 L12,44 L19.7849972,44 L20.0648488,43.2391815 L22.5196173,36.5567427 L33.4780427,36.5567427 L35.9351512,43.2391815 L36.2150028,44 L44,44 L43.3928692,42.4324141 L31.8938936,12.7425499 L31.6063018,12 Z M28.0163803,21.5755126 L31.1613993,30.2523823 L24.8432808,30.2523823 L28.0163803,21.5755126 Z" id="形状结合" fill="#2F4F4F"></path>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 780 B |
22
api/core/tools/provider/builtin/alphavantage/alphavantage.py
Normal file
22
api/core/tools/provider/builtin/alphavantage/alphavantage.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from core.tools.errors import ToolProviderCredentialValidationError
|
||||||
|
from core.tools.provider.builtin.alphavantage.tools.query_stock import QueryStockTool
|
||||||
|
from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
|
||||||
|
|
||||||
|
|
||||||
|
class AlphaVantageProvider(BuiltinToolProviderController):
|
||||||
|
def _validate_credentials(self, credentials: dict[str, Any]) -> None:
|
||||||
|
try:
|
||||||
|
QueryStockTool().fork_tool_runtime(
|
||||||
|
runtime={
|
||||||
|
"credentials": credentials,
|
||||||
|
}
|
||||||
|
).invoke(
|
||||||
|
user_id='',
|
||||||
|
tool_parameters={
|
||||||
|
"code": "AAPL", # Apple Inc.
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise ToolProviderCredentialValidationError(str(e))
|
@ -0,0 +1,31 @@
|
|||||||
|
identity:
|
||||||
|
author: zhuhao
|
||||||
|
name: alphavantage
|
||||||
|
label:
|
||||||
|
en_US: AlphaVantage
|
||||||
|
zh_Hans: AlphaVantage
|
||||||
|
pt_BR: AlphaVantage
|
||||||
|
description:
|
||||||
|
en_US: AlphaVantage is an online platform that provides financial market data and APIs, making it convenient for individual investors and developers to access stock quotes, technical indicators, and stock analysis.
|
||||||
|
zh_Hans: AlphaVantage是一个在线平台,它提供金融市场数据和API,便于个人投资者和开发者获取股票报价、技术指标和股票分析。
|
||||||
|
pt_BR: AlphaVantage is an online platform that provides financial market data and APIs, making it convenient for individual investors and developers to access stock quotes, technical indicators, and stock analysis.
|
||||||
|
icon: icon.svg
|
||||||
|
tags:
|
||||||
|
- finance
|
||||||
|
credentials_for_provider:
|
||||||
|
api_key:
|
||||||
|
type: secret-input
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: AlphaVantage API key
|
||||||
|
zh_Hans: AlphaVantage API key
|
||||||
|
pt_BR: AlphaVantage API key
|
||||||
|
placeholder:
|
||||||
|
en_US: Please input your AlphaVantage API key
|
||||||
|
zh_Hans: 请输入你的 AlphaVantage API key
|
||||||
|
pt_BR: Please input your AlphaVantage API key
|
||||||
|
help:
|
||||||
|
en_US: Get your AlphaVantage API key from AlphaVantage
|
||||||
|
zh_Hans: 从 AlphaVantage 获取您的 AlphaVantage API key
|
||||||
|
pt_BR: Get your AlphaVantage API key from AlphaVantage
|
||||||
|
url: https://www.alphavantage.co/support/#api-key
|
@ -0,0 +1,49 @@
|
|||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||||
|
from core.tools.tool.builtin_tool import BuiltinTool
|
||||||
|
|
||||||
|
ALPHAVANTAGE_API_URL = "https://www.alphavantage.co/query"
|
||||||
|
|
||||||
|
|
||||||
|
class QueryStockTool(BuiltinTool):
|
||||||
|
|
||||||
|
def _invoke(self,
|
||||||
|
user_id: str,
|
||||||
|
tool_parameters: dict[str, Any],
|
||||||
|
) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
|
||||||
|
|
||||||
|
stock_code = tool_parameters.get('code', '')
|
||||||
|
if not stock_code:
|
||||||
|
return self.create_text_message('Please tell me your stock code')
|
||||||
|
|
||||||
|
if 'api_key' not in self.runtime.credentials or not self.runtime.credentials.get('api_key'):
|
||||||
|
return self.create_text_message("Alpha Vantage API key is required.")
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"function": "TIME_SERIES_DAILY",
|
||||||
|
"symbol": stock_code,
|
||||||
|
"outputsize": "compact",
|
||||||
|
"datatype": "json",
|
||||||
|
"apikey": self.runtime.credentials['api_key']
|
||||||
|
}
|
||||||
|
response = requests.get(url=ALPHAVANTAGE_API_URL, params=params)
|
||||||
|
response.raise_for_status()
|
||||||
|
result = self._handle_response(response.json())
|
||||||
|
return self.create_json_message(result)
|
||||||
|
|
||||||
|
def _handle_response(self, response: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
result = response.get('Time Series (Daily)', {})
|
||||||
|
if not result:
|
||||||
|
return {}
|
||||||
|
stock_result = {}
|
||||||
|
for k, v in result.items():
|
||||||
|
stock_result[k] = {}
|
||||||
|
stock_result[k]['open'] = v.get('1. open')
|
||||||
|
stock_result[k]['high'] = v.get('2. high')
|
||||||
|
stock_result[k]['low'] = v.get('3. low')
|
||||||
|
stock_result[k]['close'] = v.get('4. close')
|
||||||
|
stock_result[k]['volume'] = v.get('5. volume')
|
||||||
|
return stock_result
|
@ -0,0 +1,27 @@
|
|||||||
|
identity:
|
||||||
|
name: query_stock
|
||||||
|
author: zhuhao
|
||||||
|
label:
|
||||||
|
en_US: query_stock
|
||||||
|
zh_Hans: query_stock
|
||||||
|
pt_BR: query_stock
|
||||||
|
description:
|
||||||
|
human:
|
||||||
|
en_US: Retrieve information such as daily opening price, daily highest price, daily lowest price, daily closing price, and daily trading volume for a specified stock symbol.
|
||||||
|
zh_Hans: 获取指定股票代码的每日开盘价、每日最高价、每日最低价、每日收盘价和每日交易量等信息。
|
||||||
|
pt_BR: Retrieve information such as daily opening price, daily highest price, daily lowest price, daily closing price, and daily trading volume for a specified stock symbol
|
||||||
|
llm: Retrieve information such as daily opening price, daily highest price, daily lowest price, daily closing price, and daily trading volume for a specified stock symbol
|
||||||
|
parameters:
|
||||||
|
- name: code
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: stock code
|
||||||
|
zh_Hans: 股票代码
|
||||||
|
pt_BR: stock code
|
||||||
|
human_description:
|
||||||
|
en_US: stock code
|
||||||
|
zh_Hans: 股票代码
|
||||||
|
pt_BR: stock code
|
||||||
|
llm_description: stock code for query from alphavantage
|
||||||
|
form: llm
|
@ -0,0 +1,9 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="64px" height="64px" style="shape-rendering:geometricPrecision; text-rendering:geometricPrecision; image-rendering:optimizeQuality; fill-rule:evenodd; clip-rule:evenodd" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<g><path style="opacity:1" fill="#fefefe" d="M -0.5,-0.5 C 20.8333,-0.5 42.1667,-0.5 63.5,-0.5C 63.5,20.8333 63.5,42.1667 63.5,63.5C 42.1667,63.5 20.8333,63.5 -0.5,63.5C -0.5,42.1667 -0.5,20.8333 -0.5,-0.5 Z"/></g>
|
||||||
|
<g><path style="opacity:1" fill="#346df3" d="M 47.5,33.5 C 43.3272,29.8779 38.9939,29.7112 34.5,33C 32.682,35.4897 30.3487,37.3231 27.5,38.5C 23.5003,43.5136 24.167,47.847 29.5,51.5C 24.1563,51.666 18.8229,51.4994 13.5,51C 13,50.5 12.5,50 12,49.5C 11.3333,36.8333 11.3333,24.1667 12,11.5C 12.5,11 13,10.5 13.5,10C 24.1667,9.33333 34.8333,9.33333 45.5,10C 46,10.5 46.5,11 47,11.5C 47.4997,18.8258 47.6663,26.1591 47.5,33.5 Z"/></g>
|
||||||
|
<g><path style="opacity:1" fill="#f9fafe" d="M 20.5,19.5 C 25.1785,19.3342 29.8452,19.5008 34.5,20C 35.8333,21 35.8333,22 34.5,23C 29.8333,23.6667 25.1667,23.6667 20.5,23C 19.3157,21.8545 19.3157,20.6879 20.5,19.5 Z"/></g>
|
||||||
|
<g><path style="opacity:1" fill="#f3f6fe" d="M 20.5,27.5 C 22.5273,27.3379 24.5273,27.5045 26.5,28C 27.8333,29 27.8333,30 26.5,31C 24.5,31.6667 22.5,31.6667 20.5,31C 19.3157,29.8545 19.3157,28.6879 20.5,27.5 Z"/></g>
|
||||||
|
<g><path style="opacity:1" fill="#36d4c1" d="M 47.5,33.5 C 48.7298,35.2972 49.3964,37.2972 49.5,39.5C 51.3904,39.2965 52.8904,39.9632 54,41.5C 55.1825,45.2739 54.3492,48.4406 51.5,51C 44.1742,51.4997 36.8409,51.6663 29.5,51.5C 24.167,47.847 23.5003,43.5136 27.5,38.5C 30.3487,37.3231 32.682,35.4897 34.5,33C 38.9939,29.7112 43.3272,29.8779 47.5,33.5 Z"/></g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 1.8 KiB |
@ -0,0 +1,15 @@
|
|||||||
|
from core.tools.errors import ToolProviderCredentialValidationError
|
||||||
|
from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
|
||||||
|
from core.tools.utils.feishu_api_utils import FeishuRequest
|
||||||
|
|
||||||
|
|
||||||
|
class FeishuDocumentProvider(BuiltinToolProviderController):
|
||||||
|
def _validate_credentials(self, credentials: dict) -> None:
|
||||||
|
app_id = credentials.get('app_id')
|
||||||
|
app_secret = credentials.get('app_secret')
|
||||||
|
if not app_id or not app_secret:
|
||||||
|
raise ToolProviderCredentialValidationError("app_id and app_secret is required")
|
||||||
|
try:
|
||||||
|
assert FeishuRequest(app_id, app_secret).tenant_access_token is not None
|
||||||
|
except Exception as e:
|
||||||
|
raise ToolProviderCredentialValidationError(str(e))
|
@ -0,0 +1,34 @@
|
|||||||
|
identity:
|
||||||
|
author: Doug Lea
|
||||||
|
name: feishu_document
|
||||||
|
label:
|
||||||
|
en_US: Lark Cloud Document
|
||||||
|
zh_Hans: 飞书云文档
|
||||||
|
description:
|
||||||
|
en_US: Lark Cloud Document
|
||||||
|
zh_Hans: 飞书云文档
|
||||||
|
icon: icon.svg
|
||||||
|
tags:
|
||||||
|
- social
|
||||||
|
- productivity
|
||||||
|
credentials_for_provider:
|
||||||
|
app_id:
|
||||||
|
type: text-input
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: APP ID
|
||||||
|
placeholder:
|
||||||
|
en_US: Please input your feishu app id
|
||||||
|
zh_Hans: 请输入你的飞书 app id
|
||||||
|
help:
|
||||||
|
en_US: Get your app_id and app_secret from Feishu
|
||||||
|
zh_Hans: 从飞书获取您的 app_id 和 app_secret
|
||||||
|
url: https://open.feishu.cn
|
||||||
|
app_secret:
|
||||||
|
type: secret-input
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: APP Secret
|
||||||
|
placeholder:
|
||||||
|
en_US: Please input your app secret
|
||||||
|
zh_Hans: 请输入你的飞书 app secret
|
@ -0,0 +1,19 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||||
|
from core.tools.tool.builtin_tool import BuiltinTool
|
||||||
|
from core.tools.utils.feishu_api_utils import FeishuRequest
|
||||||
|
|
||||||
|
|
||||||
|
class CreateDocumentTool(BuiltinTool):
|
||||||
|
def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage:
|
||||||
|
app_id = self.runtime.credentials.get('app_id')
|
||||||
|
app_secret = self.runtime.credentials.get('app_secret')
|
||||||
|
client = FeishuRequest(app_id, app_secret)
|
||||||
|
|
||||||
|
title = tool_parameters.get('title')
|
||||||
|
content = tool_parameters.get('content')
|
||||||
|
folder_token = tool_parameters.get('folder_token')
|
||||||
|
|
||||||
|
res = client.create_document(title, content, folder_token)
|
||||||
|
return self.create_json_message(res)
|
@ -0,0 +1,47 @@
|
|||||||
|
identity:
|
||||||
|
name: create_document
|
||||||
|
author: Doug Lea
|
||||||
|
label:
|
||||||
|
en_US: Create Lark document
|
||||||
|
zh_Hans: 创建飞书文档
|
||||||
|
description:
|
||||||
|
human:
|
||||||
|
en_US: Create Lark document
|
||||||
|
zh_Hans: 创建飞书文档,支持创建空文档和带内容的文档,支持 markdown 语法创建。
|
||||||
|
llm: A tool for creating Feishu documents.
|
||||||
|
parameters:
|
||||||
|
- name: title
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
label:
|
||||||
|
en_US: Document title
|
||||||
|
zh_Hans: 文档标题
|
||||||
|
human_description:
|
||||||
|
en_US: Document title, only supports plain text content.
|
||||||
|
zh_Hans: 文档标题,只支持纯文本内容。
|
||||||
|
llm_description: 文档标题,只支持纯文本内容,可以为空。
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: content
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
label:
|
||||||
|
en_US: Document content
|
||||||
|
zh_Hans: 文档内容
|
||||||
|
human_description:
|
||||||
|
en_US: Document content, supports markdown syntax, can be empty.
|
||||||
|
zh_Hans: 文档内容,支持 markdown 语法,可以为空。
|
||||||
|
llm_description: 文档内容,支持 markdown 语法,可以为空。
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: folder_token
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
label:
|
||||||
|
en_US: folder_token
|
||||||
|
zh_Hans: 文档所在文件夹的 Token
|
||||||
|
human_description:
|
||||||
|
en_US: The token of the folder where the document is located. If it is not passed or is empty, it means the root directory.
|
||||||
|
zh_Hans: 文档所在文件夹的 Token,不传或传空表示根目录。
|
||||||
|
llm_description: 文档所在文件夹的 Token,不传或传空表示根目录。
|
||||||
|
form: llm
|
@ -0,0 +1,17 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||||
|
from core.tools.tool.builtin_tool import BuiltinTool
|
||||||
|
from core.tools.utils.feishu_api_utils import FeishuRequest
|
||||||
|
|
||||||
|
|
||||||
|
class GetDocumentRawContentTool(BuiltinTool):
|
||||||
|
def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage:
|
||||||
|
app_id = self.runtime.credentials.get('app_id')
|
||||||
|
app_secret = self.runtime.credentials.get('app_secret')
|
||||||
|
client = FeishuRequest(app_id, app_secret)
|
||||||
|
|
||||||
|
document_id = tool_parameters.get('document_id')
|
||||||
|
|
||||||
|
res = client.get_document_raw_content(document_id)
|
||||||
|
return self.create_json_message(res)
|
@ -0,0 +1,23 @@
|
|||||||
|
identity:
|
||||||
|
name: get_document_raw_content
|
||||||
|
author: Doug Lea
|
||||||
|
label:
|
||||||
|
en_US: Get Document Raw Content
|
||||||
|
zh_Hans: 获取文档纯文本内容
|
||||||
|
description:
|
||||||
|
human:
|
||||||
|
en_US: Get document raw content
|
||||||
|
zh_Hans: 获取文档纯文本内容
|
||||||
|
llm: A tool for getting the plain text content of Feishu documents
|
||||||
|
parameters:
|
||||||
|
- name: document_id
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: document_id
|
||||||
|
zh_Hans: 飞书文档的唯一标识
|
||||||
|
human_description:
|
||||||
|
en_US: Unique ID of Feishu document document_id
|
||||||
|
zh_Hans: 飞书文档的唯一标识 document_id
|
||||||
|
llm_description: 飞书文档的唯一标识 document_id
|
||||||
|
form: llm
|
@ -0,0 +1,19 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||||
|
from core.tools.tool.builtin_tool import BuiltinTool
|
||||||
|
from core.tools.utils.feishu_api_utils import FeishuRequest
|
||||||
|
|
||||||
|
|
||||||
|
class ListDocumentBlockTool(BuiltinTool):
|
||||||
|
def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage:
|
||||||
|
app_id = self.runtime.credentials.get('app_id')
|
||||||
|
app_secret = self.runtime.credentials.get('app_secret')
|
||||||
|
client = FeishuRequest(app_id, app_secret)
|
||||||
|
|
||||||
|
document_id = tool_parameters.get('document_id')
|
||||||
|
page_size = tool_parameters.get('page_size', 500)
|
||||||
|
page_token = tool_parameters.get('page_token', '')
|
||||||
|
|
||||||
|
res = client.list_document_block(document_id, page_token, page_size)
|
||||||
|
return self.create_json_message(res)
|
@ -0,0 +1,48 @@
|
|||||||
|
identity:
|
||||||
|
name: list_document_block
|
||||||
|
author: Doug Lea
|
||||||
|
label:
|
||||||
|
en_US: List Document Block
|
||||||
|
zh_Hans: 获取飞书文档所有块
|
||||||
|
description:
|
||||||
|
human:
|
||||||
|
en_US: List document block
|
||||||
|
zh_Hans: 获取飞书文档所有块的富文本内容并分页返回。
|
||||||
|
llm: A tool to get all blocks of Feishu documents
|
||||||
|
parameters:
|
||||||
|
- name: document_id
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: document_id
|
||||||
|
zh_Hans: 飞书文档的唯一标识
|
||||||
|
human_description:
|
||||||
|
en_US: Unique ID of Feishu document document_id
|
||||||
|
zh_Hans: 飞书文档的唯一标识 document_id
|
||||||
|
llm_description: 飞书文档的唯一标识 document_id
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: page_size
|
||||||
|
type: number
|
||||||
|
required: false
|
||||||
|
default: 500
|
||||||
|
label:
|
||||||
|
en_US: page_size
|
||||||
|
zh_Hans: 分页大小
|
||||||
|
human_description:
|
||||||
|
en_US: Paging size, the default and maximum value is 500.
|
||||||
|
zh_Hans: 分页大小, 默认值和最大值为 500。
|
||||||
|
llm_description: 分页大小, 表示一次请求最多返回多少条数据,默认值和最大值为 500。
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: page_token
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
label:
|
||||||
|
en_US: page_token
|
||||||
|
zh_Hans: 分页标记
|
||||||
|
human_description:
|
||||||
|
en_US: Pagination tag, used to paginate query results so that more items can be obtained in the next traversal.
|
||||||
|
zh_Hans: 分页标记,用于分页查询结果,以便下次遍历时获取更多项。
|
||||||
|
llm_description: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。
|
||||||
|
form: llm
|
@ -0,0 +1,19 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||||
|
from core.tools.tool.builtin_tool import BuiltinTool
|
||||||
|
from core.tools.utils.feishu_api_utils import FeishuRequest
|
||||||
|
|
||||||
|
|
||||||
|
class CreateDocumentTool(BuiltinTool):
|
||||||
|
def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage:
|
||||||
|
app_id = self.runtime.credentials.get('app_id')
|
||||||
|
app_secret = self.runtime.credentials.get('app_secret')
|
||||||
|
client = FeishuRequest(app_id, app_secret)
|
||||||
|
|
||||||
|
document_id = tool_parameters.get('document_id')
|
||||||
|
content = tool_parameters.get('content')
|
||||||
|
position = tool_parameters.get('position')
|
||||||
|
|
||||||
|
res = client.write_document(document_id, content, position)
|
||||||
|
return self.create_json_message(res)
|
@ -0,0 +1,56 @@
|
|||||||
|
identity:
|
||||||
|
name: write_document
|
||||||
|
author: Doug Lea
|
||||||
|
label:
|
||||||
|
en_US: Write Document
|
||||||
|
zh_Hans: 在飞书文档中新增内容
|
||||||
|
description:
|
||||||
|
human:
|
||||||
|
en_US: Adding new content to Lark documents
|
||||||
|
zh_Hans: 在飞书文档中新增内容
|
||||||
|
llm: A tool for adding new content to Lark documents.
|
||||||
|
parameters:
|
||||||
|
- name: document_id
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: document_id
|
||||||
|
zh_Hans: 飞书文档的唯一标识
|
||||||
|
human_description:
|
||||||
|
en_US: Unique ID of Feishu document document_id
|
||||||
|
zh_Hans: 飞书文档的唯一标识 document_id
|
||||||
|
llm_description: 飞书文档的唯一标识 document_id
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: content
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: document content
|
||||||
|
zh_Hans: 文档内容
|
||||||
|
human_description:
|
||||||
|
en_US: Document content, supports markdown syntax, can be empty.
|
||||||
|
zh_Hans: 文档内容,支持 markdown 语法,可以为空。
|
||||||
|
llm_description:
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: position
|
||||||
|
type: select
|
||||||
|
required: true
|
||||||
|
default: start
|
||||||
|
label:
|
||||||
|
en_US: Choose where to add content
|
||||||
|
zh_Hans: 选择添加内容的位置
|
||||||
|
human_description:
|
||||||
|
en_US: Please fill in start or end to add content at the beginning or end of the document respectively.
|
||||||
|
zh_Hans: 请填入 start 或 end, 分别表示在文档开头(start)或结尾(end)添加内容。
|
||||||
|
form: llm
|
||||||
|
options:
|
||||||
|
- value: start
|
||||||
|
label:
|
||||||
|
en_US: start
|
||||||
|
zh_Hans: 在文档开头添加内容
|
||||||
|
- value: end
|
||||||
|
label:
|
||||||
|
en_US: end
|
||||||
|
zh_Hans: 在文档结尾添加内容
|
@ -0,0 +1,19 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="64px" height="64px" viewBox="0 0 64 64" enable-background="new 0 0 64 64" xml:space="preserve"> <image id="image0" width="64" height="64" x="0" y="0"
|
||||||
|
xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAMAAACdt4HsAAAAIGNIUk0AAHomAACAhAAA+gAAAIDo
|
||||||
|
AAB1MAAA6mAAADqYAAAXcJy6UTwAAAC9UExURf///////+bs/vL2/qa/+n+j+E1/9TNt9FmI9nOa
|
||||||
|
+Obt/sza/GaR97PI+9nk/aa/+5m2+oCk+Iyt+Yys+eXt/oCj+L/R+4yt+HOb+Ex/9TOA6jOi2jO8
|
||||||
|
zTPJxzPWwDOa3eb69zN67X/l2DOb3TPPw0DZxLPv55nq4LPw6DOB6vL9+0B29TOo16bt4zPCynPj
|
||||||
|
00zbyDN08WbgzzOH50DYxFmI9bLI+5nr34zn3OX699n384zo21ndyzTWwJnq37nAcdIAAAABdFJO
|
||||||
|
U/4a4wd9AAAAAWJLR0QAiAUdSAAAAAlwSFlzAAAWJQAAFiUBSVIk8AAAAAd0SU1FB+gHEggfEk4D
|
||||||
|
XiUAAAFOSURBVFjD7dVZU8IwFAXgpq2NtFFRUVTKtYC4gCvu6///WcCMI9Cc3CR2fLLn/XyT3KRp
|
||||||
|
IComqIEa+GMgDMNfA1G8lsh51htx6g9kSi5HbfgBm6v1eZLUA9iSKE1nYFviqMgNMPVn44xcgB1p
|
||||||
|
jnIAmpLLrhVoST6ZDdizAMoCZNKWjAdsC8BLWACRtS9lygH7DkDMAW0H4IADlANwyAEJUzzq5F2i
|
||||||
|
bn5cMIC53svpJ/3CHxic0FKGp75Ah0o585uB1ic69zmFnt6nYQEBfA9yAFDf/SZeEMwIfgtjAFxi
|
||||||
|
4AoBcA/XGLiBAHoPcJ9uISAaWv/OABAGWuOKgIgrbgHM0TDEiQnQHnavY0Tfwz0GCgMA/kweVxm/
|
||||||
|
y2gJD4UJQJd5wE6gfIxlIXlsPz1rwIsRwNGFkR8gXicVASHe3j++u5+zfHlugU8N1MD/AQI2U2Cm
|
||||||
|
Yux2lsz2AAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDI0LTA3LTE4VDA4OjMxOjE4KzAwOjAwPdC6HgAA
|
||||||
|
ACV0RVh0ZGF0ZTptb2RpZnkAMjAyNC0wNy0xOFQwODozMToxOCswMDowMEyNAqIAAAAodEVYdGRh
|
||||||
|
dGU6dGltZXN0YW1wADIwMjQtMDctMThUMDg6MzE6MTgrMDA6MDAbmCN9AAAAAElFTkSuQmCC" />
|
||||||
|
</svg>
|
After Width: | Height: | Size: 1.6 KiB |
@ -0,0 +1,15 @@
|
|||||||
|
from core.tools.errors import ToolProviderCredentialValidationError
|
||||||
|
from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
|
||||||
|
from core.tools.utils.feishu_api_utils import FeishuRequest
|
||||||
|
|
||||||
|
|
||||||
|
class FeishuMessageProvider(BuiltinToolProviderController):
|
||||||
|
def _validate_credentials(self, credentials: dict) -> None:
|
||||||
|
app_id = credentials.get('app_id')
|
||||||
|
app_secret = credentials.get('app_secret')
|
||||||
|
if not app_id or not app_secret:
|
||||||
|
raise ToolProviderCredentialValidationError("app_id and app_secret is required")
|
||||||
|
try:
|
||||||
|
assert FeishuRequest(app_id, app_secret).tenant_access_token is not None
|
||||||
|
except Exception as e:
|
||||||
|
raise ToolProviderCredentialValidationError(str(e))
|
@ -0,0 +1,34 @@
|
|||||||
|
identity:
|
||||||
|
author: Doug Lea
|
||||||
|
name: feishu_message
|
||||||
|
label:
|
||||||
|
en_US: Lark Message
|
||||||
|
zh_Hans: 飞书消息
|
||||||
|
description:
|
||||||
|
en_US: Lark Message
|
||||||
|
zh_Hans: 飞书消息
|
||||||
|
icon: icon.svg
|
||||||
|
tags:
|
||||||
|
- social
|
||||||
|
- productivity
|
||||||
|
credentials_for_provider:
|
||||||
|
app_id:
|
||||||
|
type: text-input
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: APP ID
|
||||||
|
placeholder:
|
||||||
|
en_US: Please input your feishu app id
|
||||||
|
zh_Hans: 请输入你的飞书 app id
|
||||||
|
help:
|
||||||
|
en_US: Get your app_id and app_secret from Feishu
|
||||||
|
zh_Hans: 从飞书获取您的 app_id 和 app_secret
|
||||||
|
url: https://open.feishu.cn
|
||||||
|
app_secret:
|
||||||
|
type: secret-input
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: APP Secret
|
||||||
|
placeholder:
|
||||||
|
en_US: Please input your app secret
|
||||||
|
zh_Hans: 请输入你的飞书 app secret
|
@ -0,0 +1,20 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||||
|
from core.tools.tool.builtin_tool import BuiltinTool
|
||||||
|
from core.tools.utils.feishu_api_utils import FeishuRequest
|
||||||
|
|
||||||
|
|
||||||
|
class SendBotMessageTool(BuiltinTool):
|
||||||
|
def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage:
|
||||||
|
app_id = self.runtime.credentials.get('app_id')
|
||||||
|
app_secret = self.runtime.credentials.get('app_secret')
|
||||||
|
client = FeishuRequest(app_id, app_secret)
|
||||||
|
|
||||||
|
receive_id_type = tool_parameters.get('receive_id_type')
|
||||||
|
receive_id = tool_parameters.get('receive_id')
|
||||||
|
msg_type = tool_parameters.get('msg_type')
|
||||||
|
content = tool_parameters.get('content')
|
||||||
|
|
||||||
|
res = client.send_bot_message(receive_id_type, receive_id, msg_type, content)
|
||||||
|
return self.create_json_message(res)
|
@ -0,0 +1,91 @@
|
|||||||
|
identity:
|
||||||
|
name: send_bot_message
|
||||||
|
author: Doug Lea
|
||||||
|
label:
|
||||||
|
en_US: Send Bot Message
|
||||||
|
zh_Hans: 发送飞书应用消息
|
||||||
|
description:
|
||||||
|
human:
|
||||||
|
en_US: Send bot message
|
||||||
|
zh_Hans: 发送飞书应用消息
|
||||||
|
llm: A tool for sending Feishu application messages.
|
||||||
|
parameters:
|
||||||
|
- name: receive_id_type
|
||||||
|
type: select
|
||||||
|
required: true
|
||||||
|
options:
|
||||||
|
- value: open_id
|
||||||
|
label:
|
||||||
|
en_US: open id
|
||||||
|
zh_Hans: open id
|
||||||
|
- value: union_id
|
||||||
|
label:
|
||||||
|
en_US: union id
|
||||||
|
zh_Hans: union id
|
||||||
|
- value: user_id
|
||||||
|
label:
|
||||||
|
en_US: user id
|
||||||
|
zh_Hans: user id
|
||||||
|
- value: email
|
||||||
|
label:
|
||||||
|
en_US: email
|
||||||
|
zh_Hans: email
|
||||||
|
- value: chat_id
|
||||||
|
label:
|
||||||
|
en_US: chat id
|
||||||
|
zh_Hans: chat id
|
||||||
|
label:
|
||||||
|
en_US: User ID Type
|
||||||
|
zh_Hans: 用户 ID 类型
|
||||||
|
human_description:
|
||||||
|
en_US: User ID Type
|
||||||
|
zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id、email、chat_id。
|
||||||
|
llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id、email、chat_id。
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: receive_id
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: Receive Id
|
||||||
|
zh_Hans: 消息接收者的 ID
|
||||||
|
human_description:
|
||||||
|
en_US: The ID of the message receiver. The ID type should correspond to the query parameter receive_id_type.
|
||||||
|
zh_Hans: 消息接收者的 ID,ID 类型应与查询参数 receive_id_type 对应。
|
||||||
|
llm_description: 消息接收者的 ID,ID 类型应与查询参数 receive_id_type 对应。
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: msg_type
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
options:
|
||||||
|
- value: text
|
||||||
|
label:
|
||||||
|
en_US: text
|
||||||
|
zh_Hans: 文本
|
||||||
|
- value: interactive
|
||||||
|
label:
|
||||||
|
en_US: message card
|
||||||
|
zh_Hans: 消息卡片
|
||||||
|
label:
|
||||||
|
en_US: Message type
|
||||||
|
zh_Hans: 消息类型
|
||||||
|
human_description:
|
||||||
|
en_US: Message type, optional values are, text (text), interactive (message card).
|
||||||
|
zh_Hans: 消息类型,可选值有:text(文本)、interactive(消息卡片)。
|
||||||
|
llm_description: 消息类型,可选值有:text(文本)、interactive(消息卡片)。
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: content
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: Message content
|
||||||
|
zh_Hans: 消息内容
|
||||||
|
human_description:
|
||||||
|
en_US: Message content
|
||||||
|
zh_Hans: |
|
||||||
|
消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容,
|
||||||
|
具体格式说明参考:https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json
|
||||||
|
llm_description: 消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容。
|
||||||
|
form: llm
|
@ -0,0 +1,19 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||||
|
from core.tools.tool.builtin_tool import BuiltinTool
|
||||||
|
from core.tools.utils.feishu_api_utils import FeishuRequest
|
||||||
|
|
||||||
|
|
||||||
|
class SendWebhookMessageTool(BuiltinTool):
|
||||||
|
def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) ->ToolInvokeMessage:
|
||||||
|
app_id = self.runtime.credentials.get('app_id')
|
||||||
|
app_secret = self.runtime.credentials.get('app_secret')
|
||||||
|
client = FeishuRequest(app_id, app_secret)
|
||||||
|
|
||||||
|
webhook = tool_parameters.get('webhook')
|
||||||
|
msg_type = tool_parameters.get('msg_type')
|
||||||
|
content = tool_parameters.get('content')
|
||||||
|
|
||||||
|
res = client.send_webhook_message(webhook, msg_type, content)
|
||||||
|
return self.create_json_message(res)
|
@ -0,0 +1,58 @@
|
|||||||
|
identity:
|
||||||
|
name: send_webhook_message
|
||||||
|
author: Doug Lea
|
||||||
|
label:
|
||||||
|
en_US: Send Webhook Message
|
||||||
|
zh_Hans: 使用自定义机器人发送飞书消息
|
||||||
|
description:
|
||||||
|
human:
|
||||||
|
en_US: Send webhook message
|
||||||
|
zh_Hans: 使用自定义机器人发送飞书消息
|
||||||
|
llm: A tool for sending Lark messages using a custom robot.
|
||||||
|
parameters:
|
||||||
|
- name: webhook
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: webhook
|
||||||
|
zh_Hans: webhook 的地址
|
||||||
|
human_description:
|
||||||
|
en_US: The address of the webhook
|
||||||
|
zh_Hans: webhook 的地址
|
||||||
|
llm_description: webhook 的地址
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: msg_type
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
options:
|
||||||
|
- value: text
|
||||||
|
label:
|
||||||
|
en_US: text
|
||||||
|
zh_Hans: 文本
|
||||||
|
- value: interactive
|
||||||
|
label:
|
||||||
|
en_US: message card
|
||||||
|
zh_Hans: 消息卡片
|
||||||
|
label:
|
||||||
|
en_US: Message type
|
||||||
|
zh_Hans: 消息类型
|
||||||
|
human_description:
|
||||||
|
en_US: Message type, optional values are, text (text), interactive (message card).
|
||||||
|
zh_Hans: 消息类型,可选值有:text(文本)、interactive(消息卡片)。
|
||||||
|
llm_description: 消息类型,可选值有:text(文本)、interactive(消息卡片)。
|
||||||
|
form: llm
|
||||||
|
|
||||||
|
- name: content
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
label:
|
||||||
|
en_US: Message content
|
||||||
|
zh_Hans: 消息内容
|
||||||
|
human_description:
|
||||||
|
en_US: Message content
|
||||||
|
zh_Hans: |
|
||||||
|
消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容,
|
||||||
|
具体格式说明参考:https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json
|
||||||
|
llm_description: 消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容。
|
||||||
|
form: llm
|
@ -85,7 +85,7 @@ parameters:
|
|||||||
- name: guidance_scale
|
- name: guidance_scale
|
||||||
type: number
|
type: number
|
||||||
required: true
|
required: true
|
||||||
default: 7
|
default: 7.5
|
||||||
min: 0
|
min: 0
|
||||||
max: 100
|
max: 100
|
||||||
label:
|
label:
|
||||||
|
143
api/core/tools/utils/feishu_api_utils.py
Normal file
143
api/core/tools/utils/feishu_api_utils.py
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
import httpx
|
||||||
|
|
||||||
|
from extensions.ext_redis import redis_client
|
||||||
|
|
||||||
|
|
||||||
|
class FeishuRequest:
|
||||||
|
def __init__(self, app_id: str, app_secret: str):
|
||||||
|
self.app_id = app_id
|
||||||
|
self.app_secret = app_secret
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tenant_access_token(self):
|
||||||
|
feishu_tenant_access_token = f"tools:{self.app_id}:feishu_tenant_access_token"
|
||||||
|
if redis_client.exists(feishu_tenant_access_token):
|
||||||
|
return redis_client.get(feishu_tenant_access_token).decode()
|
||||||
|
res = self.get_tenant_access_token(self.app_id, self.app_secret)
|
||||||
|
redis_client.setex(feishu_tenant_access_token, res.get("expire"), res.get("tenant_access_token"))
|
||||||
|
return res.get("tenant_access_token")
|
||||||
|
|
||||||
|
def _send_request(self, url: str, method: str = "post", require_token: bool = True, payload: dict = None,
|
||||||
|
params: dict = None):
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"user-agent": "Dify",
|
||||||
|
}
|
||||||
|
if require_token:
|
||||||
|
headers["tenant-access-token"] = f"{self.tenant_access_token}"
|
||||||
|
res = httpx.request(method=method, url=url, headers=headers, json=payload, params=params, timeout=30).json()
|
||||||
|
if res.get("code") != 0:
|
||||||
|
raise Exception(res)
|
||||||
|
return res
|
||||||
|
|
||||||
|
def get_tenant_access_token(self, app_id: str, app_secret: str) -> dict:
|
||||||
|
"""
|
||||||
|
API url: https://open.feishu.cn/document/server-docs/authentication-management/access-token/tenant_access_token_internal
|
||||||
|
Example Response:
|
||||||
|
{
|
||||||
|
"code": 0,
|
||||||
|
"msg": "ok",
|
||||||
|
"tenant_access_token": "t-caecc734c2e3328a62489fe0648c4b98779515d3",
|
||||||
|
"expire": 7200
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/access_token/get_tenant_access_token"
|
||||||
|
payload = {
|
||||||
|
"app_id": app_id,
|
||||||
|
"app_secret": app_secret
|
||||||
|
}
|
||||||
|
res = self._send_request(url, require_token=False, payload=payload)
|
||||||
|
return res
|
||||||
|
|
||||||
|
def create_document(self, title: str, content: str, folder_token: str) -> dict:
|
||||||
|
"""
|
||||||
|
API url: https://open.larkoffice.com/document/server-docs/docs/docs/docx-v1/document/create
|
||||||
|
Example Response:
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"title": "title",
|
||||||
|
"url": "https://svi136aogf123.feishu.cn/docx/VWbvd4fEdoW0WSxaY1McQTz8n7d",
|
||||||
|
"type": "docx",
|
||||||
|
"token": "VWbvd4fEdoW0WSxaY1McQTz8n7d"
|
||||||
|
},
|
||||||
|
"log_id": "021721281231575fdbddc0200ff00060a9258ec0000103df61b5d",
|
||||||
|
"code": 0,
|
||||||
|
"msg": "创建飞书文档成功,请查看"
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/document/create_document"
|
||||||
|
payload = {
|
||||||
|
"title": title,
|
||||||
|
"content": content,
|
||||||
|
"folder_token": folder_token,
|
||||||
|
}
|
||||||
|
res = self._send_request(url, payload=payload)
|
||||||
|
return res.get("data")
|
||||||
|
|
||||||
|
def write_document(self, document_id: str, content: str, position: str = "start") -> dict:
|
||||||
|
url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/document/write_document"
|
||||||
|
payload = {
|
||||||
|
"document_id": document_id,
|
||||||
|
"content": content,
|
||||||
|
"position": position
|
||||||
|
}
|
||||||
|
res = self._send_request(url, payload=payload)
|
||||||
|
return res.get("data")
|
||||||
|
|
||||||
|
def get_document_raw_content(self, document_id: str) -> dict:
|
||||||
|
"""
|
||||||
|
API url: https://open.larkoffice.com/document/server-docs/docs/docs/docx-v1/document/raw_content
|
||||||
|
Example Response:
|
||||||
|
{
|
||||||
|
"code": 0,
|
||||||
|
"msg": "success",
|
||||||
|
"data": {
|
||||||
|
"content": "云文档\n多人实时协同,插入一切元素。不仅是在线文档,更是强大的创作和互动工具\n云文档:专为协作而生\n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
params = {
|
||||||
|
"document_id": document_id,
|
||||||
|
}
|
||||||
|
url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/document/get_document_raw_content"
|
||||||
|
res = self._send_request(url, method="get", params=params)
|
||||||
|
return res.get("data").get("content")
|
||||||
|
|
||||||
|
def list_document_block(self, document_id: str, page_token: str, page_size: int = 500) -> dict:
|
||||||
|
"""
|
||||||
|
API url: https://open.larkoffice.com/document/server-docs/docs/docs/docx-v1/document/list
|
||||||
|
"""
|
||||||
|
url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/document/list_document_block"
|
||||||
|
params = {
|
||||||
|
"document_id": document_id,
|
||||||
|
"page_size": page_size,
|
||||||
|
"page_token": page_token,
|
||||||
|
}
|
||||||
|
res = self._send_request(url, method="get", params=params)
|
||||||
|
return res.get("data")
|
||||||
|
|
||||||
|
def send_bot_message(self, receive_id_type: str, receive_id: str, msg_type: str, content: str) -> dict:
|
||||||
|
"""
|
||||||
|
API url: https://open.larkoffice.com/document/server-docs/im-v1/message/create
|
||||||
|
"""
|
||||||
|
url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/message/send_bot_message"
|
||||||
|
params = {
|
||||||
|
"receive_id_type": receive_id_type,
|
||||||
|
}
|
||||||
|
payload = {
|
||||||
|
"receive_id": receive_id,
|
||||||
|
"msg_type": msg_type,
|
||||||
|
"content": content,
|
||||||
|
}
|
||||||
|
res = self._send_request(url, params=params, payload=payload)
|
||||||
|
return res.get("data")
|
||||||
|
|
||||||
|
def send_webhook_message(self, webhook: str, msg_type: str, content: str) -> dict:
|
||||||
|
url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/message/send_webhook_message"
|
||||||
|
payload = {
|
||||||
|
"webhook": webhook,
|
||||||
|
"msg_type": msg_type,
|
||||||
|
"content": content,
|
||||||
|
}
|
||||||
|
res = self._send_request(url, require_token=False, payload=payload)
|
||||||
|
return res
|
@ -26,7 +26,6 @@ def load_yaml_file(file_path: str, ignore_error: bool = True, default_value: Any
|
|||||||
raise YAMLError(f'Failed to load YAML file {file_path}: {e}')
|
raise YAMLError(f'Failed to load YAML file {file_path}: {e}')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if ignore_error:
|
if ignore_error:
|
||||||
logger.debug(f'Failed to load YAML file {file_path}: {e}')
|
|
||||||
return default_value
|
return default_value
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
|
@ -88,9 +88,9 @@ class CodeNode(BaseNode):
|
|||||||
else:
|
else:
|
||||||
raise ValueError(f"Output variable `{variable}` must be a string")
|
raise ValueError(f"Output variable `{variable}` must be a string")
|
||||||
|
|
||||||
if len(value) > dify_config.CODE_MAX_STRING_ARRAY_LENGTH:
|
if len(value) > dify_config.CODE_MAX_STRING_LENGTH:
|
||||||
raise ValueError(f'The length of output variable `{variable}` must be'
|
raise ValueError(f'The length of output variable `{variable}` must be'
|
||||||
f' less than {dify_config.CODE_MAX_STRING_ARRAY_LENGTH} characters')
|
f' less than {dify_config.CODE_MAX_STRING_LENGTH} characters')
|
||||||
|
|
||||||
return value.replace('\x00', '')
|
return value.replace('\x00', '')
|
||||||
|
|
||||||
|
@ -5,10 +5,6 @@ from pydantic import BaseModel, ValidationInfo, field_validator
|
|||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
from core.workflow.entities.base_node_data_entities import BaseNodeData
|
from core.workflow.entities.base_node_data_entities import BaseNodeData
|
||||||
|
|
||||||
MAX_CONNECT_TIMEOUT = dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT
|
|
||||||
MAX_READ_TIMEOUT = dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT
|
|
||||||
MAX_WRITE_TIMEOUT = dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT
|
|
||||||
|
|
||||||
|
|
||||||
class HttpRequestNodeAuthorizationConfig(BaseModel):
|
class HttpRequestNodeAuthorizationConfig(BaseModel):
|
||||||
type: Literal[None, 'basic', 'bearer', 'custom']
|
type: Literal[None, 'basic', 'bearer', 'custom']
|
||||||
@ -41,9 +37,9 @@ class HttpRequestNodeBody(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class HttpRequestNodeTimeout(BaseModel):
|
class HttpRequestNodeTimeout(BaseModel):
|
||||||
connect: int = MAX_CONNECT_TIMEOUT
|
connect: int = dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT
|
||||||
read: int = MAX_READ_TIMEOUT
|
read: int = dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT
|
||||||
write: int = MAX_WRITE_TIMEOUT
|
write: int = dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT
|
||||||
|
|
||||||
|
|
||||||
class HttpRequestNodeData(BaseNodeData):
|
class HttpRequestNodeData(BaseNodeData):
|
||||||
|
@ -4,15 +4,13 @@ from mimetypes import guess_extension
|
|||||||
from os import path
|
from os import path
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
|
from configs import dify_config
|
||||||
from core.app.segments import parser
|
from core.app.segments import parser
|
||||||
from core.file.file_obj import FileTransferMethod, FileType, FileVar
|
from core.file.file_obj import FileTransferMethod, FileType, FileVar
|
||||||
from core.tools.tool_file_manager import ToolFileManager
|
from core.tools.tool_file_manager import ToolFileManager
|
||||||
from core.workflow.entities.node_entities import NodeRunResult, NodeType
|
from core.workflow.entities.node_entities import NodeRunResult, NodeType
|
||||||
from core.workflow.nodes.base_node import BaseNode
|
from core.workflow.nodes.base_node import BaseNode
|
||||||
from core.workflow.nodes.http_request.entities import (
|
from core.workflow.nodes.http_request.entities import (
|
||||||
MAX_CONNECT_TIMEOUT,
|
|
||||||
MAX_READ_TIMEOUT,
|
|
||||||
MAX_WRITE_TIMEOUT,
|
|
||||||
HttpRequestNodeData,
|
HttpRequestNodeData,
|
||||||
HttpRequestNodeTimeout,
|
HttpRequestNodeTimeout,
|
||||||
)
|
)
|
||||||
@ -20,9 +18,9 @@ from core.workflow.nodes.http_request.http_executor import HttpExecutor, HttpExe
|
|||||||
from models.workflow import WorkflowNodeExecutionStatus
|
from models.workflow import WorkflowNodeExecutionStatus
|
||||||
|
|
||||||
HTTP_REQUEST_DEFAULT_TIMEOUT = HttpRequestNodeTimeout(
|
HTTP_REQUEST_DEFAULT_TIMEOUT = HttpRequestNodeTimeout(
|
||||||
connect=min(10, MAX_CONNECT_TIMEOUT),
|
connect=min(10, dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT),
|
||||||
read=min(60, MAX_READ_TIMEOUT),
|
read=min(60, dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT),
|
||||||
write=min(20, MAX_WRITE_TIMEOUT),
|
write=min(20, dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -42,9 +40,9 @@ class HttpRequestNode(BaseNode):
|
|||||||
'body': {'type': 'none'},
|
'body': {'type': 'none'},
|
||||||
'timeout': {
|
'timeout': {
|
||||||
**HTTP_REQUEST_DEFAULT_TIMEOUT.model_dump(),
|
**HTTP_REQUEST_DEFAULT_TIMEOUT.model_dump(),
|
||||||
'max_connect_timeout': MAX_CONNECT_TIMEOUT,
|
'max_connect_timeout': dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT,
|
||||||
'max_read_timeout': MAX_READ_TIMEOUT,
|
'max_read_timeout': dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT,
|
||||||
'max_write_timeout': MAX_WRITE_TIMEOUT,
|
'max_write_timeout': dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -96,17 +94,18 @@ class HttpRequestNode(BaseNode):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_request_timeout(self, node_data: HttpRequestNodeData) -> HttpRequestNodeTimeout:
|
@staticmethod
|
||||||
|
def _get_request_timeout(node_data: HttpRequestNodeData) -> HttpRequestNodeTimeout:
|
||||||
timeout = node_data.timeout
|
timeout = node_data.timeout
|
||||||
if timeout is None:
|
if timeout is None:
|
||||||
return HTTP_REQUEST_DEFAULT_TIMEOUT
|
return HTTP_REQUEST_DEFAULT_TIMEOUT
|
||||||
|
|
||||||
timeout.connect = timeout.connect or HTTP_REQUEST_DEFAULT_TIMEOUT.connect
|
timeout.connect = min(timeout.connect or HTTP_REQUEST_DEFAULT_TIMEOUT.connect,
|
||||||
timeout.connect = min(timeout.connect, MAX_CONNECT_TIMEOUT)
|
dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT)
|
||||||
timeout.read = timeout.read or HTTP_REQUEST_DEFAULT_TIMEOUT.read
|
timeout.read = min(timeout.read or HTTP_REQUEST_DEFAULT_TIMEOUT.read,
|
||||||
timeout.read = min(timeout.read, MAX_READ_TIMEOUT)
|
dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT)
|
||||||
timeout.write = timeout.write or HTTP_REQUEST_DEFAULT_TIMEOUT.write
|
timeout.write = min(timeout.write or HTTP_REQUEST_DEFAULT_TIMEOUT.write,
|
||||||
timeout.write = min(timeout.write, MAX_WRITE_TIMEOUT)
|
dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT)
|
||||||
return timeout
|
return timeout
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -17,7 +17,7 @@ class AdvancedSettings(BaseModel):
|
|||||||
"""
|
"""
|
||||||
Group.
|
Group.
|
||||||
"""
|
"""
|
||||||
output_type: Literal['string', 'number', 'array', 'object']
|
output_type: Literal['string', 'number', 'object', 'array[string]', 'array[number]', 'array[object]']
|
||||||
variables: list[list[str]]
|
variables: list[list[str]]
|
||||||
group_name: str
|
group_name: str
|
||||||
|
|
||||||
|
@ -17,6 +17,8 @@ def handle(sender, **kwargs):
|
|||||||
default_language=account.interface_language,
|
default_language=account.interface_language,
|
||||||
customize_token_strategy="not_allow",
|
customize_token_strategy="not_allow",
|
||||||
code=Site.generate_code(16),
|
code=Site.generate_code(16),
|
||||||
|
created_by=app.created_by,
|
||||||
|
updated_by=app.updated_by,
|
||||||
)
|
)
|
||||||
|
|
||||||
db.session.add(site)
|
db.session.add(site)
|
||||||
|
@ -35,6 +35,9 @@ class S3Storage(BaseStorage):
|
|||||||
# if bucket not exists, create it
|
# if bucket not exists, create it
|
||||||
if e.response["Error"]["Code"] == "404":
|
if e.response["Error"]["Code"] == "404":
|
||||||
self.client.create_bucket(Bucket=self.bucket_name)
|
self.client.create_bucket(Bucket=self.bucket_name)
|
||||||
|
# if bucket is not accessible, pass, maybe the bucket is existing but not accessible
|
||||||
|
elif e.response["Error"]["Code"] == "403":
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
# other error, raise exception
|
# other error, raise exception
|
||||||
raise
|
raise
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from flask_restful import fields
|
from flask_restful import fields
|
||||||
|
|
||||||
|
from fields.workflow_fields import workflow_partial_fields
|
||||||
from libs.helper import AppIconUrlField, TimestampField
|
from libs.helper import AppIconUrlField, TimestampField
|
||||||
|
|
||||||
app_detail_kernel_fields = {
|
app_detail_kernel_fields = {
|
||||||
@ -39,7 +40,10 @@ model_config_fields = {
|
|||||||
"completion_prompt_config": fields.Raw(attribute="completion_prompt_config_dict"),
|
"completion_prompt_config": fields.Raw(attribute="completion_prompt_config_dict"),
|
||||||
"dataset_configs": fields.Raw(attribute="dataset_configs_dict"),
|
"dataset_configs": fields.Raw(attribute="dataset_configs_dict"),
|
||||||
"file_upload": fields.Raw(attribute="file_upload_dict"),
|
"file_upload": fields.Raw(attribute="file_upload_dict"),
|
||||||
|
"created_by": fields.String,
|
||||||
"created_at": TimestampField,
|
"created_at": TimestampField,
|
||||||
|
"updated_by": fields.String,
|
||||||
|
"updated_at": TimestampField,
|
||||||
}
|
}
|
||||||
|
|
||||||
app_detail_fields = {
|
app_detail_fields = {
|
||||||
@ -52,8 +56,12 @@ app_detail_fields = {
|
|||||||
"enable_site": fields.Boolean,
|
"enable_site": fields.Boolean,
|
||||||
"enable_api": fields.Boolean,
|
"enable_api": fields.Boolean,
|
||||||
"model_config": fields.Nested(model_config_fields, attribute="app_model_config", allow_null=True),
|
"model_config": fields.Nested(model_config_fields, attribute="app_model_config", allow_null=True),
|
||||||
|
"workflow": fields.Nested(workflow_partial_fields, allow_null=True),
|
||||||
"tracing": fields.Raw,
|
"tracing": fields.Raw,
|
||||||
|
"created_by": fields.String,
|
||||||
"created_at": TimestampField,
|
"created_at": TimestampField,
|
||||||
|
"updated_by": fields.String,
|
||||||
|
"updated_at": TimestampField,
|
||||||
}
|
}
|
||||||
|
|
||||||
prompt_config_fields = {
|
prompt_config_fields = {
|
||||||
@ -63,6 +71,10 @@ prompt_config_fields = {
|
|||||||
model_config_partial_fields = {
|
model_config_partial_fields = {
|
||||||
"model": fields.Raw(attribute="model_dict"),
|
"model": fields.Raw(attribute="model_dict"),
|
||||||
"pre_prompt": fields.String,
|
"pre_prompt": fields.String,
|
||||||
|
"created_by": fields.String,
|
||||||
|
"created_at": TimestampField,
|
||||||
|
"updated_by": fields.String,
|
||||||
|
"updated_at": TimestampField,
|
||||||
}
|
}
|
||||||
|
|
||||||
tag_fields = {"id": fields.String, "name": fields.String, "type": fields.String}
|
tag_fields = {"id": fields.String, "name": fields.String, "type": fields.String}
|
||||||
@ -78,7 +90,11 @@ app_partial_fields = {
|
|||||||
"icon_background": fields.String,
|
"icon_background": fields.String,
|
||||||
"icon_url": AppIconUrlField,
|
"icon_url": AppIconUrlField,
|
||||||
"model_config": fields.Nested(model_config_partial_fields, attribute="app_model_config", allow_null=True),
|
"model_config": fields.Nested(model_config_partial_fields, attribute="app_model_config", allow_null=True),
|
||||||
|
"workflow": fields.Nested(workflow_partial_fields, allow_null=True),
|
||||||
|
"created_by": fields.String,
|
||||||
"created_at": TimestampField,
|
"created_at": TimestampField,
|
||||||
|
"updated_by": fields.String,
|
||||||
|
"updated_at": TimestampField,
|
||||||
"tags": fields.List(fields.Nested(tag_fields)),
|
"tags": fields.List(fields.Nested(tag_fields)),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,6 +140,10 @@ site_fields = {
|
|||||||
"prompt_public": fields.Boolean,
|
"prompt_public": fields.Boolean,
|
||||||
"app_base_url": fields.String,
|
"app_base_url": fields.String,
|
||||||
"show_workflow_steps": fields.Boolean,
|
"show_workflow_steps": fields.Boolean,
|
||||||
|
"created_by": fields.String,
|
||||||
|
"created_at": TimestampField,
|
||||||
|
"updated_by": fields.String,
|
||||||
|
"updated_at": TimestampField,
|
||||||
}
|
}
|
||||||
|
|
||||||
app_detail_fields_with_site = {
|
app_detail_fields_with_site = {
|
||||||
@ -138,9 +158,13 @@ app_detail_fields_with_site = {
|
|||||||
"enable_site": fields.Boolean,
|
"enable_site": fields.Boolean,
|
||||||
"enable_api": fields.Boolean,
|
"enable_api": fields.Boolean,
|
||||||
"model_config": fields.Nested(model_config_fields, attribute="app_model_config", allow_null=True),
|
"model_config": fields.Nested(model_config_fields, attribute="app_model_config", allow_null=True),
|
||||||
|
"workflow": fields.Nested(workflow_partial_fields, allow_null=True),
|
||||||
"site": fields.Nested(site_fields),
|
"site": fields.Nested(site_fields),
|
||||||
"api_base_url": fields.String,
|
"api_base_url": fields.String,
|
||||||
|
"created_by": fields.String,
|
||||||
"created_at": TimestampField,
|
"created_at": TimestampField,
|
||||||
|
"updated_by": fields.String,
|
||||||
|
"updated_at": TimestampField,
|
||||||
"deleted_tools": fields.List(fields.String),
|
"deleted_tools": fields.List(fields.String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -111,6 +111,7 @@ conversation_fields = {
|
|||||||
"from_end_user_id": fields.String,
|
"from_end_user_id": fields.String,
|
||||||
"from_end_user_session_id": fields.String(),
|
"from_end_user_session_id": fields.String(),
|
||||||
"from_account_id": fields.String,
|
"from_account_id": fields.String,
|
||||||
|
"from_account_name": fields.String,
|
||||||
"read_at": TimestampField,
|
"read_at": TimestampField,
|
||||||
"created_at": TimestampField,
|
"created_at": TimestampField,
|
||||||
"annotation": fields.Nested(annotation_fields, allow_null=True),
|
"annotation": fields.Nested(annotation_fields, allow_null=True),
|
||||||
@ -146,6 +147,7 @@ conversation_with_summary_fields = {
|
|||||||
"from_end_user_id": fields.String,
|
"from_end_user_id": fields.String,
|
||||||
"from_end_user_session_id": fields.String,
|
"from_end_user_session_id": fields.String,
|
||||||
"from_account_id": fields.String,
|
"from_account_id": fields.String,
|
||||||
|
"from_account_name": fields.String,
|
||||||
"name": fields.String,
|
"name": fields.String,
|
||||||
"summary": fields.String(attribute="summary_or_query"),
|
"summary": fields.String(attribute="summary_or_query"),
|
||||||
"read_at": TimestampField,
|
"read_at": TimestampField,
|
||||||
|
@ -53,3 +53,11 @@ workflow_fields = {
|
|||||||
"environment_variables": fields.List(EnvironmentVariableField()),
|
"environment_variables": fields.List(EnvironmentVariableField()),
|
||||||
"conversation_variables": fields.List(fields.Nested(conversation_variable_fields)),
|
"conversation_variables": fields.List(fields.Nested(conversation_variable_fields)),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
workflow_partial_fields = {
|
||||||
|
"id": fields.String,
|
||||||
|
"created_by": fields.String,
|
||||||
|
"created_at": TimestampField,
|
||||||
|
"updated_by": fields.String,
|
||||||
|
"updated_at": TimestampField,
|
||||||
|
}
|
||||||
|
@ -0,0 +1,52 @@
|
|||||||
|
"""add created_by and updated_by to app, modelconfig, and site
|
||||||
|
|
||||||
|
Revision ID: d0187d6a88dd
|
||||||
|
Revises: 2dbe42621d96
|
||||||
|
Create Date: 2024-08-25 04:41:18.157397
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
import models as models
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "d0187d6a88dd"
|
||||||
|
down_revision = "2dbe42621d96"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table("app_model_configs", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("created_by", models.types.StringUUID(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("updated_by", models.types.StringUUID(), nullable=True))
|
||||||
|
|
||||||
|
with op.batch_alter_table("apps", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("created_by", models.types.StringUUID(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("updated_by", models.types.StringUUID(), nullable=True))
|
||||||
|
|
||||||
|
with op.batch_alter_table("sites", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("created_by", models.types.StringUUID(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("updated_by", models.types.StringUUID(), nullable=True))
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table("sites", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("updated_by")
|
||||||
|
batch_op.drop_column("created_by")
|
||||||
|
|
||||||
|
with op.batch_alter_table("apps", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("updated_by")
|
||||||
|
batch_op.drop_column("created_by")
|
||||||
|
|
||||||
|
with op.batch_alter_table("app_model_configs", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("updated_by")
|
||||||
|
batch_op.drop_column("created_by")
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
@ -82,7 +82,9 @@ class App(db.Model):
|
|||||||
is_universal = db.Column(db.Boolean, nullable=False, server_default=db.text('false'))
|
is_universal = db.Column(db.Boolean, nullable=False, server_default=db.text('false'))
|
||||||
tracing = db.Column(db.Text, nullable=True)
|
tracing = db.Column(db.Text, nullable=True)
|
||||||
max_active_requests = db.Column(db.Integer, nullable=True)
|
max_active_requests = db.Column(db.Integer, nullable=True)
|
||||||
|
created_by = db.Column(StringUUID, nullable=True)
|
||||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
||||||
|
updated_by = db.Column(StringUUID, nullable=True)
|
||||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -221,7 +223,9 @@ class AppModelConfig(db.Model):
|
|||||||
provider = db.Column(db.String(255), nullable=True)
|
provider = db.Column(db.String(255), nullable=True)
|
||||||
model_id = db.Column(db.String(255), nullable=True)
|
model_id = db.Column(db.String(255), nullable=True)
|
||||||
configs = db.Column(db.JSON, nullable=True)
|
configs = db.Column(db.JSON, nullable=True)
|
||||||
|
created_by = db.Column(StringUUID, nullable=True)
|
||||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
||||||
|
updated_by = db.Column(StringUUID, nullable=True)
|
||||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
||||||
opening_statement = db.Column(db.Text)
|
opening_statement = db.Column(db.Text)
|
||||||
suggested_questions = db.Column(db.Text)
|
suggested_questions = db.Column(db.Text)
|
||||||
@ -490,7 +494,6 @@ class InstalledApp(db.Model):
|
|||||||
return tenant
|
return tenant
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Conversation(db.Model):
|
class Conversation(db.Model):
|
||||||
__tablename__ = 'conversations'
|
__tablename__ = 'conversations'
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
@ -623,6 +626,15 @@ class Conversation(db.Model):
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def from_account_name(self):
|
||||||
|
if self.from_account_id:
|
||||||
|
account = db.session.query(Account).filter(Account.id == self.from_account_id).first()
|
||||||
|
if account:
|
||||||
|
return account.name
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def in_debug_mode(self):
|
def in_debug_mode(self):
|
||||||
return self.override_model_configs is not None
|
return self.override_model_configs is not None
|
||||||
@ -1107,7 +1119,9 @@ class Site(db.Model):
|
|||||||
customize_token_strategy = db.Column(db.String(255), nullable=False)
|
customize_token_strategy = db.Column(db.String(255), nullable=False)
|
||||||
prompt_public = db.Column(db.Boolean, nullable=False, server_default=db.text('false'))
|
prompt_public = db.Column(db.Boolean, nullable=False, server_default=db.text('false'))
|
||||||
status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
|
status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
|
||||||
|
created_by = db.Column(StringUUID, nullable=True)
|
||||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
||||||
|
updated_by = db.Column(StringUUID, nullable=True)
|
||||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
|
||||||
code = db.Column(db.String(255))
|
code = db.Column(db.String(255))
|
||||||
|
|
||||||
|
406
api/poetry.lock
generated
406
api/poetry.lock
generated
@ -551,6 +551,69 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
cryptography = "*"
|
cryptography = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "azure-ai-inference"
|
||||||
|
version = "1.0.0b3"
|
||||||
|
description = "Microsoft Azure Ai Inference Client Library for Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "azure-ai-inference-1.0.0b3.tar.gz", hash = "sha256:1e99dc74c3b335a457500311bbbadb348f54dc4c12252a93cb8ab78d6d217ff0"},
|
||||||
|
{file = "azure_ai_inference-1.0.0b3-py3-none-any.whl", hash = "sha256:6734ca7334c809a170beb767f1f1455724ab3f006cb60045e42a833c0e764403"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
azure-core = ">=1.30.0"
|
||||||
|
isodate = ">=0.6.1"
|
||||||
|
typing-extensions = ">=4.6.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "azure-ai-ml"
|
||||||
|
version = "1.19.0"
|
||||||
|
description = "Microsoft Azure Machine Learning Client Library for Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "azure-ai-ml-1.19.0.tar.gz", hash = "sha256:94bb1afbb0497e539ae75455fc4a51b6942b5b68b3a275727ecce6ceb250eff9"},
|
||||||
|
{file = "azure_ai_ml-1.19.0-py3-none-any.whl", hash = "sha256:f0385af06efbeae1f83113613e45343508d1288fd2f05857619e7c7d4d4f5302"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
azure-common = ">=1.1"
|
||||||
|
azure-core = ">=1.23.0"
|
||||||
|
azure-mgmt-core = ">=1.3.0"
|
||||||
|
azure-storage-blob = ">=12.10.0"
|
||||||
|
azure-storage-file-datalake = ">=12.2.0"
|
||||||
|
azure-storage-file-share = "*"
|
||||||
|
colorama = "*"
|
||||||
|
isodate = "*"
|
||||||
|
jsonschema = ">=4.0.0"
|
||||||
|
marshmallow = ">=3.5"
|
||||||
|
msrest = ">=0.6.18"
|
||||||
|
opencensus-ext-azure = "*"
|
||||||
|
opencensus-ext-logging = "*"
|
||||||
|
pydash = ">=6.0.0"
|
||||||
|
pyjwt = "*"
|
||||||
|
pyyaml = ">=5.1.0"
|
||||||
|
strictyaml = "*"
|
||||||
|
tqdm = "*"
|
||||||
|
typing-extensions = "*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
designer = ["mldesigner"]
|
||||||
|
mount = ["azureml-dataprep-rslex (>=2.22.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "azure-common"
|
||||||
|
version = "1.1.28"
|
||||||
|
description = "Microsoft Azure Client Library for Python (Common)"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3"},
|
||||||
|
{file = "azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "azure-core"
|
name = "azure-core"
|
||||||
version = "1.30.2"
|
version = "1.30.2"
|
||||||
@ -587,6 +650,20 @@ cryptography = ">=2.5"
|
|||||||
msal = ">=1.24.0"
|
msal = ">=1.24.0"
|
||||||
msal-extensions = ">=0.3.0"
|
msal-extensions = ">=0.3.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "azure-mgmt-core"
|
||||||
|
version = "1.4.0"
|
||||||
|
description = "Microsoft Azure Management Core Library for Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "azure-mgmt-core-1.4.0.zip", hash = "sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae"},
|
||||||
|
{file = "azure_mgmt_core-1.4.0-py3-none-any.whl", hash = "sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
azure-core = ">=1.26.2,<2.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "azure-storage-blob"
|
name = "azure-storage-blob"
|
||||||
version = "12.13.0"
|
version = "12.13.0"
|
||||||
@ -603,6 +680,42 @@ azure-core = ">=1.23.1,<2.0.0"
|
|||||||
cryptography = ">=2.1.4"
|
cryptography = ">=2.1.4"
|
||||||
msrest = ">=0.6.21"
|
msrest = ">=0.6.21"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "azure-storage-file-datalake"
|
||||||
|
version = "12.8.0"
|
||||||
|
description = "Microsoft Azure File DataLake Storage Client Library for Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "azure-storage-file-datalake-12.8.0.zip", hash = "sha256:12e6306e5efb5ca28e0ccd9fa79a2c61acd589866d6109fe5601b18509da92f4"},
|
||||||
|
{file = "azure_storage_file_datalake-12.8.0-py3-none-any.whl", hash = "sha256:b6cf5733fe794bf3c866efbe3ce1941409e35b6b125028ac558b436bf90f2de7"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
azure-core = ">=1.23.1,<2.0.0"
|
||||||
|
azure-storage-blob = ">=12.13.0,<13.0.0"
|
||||||
|
msrest = ">=0.6.21"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "azure-storage-file-share"
|
||||||
|
version = "12.17.0"
|
||||||
|
description = "Microsoft Azure Azure File Share Storage Client Library for Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "azure-storage-file-share-12.17.0.tar.gz", hash = "sha256:f7b2c6cfc1b7cb80097a53b1ed2efa9e545b49a291430d369cdb49fafbc841d6"},
|
||||||
|
{file = "azure_storage_file_share-12.17.0-py3-none-any.whl", hash = "sha256:c4652759a9d529bf08881bb53275bf38774bb643746b849d27c47118f9cf923d"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
azure-core = ">=1.28.0"
|
||||||
|
cryptography = ">=2.1.4"
|
||||||
|
isodate = ">=0.6.1"
|
||||||
|
typing-extensions = ">=4.6.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
aio = ["azure-core[aio] (>=1.28.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "backoff"
|
name = "backoff"
|
||||||
version = "2.2.1"
|
version = "2.2.1"
|
||||||
@ -3952,6 +4065,41 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
ply = "*"
|
ply = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jsonschema"
|
||||||
|
version = "4.23.0"
|
||||||
|
description = "An implementation of JSON Schema validation for Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"},
|
||||||
|
{file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
attrs = ">=22.2.0"
|
||||||
|
jsonschema-specifications = ">=2023.03.6"
|
||||||
|
referencing = ">=0.28.4"
|
||||||
|
rpds-py = ">=0.7.1"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
|
||||||
|
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jsonschema-specifications"
|
||||||
|
version = "2023.12.1"
|
||||||
|
description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"},
|
||||||
|
{file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
referencing = ">=0.31.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kaleido"
|
name = "kaleido"
|
||||||
version = "0.2.1"
|
version = "0.2.1"
|
||||||
@ -5277,6 +5425,65 @@ typing-extensions = ">=4.7,<5"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
|
datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "opencensus"
|
||||||
|
version = "0.11.4"
|
||||||
|
description = "A stats collection and distributed tracing framework"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "opencensus-0.11.4-py2.py3-none-any.whl", hash = "sha256:a18487ce68bc19900336e0ff4655c5a116daf10c1b3685ece8d971bddad6a864"},
|
||||||
|
{file = "opencensus-0.11.4.tar.gz", hash = "sha256:cbef87d8b8773064ab60e5c2a1ced58bbaa38a6d052c41aec224958ce544eff2"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
google-api-core = {version = ">=1.0.0,<3.0.0", markers = "python_version >= \"3.6\""}
|
||||||
|
opencensus-context = ">=0.1.3"
|
||||||
|
six = ">=1.16,<2.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "opencensus-context"
|
||||||
|
version = "0.1.3"
|
||||||
|
description = "OpenCensus Runtime Context"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "opencensus-context-0.1.3.tar.gz", hash = "sha256:a03108c3c10d8c80bb5ddf5c8a1f033161fa61972a9917f9b9b3a18517f0088c"},
|
||||||
|
{file = "opencensus_context-0.1.3-py2.py3-none-any.whl", hash = "sha256:073bb0590007af276853009fac7e4bab1d523c3f03baf4cb4511ca38967c6039"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "opencensus-ext-azure"
|
||||||
|
version = "1.1.13"
|
||||||
|
description = "OpenCensus Azure Monitor Exporter"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "opencensus-ext-azure-1.1.13.tar.gz", hash = "sha256:aec30472177005379ba56a702a097d618c5f57558e1bb6676ec75f948130692a"},
|
||||||
|
{file = "opencensus_ext_azure-1.1.13-py2.py3-none-any.whl", hash = "sha256:06001fac6f8588ba00726a3a7c6c7f2fc88bc8ad12a65afdca657923085393dd"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
azure-core = ">=1.12.0,<2.0.0"
|
||||||
|
azure-identity = ">=1.5.0,<2.0.0"
|
||||||
|
opencensus = ">=0.11.4,<1.0.0"
|
||||||
|
psutil = ">=5.6.3"
|
||||||
|
requests = ">=2.19.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "opencensus-ext-logging"
|
||||||
|
version = "0.1.1"
|
||||||
|
description = "OpenCensus logging Integration"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "opencensus-ext-logging-0.1.1.tar.gz", hash = "sha256:c203b70f034151dada529f543af330ba17aaffec27d8a5267d03c713eb1de334"},
|
||||||
|
{file = "opencensus_ext_logging-0.1.1-py2.py3-none-any.whl", hash = "sha256:cfdaf5da5d8b195ff3d1af87a4066a6621a28046173f6be4b0b6caec4a3ca89f"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
opencensus = ">=0.8.0,<1.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openpyxl"
|
name = "openpyxl"
|
||||||
version = "3.1.5"
|
version = "3.1.5"
|
||||||
@ -6021,6 +6228,35 @@ files = [
|
|||||||
{file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"},
|
{file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "psutil"
|
||||||
|
version = "6.0.0"
|
||||||
|
description = "Cross-platform lib for process and system monitoring in Python."
|
||||||
|
optional = false
|
||||||
|
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||||
|
files = [
|
||||||
|
{file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"},
|
||||||
|
{file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"},
|
||||||
|
{file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"},
|
||||||
|
{file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"},
|
||||||
|
{file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"},
|
||||||
|
{file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"},
|
||||||
|
{file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"},
|
||||||
|
{file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"},
|
||||||
|
{file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"},
|
||||||
|
{file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"},
|
||||||
|
{file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"},
|
||||||
|
{file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"},
|
||||||
|
{file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"},
|
||||||
|
{file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"},
|
||||||
|
{file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"},
|
||||||
|
{file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"},
|
||||||
|
{file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "psycopg2-binary"
|
name = "psycopg2-binary"
|
||||||
version = "2.9.9"
|
version = "2.9.9"
|
||||||
@ -6403,6 +6639,23 @@ azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0
|
|||||||
toml = ["tomli (>=2.0.1)"]
|
toml = ["tomli (>=2.0.1)"]
|
||||||
yaml = ["pyyaml (>=6.0.1)"]
|
yaml = ["pyyaml (>=6.0.1)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pydash"
|
||||||
|
version = "8.0.3"
|
||||||
|
description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "pydash-8.0.3-py3-none-any.whl", hash = "sha256:c16871476822ee6b59b87e206dd27888240eff50a7b4cd72a4b80b43b6b994d7"},
|
||||||
|
{file = "pydash-8.0.3.tar.gz", hash = "sha256:1b27cd3da05b72f0e5ff786c523afd82af796936462e631ffd1b228d91f8b9aa"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
typing-extensions = ">3.10,<4.6.0 || >4.6.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["build", "coverage", "furo", "invoke", "mypy", "pytest", "pytest-cov", "pytest-mypy-testing", "ruff", "sphinx", "sphinx-autodoc-typehints", "tox", "twine", "wheel"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pygments"
|
name = "pygments"
|
||||||
version = "2.18.0"
|
version = "2.18.0"
|
||||||
@ -6568,13 +6821,13 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pytest"
|
name = "pytest"
|
||||||
version = "8.1.2"
|
version = "8.3.2"
|
||||||
description = "pytest: simple powerful testing with Python"
|
description = "pytest: simple powerful testing with Python"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "pytest-8.1.2-py3-none-any.whl", hash = "sha256:6c06dc309ff46a05721e6fd48e492a775ed8165d2ecdf57f156a80c7e95bb142"},
|
{file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"},
|
||||||
{file = "pytest-8.1.2.tar.gz", hash = "sha256:f3c45d1d5eed96b01a2aea70dee6a4a366d51d38f9957768083e4fecfc77f3ef"},
|
{file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -6582,11 +6835,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
|||||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||||
iniconfig = "*"
|
iniconfig = "*"
|
||||||
packaging = "*"
|
packaging = "*"
|
||||||
pluggy = ">=1.4,<2.0"
|
pluggy = ">=1.5,<2"
|
||||||
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
|
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pytest-benchmark"
|
name = "pytest-benchmark"
|
||||||
@ -7170,6 +7423,21 @@ hiredis = {version = ">1.0.0", optional = true, markers = "extra == \"hiredis\""
|
|||||||
hiredis = ["hiredis (>1.0.0)"]
|
hiredis = ["hiredis (>1.0.0)"]
|
||||||
ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
|
ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "referencing"
|
||||||
|
version = "0.35.1"
|
||||||
|
description = "JSON Referencing + Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"},
|
||||||
|
{file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
attrs = ">=22.2.0"
|
||||||
|
rpds-py = ">=0.7.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "2024.7.24"
|
version = "2024.7.24"
|
||||||
@ -7377,6 +7645,118 @@ pygments = ">=2.13.0,<3.0.0"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rpds-py"
|
||||||
|
version = "0.20.0"
|
||||||
|
description = "Python bindings to Rust's persistent data structures (rpds)"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"},
|
||||||
|
{file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"},
|
||||||
|
{file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"},
|
||||||
|
{file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"},
|
||||||
|
{file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"},
|
||||||
|
{file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"},
|
||||||
|
{file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"},
|
||||||
|
{file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"},
|
||||||
|
{file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"},
|
||||||
|
{file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rsa"
|
name = "rsa"
|
||||||
version = "4.9"
|
version = "4.9"
|
||||||
@ -7987,6 +8367,20 @@ anyio = ">=3.4.0,<5"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"]
|
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "strictyaml"
|
||||||
|
version = "1.7.3"
|
||||||
|
description = "Strict, typed YAML parser"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7.0"
|
||||||
|
files = [
|
||||||
|
{file = "strictyaml-1.7.3-py3-none-any.whl", hash = "sha256:fb5c8a4edb43bebb765959e420f9b3978d7f1af88c80606c03fb420888f5d1c7"},
|
||||||
|
{file = "strictyaml-1.7.3.tar.gz", hash = "sha256:22f854a5fcab42b5ddba8030a0e4be51ca89af0267961c8d6cfa86395586c407"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
python-dateutil = ">=2.6.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sympy"
|
name = "sympy"
|
||||||
version = "1.13.2"
|
version = "1.13.2"
|
||||||
@ -9669,4 +10063,4 @@ cffi = ["cffi (>=1.11)"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.10,<3.13"
|
python-versions = ">=3.10,<3.13"
|
||||||
content-hash = "04f970820de691f40fc9fb30f5ff0618b0f1a04d3315b14467fb88e475fa1243"
|
content-hash = "e4c00268514d26bd07c6b72925e0e3b4558ec972895d252e60e9571e3ac38895"
|
||||||
|
@ -188,6 +188,8 @@ zhipuai = "1.0.7"
|
|||||||
# Related transparent dependencies with pinned verion
|
# Related transparent dependencies with pinned verion
|
||||||
# required by main implementations
|
# required by main implementations
|
||||||
############################################################
|
############################################################
|
||||||
|
azure-ai-ml = "^1.19.0"
|
||||||
|
azure-ai-inference = "^1.0.0b3"
|
||||||
volcengine-python-sdk = {extras = ["ark"], version = "^1.0.98"}
|
volcengine-python-sdk = {extras = ["ark"], version = "^1.0.98"}
|
||||||
[tool.poetry.group.indriect.dependencies]
|
[tool.poetry.group.indriect.dependencies]
|
||||||
kaleido = "0.2.1"
|
kaleido = "0.2.1"
|
||||||
@ -241,7 +243,7 @@ optional = true
|
|||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
coverage = "~7.2.4"
|
coverage = "~7.2.4"
|
||||||
pytest = "~8.1.1"
|
pytest = "~8.3.2"
|
||||||
pytest-benchmark = "~4.0.0"
|
pytest-benchmark = "~4.0.0"
|
||||||
pytest-env = "~1.1.3"
|
pytest-env = "~1.1.3"
|
||||||
pytest-mock = "~3.14.0"
|
pytest-mock = "~3.14.0"
|
||||||
|
@ -346,6 +346,8 @@ class AppDslService:
|
|||||||
app_model_config = AppModelConfig()
|
app_model_config = AppModelConfig()
|
||||||
app_model_config = app_model_config.from_model_config_dict(model_config_data)
|
app_model_config = app_model_config.from_model_config_dict(model_config_data)
|
||||||
app_model_config.app_id = app.id
|
app_model_config.app_id = app.id
|
||||||
|
app_model_config.created_by = account.id
|
||||||
|
app_model_config.updated_by = account.id
|
||||||
|
|
||||||
db.session.add(app_model_config)
|
db.session.add(app_model_config)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
@ -390,6 +392,8 @@ class AppDslService:
|
|||||||
icon_background=icon_background,
|
icon_background=icon_background,
|
||||||
enable_site=True,
|
enable_site=True,
|
||||||
enable_api=True,
|
enable_api=True,
|
||||||
|
created_by=account.id,
|
||||||
|
updated_by=account.id,
|
||||||
)
|
)
|
||||||
|
|
||||||
db.session.add(app)
|
db.session.add(app)
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
|
|
||||||
|
from openai._exceptions import RateLimitError
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator
|
from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator
|
||||||
from core.app.apps.agent_chat.app_generator import AgentChatAppGenerator
|
from core.app.apps.agent_chat.app_generator import AgentChatAppGenerator
|
||||||
@ -11,6 +13,7 @@ from core.app.entities.app_invoke_entities import InvokeFrom
|
|||||||
from core.app.features.rate_limiting import RateLimit
|
from core.app.features.rate_limiting import RateLimit
|
||||||
from models.model import Account, App, AppMode, EndUser
|
from models.model import Account, App, AppMode, EndUser
|
||||||
from models.workflow import Workflow
|
from models.workflow import Workflow
|
||||||
|
from services.errors.llm import InvokeRateLimitError
|
||||||
from services.workflow_service import WorkflowService
|
from services.workflow_service import WorkflowService
|
||||||
|
|
||||||
|
|
||||||
@ -87,6 +90,8 @@ class AppGenerateService:
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Invalid app mode {app_model.mode}")
|
raise ValueError(f"Invalid app mode {app_model.mode}")
|
||||||
|
except RateLimitError as e:
|
||||||
|
raise InvokeRateLimitError(str(e))
|
||||||
finally:
|
finally:
|
||||||
if not streaming:
|
if not streaming:
|
||||||
rate_limit.exit(request_id)
|
rate_limit.exit(request_id)
|
||||||
|
@ -127,6 +127,8 @@ class AppService:
|
|||||||
app.tenant_id = tenant_id
|
app.tenant_id = tenant_id
|
||||||
app.api_rph = args.get("api_rph", 0)
|
app.api_rph = args.get("api_rph", 0)
|
||||||
app.api_rpm = args.get("api_rpm", 0)
|
app.api_rpm = args.get("api_rpm", 0)
|
||||||
|
app.created_by = account.id
|
||||||
|
app.updated_by = account.id
|
||||||
|
|
||||||
db.session.add(app)
|
db.session.add(app)
|
||||||
db.session.flush()
|
db.session.flush()
|
||||||
@ -134,6 +136,8 @@ class AppService:
|
|||||||
if default_model_config:
|
if default_model_config:
|
||||||
app_model_config = AppModelConfig(**default_model_config)
|
app_model_config = AppModelConfig(**default_model_config)
|
||||||
app_model_config.app_id = app.id
|
app_model_config.app_id = app.id
|
||||||
|
app_model_config.created_by = account.id
|
||||||
|
app_model_config.updated_by = account.id
|
||||||
db.session.add(app_model_config)
|
db.session.add(app_model_config)
|
||||||
db.session.flush()
|
db.session.flush()
|
||||||
|
|
||||||
@ -217,6 +221,7 @@ class AppService:
|
|||||||
app.icon_type = args.get("icon_type", "emoji")
|
app.icon_type = args.get("icon_type", "emoji")
|
||||||
app.icon = args.get("icon")
|
app.icon = args.get("icon")
|
||||||
app.icon_background = args.get("icon_background")
|
app.icon_background = args.get("icon_background")
|
||||||
|
app.updated_by = current_user.id
|
||||||
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@ -233,6 +238,7 @@ class AppService:
|
|||||||
:return: App instance
|
:return: App instance
|
||||||
"""
|
"""
|
||||||
app.name = name
|
app.name = name
|
||||||
|
app.updated_by = current_user.id
|
||||||
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@ -248,6 +254,7 @@ class AppService:
|
|||||||
"""
|
"""
|
||||||
app.icon = icon
|
app.icon = icon
|
||||||
app.icon_background = icon_background
|
app.icon_background = icon_background
|
||||||
|
app.updated_by = current_user.id
|
||||||
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@ -264,6 +271,7 @@ class AppService:
|
|||||||
return app
|
return app
|
||||||
|
|
||||||
app.enable_site = enable_site
|
app.enable_site = enable_site
|
||||||
|
app.updated_by = current_user.id
|
||||||
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@ -280,6 +288,7 @@ class AppService:
|
|||||||
return app
|
return app
|
||||||
|
|
||||||
app.enable_api = enable_api
|
app.enable_api = enable_api
|
||||||
|
app.updated_by = current_user.id
|
||||||
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
@ -136,7 +136,9 @@ class DatasetService:
|
|||||||
return datasets.items, datasets.total
|
return datasets.items, datasets.total
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_empty_dataset(tenant_id: str, name: str, indexing_technique: Optional[str], account: Account):
|
def create_empty_dataset(
|
||||||
|
tenant_id: str, name: str, indexing_technique: Optional[str], account: Account, permission: Optional[str]
|
||||||
|
):
|
||||||
# check if dataset name already exists
|
# check if dataset name already exists
|
||||||
if Dataset.query.filter_by(name=name, tenant_id=tenant_id).first():
|
if Dataset.query.filter_by(name=name, tenant_id=tenant_id).first():
|
||||||
raise DatasetNameDuplicateError(f"Dataset with name {name} already exists.")
|
raise DatasetNameDuplicateError(f"Dataset with name {name} already exists.")
|
||||||
@ -153,6 +155,7 @@ class DatasetService:
|
|||||||
dataset.tenant_id = tenant_id
|
dataset.tenant_id = tenant_id
|
||||||
dataset.embedding_model_provider = embedding_model.provider if embedding_model else None
|
dataset.embedding_model_provider = embedding_model.provider if embedding_model else None
|
||||||
dataset.embedding_model = embedding_model.model if embedding_model else None
|
dataset.embedding_model = embedding_model.model if embedding_model else None
|
||||||
|
dataset.permission = permission if permission else DatasetPermissionEnum.ONLY_ME
|
||||||
db.session.add(dataset)
|
db.session.add(dataset)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return dataset
|
return dataset
|
||||||
|
19
api/services/errors/llm.py
Normal file
19
api/services/errors/llm.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class InvokeError(Exception):
|
||||||
|
"""Base class for all LLM exceptions."""
|
||||||
|
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
def __init__(self, description: Optional[str] = None) -> None:
|
||||||
|
self.description = description
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.description or self.__class__.__name__
|
||||||
|
|
||||||
|
|
||||||
|
class InvokeRateLimitError(InvokeError):
|
||||||
|
"""Raised when the Invoke returns rate limit error."""
|
||||||
|
|
||||||
|
description = "Rate Limit Error"
|
@ -26,16 +26,15 @@ class OpsService:
|
|||||||
decrypt_tracing_config = OpsTraceManager.decrypt_tracing_config(
|
decrypt_tracing_config = OpsTraceManager.decrypt_tracing_config(
|
||||||
tenant_id, tracing_provider, trace_config_data.tracing_config
|
tenant_id, tracing_provider, trace_config_data.tracing_config
|
||||||
)
|
)
|
||||||
|
new_decrypt_tracing_config = OpsTraceManager.obfuscated_decrypt_token(tracing_provider, decrypt_tracing_config)
|
||||||
|
|
||||||
if tracing_provider == "langfuse" and (
|
if tracing_provider == "langfuse" and (
|
||||||
"project_key" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_key")
|
"project_key" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_key")
|
||||||
):
|
):
|
||||||
project_key = OpsTraceManager.get_trace_config_project_key(decrypt_tracing_config, tracing_provider)
|
project_key = OpsTraceManager.get_trace_config_project_key(decrypt_tracing_config, tracing_provider)
|
||||||
decrypt_tracing_config["project_key"] = project_key
|
new_decrypt_tracing_config.update({"project_key": project_key})
|
||||||
|
|
||||||
decrypt_tracing_config = OpsTraceManager.obfuscated_decrypt_token(tracing_provider, decrypt_tracing_config)
|
|
||||||
|
|
||||||
trace_config_data.tracing_config = decrypt_tracing_config
|
|
||||||
|
|
||||||
|
trace_config_data.tracing_config = new_decrypt_tracing_config
|
||||||
return trace_config_data.to_dict()
|
return trace_config_data.to_dict()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -79,7 +78,7 @@ class OpsService:
|
|||||||
# get tenant id
|
# get tenant id
|
||||||
tenant_id = db.session.query(App).filter(App.id == app_id).first().tenant_id
|
tenant_id = db.session.query(App).filter(App.id == app_id).first().tenant_id
|
||||||
tracing_config = OpsTraceManager.encrypt_tracing_config(tenant_id, tracing_provider, tracing_config)
|
tracing_config = OpsTraceManager.encrypt_tracing_config(tenant_id, tracing_provider, tracing_config)
|
||||||
if tracing_provider == "langfuse":
|
if tracing_provider == "langfuse" and project_key:
|
||||||
tracing_config["project_key"] = project_key
|
tracing_config["project_key"] = project_key
|
||||||
trace_config_data = TraceAppConfig(
|
trace_config_data = TraceAppConfig(
|
||||||
app_id=app_id,
|
app_id=app_id,
|
||||||
|
@ -74,6 +74,8 @@ class WorkflowConverter:
|
|||||||
new_app.api_rph = app_model.api_rph
|
new_app.api_rph = app_model.api_rph
|
||||||
new_app.is_demo = False
|
new_app.is_demo = False
|
||||||
new_app.is_public = app_model.is_public
|
new_app.is_public = app_model.is_public
|
||||||
|
new_app.created_by = account.id
|
||||||
|
new_app.updated_by = account.id
|
||||||
db.session.add(new_app)
|
db.session.add(new_app)
|
||||||
db.session.flush()
|
db.session.flush()
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -0,0 +1,113 @@
|
|||||||
|
import os
|
||||||
|
from collections.abc import Generator
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
|
||||||
|
from core.model_runtime.entities.message_entities import (
|
||||||
|
AssistantPromptMessage,
|
||||||
|
ImagePromptMessageContent,
|
||||||
|
PromptMessageTool,
|
||||||
|
SystemPromptMessage,
|
||||||
|
TextPromptMessageContent,
|
||||||
|
UserPromptMessage,
|
||||||
|
)
|
||||||
|
from core.model_runtime.errors.validate import CredentialsValidateFailedError
|
||||||
|
from core.model_runtime.model_providers.azure_ai_studio.llm.llm import AzureAIStudioLargeLanguageModel
|
||||||
|
from tests.integration_tests.model_runtime.__mock.azure_ai_studio import setup_azure_ai_studio_mock
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("setup_azure_ai_studio_mock", [["chat"]], indirect=True)
|
||||||
|
def test_validate_credentials(setup_azure_ai_studio_mock):
|
||||||
|
model = AzureAIStudioLargeLanguageModel()
|
||||||
|
|
||||||
|
with pytest.raises(CredentialsValidateFailedError):
|
||||||
|
model.validate_credentials(
|
||||||
|
model="gpt-35-turbo",
|
||||||
|
credentials={"api_key": "invalid_key", "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE")},
|
||||||
|
)
|
||||||
|
|
||||||
|
model.validate_credentials(
|
||||||
|
model="gpt-35-turbo",
|
||||||
|
credentials={
|
||||||
|
"api_key": os.getenv("AZURE_AI_STUDIO_API_KEY"),
|
||||||
|
"api_base": os.getenv("AZURE_AI_STUDIO_API_BASE"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("setup_azure_ai_studio_mock", [["chat"]], indirect=True)
|
||||||
|
def test_invoke_model(setup_azure_ai_studio_mock):
|
||||||
|
model = AzureAIStudioLargeLanguageModel()
|
||||||
|
|
||||||
|
result = model.invoke(
|
||||||
|
model="gpt-35-turbo",
|
||||||
|
credentials={
|
||||||
|
"api_key": os.getenv("AZURE_AI_STUDIO_API_KEY"),
|
||||||
|
"api_base": os.getenv("AZURE_AI_STUDIO_API_BASE"),
|
||||||
|
},
|
||||||
|
prompt_messages=[
|
||||||
|
SystemPromptMessage(
|
||||||
|
content="You are a helpful AI assistant.",
|
||||||
|
),
|
||||||
|
UserPromptMessage(content="Hello World!"),
|
||||||
|
],
|
||||||
|
model_parameters={"temperature": 0.0, "max_tokens": 100},
|
||||||
|
stream=False,
|
||||||
|
user="abc-123",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert isinstance(result, LLMResult)
|
||||||
|
assert len(result.message.content) > 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("setup_azure_ai_studio_mock", [["chat"]], indirect=True)
|
||||||
|
def test_invoke_stream_model(setup_azure_ai_studio_mock):
|
||||||
|
model = AzureAIStudioLargeLanguageModel()
|
||||||
|
|
||||||
|
result = model.invoke(
|
||||||
|
model="gpt-35-turbo",
|
||||||
|
credentials={
|
||||||
|
"api_key": os.getenv("AZURE_AI_STUDIO_API_KEY"),
|
||||||
|
"api_base": os.getenv("AZURE_AI_STUDIO_API_BASE"),
|
||||||
|
},
|
||||||
|
prompt_messages=[
|
||||||
|
SystemPromptMessage(
|
||||||
|
content="You are a helpful AI assistant.",
|
||||||
|
),
|
||||||
|
UserPromptMessage(content="Hello World!"),
|
||||||
|
],
|
||||||
|
model_parameters={"temperature": 0.0, "max_tokens": 100},
|
||||||
|
stream=True,
|
||||||
|
user="abc-123",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert isinstance(result, Generator)
|
||||||
|
|
||||||
|
for chunk in result:
|
||||||
|
assert isinstance(chunk, LLMResultChunk)
|
||||||
|
assert isinstance(chunk.delta, LLMResultChunkDelta)
|
||||||
|
assert isinstance(chunk.delta.message, AssistantPromptMessage)
|
||||||
|
if chunk.delta.finish_reason is not None:
|
||||||
|
assert chunk.delta.usage is not None
|
||||||
|
assert chunk.delta.usage.completion_tokens > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_num_tokens():
|
||||||
|
model = AzureAIStudioLargeLanguageModel()
|
||||||
|
|
||||||
|
num_tokens = model.get_num_tokens(
|
||||||
|
model="gpt-35-turbo",
|
||||||
|
credentials={
|
||||||
|
"api_key": os.getenv("AZURE_AI_STUDIO_API_KEY"),
|
||||||
|
"api_base": os.getenv("AZURE_AI_STUDIO_API_BASE"),
|
||||||
|
},
|
||||||
|
prompt_messages=[
|
||||||
|
SystemPromptMessage(
|
||||||
|
content="You are a helpful AI assistant.",
|
||||||
|
),
|
||||||
|
UserPromptMessage(content="Hello World!"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
assert num_tokens == 21
|
@ -0,0 +1,17 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from core.model_runtime.errors.validate import CredentialsValidateFailedError
|
||||||
|
from core.model_runtime.model_providers.azure_ai_studio.azure_ai_studio import AzureAIStudioProvider
|
||||||
|
|
||||||
|
|
||||||
|
def test_validate_provider_credentials():
|
||||||
|
provider = AzureAIStudioProvider()
|
||||||
|
|
||||||
|
with pytest.raises(CredentialsValidateFailedError):
|
||||||
|
provider.validate_provider_credentials(credentials={})
|
||||||
|
|
||||||
|
provider.validate_provider_credentials(
|
||||||
|
credentials={"api_key": os.getenv("AZURE_AI_STUDIO_API_KEY"), "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE")}
|
||||||
|
)
|
@ -0,0 +1,50 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from core.model_runtime.entities.rerank_entities import RerankResult
|
||||||
|
from core.model_runtime.errors.validate import CredentialsValidateFailedError
|
||||||
|
from core.model_runtime.model_providers.azure_ai_studio.rerank.rerank import AzureAIStudioRerankModel
|
||||||
|
|
||||||
|
|
||||||
|
def test_validate_credentials():
|
||||||
|
model = AzureAIStudioRerankModel()
|
||||||
|
|
||||||
|
with pytest.raises(CredentialsValidateFailedError):
|
||||||
|
model.validate_credentials(
|
||||||
|
model="azure-ai-studio-rerank-v1",
|
||||||
|
credentials={"api_key": "invalid_key", "api_base": os.getenv("AZURE_AI_STUDIO_API_BASE")},
|
||||||
|
query="What is the capital of the United States?",
|
||||||
|
docs=[
|
||||||
|
"Carson City is the capital city of the American state of Nevada. At the 2010 United States "
|
||||||
|
"Census, Carson City had a population of 55,274.",
|
||||||
|
"The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that "
|
||||||
|
"are a political division controlled by the United States. Its capital is Saipan.",
|
||||||
|
],
|
||||||
|
score_threshold=0.8,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_invoke_model():
|
||||||
|
model = AzureAIStudioRerankModel()
|
||||||
|
|
||||||
|
result = model.invoke(
|
||||||
|
model="azure-ai-studio-rerank-v1",
|
||||||
|
credentials={
|
||||||
|
"api_key": os.getenv("AZURE_AI_STUDIO_JWT_TOKEN"),
|
||||||
|
"api_base": os.getenv("AZURE_AI_STUDIO_API_BASE"),
|
||||||
|
},
|
||||||
|
query="What is the capital of the United States?",
|
||||||
|
docs=[
|
||||||
|
"Carson City is the capital city of the American state of Nevada. At the 2010 United States "
|
||||||
|
"Census, Carson City had a population of 55,274.",
|
||||||
|
"The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that "
|
||||||
|
"are a political division controlled by the United States. Its capital is Saipan.",
|
||||||
|
],
|
||||||
|
score_threshold=0.8,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert isinstance(result, RerankResult)
|
||||||
|
assert len(result.docs) == 1
|
||||||
|
assert result.docs[0].index == 1
|
||||||
|
assert result.docs[0].score >= 0.8
|
@ -3,6 +3,7 @@ from textwrap import dedent
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from flask import Flask
|
from flask import Flask
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
from configs.app_config import DifyConfig
|
from configs.app_config import DifyConfig
|
||||||
|
|
||||||
@ -84,3 +85,6 @@ def test_flask_configs(example_env_file):
|
|||||||
assert config["CONSOLE_WEB_URL"] == "https://example.com"
|
assert config["CONSOLE_WEB_URL"] == "https://example.com"
|
||||||
assert config["CONSOLE_CORS_ALLOW_ORIGINS"] == ["https://example.com"]
|
assert config["CONSOLE_CORS_ALLOW_ORIGINS"] == ["https://example.com"]
|
||||||
assert config["WEB_API_CORS_ALLOW_ORIGINS"] == ["*"]
|
assert config["WEB_API_CORS_ALLOW_ORIGINS"] == ["*"]
|
||||||
|
|
||||||
|
assert str(config["CODE_EXECUTION_ENDPOINT"]) == "http://sandbox:8194/"
|
||||||
|
assert str(URL(str(config["CODE_EXECUTION_ENDPOINT"])) / "v1") == "http://sandbox:8194/v1"
|
||||||
|
@ -2,7 +2,7 @@ version: '3'
|
|||||||
services:
|
services:
|
||||||
# API service
|
# API service
|
||||||
api:
|
api:
|
||||||
image: langgenius/dify-api:0.7.1
|
image: langgenius/dify-api:0.7.2
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Startup mode, 'api' starts the API server.
|
# Startup mode, 'api' starts the API server.
|
||||||
@ -229,7 +229,7 @@ services:
|
|||||||
# worker service
|
# worker service
|
||||||
# The Celery worker for processing the queue.
|
# The Celery worker for processing the queue.
|
||||||
worker:
|
worker:
|
||||||
image: langgenius/dify-api:0.7.1
|
image: langgenius/dify-api:0.7.2
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
CONSOLE_WEB_URL: ''
|
CONSOLE_WEB_URL: ''
|
||||||
@ -400,7 +400,7 @@ services:
|
|||||||
|
|
||||||
# Frontend web application.
|
# Frontend web application.
|
||||||
web:
|
web:
|
||||||
image: langgenius/dify-web:0.7.1
|
image: langgenius/dify-web:0.7.2
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is
|
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is
|
||||||
|
@ -190,7 +190,7 @@ x-shared-env: &shared-api-worker-env
|
|||||||
services:
|
services:
|
||||||
# API service
|
# API service
|
||||||
api:
|
api:
|
||||||
image: langgenius/dify-api:0.7.1
|
image: langgenius/dify-api:0.7.2
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Use the shared environment variables.
|
# Use the shared environment variables.
|
||||||
@ -210,7 +210,7 @@ services:
|
|||||||
# worker service
|
# worker service
|
||||||
# The Celery worker for processing the queue.
|
# The Celery worker for processing the queue.
|
||||||
worker:
|
worker:
|
||||||
image: langgenius/dify-api:0.7.1
|
image: langgenius/dify-api:0.7.2
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
# Use the shared environment variables.
|
# Use the shared environment variables.
|
||||||
@ -229,7 +229,7 @@ services:
|
|||||||
|
|
||||||
# Frontend web application.
|
# Frontend web application.
|
||||||
web:
|
web:
|
||||||
image: langgenius/dify-web:0.7.1
|
image: langgenius/dify-web:0.7.2
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||||
|
@ -4,7 +4,7 @@ import dayjs from 'dayjs'
|
|||||||
import quarterOfYear from 'dayjs/plugin/quarterOfYear'
|
import quarterOfYear from 'dayjs/plugin/quarterOfYear'
|
||||||
import { useTranslation } from 'react-i18next'
|
import { useTranslation } from 'react-i18next'
|
||||||
import type { PeriodParams } from '@/app/components/app/overview/appChart'
|
import type { PeriodParams } from '@/app/components/app/overview/appChart'
|
||||||
import { AvgResponseTime, AvgSessionInteractions, AvgUserInteractions, ConversationsChart, CostChart, EndUsersChart, TokenPerSecond, UserSatisfactionRate, WorkflowCostChart, WorkflowDailyTerminalsChart, WorkflowMessagesChart } from '@/app/components/app/overview/appChart'
|
import { AvgResponseTime, AvgSessionInteractions, AvgUserInteractions, ConversationsChart, CostChart, EndUsersChart, MessagesChart, TokenPerSecond, UserSatisfactionRate, WorkflowCostChart, WorkflowDailyTerminalsChart, WorkflowMessagesChart } from '@/app/components/app/overview/appChart'
|
||||||
import type { Item } from '@/app/components/base/select'
|
import type { Item } from '@/app/components/base/select'
|
||||||
import { SimpleSelect } from '@/app/components/base/select'
|
import { SimpleSelect } from '@/app/components/base/select'
|
||||||
import { TIME_PERIOD_LIST } from '@/app/components/app/log/filter'
|
import { TIME_PERIOD_LIST } from '@/app/components/app/log/filter'
|
||||||
@ -79,6 +79,11 @@ export default function ChartView({ appId }: IChartViewProps) {
|
|||||||
<CostChart period={period} id={appId} />
|
<CostChart period={period} id={appId} />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
{!isWorkflow && isChatApp && (
|
||||||
|
<div className='grid gap-6 grid-cols-1 xl:grid-cols-2 w-full mb-6'>
|
||||||
|
<MessagesChart period={period} id={appId} />
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
{isWorkflow && (
|
{isWorkflow && (
|
||||||
<div className='grid gap-6 grid-cols-1 xl:grid-cols-2 w-full mb-6'>
|
<div className='grid gap-6 grid-cols-1 xl:grid-cols-2 w-full mb-6'>
|
||||||
<WorkflowMessagesChart period={period} id={appId} />
|
<WorkflowMessagesChart period={period} id={appId} />
|
||||||
|
@ -280,7 +280,7 @@ const Annotation: FC<Props> = ({
|
|||||||
onSave={async (embeddingModel, score) => {
|
onSave={async (embeddingModel, score) => {
|
||||||
if (
|
if (
|
||||||
embeddingModel.embedding_model_name !== annotationConfig?.embedding_model?.embedding_model_name
|
embeddingModel.embedding_model_name !== annotationConfig?.embedding_model?.embedding_model_name
|
||||||
&& embeddingModel.embedding_provider_name !== annotationConfig?.embedding_model?.embedding_provider_name
|
|| embeddingModel.embedding_provider_name !== annotationConfig?.embedding_model?.embedding_provider_name
|
||||||
) {
|
) {
|
||||||
const { job_id: jobId }: any = await updateAnnotationStatus(appDetail.id, AnnotationEnableStatus.enable, embeddingModel, score)
|
const { job_id: jobId }: any = await updateAnnotationStatus(appDetail.id, AnnotationEnableStatus.enable, embeddingModel, score)
|
||||||
await ensureJobCompleted(jobId, AnnotationEnableStatus.enable)
|
await ensureJobCompleted(jobId, AnnotationEnableStatus.enable)
|
||||||
|
@ -98,7 +98,7 @@ const AnnotationReplyConfig: FC<Props> = ({
|
|||||||
let isEmbeddingModelChanged = false
|
let isEmbeddingModelChanged = false
|
||||||
if (
|
if (
|
||||||
embeddingModel.embedding_model_name !== annotationConfig.embedding_model.embedding_model_name
|
embeddingModel.embedding_model_name !== annotationConfig.embedding_model.embedding_model_name
|
||||||
&& embeddingModel.embedding_provider_name !== annotationConfig.embedding_model.embedding_provider_name
|
|| embeddingModel.embedding_provider_name !== annotationConfig.embedding_model.embedding_provider_name
|
||||||
) {
|
) {
|
||||||
await onEmbeddingChange(embeddingModel)
|
await onEmbeddingChange(embeddingModel)
|
||||||
isEmbeddingModelChanged = true
|
isEmbeddingModelChanged = true
|
||||||
|
@ -32,7 +32,7 @@ const Toolbox: FC<ToolboxProps> = ({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
(showAnnotation || true) && (
|
showAnnotation && (
|
||||||
<Annotation
|
<Annotation
|
||||||
onEmbeddingChange={onEmbeddingChange}
|
onEmbeddingChange={onEmbeddingChange}
|
||||||
onScoreChange={onScoreChange}
|
onScoreChange={onScoreChange}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
'use client'
|
'use client'
|
||||||
import type { FC } from 'react'
|
import type { FC } from 'react'
|
||||||
import React from 'react'
|
import React, { useMemo } from 'react'
|
||||||
import { useTranslation } from 'react-i18next'
|
import { useTranslation } from 'react-i18next'
|
||||||
import { useRouter } from 'next/navigation'
|
import { useRouter } from 'next/navigation'
|
||||||
import cn from '@/utils/classnames'
|
import cn from '@/utils/classnames'
|
||||||
@ -23,10 +23,14 @@ const LogAnnotation: FC<Props> = ({
|
|||||||
const router = useRouter()
|
const router = useRouter()
|
||||||
const appDetail = useAppStore(state => state.appDetail)
|
const appDetail = useAppStore(state => state.appDetail)
|
||||||
|
|
||||||
const options = [
|
const options = useMemo(() => {
|
||||||
|
if (appDetail?.mode === 'completion')
|
||||||
|
return [{ value: PageType.log, text: t('appLog.title') }]
|
||||||
|
return [
|
||||||
{ value: PageType.log, text: t('appLog.title') },
|
{ value: PageType.log, text: t('appLog.title') },
|
||||||
{ value: PageType.annotation, text: t('appAnnotation.title') },
|
{ value: PageType.annotation, text: t('appAnnotation.title') },
|
||||||
]
|
]
|
||||||
|
}, [appDetail])
|
||||||
|
|
||||||
if (!appDetail) {
|
if (!appDetail) {
|
||||||
return (
|
return (
|
||||||
|
@ -678,7 +678,7 @@ const ConversationList: FC<IConversationList> = ({ logs, appDetail, onRefresh })
|
|||||||
</thead>
|
</thead>
|
||||||
<tbody className="text-gray-500">
|
<tbody className="text-gray-500">
|
||||||
{logs.data.map((log: any) => {
|
{logs.data.map((log: any) => {
|
||||||
const endUser = log.from_end_user_session_id
|
const endUser = log.from_end_user_session_id || log.from_account_name
|
||||||
const leftValue = get(log, isChatMode ? 'name' : 'message.inputs.query') || (!isChatMode ? (get(log, 'message.query') || get(log, 'message.inputs.default_input')) : '') || ''
|
const leftValue = get(log, isChatMode ? 'name' : 'message.inputs.query') || (!isChatMode ? (get(log, 'message.query') || get(log, 'message.inputs.default_input')) : '') || ''
|
||||||
const rightValue = get(log, isChatMode ? 'message_count' : 'message.answer')
|
const rightValue = get(log, isChatMode ? 'message_count' : 'message.answer')
|
||||||
return <tr
|
return <tr
|
||||||
|
@ -10,8 +10,8 @@ import { useTranslation } from 'react-i18next'
|
|||||||
import { formatNumber } from '@/utils/format'
|
import { formatNumber } from '@/utils/format'
|
||||||
import Basic from '@/app/components/app-sidebar/basic'
|
import Basic from '@/app/components/app-sidebar/basic'
|
||||||
import Loading from '@/app/components/base/loading'
|
import Loading from '@/app/components/base/loading'
|
||||||
import type { AppDailyConversationsResponse, AppDailyEndUsersResponse, AppTokenCostsResponse } from '@/models/app'
|
import type { AppDailyConversationsResponse, AppDailyEndUsersResponse, AppDailyMessagesResponse, AppTokenCostsResponse } from '@/models/app'
|
||||||
import { getAppDailyConversations, getAppDailyEndUsers, getAppStatistics, getAppTokenCosts, getWorkflowDailyConversations } from '@/service/apps'
|
import { getAppDailyConversations, getAppDailyEndUsers, getAppDailyMessages, getAppStatistics, getAppTokenCosts, getWorkflowDailyConversations } from '@/service/apps'
|
||||||
const valueFormatter = (v: string | number) => v
|
const valueFormatter = (v: string | number) => v
|
||||||
|
|
||||||
const COLOR_TYPE_MAP = {
|
const COLOR_TYPE_MAP = {
|
||||||
@ -36,12 +36,15 @@ const COMMON_COLOR_MAP = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type IColorType = 'green' | 'orange' | 'blue'
|
type IColorType = 'green' | 'orange' | 'blue'
|
||||||
type IChartType = 'conversations' | 'endUsers' | 'costs' | 'workflowCosts'
|
type IChartType = 'messages' | 'conversations' | 'endUsers' | 'costs' | 'workflowCosts'
|
||||||
type IChartConfigType = { colorType: IColorType; showTokens?: boolean }
|
type IChartConfigType = { colorType: IColorType; showTokens?: boolean }
|
||||||
|
|
||||||
const commonDateFormat = 'MMM D, YYYY'
|
const commonDateFormat = 'MMM D, YYYY'
|
||||||
|
|
||||||
const CHART_TYPE_CONFIG: Record<string, IChartConfigType> = {
|
const CHART_TYPE_CONFIG: Record<string, IChartConfigType> = {
|
||||||
|
messages: {
|
||||||
|
colorType: 'green',
|
||||||
|
},
|
||||||
conversations: {
|
conversations: {
|
||||||
colorType: 'green',
|
colorType: 'green',
|
||||||
},
|
},
|
||||||
@ -89,7 +92,7 @@ export type IChartProps = {
|
|||||||
unit?: string
|
unit?: string
|
||||||
yMax?: number
|
yMax?: number
|
||||||
chartType: IChartType
|
chartType: IChartType
|
||||||
chartData: AppDailyConversationsResponse | AppDailyEndUsersResponse | AppTokenCostsResponse | { data: Array<{ date: string; count: number }> }
|
chartData: AppDailyMessagesResponse | AppDailyConversationsResponse | AppDailyEndUsersResponse | AppTokenCostsResponse | { data: Array<{ date: string; count: number }> }
|
||||||
}
|
}
|
||||||
|
|
||||||
const Chart: React.FC<IChartProps> = ({
|
const Chart: React.FC<IChartProps> = ({
|
||||||
@ -258,6 +261,20 @@ const getDefaultChartData = ({ start, end, key = 'count' }: { start: string; end
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const MessagesChart: FC<IBizChartProps> = ({ id, period }) => {
|
||||||
|
const { t } = useTranslation()
|
||||||
|
const { data: response } = useSWR({ url: `/apps/${id}/statistics/daily-messages`, params: period.query }, getAppDailyMessages)
|
||||||
|
if (!response)
|
||||||
|
return <Loading />
|
||||||
|
const noDataFlag = !response.data || response.data.length === 0
|
||||||
|
return <Chart
|
||||||
|
basicInfo={{ title: t('appOverview.analysis.totalMessages.title'), explanation: t('appOverview.analysis.totalMessages.explanation'), timePeriod: period.name }}
|
||||||
|
chartData={!noDataFlag ? response : { data: getDefaultChartData(period.query ?? defaultPeriod) }}
|
||||||
|
chartType='messages'
|
||||||
|
{...(noDataFlag && { yMax: 500 })}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
|
||||||
export const ConversationsChart: FC<IBizChartProps> = ({ id, period }) => {
|
export const ConversationsChart: FC<IBizChartProps> = ({ id, period }) => {
|
||||||
const { t } = useTranslation()
|
const { t } = useTranslation()
|
||||||
const { data: response } = useSWR({ url: `/apps/${id}/statistics/daily-conversations`, params: period.query }, getAppDailyConversations)
|
const { data: response } = useSWR({ url: `/apps/${id}/statistics/daily-conversations`, params: period.query }, getAppDailyConversations)
|
||||||
@ -265,7 +282,7 @@ export const ConversationsChart: FC<IBizChartProps> = ({ id, period }) => {
|
|||||||
return <Loading />
|
return <Loading />
|
||||||
const noDataFlag = !response.data || response.data.length === 0
|
const noDataFlag = !response.data || response.data.length === 0
|
||||||
return <Chart
|
return <Chart
|
||||||
basicInfo={{ title: t('appOverview.analysis.totalMessages.title'), explanation: t('appOverview.analysis.totalMessages.explanation'), timePeriod: period.name }}
|
basicInfo={{ title: t('appOverview.analysis.totalConversations.title'), explanation: t('appOverview.analysis.totalConversations.explanation'), timePeriod: period.name }}
|
||||||
chartData={!noDataFlag ? response : { data: getDefaultChartData(period.query ?? defaultPeriod) }}
|
chartData={!noDataFlag ? response : { data: getDefaultChartData(period.query ?? defaultPeriod) }}
|
||||||
chartType='conversations'
|
chartType='conversations'
|
||||||
{...(noDataFlag && { yMax: 500 })}
|
{...(noDataFlag && { yMax: 500 })}
|
||||||
|
@ -91,7 +91,7 @@ const WorkflowAppLogList: FC<ILogs> = ({ logs, appDetail, onRefresh }) => {
|
|||||||
</thead>
|
</thead>
|
||||||
<tbody className="text-gray-700 text-[13px]">
|
<tbody className="text-gray-700 text-[13px]">
|
||||||
{logs.data.map((log: WorkflowAppLogDetail) => {
|
{logs.data.map((log: WorkflowAppLogDetail) => {
|
||||||
const endUser = log.created_by_end_user ? log.created_by_end_user.session_id : defaultValue
|
const endUser = log.created_by_end_user ? log.created_by_end_user.session_id : log.created_by_account ? log.created_by_account.name : defaultValue
|
||||||
return <tr
|
return <tr
|
||||||
key={log.id}
|
key={log.id}
|
||||||
className={`border-b border-gray-200 h-8 hover:bg-gray-50 cursor-pointer ${currentLog?.id !== log.id ? '' : 'bg-gray-50'}`}
|
className={`border-b border-gray-200 h-8 hover:bg-gray-50 cursor-pointer ${currentLog?.id !== log.id ? '' : 'bg-gray-50'}`}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user