fix: enable_marketplace always true.

This commit is contained in:
Garfield Dai 2025-04-09 15:34:40 +08:00
parent c0358d8d0c
commit fe61815b7b
11 changed files with 1649 additions and 2319 deletions

View File

@ -5,6 +5,7 @@ on:
branches:
- "main"
- "deploy/dev"
- "1.0.0-fix"
release:
types: [published]

View File

@ -1,7 +1,7 @@
from enum import Enum
from enum import StrEnum
class IndexType(str, Enum):
class IndexType(StrEnum):
PARAGRAPH_INDEX = "text_model"
QA_INDEX = "qa_model"
PARENT_CHILD_INDEX = "hierarchical_model"

3927
api/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -905,7 +905,7 @@ class DocumentService:
).first()
if document:
document.dataset_process_rule_id = dataset_process_rule.id # type: ignore
document.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.created_from = created_from
document.doc_form = knowledge_config.doc_form
document.doc_language = knowledge_config.doc_language
@ -1872,7 +1872,7 @@ class SegmentService:
if cache_result is not None:
continue
segment.enabled = False
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.disabled_by = current_user.id
db.session.add(segment)
real_deal_segmment_ids.append(segment.id)
@ -1964,7 +1964,7 @@ class SegmentService:
child_chunk.content = child_chunk_update_args.content
child_chunk.word_count = len(child_chunk.content)
child_chunk.updated_by = current_user.id
child_chunk.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
child_chunk.type = "customized"
update_child_chunks.append(child_chunk)
else:
@ -2021,7 +2021,7 @@ class SegmentService:
child_chunk.content = content
child_chunk.word_count = len(content)
child_chunk.updated_by = current_user.id
child_chunk.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
child_chunk.type = "customized"
db.session.add(child_chunk)
VectorService.update_child_chunk_vector([], [child_chunk], [], dataset)

View File

@ -1,4 +1,4 @@
from enum import Enum
from enum import StrEnum
from typing import Literal, Optional
from pydantic import BaseModel
@ -11,7 +11,7 @@ class SegmentUpdateEntity(BaseModel):
enabled: Optional[bool] = None
class ParentMode(str, Enum):
class ParentMode(StrEnum):
FULL_DOC = "full-doc"
PARAGRAPH = "paragraph"

View File

@ -89,12 +89,10 @@ class FeatureService:
if dify_config.ENTERPRISE_ENABLED:
system_features.enable_web_sso_switch_component = True
system_features.enable_marketplace = False
cls._fulfill_params_from_enterprise(system_features)
if dify_config.MARKETPLACE_ENABLED:
system_features.enable_marketplace = True
return system_features
@classmethod
@ -105,6 +103,7 @@ class FeatureService:
system_features.is_allow_register = dify_config.ALLOW_REGISTER
system_features.is_allow_create_workspace = dify_config.ALLOW_CREATE_WORKSPACE
system_features.is_email_setup = dify_config.MAIL_TYPE is not None and dify_config.MAIL_TYPE != ""
system_features.enable_marketplace = dify_config.MARKETPLACE_ENABLED
@classmethod
def _fulfill_params_from_env(cls, features: FeatureModel):

View File

@ -51,7 +51,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
if document:
document.indexing_status = "error"
document.error = str(e)
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
return
@ -80,7 +80,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
db.session.commit()
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
documents.append(document)
db.session.add(document)
db.session.commit()

View File

@ -97,7 +97,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
{
"error": str(e),
"status": "error",
"disabled_at": datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
"disabled_at": datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
"enabled": False,
}
)

View File

@ -48,7 +48,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
if document:
document.indexing_status = "error"
document.error = str(e)
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
redis_client.delete(retry_indexing_cache_key)
@ -76,7 +76,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
db.session.commit()
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
@ -86,7 +86,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
except Exception as ex:
document.indexing_status = "error"
document.error = str(ex)
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
logging.info(click.style(str(ex), fg="yellow"))

View File

@ -46,7 +46,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
if document:
document.indexing_status = "error"
document.error = str(e)
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
redis_client.delete(sync_indexing_cache_key)
@ -72,7 +72,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
db.session.commit()
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
@ -82,7 +82,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
except Exception as ex:
document.indexing_status = "error"
document.error = str(ex)
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
logging.info(click.style(str(ex), fg="yellow"))

View File

@ -426,6 +426,7 @@ services:
SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0}
PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
MARKETPLACE_ENABLED: false
depends_on:
- db
- redis