mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-18 00:45:53 +08:00
fix: enable_marketplace always true.
This commit is contained in:
parent
c0358d8d0c
commit
fe61815b7b
1
.github/workflows/build-push.yml
vendored
1
.github/workflows/build-push.yml
vendored
@ -5,6 +5,7 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- "main"
|
- "main"
|
||||||
- "deploy/dev"
|
- "deploy/dev"
|
||||||
|
- "1.0.0-fix"
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class IndexType(str, Enum):
|
class IndexType(StrEnum):
|
||||||
PARAGRAPH_INDEX = "text_model"
|
PARAGRAPH_INDEX = "text_model"
|
||||||
QA_INDEX = "qa_model"
|
QA_INDEX = "qa_model"
|
||||||
PARENT_CHILD_INDEX = "hierarchical_model"
|
PARENT_CHILD_INDEX = "hierarchical_model"
|
||||||
|
3927
api/poetry.lock
generated
3927
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -905,7 +905,7 @@ class DocumentService:
|
|||||||
).first()
|
).first()
|
||||||
if document:
|
if document:
|
||||||
document.dataset_process_rule_id = dataset_process_rule.id # type: ignore
|
document.dataset_process_rule_id = dataset_process_rule.id # type: ignore
|
||||||
document.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
document.created_from = created_from
|
document.created_from = created_from
|
||||||
document.doc_form = knowledge_config.doc_form
|
document.doc_form = knowledge_config.doc_form
|
||||||
document.doc_language = knowledge_config.doc_language
|
document.doc_language = knowledge_config.doc_language
|
||||||
@ -1872,7 +1872,7 @@ class SegmentService:
|
|||||||
if cache_result is not None:
|
if cache_result is not None:
|
||||||
continue
|
continue
|
||||||
segment.enabled = False
|
segment.enabled = False
|
||||||
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment.disabled_by = current_user.id
|
segment.disabled_by = current_user.id
|
||||||
db.session.add(segment)
|
db.session.add(segment)
|
||||||
real_deal_segmment_ids.append(segment.id)
|
real_deal_segmment_ids.append(segment.id)
|
||||||
@ -1964,7 +1964,7 @@ class SegmentService:
|
|||||||
child_chunk.content = child_chunk_update_args.content
|
child_chunk.content = child_chunk_update_args.content
|
||||||
child_chunk.word_count = len(child_chunk.content)
|
child_chunk.word_count = len(child_chunk.content)
|
||||||
child_chunk.updated_by = current_user.id
|
child_chunk.updated_by = current_user.id
|
||||||
child_chunk.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
child_chunk.type = "customized"
|
child_chunk.type = "customized"
|
||||||
update_child_chunks.append(child_chunk)
|
update_child_chunks.append(child_chunk)
|
||||||
else:
|
else:
|
||||||
@ -2021,7 +2021,7 @@ class SegmentService:
|
|||||||
child_chunk.content = content
|
child_chunk.content = content
|
||||||
child_chunk.word_count = len(content)
|
child_chunk.word_count = len(content)
|
||||||
child_chunk.updated_by = current_user.id
|
child_chunk.updated_by = current_user.id
|
||||||
child_chunk.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
child_chunk.type = "customized"
|
child_chunk.type = "customized"
|
||||||
db.session.add(child_chunk)
|
db.session.add(child_chunk)
|
||||||
VectorService.update_child_chunk_vector([], [child_chunk], [], dataset)
|
VectorService.update_child_chunk_vector([], [child_chunk], [], dataset)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from typing import Literal, Optional
|
from typing import Literal, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
@ -11,7 +11,7 @@ class SegmentUpdateEntity(BaseModel):
|
|||||||
enabled: Optional[bool] = None
|
enabled: Optional[bool] = None
|
||||||
|
|
||||||
|
|
||||||
class ParentMode(str, Enum):
|
class ParentMode(StrEnum):
|
||||||
FULL_DOC = "full-doc"
|
FULL_DOC = "full-doc"
|
||||||
PARAGRAPH = "paragraph"
|
PARAGRAPH = "paragraph"
|
||||||
|
|
||||||
|
@ -89,12 +89,10 @@ class FeatureService:
|
|||||||
|
|
||||||
if dify_config.ENTERPRISE_ENABLED:
|
if dify_config.ENTERPRISE_ENABLED:
|
||||||
system_features.enable_web_sso_switch_component = True
|
system_features.enable_web_sso_switch_component = True
|
||||||
|
system_features.enable_marketplace = False
|
||||||
|
|
||||||
cls._fulfill_params_from_enterprise(system_features)
|
cls._fulfill_params_from_enterprise(system_features)
|
||||||
|
|
||||||
if dify_config.MARKETPLACE_ENABLED:
|
|
||||||
system_features.enable_marketplace = True
|
|
||||||
|
|
||||||
return system_features
|
return system_features
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -105,6 +103,7 @@ class FeatureService:
|
|||||||
system_features.is_allow_register = dify_config.ALLOW_REGISTER
|
system_features.is_allow_register = dify_config.ALLOW_REGISTER
|
||||||
system_features.is_allow_create_workspace = dify_config.ALLOW_CREATE_WORKSPACE
|
system_features.is_allow_create_workspace = dify_config.ALLOW_CREATE_WORKSPACE
|
||||||
system_features.is_email_setup = dify_config.MAIL_TYPE is not None and dify_config.MAIL_TYPE != ""
|
system_features.is_email_setup = dify_config.MAIL_TYPE is not None and dify_config.MAIL_TYPE != ""
|
||||||
|
system_features.enable_marketplace = dify_config.MARKETPLACE_ENABLED
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _fulfill_params_from_env(cls, features: FeatureModel):
|
def _fulfill_params_from_env(cls, features: FeatureModel):
|
||||||
|
@ -51,7 +51,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
|||||||
if document:
|
if document:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(e)
|
document.error = str(e)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return
|
return
|
||||||
@ -80,7 +80,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
documents.append(document)
|
documents.append(document)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -97,7 +97,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
|
|||||||
{
|
{
|
||||||
"error": str(e),
|
"error": str(e),
|
||||||
"status": "error",
|
"status": "error",
|
||||||
"disabled_at": datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
"disabled_at": datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
"enabled": False,
|
"enabled": False,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -48,7 +48,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
|||||||
if document:
|
if document:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(e)
|
document.error = str(e)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
redis_client.delete(retry_indexing_cache_key)
|
redis_client.delete(retry_indexing_cache_key)
|
||||||
@ -76,7 +76,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@ -86,7 +86,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(ex)
|
document.error = str(ex)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
logging.info(click.style(str(ex), fg="yellow"))
|
logging.info(click.style(str(ex), fg="yellow"))
|
||||||
|
@ -46,7 +46,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
|||||||
if document:
|
if document:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(e)
|
document.error = str(e)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
redis_client.delete(sync_indexing_cache_key)
|
redis_client.delete(sync_indexing_cache_key)
|
||||||
@ -72,7 +72,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@ -82,7 +82,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(ex)
|
document.error = str(ex)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
logging.info(click.style(str(ex), fg="yellow"))
|
logging.info(click.style(str(ex), fg="yellow"))
|
||||||
|
@ -426,6 +426,7 @@ services:
|
|||||||
SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0}
|
SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0}
|
||||||
PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
|
PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
|
||||||
INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
|
INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
|
||||||
|
MARKETPLACE_ENABLED: false
|
||||||
depends_on:
|
depends_on:
|
||||||
- db
|
- db
|
||||||
- redis
|
- redis
|
||||||
|
Loading…
x
Reference in New Issue
Block a user