feat: llm text stream support for workflow app (#3798)

Co-authored-by: JzoNg <jzongcode@gmail.com>
This commit is contained in:
takatost 2024-04-28 17:37:00 +08:00 committed by GitHub
parent 8e4989ed03
commit ff67a6d338
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
27 changed files with 549 additions and 58 deletions

View File

@ -28,9 +28,9 @@ from core.app.entities.task_entities import (
AdvancedChatTaskState, AdvancedChatTaskState,
ChatbotAppBlockingResponse, ChatbotAppBlockingResponse,
ChatbotAppStreamResponse, ChatbotAppStreamResponse,
ChatflowStreamGenerateRoute,
ErrorStreamResponse, ErrorStreamResponse,
MessageEndStreamResponse, MessageEndStreamResponse,
StreamGenerateRoute,
StreamResponse, StreamResponse,
) )
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
@ -343,7 +343,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
**extras **extras
) )
def _get_stream_generate_routes(self) -> dict[str, StreamGenerateRoute]: def _get_stream_generate_routes(self) -> dict[str, ChatflowStreamGenerateRoute]:
""" """
Get stream generate routes. Get stream generate routes.
:return: :return:
@ -366,7 +366,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
continue continue
for start_node_id in start_node_ids: for start_node_id in start_node_ids:
stream_generate_routes[start_node_id] = StreamGenerateRoute( stream_generate_routes[start_node_id] = ChatflowStreamGenerateRoute(
answer_node_id=answer_node_id, answer_node_id=answer_node_id,
generate_route=generate_route generate_route=generate_route
) )
@ -430,15 +430,14 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
for route_chunk in route_chunks: for route_chunk in route_chunks:
if route_chunk.type == 'text': if route_chunk.type == 'text':
route_chunk = cast(TextGenerateRouteChunk, route_chunk) route_chunk = cast(TextGenerateRouteChunk, route_chunk)
for token in route_chunk.text:
# handle output moderation chunk
should_direct_answer = self._handle_output_moderation_chunk(token)
if should_direct_answer:
continue
self._task_state.answer += token # handle output moderation chunk
yield self._message_to_stream_response(token, self._message.id) should_direct_answer = self._handle_output_moderation_chunk(route_chunk.text)
time.sleep(0.01) if should_direct_answer:
continue
self._task_state.answer += route_chunk.text
yield self._message_to_stream_response(route_chunk.text, self._message.id)
else: else:
break break
@ -463,10 +462,8 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
for route_chunk in route_chunks: for route_chunk in route_chunks:
if route_chunk.type == 'text': if route_chunk.type == 'text':
route_chunk = cast(TextGenerateRouteChunk, route_chunk) route_chunk = cast(TextGenerateRouteChunk, route_chunk)
for token in route_chunk.text: self._task_state.answer += route_chunk.text
self._task_state.answer += token yield self._message_to_stream_response(route_chunk.text, self._message.id)
yield self._message_to_stream_response(token, self._message.id)
time.sleep(0.01)
else: else:
route_chunk = cast(VarGenerateRouteChunk, route_chunk) route_chunk = cast(VarGenerateRouteChunk, route_chunk)
value_selector = route_chunk.value_selector value_selector = route_chunk.value_selector

View File

@ -28,11 +28,13 @@ from core.app.entities.task_entities import (
WorkflowAppBlockingResponse, WorkflowAppBlockingResponse,
WorkflowAppStreamResponse, WorkflowAppStreamResponse,
WorkflowFinishStreamResponse, WorkflowFinishStreamResponse,
WorkflowStreamGenerateNodes,
WorkflowTaskState, WorkflowTaskState,
) )
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
from core.app.task_pipeline.workflow_cycle_manage import WorkflowCycleManage from core.app.task_pipeline.workflow_cycle_manage import WorkflowCycleManage
from core.workflow.entities.node_entities import SystemVariable from core.workflow.entities.node_entities import NodeType, SystemVariable
from core.workflow.nodes.end.end_node import EndNode
from extensions.ext_database import db from extensions.ext_database import db
from models.account import Account from models.account import Account
from models.model import EndUser from models.model import EndUser
@ -40,6 +42,7 @@ from models.workflow import (
Workflow, Workflow,
WorkflowAppLog, WorkflowAppLog,
WorkflowAppLogCreatedFrom, WorkflowAppLogCreatedFrom,
WorkflowNodeExecution,
WorkflowRun, WorkflowRun,
) )
@ -83,6 +86,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
} }
self._task_state = WorkflowTaskState() self._task_state = WorkflowTaskState()
self._stream_generate_nodes = self._get_stream_generate_nodes()
def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
""" """
@ -167,6 +171,14 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
) )
elif isinstance(event, QueueNodeStartedEvent): elif isinstance(event, QueueNodeStartedEvent):
workflow_node_execution = self._handle_node_start(event) workflow_node_execution = self._handle_node_start(event)
# search stream_generate_routes if node id is answer start at node
if not self._task_state.current_stream_generate_state and event.node_id in self._stream_generate_nodes:
self._task_state.current_stream_generate_state = self._stream_generate_nodes[event.node_id]
# generate stream outputs when node started
yield from self._generate_stream_outputs_when_node_started()
yield self._workflow_node_start_to_stream_response( yield self._workflow_node_start_to_stream_response(
event=event, event=event,
task_id=self._application_generate_entity.task_id, task_id=self._application_generate_entity.task_id,
@ -174,6 +186,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
) )
elif isinstance(event, QueueNodeSucceededEvent | QueueNodeFailedEvent): elif isinstance(event, QueueNodeSucceededEvent | QueueNodeFailedEvent):
workflow_node_execution = self._handle_node_finished(event) workflow_node_execution = self._handle_node_finished(event)
yield self._workflow_node_finish_to_stream_response( yield self._workflow_node_finish_to_stream_response(
task_id=self._application_generate_entity.task_id, task_id=self._application_generate_entity.task_id,
workflow_node_execution=workflow_node_execution workflow_node_execution=workflow_node_execution
@ -193,6 +206,11 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
if delta_text is None: if delta_text is None:
continue continue
if not self._is_stream_out_support(
event=event
):
continue
self._task_state.answer += delta_text self._task_state.answer += delta_text
yield self._text_chunk_to_stream_response(delta_text) yield self._text_chunk_to_stream_response(delta_text)
elif isinstance(event, QueueMessageReplaceEvent): elif isinstance(event, QueueMessageReplaceEvent):
@ -254,3 +272,140 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
task_id=self._application_generate_entity.task_id, task_id=self._application_generate_entity.task_id,
text=TextReplaceStreamResponse.Data(text=text) text=TextReplaceStreamResponse.Data(text=text)
) )
def _get_stream_generate_nodes(self) -> dict[str, WorkflowStreamGenerateNodes]:
"""
Get stream generate nodes.
:return:
"""
# find all answer nodes
graph = self._workflow.graph_dict
end_node_configs = [
node for node in graph['nodes']
if node.get('data', {}).get('type') == NodeType.END.value
]
# parse stream output node value selectors of end nodes
stream_generate_routes = {}
for node_config in end_node_configs:
# get generate route for stream output
end_node_id = node_config['id']
generate_nodes = EndNode.extract_generate_nodes(graph, node_config)
start_node_ids = self._get_end_start_at_node_ids(graph, end_node_id)
if not start_node_ids:
continue
for start_node_id in start_node_ids:
stream_generate_routes[start_node_id] = WorkflowStreamGenerateNodes(
end_node_id=end_node_id,
stream_node_ids=generate_nodes
)
return stream_generate_routes
def _get_end_start_at_node_ids(self, graph: dict, target_node_id: str) \
-> list[str]:
"""
Get end start at node id.
:param graph: graph
:param target_node_id: target node ID
:return:
"""
nodes = graph.get('nodes')
edges = graph.get('edges')
# fetch all ingoing edges from source node
ingoing_edges = []
for edge in edges:
if edge.get('target') == target_node_id:
ingoing_edges.append(edge)
if not ingoing_edges:
return []
start_node_ids = []
for ingoing_edge in ingoing_edges:
source_node_id = ingoing_edge.get('source')
source_node = next((node for node in nodes if node.get('id') == source_node_id), None)
if not source_node:
continue
node_type = source_node.get('data', {}).get('type')
if node_type in [
NodeType.IF_ELSE.value,
NodeType.QUESTION_CLASSIFIER.value
]:
start_node_id = target_node_id
start_node_ids.append(start_node_id)
elif node_type == NodeType.START.value:
start_node_id = source_node_id
start_node_ids.append(start_node_id)
else:
sub_start_node_ids = self._get_end_start_at_node_ids(graph, source_node_id)
if sub_start_node_ids:
start_node_ids.extend(sub_start_node_ids)
return start_node_ids
def _generate_stream_outputs_when_node_started(self) -> Generator:
"""
Generate stream outputs.
:return:
"""
if self._task_state.current_stream_generate_state:
stream_node_ids = self._task_state.current_stream_generate_state.stream_node_ids
for node_id, node_execution_info in self._task_state.ran_node_execution_infos.items():
if node_id not in stream_node_ids:
continue
node_execution_info = self._task_state.ran_node_execution_infos[node_id]
# get chunk node execution
route_chunk_node_execution = db.session.query(WorkflowNodeExecution).filter(
WorkflowNodeExecution.id == node_execution_info.workflow_node_execution_id).first()
if not route_chunk_node_execution:
continue
outputs = route_chunk_node_execution.outputs_dict
if not outputs:
continue
# get value from outputs
text = outputs.get('text')
if text:
self._task_state.answer += text
yield self._text_chunk_to_stream_response(text)
def _is_stream_out_support(self, event: QueueTextChunkEvent) -> bool:
"""
Is stream out support
:param event: queue text chunk event
:return:
"""
if not event.metadata:
return False
if 'node_id' not in event.metadata:
return False
node_id = event.metadata.get('node_id')
node_type = event.metadata.get('node_type')
stream_output_value_selector = event.metadata.get('value_selector')
if not stream_output_value_selector:
return False
if not self._task_state.current_stream_generate_state:
return False
if node_id not in self._task_state.current_stream_generate_state.stream_node_ids:
return False
if node_type != NodeType.LLM:
# only LLM support chunk stream output
return False
return True

View File

@ -6,6 +6,7 @@ from core.app.entities.queue_entities import (
QueueNodeFailedEvent, QueueNodeFailedEvent,
QueueNodeStartedEvent, QueueNodeStartedEvent,
QueueNodeSucceededEvent, QueueNodeSucceededEvent,
QueueTextChunkEvent,
QueueWorkflowFailedEvent, QueueWorkflowFailedEvent,
QueueWorkflowStartedEvent, QueueWorkflowStartedEvent,
QueueWorkflowSucceededEvent, QueueWorkflowSucceededEvent,
@ -119,7 +120,15 @@ class WorkflowEventTriggerCallback(BaseWorkflowCallback):
""" """
Publish text chunk Publish text chunk
""" """
pass self._queue_manager.publish(
QueueTextChunkEvent(
text=text,
metadata={
"node_id": node_id,
**metadata
}
), PublishFrom.APPLICATION_MANAGER
)
def on_event(self, event: AppQueueEvent) -> None: def on_event(self, event: AppQueueEvent) -> None:
""" """

View File

@ -9,9 +9,17 @@ from core.workflow.entities.node_entities import NodeType
from core.workflow.nodes.answer.entities import GenerateRouteChunk from core.workflow.nodes.answer.entities import GenerateRouteChunk
class StreamGenerateRoute(BaseModel): class WorkflowStreamGenerateNodes(BaseModel):
""" """
StreamGenerateRoute entity WorkflowStreamGenerateNodes entity
"""
end_node_id: str
stream_node_ids: list[str]
class ChatflowStreamGenerateRoute(BaseModel):
"""
ChatflowStreamGenerateRoute entity
""" """
answer_node_id: str answer_node_id: str
generate_route: list[GenerateRouteChunk] generate_route: list[GenerateRouteChunk]
@ -55,6 +63,8 @@ class WorkflowTaskState(TaskState):
ran_node_execution_infos: dict[str, NodeExecutionInfo] = {} ran_node_execution_infos: dict[str, NodeExecutionInfo] = {}
latest_node_execution_info: Optional[NodeExecutionInfo] = None latest_node_execution_info: Optional[NodeExecutionInfo] = None
current_stream_generate_state: Optional[WorkflowStreamGenerateNodes] = None
class AdvancedChatTaskState(WorkflowTaskState): class AdvancedChatTaskState(WorkflowTaskState):
""" """
@ -62,7 +72,7 @@ class AdvancedChatTaskState(WorkflowTaskState):
""" """
usage: LLMUsage usage: LLMUsage
current_stream_generate_state: Optional[StreamGenerateRoute] = None current_stream_generate_state: Optional[ChatflowStreamGenerateRoute] = None
class StreamEvent(Enum): class StreamEvent(Enum):

View File

@ -36,6 +36,49 @@ class EndNode(BaseNode):
outputs=outputs outputs=outputs
) )
@classmethod
def extract_generate_nodes(cls, graph: dict, config: dict) -> list[str]:
"""
Extract generate nodes
:param graph: graph
:param config: node config
:return:
"""
node_data = cls._node_data_cls(**config.get("data", {}))
node_data = cast(cls._node_data_cls, node_data)
return cls.extract_generate_nodes_from_node_data(graph, node_data)
@classmethod
def extract_generate_nodes_from_node_data(cls, graph: dict, node_data: EndNodeData) -> list[str]:
"""
Extract generate nodes from node data
:param graph: graph
:param node_data: node data object
:return:
"""
nodes = graph.get('nodes')
node_mapping = {node.get('id'): node for node in nodes}
variable_selectors = node_data.outputs
generate_nodes = []
for variable_selector in variable_selectors:
if not variable_selector.value_selector:
continue
node_id = variable_selector.value_selector[0]
if node_id != 'sys' and node_id in node_mapping:
node = node_mapping[node_id]
node_type = node.get('data', {}).get('type')
if node_type == NodeType.LLM.value and variable_selector.value_selector[1] == 'text':
generate_nodes.append(node_id)
# remove duplicates
generate_nodes = list(set(generate_nodes))
return generate_nodes
@classmethod @classmethod
def _extract_variable_selector_to_variable_mapping(cls, node_data: BaseNodeData) -> dict[str, list[str]]: def _extract_variable_selector_to_variable_mapping(cls, node_data: BaseNodeData) -> dict[str, list[str]]:
""" """

View File

@ -8,9 +8,8 @@ import { useParams } from 'next/navigation'
import { HandThumbDownIcon, HandThumbUpIcon } from '@heroicons/react/24/outline' import { HandThumbDownIcon, HandThumbUpIcon } from '@heroicons/react/24/outline'
import { useBoolean } from 'ahooks' import { useBoolean } from 'ahooks'
import { HashtagIcon } from '@heroicons/react/24/solid' import { HashtagIcon } from '@heroicons/react/24/solid'
// import PromptLog from '@/app/components/app/chat/log' import ResultTab from './result-tab'
import { Markdown } from '@/app/components/base/markdown' import { Markdown } from '@/app/components/base/markdown'
import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
import Loading from '@/app/components/base/loading' import Loading from '@/app/components/base/loading'
import Toast from '@/app/components/base/toast' import Toast from '@/app/components/base/toast'
import AudioBtn from '@/app/components/base/audio-btn' import AudioBtn from '@/app/components/base/audio-btn'
@ -26,7 +25,6 @@ import EditReplyModal from '@/app/components/app/annotation/edit-annotation-moda
import { useStore as useAppStore } from '@/app/components/app/store' import { useStore as useAppStore } from '@/app/components/app/store'
import WorkflowProcessItem from '@/app/components/base/chat/chat/answer/workflow-process' import WorkflowProcessItem from '@/app/components/base/chat/chat/answer/workflow-process'
import type { WorkflowProcess } from '@/app/components/base/chat/types' import type { WorkflowProcess } from '@/app/components/base/chat/types'
import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
const MAX_DEPTH = 3 const MAX_DEPTH = 3
@ -293,23 +291,17 @@ const GenerationItem: FC<IGenerationItemProps> = ({
<div className={`flex ${contentClassName}`}> <div className={`flex ${contentClassName}`}>
<div className='grow w-0'> <div className='grow w-0'>
{workflowProcessData && ( {workflowProcessData && (
<WorkflowProcessItem grayBg data={workflowProcessData} expand={workflowProcessData.expand} /> <WorkflowProcessItem grayBg hideInfo data={workflowProcessData} expand={workflowProcessData.expand} />
)}
{workflowProcessData && !isError && (
<ResultTab data={workflowProcessData} content={content} />
)} )}
{isError && ( {isError && (
<div className='text-gray-400 text-sm'>{t('share.generation.batchFailed.outputPlaceholder')}</div> <div className='text-gray-400 text-sm'>{t('share.generation.batchFailed.outputPlaceholder')}</div>
)} )}
{!isError && (typeof content === 'string') && ( {!workflowProcessData && !isError && (typeof content === 'string') && (
<Markdown content={content} /> <Markdown content={content} />
)} )}
{!isError && (typeof content !== 'string') && (
<CodeEditor
readOnly
title={<div/>}
language={CodeLanguage.json}
value={content}
isJSONStringifyBeauty
/>
)}
</div> </div>
</div> </div>
@ -427,7 +419,11 @@ const GenerationItem: FC<IGenerationItemProps> = ({
</> </>
)} )}
</div> </div>
<div className='text-xs text-gray-500'>{content?.length} {t('common.unit.char')}</div> <div>
{!workflowProcessData && (
<div className='text-xs text-gray-500'>{content?.length} {t('common.unit.char')}</div>
)}
</div>
</div> </div>
</div> </div>

View File

@ -0,0 +1,74 @@
import {
memo,
useEffect,
// useRef,
useState,
} from 'react'
import cn from 'classnames'
import { useTranslation } from 'react-i18next'
// import Loading from '@/app/components/base/loading'
import { Markdown } from '@/app/components/base/markdown'
import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
import type { WorkflowProcess } from '@/app/components/base/chat/types'
// import { WorkflowRunningStatus } from '@/app/components/workflow/types'
const ResultTab = ({
data,
content,
}: {
data?: WorkflowProcess
content: any
}) => {
const { t } = useTranslation()
const [currentTab, setCurrentTab] = useState<string>('DETAIL')
const switchTab = async (tab: string) => {
setCurrentTab(tab)
}
useEffect(() => {
if (data?.resultText)
switchTab('RESULT')
else
switchTab('DETAIL')
}, [data?.resultText])
return (
<div className='grow relative flex flex-col'>
{data?.resultText && (
<div className='shrink-0 flex items-center mb-2 border-b-[0.5px] border-[rgba(0,0,0,0.05)]'>
<div
className={cn(
'mr-6 py-3 border-b-2 border-transparent text-[13px] font-semibold leading-[18px] text-gray-400 cursor-pointer',
currentTab === 'RESULT' && '!border-[rgb(21,94,239)] text-gray-700',
)}
onClick={() => switchTab('RESULT')}
>{t('runLog.result')}</div>
<div
className={cn(
'mr-6 py-3 border-b-2 border-transparent text-[13px] font-semibold leading-[18px] text-gray-400 cursor-pointer',
currentTab === 'DETAIL' && '!border-[rgb(21,94,239)] text-gray-700',
)}
onClick={() => switchTab('DETAIL')}
>{t('runLog.detail')}</div>
</div>
)}
<div className={cn('grow bg-white')}>
{currentTab === 'RESULT' && (
<Markdown content={data?.resultText || ''} />
)}
{currentTab === 'DETAIL' && content && (
<CodeEditor
readOnly
title={<div>JSON OUTPUT</div>}
language={CodeLanguage.json}
value={content}
isJSONStringifyBeauty
/>
)}
</div>
</div>
)
}
export default memo(ResultTab)

View File

@ -54,6 +54,7 @@ export type WorkflowProcess = {
status: WorkflowRunningStatus status: WorkflowRunningStatus
tracing: NodeTracing[] tracing: NodeTracing[]
expand?: boolean // for UI expand?: boolean // for UI
resultText?: string
} }
export type ChatItem = IChatItem & { export type ChatItem = IChatItem & {

View File

@ -0,0 +1,5 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g id="image-indent-left">
<path id="Icon" d="M21 9.25H15M21 4H3M21 14.75H15M21 20H3M4.6 16H9.4C9.96005 16 10.2401 16 10.454 15.891C10.6422 15.7951 10.7951 15.6422 10.891 15.454C11 15.2401 11 14.9601 11 14.4V9.6C11 9.03995 11 8.75992 10.891 8.54601C10.7951 8.35785 10.6422 8.20487 10.454 8.10899C10.2401 8 9.96005 8 9.4 8H4.6C4.03995 8 3.75992 8 3.54601 8.10899C3.35785 8.20487 3.20487 8.35785 3.10899 8.54601C3 8.75992 3 9.03995 3 9.6V14.4C3 14.9601 3 15.2401 3.10899 15.454C3.20487 15.6422 3.35785 15.7951 3.54601 15.891C3.75992 16 4.03995 16 4.6 16Z" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 743 B

View File

@ -0,0 +1,39 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "24",
"height": "24",
"viewBox": "0 0 24 24",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"id": "image-indent-left"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"id": "Icon",
"d": "M21 9.25H15M21 4H3M21 14.75H15M21 20H3M4.6 16H9.4C9.96005 16 10.2401 16 10.454 15.891C10.6422 15.7951 10.7951 15.6422 10.891 15.454C11 15.2401 11 14.9601 11 14.4V9.6C11 9.03995 11 8.75992 10.891 8.54601C10.7951 8.35785 10.6422 8.20487 10.454 8.10899C10.2401 8 9.96005 8 9.4 8H4.6C4.03995 8 3.75992 8 3.54601 8.10899C3.35785 8.20487 3.20487 8.35785 3.10899 8.54601C3 8.75992 3 9.03995 3 9.6V14.4C3 14.9601 3 15.2401 3.10899 15.454C3.20487 15.6422 3.35785 15.7951 3.54601 15.891C3.75992 16 4.03995 16 4.6 16Z",
"stroke": "currentColor",
"stroke-width": "2",
"stroke-linecap": "round",
"stroke-linejoin": "round"
},
"children": []
}
]
}
]
},
"name": "ImageIndentLeft"
}

View File

@ -0,0 +1,16 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './ImageIndentLeft.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
props,
ref,
) => <IconBase {...props} ref={ref} data={data as IconData} />)
Icon.displayName = 'ImageIndentLeft'
export default Icon

View File

@ -1,6 +1,7 @@
export { default as AlignLeft } from './AlignLeft' export { default as AlignLeft } from './AlignLeft'
export { default as BezierCurve03 } from './BezierCurve03' export { default as BezierCurve03 } from './BezierCurve03'
export { default as Colors } from './Colors' export { default as Colors } from './Colors'
export { default as ImageIndentLeft } from './ImageIndentLeft'
export { default as LeftIndent02 } from './LeftIndent02' export { default as LeftIndent02 } from './LeftIndent02'
export { default as LetterSpacing01 } from './LetterSpacing01' export { default as LetterSpacing01 } from './LetterSpacing01'
export { default as TypeSquare } from './TypeSquare' export { default as TypeSquare } from './TypeSquare'

View File

@ -201,6 +201,7 @@ const Result: FC<IResultProps> = ({
status: WorkflowRunningStatus.Running, status: WorkflowRunningStatus.Running,
tracing: [], tracing: [],
expand: false, expand: false,
resultText: '',
}) })
setRespondingFalse() setRespondingFalse()
}, },
@ -243,15 +244,25 @@ const Result: FC<IResultProps> = ({
})) }))
if (!data.outputs) if (!data.outputs)
setCompletionRes('') setCompletionRes('')
else if (Object.keys(data.outputs).length > 1)
setCompletionRes(data.outputs)
else else
setCompletionRes(data.outputs[Object.keys(data.outputs)[0]]) setCompletionRes(data.outputs)
setRespondingFalse() setRespondingFalse()
setMessageId(tempMessageId) setMessageId(tempMessageId)
onCompleted(getCompletionRes(), taskId, true) onCompleted(getCompletionRes(), taskId, true)
isEnd = true isEnd = true
}, },
onTextChunk: (params) => {
const { data: { text } } = params
setWorkflowProccessData(produce(getWorkflowProccessData()!, (draft) => {
draft.resultText += text
}))
},
onTextReplace: (params) => {
const { data: { text } } = params
setWorkflowProccessData(produce(getWorkflowProccessData()!, (draft) => {
draft.resultText = text
}))
},
}, },
isInstalledApp, isInstalledApp,
installedAppInfo?.id, installedAppInfo?.id,

View File

@ -124,6 +124,7 @@ export const useWorkflowRun = () => {
status: WorkflowRunningStatus.Running, status: WorkflowRunningStatus.Running,
}, },
tracing: [], tracing: [],
resultText: '',
}) })
ssePost( ssePost(
@ -284,6 +285,27 @@ export const useWorkflowRun = () => {
if (onNodeFinished) if (onNodeFinished)
onNodeFinished(params) onNodeFinished(params)
}, },
onTextChunk: (params) => {
const { data: { text } } = params
const {
workflowRunningData,
setWorkflowRunningData,
} = workflowStore.getState()
setWorkflowRunningData(produce(workflowRunningData!, (draft) => {
draft.resultTabActive = true
draft.resultText += text
}))
},
onTextReplace: (params) => {
const { data: { text } } = params
const {
workflowRunningData,
setWorkflowRunningData,
} = workflowStore.getState()
setWorkflowRunningData(produce(workflowRunningData!, (draft) => {
draft.resultText = text
}))
},
...restCallback, ...restCallback,
}, },
) )

View File

@ -1,12 +1,12 @@
import { import {
memo, memo,
useEffect, useEffect,
useRef, // useRef,
useState, useState,
} from 'react' } from 'react'
import cn from 'classnames' import cn from 'classnames'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import OutputPanel from '../run/output-panel' import ResultText from '../run/result-text'
import ResultPanel from '../run/result-panel' import ResultPanel from '../run/result-panel'
import TracingPanel from '../run/tracing-panel' import TracingPanel from '../run/tracing-panel'
import { import {
@ -32,22 +32,15 @@ const WorkflowPreview = () => {
setCurrentTab(tab) setCurrentTab(tab)
} }
const [height, setHieght] = useState(0)
const ref = useRef<HTMLDivElement>(null)
useEffect(() => { useEffect(() => {
if (showDebugAndPreviewPanel && showInputsPanel) if (showDebugAndPreviewPanel && showInputsPanel)
setCurrentTab('INPUT') setCurrentTab('INPUT')
}, [showDebugAndPreviewPanel, showInputsPanel]) }, [showDebugAndPreviewPanel, showInputsPanel])
const adjustResultHeight = () => {
if (ref.current)
setHieght(ref.current?.clientHeight - 16 - 16 - 2 - 1)
}
useEffect(() => { useEffect(() => {
adjustResultHeight() if ((workflowRunningData?.result.status === WorkflowRunningStatus.Succeeded || workflowRunningData?.result.status === WorkflowRunningStatus.Failed) && !workflowRunningData.resultText)
}, []) switchTab('DETAIL')
}, [workflowRunningData])
return ( return (
<div className={` <div className={`
@ -107,7 +100,7 @@ const WorkflowPreview = () => {
}} }}
>{t('runLog.tracing')}</div> >{t('runLog.tracing')}</div>
</div> </div>
<div ref={ref} className={cn( <div className={cn(
'grow bg-white h-0 overflow-y-auto rounded-b-2xl', 'grow bg-white h-0 overflow-y-auto rounded-b-2xl',
(currentTab === 'RESULT' || currentTab === 'TRACING') && '!bg-gray-50', (currentTab === 'RESULT' || currentTab === 'TRACING') && '!bg-gray-50',
)}> )}>
@ -115,11 +108,11 @@ const WorkflowPreview = () => {
<InputsPanel onRun={() => switchTab('RESULT')} /> <InputsPanel onRun={() => switchTab('RESULT')} />
)} )}
{currentTab === 'RESULT' && ( {currentTab === 'RESULT' && (
<OutputPanel <ResultText
isRunning={workflowRunningData?.result?.status === WorkflowRunningStatus.Running || !workflowRunningData?.result} isRunning={workflowRunningData?.result?.status === WorkflowRunningStatus.Running || !workflowRunningData?.result}
outputs={workflowRunningData?.result?.outputs} outputs={workflowRunningData?.resultText}
error={workflowRunningData?.result?.error} error={workflowRunningData?.result?.error}
height={height} onClick={() => switchTab('DETAIL')}
/> />
)} )}
{currentTab === 'DETAIL' && ( {currentTab === 'DETAIL' && (

View File

@ -0,0 +1,56 @@
'use client'
import type { FC } from 'react'
import { useTranslation } from 'react-i18next'
import { ImageIndentLeft } from '@/app/components/base/icons/src/vender/line/editor'
import { Markdown } from '@/app/components/base/markdown'
import LoadingAnim from '@/app/components/app/chat/loading-anim'
type ResultTextProps = {
isRunning?: boolean
outputs?: any
error?: string
onClick?: () => void
}
const ResultText: FC<ResultTextProps> = ({
isRunning,
outputs,
error,
onClick,
}) => {
const { t } = useTranslation()
return (
<div className='bg-gray-50 py-2'>
{isRunning && !outputs && (
<div className='pt-4 pl-[26px]'>
<LoadingAnim type='text' />
</div>
)}
{!isRunning && error && (
<div className='px-4'>
<div className='px-3 py-[10px] rounded-lg !bg-[#fef3f2] border-[0.5px] border-[rbga(0,0,0,0.05)] shadow-xs'>
<div className='text-xs leading-[18px] text-[#d92d20]'>{error}</div>
</div>
</div>
)}
{!isRunning && !outputs && !error && (
<div className='mt-[120px] px-4 py-2 flex flex-col items-center text-[13px] leading-[18px] text-gray-500'>
<ImageIndentLeft className='w-6 h-6 text-gray-400' />
<div className='mr-2'>{t('runLog.resultEmpty.title')}</div>
<div>
{t('runLog.resultEmpty.tipLeft')}
<span onClick={onClick} className='cursor-pointer text-primary-600'>{t('runLog.resultEmpty.link')}</span>
{t('runLog.resultEmpty.tipRight')}
</div>
</div>
)}
{outputs && (
<div className='px-4 py-2'>
<Markdown content={outputs} />
</div>
)}
</div>
)
}
export default ResultText

View File

@ -19,11 +19,16 @@ import type {
} from './types' } from './types'
import { WorkflowContext } from './context' import { WorkflowContext } from './context'
type PreviewRunningData = WorkflowRunningData & {
resultTabActive?: boolean
resultText?: string
}
type Shape = { type Shape = {
appId: string appId: string
panelWidth: number panelWidth: number
workflowRunningData?: WorkflowRunningData workflowRunningData?: PreviewRunningData
setWorkflowRunningData: (workflowData?: WorkflowRunningData) => void setWorkflowRunningData: (workflowData: PreviewRunningData) => void
historyWorkflowData?: HistoryWorkflowData historyWorkflowData?: HistoryWorkflowData
setHistoryWorkflowData: (historyWorkflowData?: HistoryWorkflowData) => void setHistoryWorkflowData: (historyWorkflowData?: HistoryWorkflowData) => void
showRunHistory: boolean showRunHistory: boolean

View File

@ -18,6 +18,12 @@ const translation = {
tokens: 'Gesamtzeichen', tokens: 'Gesamtzeichen',
steps: 'Ausführungsschritte', steps: 'Ausführungsschritte',
}, },
resultEmpty: {
title: 'Dieser Lauf gibt nur das JSON-Format aus',
tipLeft: 'Bitte gehen Sie zum ',
Link: 'Detailpanel',
tipRight: 'ansehen.',
},
} }
export default translation export default translation

View File

@ -18,6 +18,12 @@ const translation = {
tokens: 'Total Tokens', tokens: 'Total Tokens',
steps: 'Run Steps', steps: 'Run Steps',
}, },
resultEmpty: {
title: 'This run only output JSON format,',
tipLeft: 'please go to the ',
link: 'detail panel',
tipRight: ' view it.',
},
} }
export default translation export default translation

View File

@ -18,6 +18,12 @@ const translation = {
tokens: 'Total des jetons', tokens: 'Total des jetons',
steps: 'Étapes d\'exécution', steps: 'Étapes d\'exécution',
}, },
resultEmpty: {
title: 'Cela exécute uniquement le format de sortie JSON,',
tipLeft: 'veuillez aller à ',
link: 'panneau de détail',
tipRight: ' visualisez-le.',
},
} }
export default translation export default translation

View File

@ -18,6 +18,12 @@ const translation = {
tokens: 'トークンの合計', tokens: 'トークンの合計',
steps: '実行ステップ', steps: '実行ステップ',
}, },
resultEmpty: {
title: 'この実行では JSON 形式のみが出力されます',
tipLeft: 'にアクセスしてください',
link: '詳細パネル',
tipRight: '表示します。',
},
} }
export default translation export default translation

View File

@ -18,6 +18,12 @@ const translation = {
tokens: 'Total de Tokens', tokens: 'Total de Tokens',
steps: 'Passos de Execução', steps: 'Passos de Execução',
}, },
resultEmpty: {
title: 'Esta execução apenas produz o formato JSON,',
tipLeft: 'por favor vá para ',
link: 'painel de detalhes',
tipRight: ' veja.',
},
} }
export default translation export default translation

View File

@ -18,6 +18,12 @@ const translation = {
tokens: 'Загальна кількість токенів', tokens: 'Загальна кількість токенів',
steps: 'Кроки виконання', steps: 'Кроки виконання',
}, },
resultEmpty: {
title: 'Цей запуск лише вихідного формату JSON,',
tipLeft: 'будь ласка, перейдіть до ',
link: 'панель деталей',
tipRight: ' переглянути.',
},
} }
export default translation export default translation

View File

@ -18,6 +18,12 @@ const translation = {
tokens: 'Tổng số token', tokens: 'Tổng số token',
steps: 'Các bước chạy', steps: 'Các bước chạy',
}, },
resultEmpty: {
title: 'Chạy này chỉ xuất ra định dạng JSON,',
tipLeft: 'vui lòng truy cập ',
link: 'bảng chi tiết',
tipRight: ' xem nó.',
},
} }
export default translation export default translation

View File

@ -18,6 +18,12 @@ const translation = {
tokens: '总 token 数', tokens: '总 token 数',
steps: '运行步数', steps: '运行步数',
}, },
resultEmpty: {
title: '本次运行仅输出JSON格式',
tipLeft: '请转到',
link: '详细信息面板',
tipRight: '查看它。',
},
} }
export default translation export default translation

View File

@ -18,6 +18,12 @@ const translation = {
tokens: '總 token 數', tokens: '總 token 數',
steps: '執行步數', steps: '執行步數',
}, },
resultEmpty: {
title: '本運行僅輸出JSON格式',
tipLeft: '請到',
link: '詳細資訊面板',
tipRight: '查看它。',
},
} }
export default translation export default translation

View File

@ -1,4 +1,4 @@
import type { IOnCompleted, IOnData, IOnError, IOnFile, IOnMessageEnd, IOnMessageReplace, IOnNodeFinished, IOnNodeStarted, IOnThought, IOnWorkflowFinished, IOnWorkflowStarted } from './base' import type { IOnCompleted, IOnData, IOnError, IOnFile, IOnMessageEnd, IOnMessageReplace, IOnNodeFinished, IOnNodeStarted, IOnTextChunk, IOnTextReplace, IOnThought, IOnWorkflowFinished, IOnWorkflowStarted } from './base'
import { import {
del as consoleDel, get as consoleGet, patch as consolePatch, post as consolePost, del as consoleDel, get as consoleGet, patch as consolePatch, post as consolePost,
delPublic as del, getPublic as get, patchPublic as patch, postPublic as post, ssePost, delPublic as del, getPublic as get, patchPublic as patch, postPublic as post, ssePost,
@ -72,11 +72,15 @@ export const sendWorkflowMessage = async (
onNodeStarted, onNodeStarted,
onNodeFinished, onNodeFinished,
onWorkflowFinished, onWorkflowFinished,
onTextChunk,
onTextReplace,
}: { }: {
onWorkflowStarted: IOnWorkflowStarted onWorkflowStarted: IOnWorkflowStarted
onNodeStarted: IOnNodeStarted onNodeStarted: IOnNodeStarted
onNodeFinished: IOnNodeFinished onNodeFinished: IOnNodeFinished
onWorkflowFinished: IOnWorkflowFinished onWorkflowFinished: IOnWorkflowFinished
onTextChunk: IOnTextChunk
onTextReplace: IOnTextReplace
}, },
isInstalledApp: boolean, isInstalledApp: boolean,
installedAppId = '', installedAppId = '',
@ -86,7 +90,7 @@ export const sendWorkflowMessage = async (
...body, ...body,
response_mode: 'streaming', response_mode: 'streaming',
}, },
}, { onNodeStarted, onWorkflowStarted, onWorkflowFinished, isPublicAPI: !isInstalledApp, onNodeFinished }) }, { onNodeStarted, onWorkflowStarted, onWorkflowFinished, isPublicAPI: !isInstalledApp, onNodeFinished, onTextChunk, onTextReplace })
} }
export const fetchAppInfo = async () => { export const fetchAppInfo = async () => {