From 5f89e84eafa7b34c299c0606783e012d4a76c77d Mon Sep 17 00:00:00 2001 From: dnazarenkoo <134951516+dnazarenkoo@users.noreply.github.com> Date: Sun, 30 Jul 2023 14:02:18 +0300 Subject: [PATCH 01/19] feat: add logs context (#3190) * feat: add the ability to share a link to a log line * fix: update tooltip * fix: resolve comments regarding query params * fix: resolve comments * feat: add logs context * feat: add highlighting active items * fix: resolve comments * feat: fix showing log lines * fix: update logs ordering * fix: update page size and logs saving * fix: update related to comments * feat: logs context is updated --------- Co-authored-by: Palash Gupta --- frontend/package.json | 1 + frontend/src/api/metrics/getQueryRange.ts | 1 + .../LogDetail/LogDetail.interfaces.ts | 5 +- frontend/src/components/LogDetail/index.tsx | 6 +- .../src/components/Logs/ListLogView/index.tsx | 78 +++++- .../src/components/Logs/ListLogView/styles.ts | 16 +- .../src/components/Logs/RawLogView/index.tsx | 142 +++++++++- .../src/components/Logs/RawLogView/styles.ts | 44 +++- .../src/components/Logs/TableView/types.ts | 9 +- .../Logs/TableView/useTableView.tsx | 85 +++++- frontend/src/constants/query.ts | 2 + frontend/src/constants/theme.ts | 1 + .../container/LogsContextList/ShowButton.tsx | 36 +++ .../src/container/LogsContextList/configs.ts | 9 + .../src/container/LogsContextList/index.tsx | 198 ++++++++++++++ .../src/container/LogsContextList/styles.ts | 25 ++ .../src/container/LogsContextList/utils.ts | 52 ++++ .../container/LogsExplorerContext/index.tsx | 109 ++++++++ .../container/LogsExplorerContext/styles.ts | 30 +++ .../container/LogsExplorerContext/types.ts | 6 + .../LogsExplorerContext/useInitialQuery.ts | 36 +++ .../container/LogsExplorerContext/utils.ts | 22 ++ .../InfinityTableView/LogsCustomTable.tsx | 6 + .../InfinityTableView/config.ts | 2 +- .../InfinityTableView/index.tsx | 249 +++++++++++------- .../InfinityTableView/styles.ts | 14 +- .../InfinityTableView/types.ts | 3 +- .../LogsExplorerList.interfaces.ts | 5 +- .../src/container/LogsExplorerList/index.tsx | 55 ++-- .../src/container/LogsExplorerViews/index.tsx | 119 +++------ frontend/src/container/LogsTable/index.tsx | 77 +----- frontend/src/hooks/logs/configs.ts | 1 + frontend/src/hooks/logs/types.ts | 24 ++ frontend/src/hooks/logs/useActiveLog.ts | 127 +++++++++ frontend/src/hooks/logs/useCopyLogLink.ts | 85 ++++++ .../queryBuilder/useGetExplorerQueryRange.ts | 2 + frontend/src/pages/Logs/index.tsx | 19 +- frontend/src/types/api/index.ts | 3 +- frontend/src/types/api/logs/log.ts | 2 +- frontend/src/utils/getAlphaColor.ts | 14 + frontend/yarn.lock | 14 + 41 files changed, 1393 insertions(+), 341 deletions(-) create mode 100644 frontend/src/container/LogsContextList/ShowButton.tsx create mode 100644 frontend/src/container/LogsContextList/configs.ts create mode 100644 frontend/src/container/LogsContextList/index.tsx create mode 100644 frontend/src/container/LogsContextList/styles.ts create mode 100644 frontend/src/container/LogsContextList/utils.ts create mode 100644 frontend/src/container/LogsExplorerContext/index.tsx create mode 100644 frontend/src/container/LogsExplorerContext/styles.ts create mode 100644 frontend/src/container/LogsExplorerContext/types.ts create mode 100644 frontend/src/container/LogsExplorerContext/useInitialQuery.ts create mode 100644 frontend/src/container/LogsExplorerContext/utils.ts create mode 100644 frontend/src/hooks/logs/configs.ts create mode 100644 frontend/src/hooks/logs/types.ts create mode 100644 frontend/src/hooks/logs/useActiveLog.ts create mode 100644 frontend/src/hooks/logs/useCopyLogLink.ts create mode 100644 frontend/src/utils/getAlphaColor.ts diff --git a/frontend/package.json b/frontend/package.json index c6208a9a82..dd64d0bc14 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -46,6 +46,7 @@ "chartjs-adapter-date-fns": "^2.0.0", "chartjs-plugin-annotation": "^1.4.0", "color": "^4.2.1", + "color-alpha": "1.1.3", "cross-env": "^7.0.3", "css-loader": "4.3.0", "css-minimizer-webpack-plugin": "^3.2.0", diff --git a/frontend/src/api/metrics/getQueryRange.ts b/frontend/src/api/metrics/getQueryRange.ts index 9a0a22bda7..bc70d19832 100644 --- a/frontend/src/api/metrics/getQueryRange.ts +++ b/frontend/src/api/metrics/getQueryRange.ts @@ -18,6 +18,7 @@ export const getMetricsQueryRange = async ( error: null, message: response.data.status, payload: response.data, + params: props, }; } catch (error) { return ErrorResponseHandler(error as AxiosError); diff --git a/frontend/src/components/LogDetail/LogDetail.interfaces.ts b/frontend/src/components/LogDetail/LogDetail.interfaces.ts index 198e2abdcd..a67dfc10c8 100644 --- a/frontend/src/components/LogDetail/LogDetail.interfaces.ts +++ b/frontend/src/components/LogDetail/LogDetail.interfaces.ts @@ -1,9 +1,10 @@ +import { DrawerProps } from 'antd'; import { AddToQueryHOCProps } from 'components/Logs/AddToQueryHOC'; import { ActionItemProps } from 'container/LogDetailedView/ActionItem'; import { ILog } from 'types/api/logs/log'; export type LogDetailProps = { log: ILog | null; - onClose: () => void; } & Pick & - Pick; + Pick & + Pick; diff --git a/frontend/src/components/LogDetail/index.tsx b/frontend/src/components/LogDetail/index.tsx index 8ea0709fbd..b787322ca7 100644 --- a/frontend/src/components/LogDetail/index.tsx +++ b/frontend/src/components/LogDetail/index.tsx @@ -11,10 +11,6 @@ function LogDetail({ onAddToQuery, onClickActionItem, }: LogDetailProps): JSX.Element { - const onDrawerClose = (): void => { - onClose(); - }; - const items = useMemo( () => [ { @@ -43,7 +39,7 @@ function LogDetail({ title="Log Details" placement="right" closable - onClose={onDrawerClose} + onClose={onClose} open={log !== null} style={{ overscrollBehavior: 'contain' }} destroyOnClose diff --git a/frontend/src/components/Logs/ListLogView/index.tsx b/frontend/src/components/Logs/ListLogView/index.tsx index 91d0787a95..b56614edcf 100644 --- a/frontend/src/components/Logs/ListLogView/index.tsx +++ b/frontend/src/components/Logs/ListLogView/index.tsx @@ -1,9 +1,19 @@ import { blue, grey, orange } from '@ant-design/colors'; -import { CopyFilled, ExpandAltOutlined } from '@ant-design/icons'; +import { + CopyFilled, + ExpandAltOutlined, + LinkOutlined, + MonitorOutlined, +} from '@ant-design/icons'; import Convert from 'ansi-to-html'; import { Button, Divider, Row, Typography } from 'antd'; +import LogDetail from 'components/LogDetail'; +import LogsExplorerContext from 'container/LogsExplorerContext'; import dayjs from 'dayjs'; import dompurify from 'dompurify'; +import { useActiveLog } from 'hooks/logs/useActiveLog'; +import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; +import { useIsDarkMode } from 'hooks/useDarkMode'; import { useNotifications } from 'hooks/useNotifications'; // utils import { FlatLogData } from 'lib/logs/flatLogData'; @@ -85,24 +95,40 @@ function LogSelectedField({ type ListLogViewProps = { logData: ILog; - onOpenDetailedView: (log: ILog) => void; selectedFields: IField[]; -} & Pick; +}; function ListLogView({ logData, selectedFields, - onOpenDetailedView, - onAddToQuery, }: ListLogViewProps): JSX.Element { const flattenLogData = useMemo(() => FlatLogData(logData), [logData]); + const isDarkMode = useIsDarkMode(); const [, setCopy] = useCopyToClipboard(); const { notifications } = useNotifications(); + const { isHighlighted, isLogsExplorerPage, onLogCopy } = useCopyLogLink( + logData.id, + ); + const { + activeLog: activeContextLog, + onSetActiveLog: handleSetActiveContextLog, + onClearActiveLog: handleClearActiveContextLog, + } = useActiveLog(); + const { + activeLog, + onSetActiveLog, + onClearActiveLog, + onAddToQuery, + } = useActiveLog(); const handleDetailedView = useCallback(() => { - onOpenDetailedView(logData); - }, [logData, onOpenDetailedView]); + onSetActiveLog(logData); + }, [logData, onSetActiveLog]); + + const handleShowContext = useCallback(() => { + handleSetActiveContextLog(logData); + }, [logData, handleSetActiveContextLog]); const handleCopyJSON = (): void => { setCopy(JSON.stringify(logData, null, 2)); @@ -125,7 +151,7 @@ function ListLogView({ ); return ( - +
<> @@ -169,6 +195,42 @@ function ListLogView({ > Copy JSON + + {isLogsExplorerPage && ( + <> + + + + )} + + {activeContextLog && ( + + )} + ); diff --git a/frontend/src/components/Logs/ListLogView/styles.ts b/frontend/src/components/Logs/ListLogView/styles.ts index 313f5b9e0c..452bb653fa 100644 --- a/frontend/src/components/Logs/ListLogView/styles.ts +++ b/frontend/src/components/Logs/ListLogView/styles.ts @@ -1,12 +1,26 @@ import { Card, Typography } from 'antd'; +import { themeColors } from 'constants/theme'; import styled from 'styled-components'; +import getAlphaColor from 'utils/getAlphaColor'; -export const Container = styled(Card)` +export const Container = styled(Card)<{ + $isDarkMode: boolean; + $isActiveLog: boolean; +}>` width: 100% !important; margin-bottom: 0.3rem; .ant-card-body { padding: 0.3rem 0.6rem; } + + ${({ $isDarkMode, $isActiveLog }): string => + $isActiveLog + ? `background-color: ${ + $isDarkMode + ? getAlphaColor(themeColors.white)[10] + : getAlphaColor(themeColors.black)[10] + };` + : ''} `; export const Text = styled(Typography.Text)` diff --git a/frontend/src/components/Logs/RawLogView/index.tsx b/frontend/src/components/Logs/RawLogView/index.tsx index 76d12c1a22..e926e73643 100644 --- a/frontend/src/components/Logs/RawLogView/index.tsx +++ b/frontend/src/components/Logs/RawLogView/index.tsx @@ -1,16 +1,32 @@ -import { ExpandAltOutlined } from '@ant-design/icons'; -// const Convert = require('ansi-to-html'); +import { + ExpandAltOutlined, + LinkOutlined, + MonitorOutlined, +} from '@ant-design/icons'; import Convert from 'ansi-to-html'; +import { Button, DrawerProps, Tooltip } from 'antd'; +import LogDetail from 'components/LogDetail'; +import LogsExplorerContext from 'container/LogsExplorerContext'; import dayjs from 'dayjs'; import dompurify from 'dompurify'; +import { useActiveLog } from 'hooks/logs/useActiveLog'; +import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; // hooks import { useIsDarkMode } from 'hooks/useDarkMode'; -import { useCallback, useMemo } from 'react'; +import { + KeyboardEvent, + MouseEvent, + MouseEventHandler, + useCallback, + useMemo, + useState, +} from 'react'; // interfaces import { ILog } from 'types/api/logs/log'; // styles import { + ActionButtonsWrapper, ExpandIconWrapper, RawLogContent, RawLogViewContainer, @@ -19,15 +35,34 @@ import { const convert = new Convert(); interface RawLogViewProps { + isActiveLog?: boolean; + isReadOnly?: boolean; data: ILog; linesPerRow: number; - onClickExpand: (log: ILog) => void; } function RawLogView(props: RawLogViewProps): JSX.Element { - const { data, linesPerRow, onClickExpand } = props; + const { isActiveLog = false, isReadOnly = false, data, linesPerRow } = props; + + const { isHighlighted, isLogsExplorerPage, onLogCopy } = useCopyLogLink( + data.id, + ); + const { + activeLog: activeContextLog, + onSetActiveLog: handleSetActiveContextLog, + onClearActiveLog: handleClearActiveContextLog, + } = useActiveLog(); + const { + activeLog, + onSetActiveLog, + onClearActiveLog, + onAddToQuery, + } = useActiveLog(); + + const [hasActionButtons, setHasActionButtons] = useState(false); const isDarkMode = useIsDarkMode(); + const isReadOnlyLog = !isLogsExplorerPage || isReadOnly; const text = useMemo( () => @@ -38,8 +73,43 @@ function RawLogView(props: RawLogViewProps): JSX.Element { ); const handleClickExpand = useCallback(() => { - onClickExpand(data); - }, [onClickExpand, data]); + if (activeContextLog || isReadOnly) return; + + onSetActiveLog(data); + }, [activeContextLog, isReadOnly, data, onSetActiveLog]); + + const handleCloseLogDetail: DrawerProps['onClose'] = useCallback( + ( + event: MouseEvent | KeyboardEvent, + ) => { + event.preventDefault(); + event.stopPropagation(); + + onClearActiveLog(); + }, + [onClearActiveLog], + ); + + const handleMouseEnter = useCallback(() => { + if (isReadOnlyLog) return; + + setHasActionButtons(true); + }, [isReadOnlyLog]); + + const handleMouseLeave = useCallback(() => { + if (isReadOnlyLog) return; + + setHasActionButtons(false); + }, [isReadOnlyLog]); + + const handleShowContext: MouseEventHandler = useCallback( + (event) => { + event.preventDefault(); + event.stopPropagation(); + handleSetActiveContextLog(data); + }, + [data, handleSetActiveContextLog], + ); const html = useMemo( () => ({ @@ -48,19 +118,69 @@ function RawLogView(props: RawLogViewProps): JSX.Element { [text], ); + const mouseActions = useMemo( + () => ({ onMouseEnter: handleMouseEnter, onMouseLeave: handleMouseLeave }), + [handleMouseEnter, handleMouseLeave], + ); + return ( - - - - + {!isReadOnly && ( + + + + )} + + + + {hasActionButtons && ( + + + + + ); +} + +export default ShowButton; diff --git a/frontend/src/container/LogsContextList/configs.ts b/frontend/src/container/LogsContextList/configs.ts new file mode 100644 index 0000000000..2fbb159b9d --- /dev/null +++ b/frontend/src/container/LogsContextList/configs.ts @@ -0,0 +1,9 @@ +import { OrderByPayload } from 'types/api/queryBuilder/queryBuilderData'; + +export const INITIAL_PAGE_SIZE = 5; +export const LOGS_MORE_PAGE_SIZE = 10; + +export const getOrderByTimestamp = (order: string): OrderByPayload => ({ + columnName: 'timestamp', + order, +}); diff --git a/frontend/src/container/LogsContextList/index.tsx b/frontend/src/container/LogsContextList/index.tsx new file mode 100644 index 0000000000..0433a5f636 --- /dev/null +++ b/frontend/src/container/LogsContextList/index.tsx @@ -0,0 +1,198 @@ +import RawLogView from 'components/Logs/RawLogView'; +import Spinner from 'components/Spinner'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config'; +import { useGetExplorerQueryRange } from 'hooks/queryBuilder/useGetExplorerQueryRange'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { memo, useCallback, useEffect, useMemo, useState } from 'react'; +import { Virtuoso } from 'react-virtuoso'; +import { SuccessResponse } from 'types/api'; +import { ILog } from 'types/api/logs/log'; +import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; +import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData'; + +import { + getOrderByTimestamp, + INITIAL_PAGE_SIZE, + LOGS_MORE_PAGE_SIZE, +} from './configs'; +import ShowButton from './ShowButton'; +import { EmptyText, ListContainer } from './styles'; +import { getRequestData } from './utils'; + +interface LogsContextListProps { + isEdit: boolean; + query: Query; + log: ILog; + order: string; + filters: TagFilter | null; +} + +function LogsContextList({ + isEdit, + query, + log, + order, + filters, +}: LogsContextListProps): JSX.Element { + const isDarkMode = useIsDarkMode(); + const [logs, setLogs] = useState([]); + const [page, setPage] = useState(1); + + const firstLog = useMemo(() => logs[0], [logs]); + const lastLog = useMemo(() => logs[logs.length - 1], [logs]); + const orderByTimestamp = useMemo(() => getOrderByTimestamp(order), [order]); + + const logsMorePageSize = useMemo(() => (page - 1) * LOGS_MORE_PAGE_SIZE, [ + page, + ]); + const pageSize = useMemo( + () => (page <= 1 ? INITIAL_PAGE_SIZE : logsMorePageSize + INITIAL_PAGE_SIZE), + [page, logsMorePageSize], + ); + const isDisabledFetch = useMemo(() => logs.length < pageSize, [ + logs.length, + pageSize, + ]); + + const currentStagedQueryData = useMemo(() => { + if (!query || query.builder.queryData.length !== 1) return null; + + return query.builder.queryData[0]; + }, [query]); + + const initialLogsRequest = useMemo( + () => + getRequestData({ + stagedQueryData: currentStagedQueryData, + query, + log, + orderByTimestamp, + page, + }), + [currentStagedQueryData, page, log, query, orderByTimestamp], + ); + + const [requestData, setRequestData] = useState( + initialLogsRequest, + ); + + const handleSuccess = useCallback( + (data: SuccessResponse) => { + const currentData = data?.payload.data.newResult.data.result || []; + + if (currentData.length > 0 && currentData[0].list) { + const currentLogs: ILog[] = currentData[0].list.map((item) => ({ + ...item.data, + timestamp: item.timestamp, + })); + + if (order === FILTERS.ASC) { + const reversedCurrentLogs = currentLogs.reverse(); + setLogs((prevLogs) => [...reversedCurrentLogs, ...prevLogs]); + } else { + setLogs((prevLogs) => [...prevLogs, ...currentLogs]); + } + } + }, + [order], + ); + + const { isError, isFetching } = useGetExplorerQueryRange( + requestData, + PANEL_TYPES.LIST, + { + keepPreviousData: true, + onSuccess: handleSuccess, + }, + ); + + const handleShowNextLines = useCallback(() => { + if (isDisabledFetch) return; + + const log = order === FILTERS.ASC ? firstLog : lastLog; + + const newRequestData = getRequestData({ + stagedQueryData: currentStagedQueryData, + query, + log, + orderByTimestamp, + page: page + 1, + pageSize: LOGS_MORE_PAGE_SIZE, + }); + + setPage((prevPage) => prevPage + 1); + setRequestData(newRequestData); + }, [ + query, + firstLog, + lastLog, + page, + order, + currentStagedQueryData, + isDisabledFetch, + orderByTimestamp, + ]); + + useEffect(() => { + if (!isEdit) return; + + const newRequestData = getRequestData({ + stagedQueryData: currentStagedQueryData, + query, + log, + orderByTimestamp, + page: 1, + }); + + setPage(1); + setLogs([]); + setRequestData(newRequestData); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [filters]); + + const getItemContent = useCallback( + (_: number, log: ILog): JSX.Element => ( + + ), + [], + ); + + return ( + <> + {order === FILTERS.ASC && ( + + )} + + + {((!logs.length && !isFetching) || isError) && ( + No Data + )} + {isFetching && } + + + + + {order === FILTERS.DESC && ( + + )} + + ); +} + +export default memo(LogsContextList); diff --git a/frontend/src/container/LogsContextList/styles.ts b/frontend/src/container/LogsContextList/styles.ts new file mode 100644 index 0000000000..85cf3128f1 --- /dev/null +++ b/frontend/src/container/LogsContextList/styles.ts @@ -0,0 +1,25 @@ +import { Space, Typography } from 'antd'; +import { themeColors } from 'constants/theme'; +import styled from 'styled-components'; + +export const ListContainer = styled.div<{ $isDarkMode: boolean }>` + position: relative; + margin: 0 -1.5rem; + height: 10rem; + overflow-y: scroll; + + background-color: ${({ $isDarkMode }): string => + $isDarkMode ? themeColors.darkGrey : themeColors.lightgrey}; +`; + +export const ShowButtonWrapper = styled(Space)` + margin: 0.625rem 0; +`; + +export const EmptyText = styled(Typography)` + padding: 0 1.5rem; + position: absolute; + top: 50%; + left: 50%; + transform: translate(-50%, -50%); +`; diff --git a/frontend/src/container/LogsContextList/utils.ts b/frontend/src/container/LogsContextList/utils.ts new file mode 100644 index 0000000000..2fc832d8e9 --- /dev/null +++ b/frontend/src/container/LogsContextList/utils.ts @@ -0,0 +1,52 @@ +import { getPaginationQueryData } from 'lib/newQueryBuilder/getPaginationQueryData'; +import { ILog } from 'types/api/logs/log'; +import { + IBuilderQuery, + OrderByPayload, + Query, +} from 'types/api/queryBuilder/queryBuilderData'; + +import { INITIAL_PAGE_SIZE } from './configs'; + +type GetRequestDataProps = { + query: Query | null; + stagedQueryData: IBuilderQuery | null; + log: ILog; + orderByTimestamp: OrderByPayload; + page: number; + pageSize?: number; +}; + +export const getRequestData = ({ + query, + stagedQueryData, + log, + orderByTimestamp, + page, + pageSize = INITIAL_PAGE_SIZE, +}: GetRequestDataProps): Query | null => { + if (!query) return null; + + const paginateData = getPaginationQueryData({ + currentStagedQueryData: stagedQueryData, + listItemId: log ? log.id : null, + orderByTimestamp, + page, + pageSize, + }); + + const data: Query = { + ...query, + builder: { + ...query.builder, + queryData: query.builder.queryData.map((item) => ({ + ...item, + ...paginateData, + pageSize, + orderBy: [orderByTimestamp], + })), + }, + }; + + return data; +}; diff --git a/frontend/src/container/LogsExplorerContext/index.tsx b/frontend/src/container/LogsExplorerContext/index.tsx new file mode 100644 index 0000000000..746148f3d1 --- /dev/null +++ b/frontend/src/container/LogsExplorerContext/index.tsx @@ -0,0 +1,109 @@ +import { EditFilled } from '@ant-design/icons'; +import { Typography } from 'antd'; +import Modal from 'antd/es/modal/Modal'; +import RawLogView from 'components/Logs/RawLogView'; +import LogsContextList from 'container/LogsContextList'; +import { FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config'; +import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { memo, useCallback, useMemo, useState } from 'react'; +import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData'; + +import { EditButton, TitleWrapper } from './styles'; +import { LogsExplorerContextProps } from './types'; +import useInitialQuery from './useInitialQuery'; + +function LogsExplorerContext({ + log, + onClose, +}: LogsExplorerContextProps): JSX.Element | null { + const initialContextQuery = useInitialQuery(log); + + const [contextQuery, setContextQuery] = useState(initialContextQuery); + const [filters, setFilters] = useState(null); + const [isEdit, setIsEdit] = useState(false); + + const isDarkMode = useIsDarkMode(); + + const handleClickEditButton = useCallback( + () => setIsEdit((prevValue) => !prevValue), + [], + ); + + const handleSearch = useCallback( + (tagFilters: TagFilter): void => { + const tagFiltersLength = tagFilters.items.length; + + if ( + (!tagFiltersLength && (!filters || !filters.items.length)) || + tagFiltersLength === filters?.items.length + ) + return; + + const nextQuery: Query = { + ...contextQuery, + builder: { + ...contextQuery.builder, + queryData: contextQuery.builder.queryData.map((item) => ({ + ...item, + filters: tagFilters, + })), + }, + }; + + setFilters(tagFilters); + setContextQuery(nextQuery); + }, + [contextQuery, filters], + ); + + const contextListParams = useMemo( + () => ({ log, isEdit, filters, query: contextQuery }), + [isEdit, log, filters, contextQuery], + ); + + return ( + + Logs Context + + } + onClick={handleClickEditButton} + /> + + } + > + {isEdit && ( + + )} + + + + + ); +} + +export default memo(LogsExplorerContext); diff --git a/frontend/src/container/LogsExplorerContext/styles.ts b/frontend/src/container/LogsExplorerContext/styles.ts new file mode 100644 index 0000000000..2236c20d53 --- /dev/null +++ b/frontend/src/container/LogsExplorerContext/styles.ts @@ -0,0 +1,30 @@ +import { Button, Space } from 'antd'; +import { themeColors } from 'constants/theme'; +import styled from 'styled-components'; +import getAlphaColor from 'utils/getAlphaColor'; + +export const TitleWrapper = styled(Space.Compact)` + justify-content: space-between; + align-items: center; +`; + +export const EditButton = styled(Button)<{ $isDarkMode: boolean }>` + margin-right: 0.938rem; + width: 1.375rem !important; + height: 1.375rem; + position: absolute; + + top: 1rem; + right: 1.563rem; + padding: 0; + + border-radius: 0.125rem; + + border-start-start-radius: 0.125rem !important; + border-end-start-radius: 0.125rem !important; + + color: ${({ $isDarkMode }): string => + $isDarkMode + ? getAlphaColor(themeColors.white)[45] + : getAlphaColor(themeColors.black)[45]}; +`; diff --git a/frontend/src/container/LogsExplorerContext/types.ts b/frontend/src/container/LogsExplorerContext/types.ts new file mode 100644 index 0000000000..343171a740 --- /dev/null +++ b/frontend/src/container/LogsExplorerContext/types.ts @@ -0,0 +1,6 @@ +import { ILog } from 'types/api/logs/log'; + +export interface LogsExplorerContextProps { + log: ILog; + onClose: VoidFunction; +} diff --git a/frontend/src/container/LogsExplorerContext/useInitialQuery.ts b/frontend/src/container/LogsExplorerContext/useInitialQuery.ts new file mode 100644 index 0000000000..7c0f49029c --- /dev/null +++ b/frontend/src/container/LogsExplorerContext/useInitialQuery.ts @@ -0,0 +1,36 @@ +import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { ILog } from 'types/api/logs/log'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; +import { DataSource } from 'types/common/queryBuilder'; + +import { getFiltersFromResources } from './utils'; + +const useInitialQuery = (log: ILog): Query => { + const { updateAllQueriesOperators } = useQueryBuilder(); + const resourcesFilters = getFiltersFromResources(log.resources_string); + + const updatedAllQueriesOperator = updateAllQueriesOperators( + initialQueriesMap.logs, + PANEL_TYPES.LIST, + DataSource.LOGS, + ); + + const data: Query = { + ...updatedAllQueriesOperator, + builder: { + ...updatedAllQueriesOperator.builder, + queryData: updatedAllQueriesOperator.builder.queryData.map((item) => ({ + ...item, + filters: { + ...item.filters, + items: [...item.filters.items, ...resourcesFilters], + }, + })), + }, + }; + + return data; +}; + +export default useInitialQuery; diff --git a/frontend/src/container/LogsExplorerContext/utils.ts b/frontend/src/container/LogsExplorerContext/utils.ts new file mode 100644 index 0000000000..93456ce0cd --- /dev/null +++ b/frontend/src/container/LogsExplorerContext/utils.ts @@ -0,0 +1,22 @@ +import { OPERATORS } from 'constants/queryBuilder'; +import { ILog } from 'types/api/logs/log'; +import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData'; +import { v4 as uuid } from 'uuid'; + +export const getFiltersFromResources = ( + resources: ILog['resources_string'], +): TagFilterItem[] => + Object.keys(resources).map((key: string) => { + const resourceValue = resources[key] as string; + return { + id: uuid(), + key: { + key, + dataType: 'string', + type: 'resource', + isColumn: false, + }, + op: OPERATORS['='], + value: resourceValue, + }; + }); diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/LogsCustomTable.tsx b/frontend/src/container/LogsExplorerList/InfinityTableView/LogsCustomTable.tsx index d7ba10fb01..a5cc609492 100644 --- a/frontend/src/container/LogsExplorerList/InfinityTableView/LogsCustomTable.tsx +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/LogsCustomTable.tsx @@ -1,3 +1,4 @@ +import Spinner from 'components/Spinner'; import { dragColumnParams } from 'hooks/useDragColumns/configs'; import ReactDragListView from 'react-drag-listview'; import { TableComponents } from 'react-virtuoso'; @@ -5,13 +6,18 @@ import { TableComponents } from 'react-virtuoso'; import { TableStyled } from './styles'; interface LogsCustomTableProps { + isLoading?: boolean; handleDragEnd: (fromIndex: number, toIndex: number) => void; } export const LogsCustomTable = ({ + isLoading, handleDragEnd, }: LogsCustomTableProps): TableComponents['Table'] => function CustomTable({ style, children }): JSX.Element { + if (isLoading) { + return ; + } return ( ['TableRow'] = ({ children, context, ...props - // eslint-disable-next-line react/jsx-props-no-spreading -}) => {children}; - -function InfinityTable({ - tableViewProps, - infitiyTableProps, -}: InfinityTableProps): JSX.Element | null { - const { onEndReached } = infitiyTableProps; - const { dataSource, columns } = useTableView(tableViewProps); - const { draggedColumns, onDragColumns } = useDragColumns< - Record - >(LOCALSTORAGE.LOGS_LIST_COLUMNS); - - const tableColumns = useMemo( - () => getDraggedColumns>(columns, draggedColumns), - [columns, draggedColumns], - ); - - const handleDragEnd = useCallback( - (fromIndex: number, toIndex: number) => - onDragColumns(tableColumns, fromIndex, toIndex), - [tableColumns, onDragColumns], - ); - - const itemContent = useCallback( - (index: number, log: Record): JSX.Element => ( - <> - {tableColumns.map((column) => { - if (!column.render) return Empty; - - const element: ColumnTypeRender> = column.render( - log[column.key as keyof Record], - log, - index, - ); - - const elementWithChildren = element as Exclude< - ColumnTypeRender>, - ReactNode - >; - - const children = elementWithChildren.children as ReactElement; - const props = elementWithChildren.props as Record; - - return ( - - {cloneElement(children, props)} - - ); - })} - - ), - [tableColumns], - ); - - const tableHeader = useCallback( - () => ( - - {tableColumns.map((column) => { - const isDragColumn = column.key !== 'expand'; - - return ( - - {column.title as string} - - ); - })} - - ), - [tableColumns], - ); +}) => { + const isDarkMode = useIsDarkMode(); + const { isHighlighted } = useCopyLogLink(props.item.id); return ( - + + {children} + ); -} +}; + +const InfinityTable = forwardRef( + function InfinityTableView( + { isLoading, tableViewProps, infitiyTableProps }, + ref, + ): JSX.Element | null { + const { + activeLog: activeContextLog, + onSetActiveLog: handleSetActiveContextLog, + onClearActiveLog: handleClearActiveContextLog, + } = useActiveLog(); + const { + activeLog, + onSetActiveLog, + onClearActiveLog, + onAddToQuery, + } = useActiveLog(); + + const { onEndReached } = infitiyTableProps; + const { dataSource, columns } = useTableView({ + ...tableViewProps, + onClickExpand: onSetActiveLog, + onOpenLogsContext: handleSetActiveContextLog, + }); + const { draggedColumns, onDragColumns } = useDragColumns< + Record + >(LOCALSTORAGE.LOGS_LIST_COLUMNS); + + const tableColumns = useMemo( + () => getDraggedColumns>(columns, draggedColumns), + [columns, draggedColumns], + ); + + const handleDragEnd = useCallback( + (fromIndex: number, toIndex: number) => + onDragColumns(tableColumns, fromIndex, toIndex), + [tableColumns, onDragColumns], + ); + + const itemContent = useCallback( + (index: number, log: Record): JSX.Element => ( + <> + {tableColumns.map((column) => { + if (!column.render) return Empty; + + const element: ColumnTypeRender> = column.render( + log[column.key as keyof Record], + log, + index, + ); + + const elementWithChildren = element as Exclude< + ColumnTypeRender>, + ReactNode + >; + + const children = elementWithChildren.children as ReactElement; + const props = elementWithChildren.props as Record; + + return ( + + {cloneElement(children, props)} + + ); + })} + + ), + [tableColumns], + ); + + const tableHeader = useCallback( + () => ( + + {tableColumns.map((column) => { + const isDragColumn = column.key !== 'expand'; + + return ( + + {column.title as string} + + ); + })} + + ), + [tableColumns], + ); + + return ( + <> + + + {activeContextLog && ( + + )} + + + ); + }, +); export default InfinityTable; diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/styles.ts b/frontend/src/container/LogsExplorerList/InfinityTableView/styles.ts index 024ba88a9e..8fe4fd50c5 100644 --- a/frontend/src/container/LogsExplorerList/InfinityTableView/styles.ts +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/styles.ts @@ -22,7 +22,19 @@ export const TableCellStyled = styled.td` background-color: ${themeColors.lightBlack}; `; -export const TableRowStyled = styled.tr` +export const TableRowStyled = styled.tr<{ + $isDarkMode: boolean; + $isActiveLog: boolean; +}>` + td { + ${({ $isDarkMode, $isActiveLog }): string => + $isActiveLog + ? `background-color: ${ + $isDarkMode ? 'rgba(255,255,255,0.1)' : 'rgba(0, 0, 0, 0.1)' + };` + : ''} + } + &:hover { ${TableCellStyled} { background-color: #1d1d1d; diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/types.ts b/frontend/src/container/LogsExplorerList/InfinityTableView/types.ts index bf0e5a654c..fb8eb23170 100644 --- a/frontend/src/container/LogsExplorerList/InfinityTableView/types.ts +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/types.ts @@ -1,7 +1,8 @@ import { UseTableViewProps } from 'components/Logs/TableView/types'; export type InfinityTableProps = { - tableViewProps: UseTableViewProps; + isLoading?: boolean; + tableViewProps: Omit; infitiyTableProps: { onEndReached: (index: number) => void; }; diff --git a/frontend/src/container/LogsExplorerList/LogsExplorerList.interfaces.ts b/frontend/src/container/LogsExplorerList/LogsExplorerList.interfaces.ts index 6862fe5ee9..ba68c67eb8 100644 --- a/frontend/src/container/LogsExplorerList/LogsExplorerList.interfaces.ts +++ b/frontend/src/container/LogsExplorerList/LogsExplorerList.interfaces.ts @@ -1,4 +1,3 @@ -import { AddToQueryHOCProps } from 'components/Logs/AddToQueryHOC'; import { ILog } from 'types/api/logs/log'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; @@ -7,6 +6,4 @@ export type LogsExplorerListProps = { currentStagedQueryData: IBuilderQuery | null; logs: ILog[]; onEndReached: (index: number) => void; - onExpand: (log: ILog) => void; - onOpenDetailedView: (log: ILog) => void; -} & Pick; +}; diff --git a/frontend/src/container/LogsExplorerList/index.tsx b/frontend/src/container/LogsExplorerList/index.tsx index 184e4e73f0..56c997c669 100644 --- a/frontend/src/container/LogsExplorerList/index.tsx +++ b/frontend/src/container/LogsExplorerList/index.tsx @@ -8,10 +8,11 @@ import ExplorerControlPanel from 'container/ExplorerControlPanel'; import { Heading } from 'container/LogsTable/styles'; import { useOptionsMenu } from 'container/OptionsMenu'; import { contentStyle } from 'container/Trace/Search/config'; +import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import useFontFaceObserver from 'hooks/useFontObserver'; -import { memo, useCallback, useMemo } from 'react'; -import { Virtuoso } from 'react-virtuoso'; +import { memo, useCallback, useEffect, useMemo, useRef } from 'react'; +import { Virtuoso, VirtuosoHandle } from 'react-virtuoso'; // interfaces import { ILog } from 'types/api/logs/log'; import { DataSource, StringOperators } from 'types/common/queryBuilder'; @@ -29,13 +30,13 @@ function LogsExplorerList({ isLoading, currentStagedQueryData, logs, - onOpenDetailedView, onEndReached, - onExpand, - onAddToQuery, }: LogsExplorerListProps): JSX.Element { + const ref = useRef(null); const { initialDataSource } = useQueryBuilder(); + const { activeLogId } = useCopyLogLink(); + const { options, config } = useOptionsMenu({ storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS, dataSource: initialDataSource || DataSource.METRICS, @@ -43,6 +44,11 @@ function LogsExplorerList({ currentStagedQueryData?.aggregateOperator || StringOperators.NOOP, }); + const activeLogIndex = useMemo( + () => logs.findIndex(({ id }) => id === activeLogId), + [logs, activeLogId], + ); + useFontFaceObserver( [ { @@ -65,35 +71,27 @@ function LogsExplorerList({ (_: number, log: ILog): JSX.Element => { if (options.format === 'raw') { return ( - + ); } return ( - + ); }, - [ - options.format, - options.maxLines, - selectedFields, - onOpenDetailedView, - onAddToQuery, - onExpand, - ], + [options.format, options.maxLines, selectedFields], ); + useEffect(() => { + if (!activeLogId || activeLogIndex < 0) return; + + ref?.current?.scrollToIndex({ + index: activeLogIndex, + align: 'start', + behavior: 'smooth', + }); + }, [activeLogId, activeLogIndex]); + const renderContent = useMemo(() => { const components = isLoading ? { @@ -104,11 +102,12 @@ function LogsExplorerList({ if (options.format === 'table') { return ( (null); const [page, setPage] = useState(1); const [logs, setLogs] = useState([]); const [requestData, setRequestData] = useState(null); @@ -167,16 +156,16 @@ function LogsExplorerViews(): JSX.Element { keepPreviousData: true, enabled: !isLimit, }, + { + ...(timeRange && + activeLogId && + !logs.length && { + start: timeRange.start, + end: timeRange.end, + }), + }, ); - const handleSetActiveLog = useCallback((nextActiveLog: ILog) => { - setActiveLog(nextActiveLog); - }, []); - - const handleClearActiveLog = useCallback(() => { - setActiveLog(null); - }, []); - const getUpdateQuery = useCallback( (newPanelType: PANEL_TYPES): Query => { let query = updateAllQueriesOperators( @@ -245,51 +234,6 @@ function LogsExplorerViews(): JSX.Element { [currentStagedQueryData, orderByTimestamp], ); - const handleAddToQuery = useCallback( - (fieldKey: string, fieldValue: string, operator: string): void => { - const keysAutocomplete: BaseAutocompleteData[] = - queryClient.getQueryData>( - [QueryBuilderKeys.GET_AGGREGATE_KEYS], - { exact: false }, - )?.payload.attributeKeys || []; - - const existAutocompleteKey = chooseAutocompleteFromCustomValue( - keysAutocomplete, - fieldKey, - ); - - const currentOperator = - Object.keys(OPERATORS).find((op) => op === operator) || ''; - - const nextQuery: Query = { - ...currentQuery, - builder: { - ...currentQuery.builder, - queryData: currentQuery.builder.queryData.map((item) => ({ - ...item, - filters: { - ...item.filters, - items: [ - ...item.filters.items.filter( - (item) => item.key?.id !== existAutocompleteKey.id, - ), - { - id: uuid(), - key: existAutocompleteKey, - op: currentOperator, - value: fieldValue, - }, - ], - }, - })), - }, - }; - - redirectWithQueryBuilderData(nextQuery); - }, - [currentQuery, queryClient, redirectWithQueryBuilderData], - ); - const handleEndReached = useCallback( (index: number) => { if (isLimit) return; @@ -397,14 +341,24 @@ function LogsExplorerViews(): JSX.Element { }, [panelType, isMultipleQueries, isGroupByExist, handleChangeView]); useEffect(() => { + const currentParams = data?.params as Omit; const currentData = data?.payload.data.newResult.data.result || []; if (currentData.length > 0 && currentData[0].list) { const currentLogs: ILog[] = currentData[0].list.map((item) => ({ ...item.data, timestamp: item.timestamp, })); - setLogs((prevLogs) => [...prevLogs, ...currentLogs]); + const newLogs = [...logs, ...currentLogs]; + + setLogs(newLogs); + onTimeRangeChange({ + start: currentParams?.start, + end: timeRange?.end || currentParams?.end, + pageSize: newLogs.length, + }); } + + // eslint-disable-next-line react-hooks/exhaustive-deps }, [data]); useEffect(() => { @@ -415,14 +369,28 @@ function LogsExplorerViews(): JSX.Element { const newRequestData = getRequestData(stagedQuery, { page: 1, log: null, - pageSize, + pageSize: + timeRange?.pageSize && activeLogId ? timeRange?.pageSize : pageSize, }); setLogs([]); setPage(1); setRequestData(newRequestData); currentMinTimeRef.current = minTime; + + if (!activeLogId) { + onTimeRangeChange(null); + } } - }, [stagedQuery, requestData, getRequestData, pageSize, minTime]); + }, [ + stagedQuery, + requestData, + getRequestData, + pageSize, + minTime, + timeRange, + activeLogId, + onTimeRangeChange, + ]); const tabsItems: TabsProps['items'] = useMemo( () => [ @@ -441,10 +409,7 @@ function LogsExplorerViews(): JSX.Element { isLoading={isFetching} currentStagedQueryData={currentStagedQueryData} logs={logs} - onOpenDetailedView={handleSetActiveLog} onEndReached={handleEndReached} - onExpand={handleSetActiveLog} - onAddToQuery={handleAddToQuery} /> ), }, @@ -472,9 +437,7 @@ function LogsExplorerViews(): JSX.Element { isFetching, currentStagedQueryData, logs, - handleSetActiveLog, handleEndReached, - handleAddToQuery, data, isError, ], @@ -524,12 +487,6 @@ function LogsExplorerViews(): JSX.Element { onChange={handleChangeView} destroyInactiveTabPane /> - diff --git a/frontend/src/container/LogsTable/index.tsx b/frontend/src/container/LogsTable/index.tsx index 73fdaa8b4b..729b9c6b94 100644 --- a/frontend/src/container/LogsTable/index.tsx +++ b/frontend/src/container/LogsTable/index.tsx @@ -4,18 +4,13 @@ import ListLogView from 'components/Logs/ListLogView'; import RawLogView from 'components/Logs/RawLogView'; import LogsTableView from 'components/Logs/TableView'; import Spinner from 'components/Spinner'; -import ROUTES from 'constants/routes'; import { contentStyle } from 'container/Trace/Search/config'; import useFontFaceObserver from 'hooks/useFontObserver'; -import { getGeneratedFilterQueryString } from 'lib/getGeneratedFilterQueryString'; import { memo, useCallback, useMemo } from 'react'; -import { useDispatch, useSelector } from 'react-redux'; -import { useHistory } from 'react-router-dom'; +import { useSelector } from 'react-redux'; import { Virtuoso } from 'react-virtuoso'; import { AppState } from 'store/reducers'; // interfaces -import { SET_DETAILED_LOG_DATA } from 'types/actions/logs'; -import { ILog } from 'types/api/logs/log'; import { ILogsReducer } from 'types/reducer/logs'; // styles @@ -26,15 +21,10 @@ export type LogViewMode = 'raw' | 'table' | 'list'; type LogsTableProps = { viewMode: LogViewMode; linesPerRow: number; - onClickExpand: (logData: ILog) => void; }; function LogsTable(props: LogsTableProps): JSX.Element { - const { viewMode, onClickExpand, linesPerRow } = props; - - const history = useHistory(); - - const dispatch = useDispatch(); + const { viewMode, linesPerRow } = props; useFontFaceObserver( [ @@ -52,7 +42,6 @@ function LogsTable(props: LogsTableProps): JSX.Element { const { logs, fields: { selected }, - searchFilter: { queryString }, isLoading, liveTail, } = useSelector((state) => state.logs); @@ -67,75 +56,23 @@ function LogsTable(props: LogsTableProps): JSX.Element { liveTail, ]); - const handleOpenDetailedView = useCallback( - (logData: ILog) => { - dispatch({ - type: SET_DETAILED_LOG_DATA, - payload: logData, - }); - }, - [dispatch], - ); - - const handleAddToQuery = useCallback( - (fieldKey: string, fieldValue: string, operator: string) => { - const updatedQueryString = getGeneratedFilterQueryString( - fieldKey, - fieldValue, - operator, - queryString, - ); - - history.replace(`${ROUTES.LOGS}?q=${updatedQueryString}`); - }, - [history, queryString], - ); - const getItemContent = useCallback( (index: number): JSX.Element => { const log = logs[index]; if (viewMode === 'raw') { - return ( - - ); + return ; } - return ( - - ); + return ; }, - [ - logs, - viewMode, - selected, - linesPerRow, - onClickExpand, - handleOpenDetailedView, - handleAddToQuery, - ], + [logs, viewMode, selected, linesPerRow], ); const renderContent = useMemo(() => { if (viewMode === 'table') { return ( - + ); } @@ -148,7 +85,7 @@ function LogsTable(props: LogsTableProps): JSX.Element { /> ); - }, [getItemContent, linesPerRow, logs, onClickExpand, selected, viewMode]); + }, [getItemContent, linesPerRow, logs, selected, viewMode]); if (isLoading) { return ; diff --git a/frontend/src/hooks/logs/configs.ts b/frontend/src/hooks/logs/configs.ts new file mode 100644 index 0000000000..12dc8d9615 --- /dev/null +++ b/frontend/src/hooks/logs/configs.ts @@ -0,0 +1 @@ +export const HIGHLIGHTED_DELAY = 10000; diff --git a/frontend/src/hooks/logs/types.ts b/frontend/src/hooks/logs/types.ts new file mode 100644 index 0000000000..3776ba606a --- /dev/null +++ b/frontend/src/hooks/logs/types.ts @@ -0,0 +1,24 @@ +import { MouseEventHandler } from 'react'; +import { ILog } from 'types/api/logs/log'; + +export type LogTimeRange = { + start: number; + end: number; + pageSize: number; +}; + +export type UseCopyLogLink = { + isHighlighted: boolean; + isLogsExplorerPage: boolean; + activeLogId: string | null; + timeRange: LogTimeRange | null; + onLogCopy: MouseEventHandler; + onTimeRangeChange: (newTimeRange: LogTimeRange | null) => void; +}; + +export type UseActiveLog = { + activeLog: ILog | null; + onSetActiveLog: (log: ILog) => void; + onClearActiveLog: () => void; + onAddToQuery: (fieldKey: string, fieldValue: string, operator: string) => void; +}; diff --git a/frontend/src/hooks/logs/useActiveLog.ts b/frontend/src/hooks/logs/useActiveLog.ts new file mode 100644 index 0000000000..004d7e1d92 --- /dev/null +++ b/frontend/src/hooks/logs/useActiveLog.ts @@ -0,0 +1,127 @@ +import { OPERATORS, QueryBuilderKeys } from 'constants/queryBuilder'; +import ROUTES from 'constants/routes'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { getGeneratedFilterQueryString } from 'lib/getGeneratedFilterQueryString'; +import { chooseAutocompleteFromCustomValue } from 'lib/newQueryBuilder/chooseAutocompleteFromCustomValue'; +import { useCallback, useMemo, useState } from 'react'; +import { useQueryClient } from 'react-query'; +import { useDispatch, useSelector } from 'react-redux'; +import { useHistory, useLocation } from 'react-router-dom'; +import { AppState } from 'store/reducers'; +import { SET_DETAILED_LOG_DATA } from 'types/actions/logs'; +import { SuccessResponse } from 'types/api'; +import { ILog } from 'types/api/logs/log'; +import { + BaseAutocompleteData, + IQueryAutocompleteResponse, +} from 'types/api/queryBuilder/queryAutocompleteResponse'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; +import { ILogsReducer } from 'types/reducer/logs'; +import { v4 as uuid } from 'uuid'; + +import { UseActiveLog } from './types'; + +export const useActiveLog = (): UseActiveLog => { + const dispatch = useDispatch(); + + const { + searchFilter: { queryString }, + } = useSelector((state) => state.logs); + const queryClient = useQueryClient(); + const { pathname } = useLocation(); + const history = useHistory(); + const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder(); + + const isLogsPage = useMemo(() => pathname === ROUTES.LOGS, [pathname]); + + const [activeLog, setActiveLog] = useState(null); + + const onSetDetailedLogData = useCallback( + (logData: ILog) => { + dispatch({ + type: SET_DETAILED_LOG_DATA, + payload: logData, + }); + }, + [dispatch], + ); + + const onSetActiveLog = useCallback( + (nextActiveLog: ILog): void => { + if (isLogsPage) { + onSetDetailedLogData(nextActiveLog); + } else { + setActiveLog(nextActiveLog); + } + }, + [isLogsPage, onSetDetailedLogData], + ); + + const onClearActiveLog = useCallback((): void => setActiveLog(null), []); + + const onAddToQueryExplorer = useCallback( + (fieldKey: string, fieldValue: string, operator: string): void => { + const keysAutocomplete: BaseAutocompleteData[] = + queryClient.getQueryData>( + [QueryBuilderKeys.GET_AGGREGATE_KEYS], + { exact: false }, + )?.payload.attributeKeys || []; + + const existAutocompleteKey = chooseAutocompleteFromCustomValue( + keysAutocomplete, + fieldKey, + ); + + const currentOperator = + Object.keys(OPERATORS).find((op) => op === operator) || ''; + + const nextQuery: Query = { + ...currentQuery, + builder: { + ...currentQuery.builder, + queryData: currentQuery.builder.queryData.map((item) => ({ + ...item, + filters: { + ...item.filters, + items: [ + ...item.filters.items.filter( + (item) => item.key?.id !== existAutocompleteKey.id, + ), + { + id: uuid(), + key: existAutocompleteKey, + op: currentOperator, + value: fieldValue, + }, + ], + }, + })), + }, + }; + + redirectWithQueryBuilderData(nextQuery); + }, + [currentQuery, queryClient, redirectWithQueryBuilderData], + ); + + const onAddToQueryLogs = useCallback( + (fieldKey: string, fieldValue: string, operator: string) => { + const updatedQueryString = getGeneratedFilterQueryString( + fieldKey, + fieldValue, + operator, + queryString, + ); + + history.replace(`${ROUTES.LOGS}?q=${updatedQueryString}`); + }, + [history, queryString], + ); + + return { + activeLog, + onSetActiveLog, + onClearActiveLog, + onAddToQuery: isLogsPage ? onAddToQueryLogs : onAddToQueryExplorer, + }; +}; diff --git a/frontend/src/hooks/logs/useCopyLogLink.ts b/frontend/src/hooks/logs/useCopyLogLink.ts new file mode 100644 index 0000000000..81c768618f --- /dev/null +++ b/frontend/src/hooks/logs/useCopyLogLink.ts @@ -0,0 +1,85 @@ +import { QueryParams } from 'constants/query'; +import ROUTES from 'constants/routes'; +import { useNotifications } from 'hooks/useNotifications'; +import useUrlQuery from 'hooks/useUrlQuery'; +import useUrlQueryData from 'hooks/useUrlQueryData'; +import { + MouseEventHandler, + useCallback, + useEffect, + useMemo, + useState, +} from 'react'; +import { useLocation } from 'react-router-dom'; +import { useCopyToClipboard } from 'react-use'; + +import { HIGHLIGHTED_DELAY } from './configs'; +import { LogTimeRange, UseCopyLogLink } from './types'; + +export const useCopyLogLink = (logId?: string): UseCopyLogLink => { + const urlQuery = useUrlQuery(); + const { pathname } = useLocation(); + const [, setCopy] = useCopyToClipboard(); + const { notifications } = useNotifications(); + + const { + queryData: timeRange, + redirectWithQuery: onTimeRangeChange, + } = useUrlQueryData(QueryParams.timeRange, null); + + const { queryData: activeLogId } = useUrlQueryData( + QueryParams.activeLogId, + null, + ); + + const isActiveLog = useMemo(() => activeLogId === logId, [activeLogId, logId]); + const [isHighlighted, setIsHighlighted] = useState(isActiveLog); + + const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [ + pathname, + ]); + + const onLogCopy: MouseEventHandler = useCallback( + (event) => { + if (!logId) return; + + event.preventDefault(); + event.stopPropagation(); + + const range = JSON.stringify(timeRange); + + urlQuery.delete(QueryParams.activeLogId); + urlQuery.delete(QueryParams.timeRange); + urlQuery.set(QueryParams.activeLogId, `"${logId}"`); + urlQuery.set(QueryParams.timeRange, range); + + const link = `${window.location.origin}${pathname}?${urlQuery.toString()}`; + + setCopy(link); + notifications.success({ + message: 'Copied to clipboard', + }); + }, + [logId, notifications, timeRange, urlQuery, pathname, setCopy], + ); + + useEffect(() => { + if (!isActiveLog) return; + + const timer = setTimeout(() => setIsHighlighted(false), HIGHLIGHTED_DELAY); + + // eslint-disable-next-line consistent-return + return (): void => { + clearTimeout(timer); + }; + }, [isActiveLog]); + + return { + isHighlighted, + isLogsExplorerPage, + activeLogId, + timeRange, + onLogCopy, + onTimeRangeChange, + }; +}; diff --git a/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts b/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts index 408e598727..85a588d59e 100644 --- a/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts +++ b/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts @@ -16,6 +16,7 @@ export const useGetExplorerQueryRange = ( requestData: Query | null, panelType: PANEL_TYPES | null, options?: UseQueryOptions, Error>, + params?: Record, ): UseQueryResult, Error> => { const { isEnabledQuery } = useQueryBuilder(); const { selectedTime: globalSelectedInterval, minTime, maxTime } = useSelector< @@ -46,6 +47,7 @@ export const useGetExplorerQueryRange = ( selectedTime: 'GLOBAL_TIME', globalSelectedInterval, query: requestData || initialQueriesMap.metrics, + params, }, { ...options, diff --git a/frontend/src/pages/Logs/index.tsx b/frontend/src/pages/Logs/index.tsx index 4810d3e00b..f9ebd04bb7 100644 --- a/frontend/src/pages/Logs/index.tsx +++ b/frontend/src/pages/Logs/index.tsx @@ -14,8 +14,7 @@ import { useLocation } from 'react-router-dom'; import { Dispatch } from 'redux'; import { AppState } from 'store/reducers'; import AppActions from 'types/actions'; -import { SET_DETAILED_LOG_DATA, SET_LOGS_ORDER } from 'types/actions/logs'; -import { ILog } from 'types/api/logs/log'; +import { SET_LOGS_ORDER } from 'types/actions/logs'; import { ILogsReducer } from 'types/reducer/logs'; import { @@ -33,16 +32,6 @@ function Logs(): JSX.Element { const { order } = useSelector((store) => store.logs); const location = useLocation(); - const showExpandedLog = useCallback( - (logData: ILog) => { - dispatch({ - type: SET_DETAILED_LOG_DATA, - payload: logData, - }); - }, - [dispatch], - ); - const { viewModeOptionList, viewModeOption, @@ -141,11 +130,7 @@ function Logs(): JSX.Element { - + diff --git a/frontend/src/types/api/index.ts b/frontend/src/types/api/index.ts index fbd8db5bf1..c2f4d44ca0 100644 --- a/frontend/src/types/api/index.ts +++ b/frontend/src/types/api/index.ts @@ -7,9 +7,10 @@ export interface ErrorResponse { message: null; } -export interface SuccessResponse { +export interface SuccessResponse { statusCode: SuccessStatusCode; message: string; payload: T; error: null; + params?: P; } diff --git a/frontend/src/types/api/logs/log.ts b/frontend/src/types/api/logs/log.ts index a7c00885ff..af498be262 100644 --- a/frontend/src/types/api/logs/log.ts +++ b/frontend/src/types/api/logs/log.ts @@ -8,7 +8,7 @@ export interface ILog { severityText: string; severityNumber: number; body: string; - resourcesString: Record; + resources_string: Record; attributesString: Record; attributesInt: Record; attributesFloat: Record; diff --git a/frontend/src/utils/getAlphaColor.ts b/frontend/src/utils/getAlphaColor.ts new file mode 100644 index 0000000000..b57417a1ae --- /dev/null +++ b/frontend/src/utils/getAlphaColor.ts @@ -0,0 +1,14 @@ +import colorAlpha from 'color-alpha'; + +type GetAlphaColor = Record<0 | 10 | 25 | 45 | 75 | 100, string>; + +const getAlphaColor = (color: string): GetAlphaColor => ({ + 0: colorAlpha(color, 0), + 10: colorAlpha(color, 0.1), + 25: colorAlpha(color, 0.25), + 45: colorAlpha(color, 0.45), + 75: colorAlpha(color, 0.75), + 100: colorAlpha(color, 1), +}); + +export default getAlphaColor; diff --git a/frontend/yarn.lock b/frontend/yarn.lock index de53c86183..f1f6cfbe71 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -4226,6 +4226,13 @@ collection-visit@^1.0.0: map-visit "^1.0.0" object-visit "^1.0.0" +color-alpha@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-alpha/-/color-alpha-1.1.3.tgz#71250189e9f02bba8261a94d5e7d5f5606d1749a" + integrity sha512-krPYBO1RSO5LH4AGb/b6z70O1Ip2o0F0+0cVFN5FN99jfQtZFT08rQyg+9oOBNJYAn3SRwJIFC8jUEOKz7PisA== + dependencies: + color-parse "^1.4.1" + color-convert@^1.9.0: version "1.9.3" resolved "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz" @@ -4250,6 +4257,13 @@ color-name@^1.0.0, color-name@~1.1.4: resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +color-parse@^1.4.1: + version "1.4.2" + resolved "https://registry.yarnpkg.com/color-parse/-/color-parse-1.4.2.tgz#78651f5d34df1a57f997643d86f7f87268ad4eb5" + integrity sha512-RI7s49/8yqDj3fECFZjUI1Yi0z/Gq1py43oNJivAIIDSyJiOZLfYCRQEgn8HEVAj++PcRe8AnL2XF0fRJ3BTnA== + dependencies: + color-name "^1.0.0" + color-string@^1.9.0: version "1.9.1" resolved "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz" From fabdf87ed1beecf794a5c7e78a6742be2daac7c7 Mon Sep 17 00:00:00 2001 From: dnazarenkoo <134951516+dnazarenkoo@users.noreply.github.com> Date: Mon, 31 Jul 2023 10:57:07 +0300 Subject: [PATCH 02/19] fix: add highlight color for all views (logs context) (#3233) * fix: add higlight color for all of views * fix: resolve comments --- .../src/components/Logs/ListLogView/index.tsx | 4 +--- .../src/components/Logs/ListLogView/styles.ts | 13 ++--------- .../src/components/Logs/RawLogView/styles.ts | 22 ++++++------------- frontend/src/constants/theme.ts | 1 + .../InfinityTableView/index.tsx | 3 --- .../InfinityTableView/styles.ts | 16 ++++++-------- frontend/src/utils/logs.ts | 22 +++++++++++++++++++ 7 files changed, 40 insertions(+), 41 deletions(-) create mode 100644 frontend/src/utils/logs.ts diff --git a/frontend/src/components/Logs/ListLogView/index.tsx b/frontend/src/components/Logs/ListLogView/index.tsx index b56614edcf..c5aa7a184d 100644 --- a/frontend/src/components/Logs/ListLogView/index.tsx +++ b/frontend/src/components/Logs/ListLogView/index.tsx @@ -13,7 +13,6 @@ import dayjs from 'dayjs'; import dompurify from 'dompurify'; import { useActiveLog } from 'hooks/logs/useActiveLog'; import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; -import { useIsDarkMode } from 'hooks/useDarkMode'; import { useNotifications } from 'hooks/useNotifications'; // utils import { FlatLogData } from 'lib/logs/flatLogData'; @@ -104,7 +103,6 @@ function ListLogView({ }: ListLogViewProps): JSX.Element { const flattenLogData = useMemo(() => FlatLogData(logData), [logData]); - const isDarkMode = useIsDarkMode(); const [, setCopy] = useCopyToClipboard(); const { notifications } = useNotifications(); const { isHighlighted, isLogsExplorerPage, onLogCopy } = useCopyLogLink( @@ -151,7 +149,7 @@ function ListLogView({ ); return ( - +
<> diff --git a/frontend/src/components/Logs/ListLogView/styles.ts b/frontend/src/components/Logs/ListLogView/styles.ts index 452bb653fa..0db1baafe5 100644 --- a/frontend/src/components/Logs/ListLogView/styles.ts +++ b/frontend/src/components/Logs/ListLogView/styles.ts @@ -1,10 +1,8 @@ import { Card, Typography } from 'antd'; -import { themeColors } from 'constants/theme'; import styled from 'styled-components'; -import getAlphaColor from 'utils/getAlphaColor'; +import { getActiveLogBackground } from 'utils/logs'; export const Container = styled(Card)<{ - $isDarkMode: boolean; $isActiveLog: boolean; }>` width: 100% !important; @@ -13,14 +11,7 @@ export const Container = styled(Card)<{ padding: 0.3rem 0.6rem; } - ${({ $isDarkMode, $isActiveLog }): string => - $isActiveLog - ? `background-color: ${ - $isDarkMode - ? getAlphaColor(themeColors.white)[10] - : getAlphaColor(themeColors.black)[10] - };` - : ''} + ${({ $isActiveLog }): string => getActiveLogBackground($isActiveLog)} `; export const Text = styled(Typography.Text)` diff --git a/frontend/src/components/Logs/RawLogView/styles.ts b/frontend/src/components/Logs/RawLogView/styles.ts index 162ee211a4..b4be783a2a 100644 --- a/frontend/src/components/Logs/RawLogView/styles.ts +++ b/frontend/src/components/Logs/RawLogView/styles.ts @@ -1,8 +1,7 @@ -import { blue, orange } from '@ant-design/colors'; +import { blue } from '@ant-design/colors'; import { Col, Row, Space } from 'antd'; -import { themeColors } from 'constants/theme'; import styled from 'styled-components'; -import getAlphaColor from 'utils/getAlphaColor'; +import { getActiveLogBackground, getDefaultLogBackground } from 'utils/logs'; export const RawLogViewContainer = styled(Row)<{ $isDarkMode: boolean; @@ -17,19 +16,12 @@ export const RawLogViewContainer = styled(Row)<{ transition: background-color 0.2s ease-in; - ${({ $isActiveLog }): string => - $isActiveLog ? `background-color: ${orange[3]};` : ''} + ${({ $isActiveLog }): string => getActiveLogBackground($isActiveLog)} - ${({ $isReadOnly, $isDarkMode }): string => - !$isReadOnly - ? `&:hover { - background-color: ${ - $isDarkMode - ? getAlphaColor(themeColors.white)[10] - : getAlphaColor(themeColors.black)[10] - }; - }` - : ''} + ${({ $isReadOnly, $isDarkMode, $isActiveLog }): string => + $isActiveLog + ? getActiveLogBackground() + : getDefaultLogBackground($isReadOnly, $isDarkMode)} `; export const ExpandIconWrapper = styled(Col)` diff --git a/frontend/src/constants/theme.ts b/frontend/src/constants/theme.ts index 33e4881409..354ea190a7 100644 --- a/frontend/src/constants/theme.ts +++ b/frontend/src/constants/theme.ts @@ -44,6 +44,7 @@ const themeColors = { lightWhite: '#ffffffd9', borderLightGrey: '#d9d9d9', borderDarkGrey: '#424242', + bckgGrey: '#1d1d1d', }; export { themeColors }; diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx b/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx index 13101f38d0..b5a30e1568 100644 --- a/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx @@ -5,7 +5,6 @@ import { LOCALSTORAGE } from 'constants/localStorage'; import LogsExplorerContext from 'container/LogsExplorerContext'; import { useActiveLog } from 'hooks/logs/useActiveLog'; import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; -import { useIsDarkMode } from 'hooks/useDarkMode'; import useDragColumns from 'hooks/useDragColumns'; import { getDraggedColumns } from 'hooks/useDragColumns/utils'; import { @@ -38,12 +37,10 @@ const CustomTableRow: TableComponents['TableRow'] = ({ context, ...props }) => { - const isDarkMode = useIsDarkMode(); const { isHighlighted } = useCopyLogLink(props.item.id); return ( ` td { - ${({ $isDarkMode, $isActiveLog }): string => - $isActiveLog - ? `background-color: ${ - $isDarkMode ? 'rgba(255,255,255,0.1)' : 'rgba(0, 0, 0, 0.1)' - };` - : ''} + ${({ $isActiveLog }): string => getActiveLogBackground($isActiveLog)} } &:hover { ${TableCellStyled} { - background-color: #1d1d1d; + ${({ $isActiveLog }): string => + $isActiveLog + ? getActiveLogBackground() + : `background-color: ${themeColors.bckgGrey};`} } } `; @@ -45,7 +43,7 @@ export const TableRowStyled = styled.tr<{ export const TableHeaderCellStyled = styled.th` padding: 0.5rem; border-inline-end: 1px solid rgba(253, 253, 253, 0.12); - background-color: #1d1d1d; + background-color: ${themeColors.bckgGrey}; ${({ isDragColumn }): string => (isDragColumn ? 'cursor: col-resize;' : '')} &:first-child { diff --git a/frontend/src/utils/logs.ts b/frontend/src/utils/logs.ts new file mode 100644 index 0000000000..66a6ba28a6 --- /dev/null +++ b/frontend/src/utils/logs.ts @@ -0,0 +1,22 @@ +import { orange } from '@ant-design/colors'; +import { themeColors } from 'constants/theme'; +import getAlphaColor from 'utils/getAlphaColor'; + +export const getDefaultLogBackground = ( + isReadOnly: boolean, + isDarkMode: boolean, +): string => { + if (isReadOnly) return ''; + return `&:hover { + background-color: ${ + isDarkMode + ? getAlphaColor(themeColors.white)[10] + : getAlphaColor(themeColors.black)[10] + }; + }`; +}; + +export const getActiveLogBackground = (isActiveLog = true): string => { + if (!isActiveLog) return ''; + return `background-color: ${orange[3]};`; +}; From 4397c5349405ffaf55d6e518c06af927facf1627 Mon Sep 17 00:00:00 2001 From: vasukapil Date: Mon, 31 Jul 2023 18:43:25 +0530 Subject: [PATCH 03/19] fix : Hide limit param in list view of traces #3173 (#3187) * fix : Hide limit param in list view of traces #3173 * Hide Limit blue text also removed * changes made such that LIMIT is only hidden for Traces List and not for Logs * code refactored and filterConfigs used * minor change * unnecessary filter code removed * lint issue fixed and develop branch merged * minor change * eslint fix --------- Co-authored-by: Palash Gupta Co-authored-by: Vishal Sharma --- .../QueryBuilder/components/Query/Query.tsx | 25 +++++++++++-------- .../TracesExplorer/QuerySection/index.tsx | 1 + .../hooks/queryBuilder/useQueryOperations.ts | 1 - 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/frontend/src/container/QueryBuilder/components/Query/Query.tsx b/frontend/src/container/QueryBuilder/components/Query/Query.tsx index 7f8b4c1cdc..9f4fca9b12 100644 --- a/frontend/src/container/QueryBuilder/components/Query/Query.tsx +++ b/frontend/src/container/QueryBuilder/components/Query/Query.tsx @@ -210,16 +210,19 @@ export const Query = memo(function Query({ default: { return ( <> - - - - - - - - - - + {!filterConfigs?.limit?.isHidden && ( + + + + + + + + + + + )} + {!filterConfigs?.having?.isHidden && ( @@ -232,7 +235,6 @@ export const Query = memo(function Query({ )} - @@ -251,6 +253,7 @@ export const Query = memo(function Query({ panelType, isMetricsDataSource, query, + filterConfigs?.limit?.isHidden, filterConfigs?.having?.isHidden, handleChangeLimit, handleChangeHavingFilter, diff --git a/frontend/src/container/TracesExplorer/QuerySection/index.tsx b/frontend/src/container/TracesExplorer/QuerySection/index.tsx index 80f2925b37..279f57b5df 100644 --- a/frontend/src/container/TracesExplorer/QuerySection/index.tsx +++ b/frontend/src/container/TracesExplorer/QuerySection/index.tsx @@ -20,6 +20,7 @@ function QuerySection(): JSX.Element { const isList = panelTypes === PANEL_TYPES.LIST; const config: QueryBuilderProps['filterConfigs'] = { stepInterval: { isHidden: false, isDisabled: true }, + limit: { isHidden: isList, isDisabled: true }, having: { isHidden: isList, isDisabled: true }, }; diff --git a/frontend/src/hooks/queryBuilder/useQueryOperations.ts b/frontend/src/hooks/queryBuilder/useQueryOperations.ts index 8e0251d0b5..0d2f8f0cf2 100644 --- a/frontend/src/hooks/queryBuilder/useQueryOperations.ts +++ b/frontend/src/hooks/queryBuilder/useQueryOperations.ts @@ -152,7 +152,6 @@ export const useQueryOperations: UseQueryOperations = ({ () => query.dataSource === DataSource.METRICS, [query.dataSource], ); - const isTracePanelType = useMemo(() => panelType === PANEL_TYPES.TRACE, [ panelType, ]); From 1a3e46cecd0bc5bd7e32d3830d81e833f59c7be3 Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Mon, 31 Jul 2023 21:34:42 +0530 Subject: [PATCH 04/19] feat: integrate pipelines API (#2457) * chore: integrate pipelines API * fix: limit support integrated in pipelines * fix: interface to string * fix: json parser and allow deleting all pipelines * fix: output modified if operators are disabled * fix: validation updated for operators * fix: expression check added * fix: regex expression check added * fix: remove operator validation updated * fix: tests updated for pipeline builder * fix: fix error messages in http handler * fix: dont return payload if there is an error * fix: extracting userId from context moved to auth package * fix: api errors moved to http handler * fix: get version logic updated * fix: deployment result message updated * fix: pipeline builder edgecase fixed and tests updated * fix: get failing postablePipeline tests to pass --------- Co-authored-by: Vishal Sharma Co-authored-by: Raj --- ee/query-service/app/api/api.go | 34 +-- ee/query-service/app/server.go | 24 +- go.mod | 1 + go.sum | 2 + pkg/query-service/agentConf/db.go | 15 +- pkg/query-service/agentConf/manager.go | 4 +- pkg/query-service/app/http_handler.go | 154 ++++++++++- .../app/logparsingpipeline/controller.go | 132 ++++++++++ .../app/logparsingpipeline/db.go | 198 +++++++++++++++ .../app/logparsingpipeline/pipelineBuilder.go | 77 ++++++ .../pipelineBuilder_test.go | 175 +++++++++++++ .../logparsingpipeline/postablePipeline.go | 182 +++++++++++++ .../postablePipeline_test.go | 239 ++++++++++++++++++ .../app/logparsingpipeline/sqlite/init.go | 35 +++ pkg/query-service/app/server.go | 22 +- pkg/query-service/auth/jwt.go | 19 ++ 16 files changed, 1269 insertions(+), 44 deletions(-) create mode 100644 pkg/query-service/app/logparsingpipeline/controller.go create mode 100644 pkg/query-service/app/logparsingpipeline/db.go create mode 100644 pkg/query-service/app/logparsingpipeline/pipelineBuilder.go create mode 100644 pkg/query-service/app/logparsingpipeline/pipelineBuilder_test.go create mode 100644 pkg/query-service/app/logparsingpipeline/postablePipeline.go create mode 100644 pkg/query-service/app/logparsingpipeline/postablePipeline_test.go create mode 100644 pkg/query-service/app/logparsingpipeline/sqlite/init.go diff --git a/ee/query-service/app/api/api.go b/ee/query-service/app/api/api.go index c9d839fd39..89a9ed24cb 100644 --- a/ee/query-service/app/api/api.go +++ b/ee/query-service/app/api/api.go @@ -8,6 +8,7 @@ import ( "go.signoz.io/signoz/ee/query-service/interfaces" "go.signoz.io/signoz/ee/query-service/license" baseapp "go.signoz.io/signoz/pkg/query-service/app" + "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline" baseint "go.signoz.io/signoz/pkg/query-service/interfaces" basemodel "go.signoz.io/signoz/pkg/query-service/model" rules "go.signoz.io/signoz/pkg/query-service/rules" @@ -15,14 +16,15 @@ import ( ) type APIHandlerOptions struct { - DataConnector interfaces.DataConnector - SkipConfig *basemodel.SkipConfig - PreferDelta bool - PreferSpanMetrics bool - AppDao dao.ModelDao - RulesManager *rules.Manager - FeatureFlags baseint.FeatureLookup - LicenseManager *license.Manager + DataConnector interfaces.DataConnector + SkipConfig *basemodel.SkipConfig + PreferDelta bool + PreferSpanMetrics bool + AppDao dao.ModelDao + RulesManager *rules.Manager + FeatureFlags baseint.FeatureLookup + LicenseManager *license.Manager + LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController } type APIHandler struct { @@ -34,13 +36,15 @@ type APIHandler struct { func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) { baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{ - Reader: opts.DataConnector, - SkipConfig: opts.SkipConfig, - PerferDelta: opts.PreferDelta, - PreferSpanMetrics: opts.PreferSpanMetrics, - AppDao: opts.AppDao, - RuleManager: opts.RulesManager, - FeatureFlags: opts.FeatureFlags}) + Reader: opts.DataConnector, + SkipConfig: opts.SkipConfig, + PerferDelta: opts.PreferDelta, + PreferSpanMetrics: opts.PreferSpanMetrics, + AppDao: opts.AppDao, + RuleManager: opts.RulesManager, + FeatureFlags: opts.FeatureFlags, + LogsParsingPipelineController: opts.LogsParsingPipelineController, + }) if err != nil { return nil, err diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index a74738eef5..5004d36ab1 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -31,6 +31,7 @@ import ( baseapp "go.signoz.io/signoz/pkg/query-service/app" "go.signoz.io/signoz/pkg/query-service/app/dashboards" baseexplorer "go.signoz.io/signoz/pkg/query-service/app/explorer" + "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline" "go.signoz.io/signoz/pkg/query-service/app/opamp" opAmpModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model" baseauth "go.signoz.io/signoz/pkg/query-service/auth" @@ -157,6 +158,12 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { return nil, err } + // ingestion pipelines manager + logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(localDB, "sqlite") + if err != nil { + return nil, err + } + // start the usagemanager usageManager, err := usage.New("sqlite", localDB, lm.GetRepo(), reader.GetConn()) if err != nil { @@ -170,14 +177,15 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { telemetry.GetInstance().SetReader(reader) apiOpts := api.APIHandlerOptions{ - DataConnector: reader, - SkipConfig: skipConfig, - PreferDelta: serverOptions.PreferDelta, - PreferSpanMetrics: serverOptions.PreferSpanMetrics, - AppDao: modelDao, - RulesManager: rm, - FeatureFlags: lm, - LicenseManager: lm, + DataConnector: reader, + SkipConfig: skipConfig, + PreferDelta: serverOptions.PreferDelta, + PreferSpanMetrics: serverOptions.PreferSpanMetrics, + AppDao: modelDao, + RulesManager: rm, + FeatureFlags: lm, + LicenseManager: lm, + LogsParsingPipelineController: logParsingPipelineController, } apiHandler, err := api.NewAPIHandler(apiOpts) diff --git a/go.mod b/go.mod index 753e8fd613..59ef611a4c 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,7 @@ go 1.18 require ( github.com/ClickHouse/clickhouse-go/v2 v2.5.1 github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb + github.com/antonmedv/expr v1.12.4 github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230523034029-2b7ff773052c github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230517094211-cd3f3f0aea85 github.com/coreos/go-oidc/v3 v3.4.0 diff --git a/go.sum b/go.sum index 5e49f85e71..e9f289de7e 100644 --- a/go.sum +++ b/go.sum @@ -95,6 +95,8 @@ github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8V github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/antonmedv/expr v1.12.4 h1:YRkeF7r0cejMS47bDYe3Jyes7L9t1AhpunC+Duq+R9k= +github.com/antonmedv/expr v1.12.4/go.mod h1:FPC8iWArxls7axbVLsW+kpg1mz29A1b2M6jt+hZfDkU= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-metrics v0.4.0 h1:yCQqn7dwca4ITXb+CbubHmedzaQYHhNhrEXLYUeEe8Q= diff --git a/pkg/query-service/agentConf/db.go b/pkg/query-service/agentConf/db.go index c3a84f6b3a..3deb592778 100644 --- a/pkg/query-service/agentConf/db.go +++ b/pkg/query-service/agentConf/db.go @@ -104,7 +104,8 @@ func (r *Repo) GetLatestVersion(ctx context.Context, typ ElementTypeDef) (*Confi FROM agent_config_versions WHERE element_type=$2)`, typ, typ) if err != nil { - zap.S().Error("failed get latest config version for element:", typ, err) + // intially the table will be empty + return nil, err } return &c, err } @@ -115,8 +116,9 @@ func (r *Repo) insertConfig(ctx context.Context, userId string, c *ConfigVersion return fmt.Errorf("element type is required for creating agent config version") } - if len(elements) == 0 { - zap.S().Error("insert config called with no elements", c.ElementType) + // allowing empty elements for logs - use case is deleting all pipelines + if len(elements) == 0 && c.ElementType != ElementTypeLogPipelines { + zap.S().Error("insert config called with no elements ", c.ElementType) return fmt.Errorf("config must have atleast one element") } @@ -136,7 +138,12 @@ func (r *Repo) insertConfig(ctx context.Context, userId string, c *ConfigVersion } } - c.Version = updateVersion(configVersion.Version) + if configVersion != nil { + c.Version = updateVersion(configVersion.Version) + } else { + // first version + c.Version = 1 + } defer func() { if fnerr != nil { diff --git a/pkg/query-service/agentConf/manager.go b/pkg/query-service/agentConf/manager.go index b26d382070..e0b32ffc0c 100644 --- a/pkg/query-service/agentConf/manager.go +++ b/pkg/query-service/agentConf/manager.go @@ -169,7 +169,7 @@ func (m *Manager) OnConfigUpdate(agentId string, hash string, err error) { status := string(Deployed) - message := "deploy successful" + message := "Deployment was successful" defer func() { zap.S().Info(status, zap.String("agentId", agentId), zap.String("agentResponse", message)) @@ -225,6 +225,6 @@ func UpsertLogParsingProcessor(ctx context.Context, version int, rawPipelineData return err } - m.updateDeployStatus(ctx, ElementTypeLogPipelines, version, string(DeployInitiated), "Deployment started", configHash, string(rawPipelineData)) + m.updateDeployStatus(ctx, ElementTypeLogPipelines, version, string(DeployInitiated), "Deployment has started", configHash, string(rawPipelineData)) return nil } diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 5008d6edae..46fcd00676 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -3,6 +3,7 @@ package app import ( "bytes" "context" + "database/sql" "encoding/json" "errors" "fmt" @@ -19,6 +20,7 @@ import ( jsoniter "github.com/json-iterator/go" _ "github.com/mattn/go-sqlite3" "github.com/prometheus/prometheus/promql" + "go.signoz.io/signoz/pkg/query-service/agentConf" "go.signoz.io/signoz/pkg/query-service/app/dashboards" "go.signoz.io/signoz/pkg/query-service/app/explorer" "go.signoz.io/signoz/pkg/query-service/app/logs" @@ -33,6 +35,7 @@ import ( v3 "go.signoz.io/signoz/pkg/query-service/model/v3" querytemplate "go.signoz.io/signoz/pkg/query-service/utils/queryTemplate" + "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline" "go.signoz.io/signoz/pkg/query-service/dao" am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager" signozio "go.signoz.io/signoz/pkg/query-service/integrations/signozio" @@ -74,6 +77,8 @@ type APIHandler struct { preferDelta bool preferSpanMetrics bool + LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController + // SetupCompleted indicates if SigNoz is ready for general use. // at the moment, we mark the app ready when the first user // is registers. @@ -97,6 +102,9 @@ type APIHandlerOpts struct { // feature flags querier FeatureFlags interfaces.FeatureLookup + + // Log parsing pipelines + LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController } // NewAPIHandler returns an APIHandler @@ -108,14 +116,15 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) { } aH := &APIHandler{ - reader: opts.Reader, - appDao: opts.AppDao, - skipConfig: opts.SkipConfig, - preferDelta: opts.PerferDelta, - preferSpanMetrics: opts.PreferSpanMetrics, - alertManager: alertManager, - ruleManager: opts.RuleManager, - featureFlags: opts.FeatureFlags, + reader: opts.Reader, + appDao: opts.AppDao, + skipConfig: opts.SkipConfig, + preferDelta: opts.PerferDelta, + preferSpanMetrics: opts.PreferSpanMetrics, + alertManager: alertManager, + ruleManager: opts.RuleManager, + featureFlags: opts.FeatureFlags, + LogsParsingPipelineController: opts.LogsParsingPipelineController, } builderOpts := queryBuilder.QueryBuilderOptions{ @@ -2240,6 +2249,10 @@ func (aH *APIHandler) RegisterLogsRoutes(router *mux.Router, am *AuthMiddleware) subRouter.HandleFunc("/fields", am.ViewAccess(aH.logFields)).Methods(http.MethodGet) subRouter.HandleFunc("/fields", am.EditAccess(aH.logFieldUpdate)).Methods(http.MethodPost) subRouter.HandleFunc("/aggregate", am.ViewAccess(aH.logAggregate)).Methods(http.MethodGet) + + // log pipelines + subRouter.HandleFunc("/pipelines/{version}", am.ViewAccess(aH.listLogsPipelinesHandler)).Methods(http.MethodGet) + subRouter.HandleFunc("/pipelines", am.EditAccess(aH.createLogsPipeline)).Methods(http.MethodPost) } func (aH *APIHandler) logFields(w http.ResponseWriter, r *http.Request) { @@ -2351,6 +2364,131 @@ func (aH *APIHandler) logAggregate(w http.ResponseWriter, r *http.Request) { aH.WriteJSON(w, r, res) } +const logPipelines = "log_pipelines" + +func parseAgentConfigVersion(r *http.Request) (int, *model.ApiError) { + versionString := mux.Vars(r)["version"] + + if versionString == "latest" { + return -1, nil + } + + version64, err := strconv.ParseInt(versionString, 0, 8) + + if err != nil { + return 0, model.BadRequestStr("invalid version number") + } + + if version64 <= 0 { + return 0, model.BadRequestStr("invalid version number") + } + + return int(version64), nil +} + +func (ah *APIHandler) listLogsPipelinesHandler(w http.ResponseWriter, r *http.Request) { + + version, err := parseAgentConfigVersion(r) + if err != nil { + RespondError(w, err, nil) + return + } + + var payload *logparsingpipeline.PipelinesResponse + var apierr *model.ApiError + + if version != -1 { + payload, apierr = ah.listLogsPipelinesByVersion(context.Background(), version) + } else { + payload, apierr = ah.listLogsPipelines(context.Background()) + } + + if apierr != nil { + RespondError(w, apierr, payload) + return + } + ah.Respond(w, payload) +} + +// listLogsPipelines lists logs piplines for latest version +func (ah *APIHandler) listLogsPipelines(ctx context.Context) (*logparsingpipeline.PipelinesResponse, *model.ApiError) { + // get lateset agent config + lastestConfig, err := agentConf.GetLatestVersion(ctx, logPipelines) + if err != nil { + if err != sql.ErrNoRows { + return nil, model.InternalError(fmt.Errorf("failed to get latest agent config version with error %w", err)) + } else { + return nil, nil + } + } + + payload, err := ah.LogsParsingPipelineController.GetPipelinesByVersion(ctx, lastestConfig.Version) + if err != nil { + return nil, model.InternalError(fmt.Errorf("failed to get pipelines with error %w", err)) + } + + // todo(Nitya): make a new API for history pagination + limit := 10 + history, err := agentConf.GetConfigHistory(ctx, logPipelines, limit) + if err != nil { + return nil, model.InternalError(fmt.Errorf("failed to get config history with error %w", err)) + } + payload.History = history + return payload, nil +} + +// listLogsPipelinesByVersion lists pipelines along with config version history +func (ah *APIHandler) listLogsPipelinesByVersion(ctx context.Context, version int) (*logparsingpipeline.PipelinesResponse, *model.ApiError) { + payload, err := ah.LogsParsingPipelineController.GetPipelinesByVersion(ctx, version) + if err != nil { + return nil, model.InternalError(err) + } + + // todo(Nitya): make a new API for history pagination + limit := 10 + history, err := agentConf.GetConfigHistory(ctx, logPipelines, limit) + if err != nil { + return nil, model.InternalError(fmt.Errorf("failed to retrieve agent config history with error %w", err)) + } + + payload.History = history + return payload, nil +} + +func (ah *APIHandler) createLogsPipeline(w http.ResponseWriter, r *http.Request) { + + req := logparsingpipeline.PostablePipelines{} + + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + ctx := auth.AttachJwtToContext(context.Background(), r) + + createPipeline := func(ctx context.Context, postable []logparsingpipeline.PostablePipeline) (*logparsingpipeline.PipelinesResponse, error) { + if len(postable) == 0 { + zap.S().Warnf("found no pipelines in the http request, this will delete all the pipelines") + } + + for _, p := range postable { + if err := p.IsValid(); err != nil { + return nil, model.BadRequestStr(err.Error()) + } + } + + return ah.LogsParsingPipelineController.ApplyPipelines(ctx, postable) + } + + res, err := createPipeline(ctx, req.Pipelines) + if err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + + ah.Respond(w, res) +} + func (aH *APIHandler) getExplorerQueries(w http.ResponseWriter, r *http.Request) { queries, err := explorer.GetQueries() if err != nil { diff --git a/pkg/query-service/app/logparsingpipeline/controller.go b/pkg/query-service/app/logparsingpipeline/controller.go new file mode 100644 index 0000000000..3a1fb9e160 --- /dev/null +++ b/pkg/query-service/app/logparsingpipeline/controller.go @@ -0,0 +1,132 @@ +package logparsingpipeline + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/jmoiron/sqlx" + "go.signoz.io/signoz/pkg/query-service/agentConf" + "go.signoz.io/signoz/pkg/query-service/auth" + "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +// Controller takes care of deployment cycle of log parsing pipelines. +type LogParsingPipelineController struct { + Repo +} + +func NewLogParsingPipelinesController(db *sqlx.DB, engine string) (*LogParsingPipelineController, error) { + repo := NewRepo(db) + err := repo.InitDB(engine) + return &LogParsingPipelineController{Repo: repo}, err +} + +// PipelinesResponse is used to prepare http response for pipelines config related requests +type PipelinesResponse struct { + *agentConf.ConfigVersion + + Pipelines []model.Pipeline `json:"pipelines"` + History []agentConf.ConfigVersion `json:"history"` +} + +// ApplyPipelines stores new or changed pipelines and initiates a new config update +func (ic *LogParsingPipelineController) ApplyPipelines(ctx context.Context, postable []PostablePipeline) (*PipelinesResponse, error) { + // get user id from context + userId, err := auth.ExtractUserIdFromContext(ctx) + if err != nil { + return nil, model.InternalError(fmt.Errorf("failed to get userId from context %v", err)) + } + + var pipelines []model.Pipeline + + // scan through postable pipelines, to select the existing pipelines or insert missing ones + for _, r := range postable { + + // note: we process only new and changed pipelines here, deleted pipelines are not expected + // from client. if user deletes a pipelines, the client should not send that pipelines in the update. + // in effect, the new config version will not have that pipelines. + + if r.Id == "" { + // looks like a new or changed pipeline, store it first + inserted, err := ic.insertPipeline(ctx, &r) + if err != nil || inserted == nil { + zap.S().Errorf("failed to insert edited pipeline %s", err.Error()) + return nil, fmt.Errorf("failed to insert edited pipeline") + } else { + pipelines = append(pipelines, *inserted) + } + } else { + selected, err := ic.GetPipeline(ctx, r.Id) + if err != nil || selected == nil { + zap.S().Errorf("failed to find edited pipeline %s", err.Error()) + return nil, fmt.Errorf("failed to find pipeline, invalid request") + } + pipelines = append(pipelines, *selected) + } + + } + + // prepare filter config (processor) from the pipelines + filterConfig, names, err := PreparePipelineProcessor(pipelines) + if err != nil { + zap.S().Errorf("failed to generate processor config from pipelines for deployment %s", err.Error()) + return nil, err + } + + if !agentConf.Ready() { + return nil, fmt.Errorf("agent updater unavailable at the moment. Please try in sometime") + } + + // prepare config elements + elements := make([]string, len(pipelines)) + for i, p := range pipelines { + elements[i] = p.Id + } + + // prepare config by calling gen func + cfg, err := agentConf.StartNewVersion(ctx, userId, agentConf.ElementTypeLogPipelines, elements) + if err != nil || cfg == nil { + return nil, err + } + + zap.S().Info("applying drop pipeline config", cfg) + // raw pipeline is needed since filterConfig doesn't contain inactive pipelines and operators + rawPipelineData, _ := json.Marshal(pipelines) + + // queue up the config to push to opamp + err = agentConf.UpsertLogParsingProcessor(ctx, cfg.Version, rawPipelineData, filterConfig, names) + history, _ := agentConf.GetConfigHistory(ctx, agentConf.ElementTypeLogPipelines, 10) + insertedCfg, _ := agentConf.GetConfigVersion(ctx, agentConf.ElementTypeLogPipelines, cfg.Version) + + response := &PipelinesResponse{ + ConfigVersion: insertedCfg, + Pipelines: pipelines, + History: history, + } + + if err != nil { + return response, fmt.Errorf("failed to apply pipelines") + } + return response, nil +} + +// GetPipelinesByVersion responds with version info and associated pipelines +func (ic *LogParsingPipelineController) GetPipelinesByVersion(ctx context.Context, version int) (*PipelinesResponse, error) { + pipelines, errors := ic.getPipelinesByVersion(ctx, version) + if errors != nil { + zap.S().Errorf("failed to get pipelines for version %d, %w", version, errors) + return nil, fmt.Errorf("failed to get pipelines for given version") + } + configVersion, err := agentConf.GetConfigVersion(ctx, agentConf.ElementTypeLogPipelines, version) + if err != nil || configVersion == nil { + zap.S().Errorf("failed to get config for version %d, %s", version, err.Error()) + return nil, fmt.Errorf("failed to get config for given version") + } + + return &PipelinesResponse{ + ConfigVersion: configVersion, + Pipelines: pipelines, + }, nil +} diff --git a/pkg/query-service/app/logparsingpipeline/db.go b/pkg/query-service/app/logparsingpipeline/db.go new file mode 100644 index 0000000000..ac6bc5ba3d --- /dev/null +++ b/pkg/query-service/app/logparsingpipeline/db.go @@ -0,0 +1,198 @@ +package logparsingpipeline + +import ( + "context" + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline/sqlite" + "go.signoz.io/signoz/pkg/query-service/auth" + "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +// Repo handles DDL and DML ops on ingestion pipeline +type Repo struct { + db *sqlx.DB +} + +const logPipelines = "log_pipelines" + +// NewRepo initiates a new ingestion repo +func NewRepo(db *sqlx.DB) Repo { + return Repo{ + db: db, + } +} + +func (r *Repo) InitDB(engine string) error { + switch engine { + case "sqlite3", "sqlite": + return sqlite.InitDB(r.db) + default: + return fmt.Errorf("unsupported db") + } +} + +// insertPipeline stores a given postable pipeline to database +func (r *Repo) insertPipeline(ctx context.Context, postable *PostablePipeline) (*model.Pipeline, error) { + if err := postable.IsValid(); err != nil { + return nil, errors.Wrap(err, "failed to validate postable pipeline") + } + + rawConfig, err := json.Marshal(postable.Config) + if err != nil { + return nil, errors.Wrap(err, "failed to unmarshal postable pipeline config") + } + + jwt, err := auth.ExtractJwtFromContext(ctx) + if err != nil { + return nil, err + } + + claims, err := auth.ParseJWT(jwt) + if err != nil { + return nil, err + } + + insertRow := &model.Pipeline{ + Id: uuid.New().String(), + OrderId: postable.OrderId, + Enabled: postable.Enabled, + Name: postable.Name, + Alias: postable.Alias, + Description: &postable.Description, + Filter: postable.Filter, + Config: postable.Config, + RawConfig: string(rawConfig), + Creator: model.Creator{ + CreatedBy: claims["email"].(string), + CreatedAt: time.Now(), + }, + } + + insertQuery := `INSERT INTO pipelines + (id, order_id, enabled, created_by, created_at, name, alias, description, filter, config_json) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)` + + _, err = r.db.ExecContext(ctx, + insertQuery, + insertRow.Id, + insertRow.OrderId, + insertRow.Enabled, + insertRow.Creator.CreatedBy, + insertRow.Creator.CreatedAt, + insertRow.Name, + insertRow.Alias, + insertRow.Description, + insertRow.Filter, + insertRow.RawConfig) + + if err != nil { + zap.S().Errorf("error in inserting pipeline data: ", zap.Error(err)) + return insertRow, errors.Wrap(err, "failed to insert pipeline") + } + + return insertRow, nil +} + +// getPipelinesByVersion returns pipelines associated with a given version +func (r *Repo) getPipelinesByVersion(ctx context.Context, version int) ([]model.Pipeline, []error) { + var errors []error + pipelines := []model.Pipeline{} + + versionQuery := `SELECT r.id, + r.name, + r.config_json, + r.alias, + r.description, + r.filter, + r.order_id, + r.created_by, + r.created_at, + r.enabled + FROM pipelines r, + agent_config_elements e, + agent_config_versions v + WHERE r.id = e.element_id + AND v.id = e.version_id + AND e.element_type = $1 + AND v.version = $2 + ORDER BY order_id asc` + + err := r.db.SelectContext(ctx, &pipelines, versionQuery, logPipelines, version) + if err != nil { + return nil, []error{fmt.Errorf("failed to get drop pipelines from db: %v", err)} + } + + if len(pipelines) == 0 { + return pipelines, nil + } + + for i := range pipelines { + if err := pipelines[i].ParseRawConfig(); err != nil { + errors = append(errors, err) + } + } + + return pipelines, errors +} + +// GetPipelines returns pipeline and errors (if any) +func (r *Repo) GetPipeline(ctx context.Context, id string) (*model.Pipeline, error) { + pipelines := []model.Pipeline{} + + pipelineQuery := `SELECT id, + name, + config_json, + alias, + description, + filter, + order_id, + created_by, + created_at, + enabled + FROM pipelines + WHERE id = $1` + + err := r.db.SelectContext(ctx, &pipelines, pipelineQuery, id) + if err != nil { + zap.S().Errorf("failed to get ingestion pipeline from db", err) + return nil, model.BadRequestStr("failed to get ingestion pipeline from db") + } + + if len(pipelines) == 0 { + zap.S().Warnf("No row found for ingestion pipeline id", id) + return nil, nil + } + + if len(pipelines) == 1 { + err := pipelines[0].ParseRawConfig() + if err != nil { + zap.S().Errorf("invalid pipeline config found", id, err) + return &pipelines[0], model.InternalError(fmt.Errorf("found an invalid pipeline config ")) + } + return &pipelines[0], nil + } + + return nil, model.InternalError(fmt.Errorf("multiple pipelines with same id")) + +} + +func (r *Repo) DeletePipeline(ctx context.Context, id string) error { + deleteQuery := `DELETE + FROM pipelines + WHERE id = $1` + + _, err := r.db.ExecContext(ctx, deleteQuery, id) + if err != nil { + return model.BadRequest(err) + } + + return nil + +} diff --git a/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go b/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go new file mode 100644 index 0000000000..60f0e4df17 --- /dev/null +++ b/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go @@ -0,0 +1,77 @@ +package logparsingpipeline + +import ( + "encoding/json" + "fmt" + + "go.signoz.io/signoz/pkg/query-service/constants" + "go.signoz.io/signoz/pkg/query-service/model" +) + +const ( + NOOP = "noop" +) + +func PreparePipelineProcessor(pipelines []model.Pipeline) (map[string]interface{}, []string, error) { + processors := map[string]interface{}{} + names := []string{} + for _, v := range pipelines { + if !v.Enabled { + continue + } + + operators := getOperators(v.Config) + if len(operators) == 0 { + continue + } + router := []model.PipelineOperator{ + { + ID: "router_signoz", + Type: "router", + Routes: &[]model.Route{ + { + Output: v.Config[0].ID, + Expr: v.Filter, + }, + }, + Default: NOOP, + }, + } + + v.Config = append(router, operators...) + + // noop operator is needed as the default operator so that logs are not dropped + noop := model.PipelineOperator{ + ID: NOOP, + Type: NOOP, + } + v.Config = append(v.Config, noop) + + processor := model.Processor{ + Operators: v.Config, + } + name := constants.LogsPPLPfx + v.Alias + processors[name] = processor + names = append(names, name) + } + return processors, names, nil +} + +func getOperators(ops []model.PipelineOperator) []model.PipelineOperator { + filteredOp := []model.PipelineOperator{} + for i, operator := range ops { + if operator.Enabled { + if i > 0 { + filteredOp[len(filteredOp)-1].Output = operator.ID + } + filteredOp = append(filteredOp, operator) + } else if i == len(ops)-1 && len(filteredOp) != 0 { + filteredOp[len(filteredOp)-1].Output = "" + } + } + for _, v := range filteredOp { + x, _ := json.Marshal(v) + fmt.Println(string(x)) + } + return filteredOp +} diff --git a/pkg/query-service/app/logparsingpipeline/pipelineBuilder_test.go b/pkg/query-service/app/logparsingpipeline/pipelineBuilder_test.go new file mode 100644 index 0000000000..4973467d1b --- /dev/null +++ b/pkg/query-service/app/logparsingpipeline/pipelineBuilder_test.go @@ -0,0 +1,175 @@ +package logparsingpipeline + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" + "go.signoz.io/signoz/pkg/query-service/model" +) + +var prepareProcessorTestData = []struct { + Name string + Operators []model.PipelineOperator + Output []model.PipelineOperator +}{ + { + Name: "Last operator disabled", + Operators: []model.PipelineOperator{ + { + ID: "t1", + Name: "t1", + Output: "t2", + Enabled: true, + }, + { + ID: "t2", + Name: "t2", + Enabled: false, + }, + }, + Output: []model.PipelineOperator{ + { + ID: "t1", + Name: "t1", + Enabled: true, + }, + }, + }, + { + Name: "Operator in middle disabled", + Operators: []model.PipelineOperator{ + { + ID: "t1", + Name: "t1", + Output: "t2", + Enabled: true, + }, + { + ID: "t2", + Name: "t2", + Output: "t3", + Enabled: false, + }, + { + ID: "t3", + Name: "t3", + Enabled: true, + }, + }, + Output: []model.PipelineOperator{ + { + ID: "t1", + Name: "t1", + Output: "t3", + Enabled: true, + }, + { + ID: "t3", + Name: "t3", + Enabled: true, + }, + }, + }, + { + Name: "Single operator disabled", + Operators: []model.PipelineOperator{ + { + ID: "t1", + Name: "t1", + Output: "t2", + Enabled: false, + }, + }, + Output: []model.PipelineOperator{}, + }, + { + Name: "Single operator enabled", + Operators: []model.PipelineOperator{ + { + ID: "t1", + Name: "t1", + Enabled: true, + }, + }, + Output: []model.PipelineOperator{ + { + ID: "t1", + Name: "t1", + Enabled: true, + }, + }, + }, + { + Name: "Empty operator", + Operators: []model.PipelineOperator{}, + Output: []model.PipelineOperator{}, + }, + { + Name: "new test", + Operators: []model.PipelineOperator{ + { + ID: "move_filename", + Output: "move_function", + Enabled: true, + Name: "move_filename", + }, + { + ID: "move_function", + Output: "move_line", + Enabled: false, + Name: "move_function", + }, + { + ID: "move_line", + Output: "move_lwp", + Enabled: true, + Name: "move_line", + }, + { + ID: "move_lwp", + Output: "move_callid", + Enabled: true, + Name: "move_lwp", + }, + { + ID: "move_callid", + Enabled: true, + Name: "move_lwp", + }, + }, + Output: []model.PipelineOperator{ + { + ID: "move_filename", + Output: "move_line", + Enabled: true, + Name: "move_filename", + }, + { + ID: "move_line", + Output: "move_lwp", + Enabled: true, + Name: "move_line", + }, + { + ID: "move_lwp", + Output: "move_callid", + Enabled: true, + Name: "move_lwp", + }, + { + ID: "move_callid", + Enabled: true, + Name: "move_lwp", + }, + }, + }, +} + +func TestPreparePipelineProcessor(t *testing.T) { + for _, test := range prepareProcessorTestData { + Convey(test.Name, t, func() { + res := getOperators(test.Operators) + So(res, ShouldResemble, test.Output) + }) + } +} diff --git a/pkg/query-service/app/logparsingpipeline/postablePipeline.go b/pkg/query-service/app/logparsingpipeline/postablePipeline.go new file mode 100644 index 0000000000..2deda650bd --- /dev/null +++ b/pkg/query-service/app/logparsingpipeline/postablePipeline.go @@ -0,0 +1,182 @@ +package logparsingpipeline + +import ( + "errors" + "fmt" + "regexp" + "strings" + + "github.com/antonmedv/expr" + + "go.signoz.io/signoz/pkg/query-service/model" +) + +// PostablePipelines are a list of user defined pielines +type PostablePipelines struct { + Pipelines []PostablePipeline `json:"pipelines"` +} + +// PostablePipeline captures user inputs in setting the pipeline + +type PostablePipeline struct { + Id string `json:"id"` + OrderId int `json:"orderId"` + Name string `json:"name"` + Alias string `json:"alias"` + Description string `json:"description"` + Enabled bool `json:"enabled"` + Filter string `json:"filter"` + Config []model.PipelineOperator `json:"config"` +} + +// IsValid checks if postable pipeline has all the required params +func (p *PostablePipeline) IsValid() error { + if p.OrderId == 0 { + return fmt.Errorf("orderId with value > 1 is required") + } + if p.Name == "" { + return fmt.Errorf("pipeline name is required") + } + + if p.Alias == "" { + return fmt.Errorf("pipeline alias is required") + } + + if p.Filter == "" { + return fmt.Errorf("pipeline filter is required") + } + + // check the expression + _, err := expr.Compile(p.Filter, expr.AsBool(), expr.AllowUndefinedVariables()) + if err != nil { + return fmt.Errorf(fmt.Sprintf("filter for pipeline %v is not correct: %v", p.Name, err.Error())) + } + + idUnique := map[string]struct{}{} + outputUnique := map[string]struct{}{} + + l := len(p.Config) + for i, op := range p.Config { + if op.OrderId == 0 { + return fmt.Errorf("orderId with value > 1 is required in operator") + } + if op.ID == "" { + return fmt.Errorf("id of an operator cannot be empty") + } + if op.Type == "" { + return fmt.Errorf("type of an operator cannot be empty") + } + if i != (l-1) && op.Output == "" { + return fmt.Errorf(fmt.Sprintf("Output of operator %s cannot be nil", op.ID)) + } + if i == (l-1) && op.Output != "" { + return fmt.Errorf(fmt.Sprintf("Output of operator %s should be empty", op.ID)) + } + + if _, ok := idUnique[op.ID]; ok { + return fmt.Errorf("duplicate id cannot be present") + } + if _, ok := outputUnique[op.Output]; ok { + return fmt.Errorf("duplicate output cannot be present") + } + + if op.ID == op.Output { + return fmt.Errorf("id and output cannot be same") + } + + err := isValidOperator(op) + if err != nil { + return err + } + + idUnique[op.ID] = struct{}{} + outputUnique[op.Output] = struct{}{} + } + return nil +} + +func isValidOperator(op model.PipelineOperator) error { + if op.ID == "" { + return errors.New("PipelineOperator.ID is required.") + } + + switch op.Type { + case "json_parser": + if op.ParseFrom == "" && op.ParseTo == "" { + return fmt.Errorf(fmt.Sprintf("parse from and parse to of %s json operator cannot be empty", op.ID)) + } + case "grok_parser": + if op.Pattern == "" { + return fmt.Errorf(fmt.Sprintf("pattern of %s grok operator cannot be empty", op.ID)) + } + case "regex_parser": + if op.Regex == "" { + return fmt.Errorf(fmt.Sprintf("regex of %s regex operator cannot be empty", op.ID)) + } + r, err := regexp.Compile(op.Regex) + if err != nil { + return fmt.Errorf(fmt.Sprintf("error compiling regex expression of %s regex operator", op.ID)) + } + namedCaptureGroups := 0 + for _, groupName := range r.SubexpNames() { + if groupName != "" { + namedCaptureGroups++ + } + } + if namedCaptureGroups == 0 { + return fmt.Errorf(fmt.Sprintf("no capture groups in regex expression of %s regex operator", op.ID)) + } + case "copy": + if op.From == "" || op.To == "" { + return fmt.Errorf(fmt.Sprintf("from or to of %s copy operator cannot be empty", op.ID)) + } + case "move": + if op.From == "" || op.To == "" { + return fmt.Errorf(fmt.Sprintf("from or to of %s move operator cannot be empty", op.ID)) + } + case "add": + if op.Field == "" || op.Value == "" { + return fmt.Errorf(fmt.Sprintf("field or value of %s add operator cannot be empty", op.ID)) + } + case "remove": + if op.Field == "" { + return fmt.Errorf(fmt.Sprintf("field of %s remove operator cannot be empty", op.ID)) + } + case "traceParser": + if op.TraceParser == nil { + return fmt.Errorf(fmt.Sprintf("field of %s remove operator cannot be empty", op.ID)) + } + + if op.TraceParser.SpanId.ParseFrom == "" && op.TraceParser.TraceId.ParseFrom == "" && op.TraceParser.TraceFlags.ParseFrom == "" { + return fmt.Errorf(fmt.Sprintf("one of trace_id,span_id,parse_from of %s traceParser operator must be present", op.ID)) + } + case "retain": + if len(op.Fields) == 0 { + return fmt.Errorf(fmt.Sprintf("fields of %s retain operator cannot be empty", op.ID)) + } + default: + return fmt.Errorf(fmt.Sprintf("operator type %s not supported for %s, use one of (grok_parser, regex_parser, copy, move, add, remove, traceParser, retain)", op.Type, op.ID)) + } + + if !isValidOtelValue(op.ParseFrom) || + !isValidOtelValue(op.ParseTo) || + !isValidOtelValue(op.From) || + !isValidOtelValue(op.To) || + !isValidOtelValue(op.Field) { + valueErrStr := "value should have prefix of body, attributes, resource" + return fmt.Errorf(fmt.Sprintf("%s for operator Id %s", valueErrStr, op.ID)) + } + return nil +} + +func isValidOtelValue(val string) bool { + if val == "" { + return true + } + if !strings.HasPrefix(val, "body") && + !strings.HasPrefix(val, "attributes") && + !strings.HasPrefix(val, "resource") { + return false + } + return true +} diff --git a/pkg/query-service/app/logparsingpipeline/postablePipeline_test.go b/pkg/query-service/app/logparsingpipeline/postablePipeline_test.go new file mode 100644 index 0000000000..ab9ed4414f --- /dev/null +++ b/pkg/query-service/app/logparsingpipeline/postablePipeline_test.go @@ -0,0 +1,239 @@ +package logparsingpipeline + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" + "go.signoz.io/signoz/pkg/query-service/model" +) + +var correctQueriesTest = []struct { + Name string + Pipeline PostablePipeline + IsValid bool +}{ + { + Name: "No orderId", + Pipeline: PostablePipeline{ + Name: "pipeline 1", + Alias: "pipeline1", + Enabled: true, + Filter: "attributes.method == \"GET\"", + Config: []model.PipelineOperator{}, + }, + IsValid: false, + }, + { + Name: "Invalid orderId", + Pipeline: PostablePipeline{ + OrderId: 0, + Name: "pipeline 1", + Alias: "pipeline1", + Enabled: true, + Filter: "attributes.method == \"GET\"", + Config: []model.PipelineOperator{}, + }, + IsValid: false, + }, + { + Name: "Valid orderId", + Pipeline: PostablePipeline{ + OrderId: 1, + Name: "pipeline 1", + Alias: "pipeline1", + Enabled: true, + Filter: "attributes.method == \"GET\"", + Config: []model.PipelineOperator{}, + }, + IsValid: true, + }, + { + Name: "Invalid filter", + Pipeline: PostablePipeline{ + OrderId: 1, + Name: "pipeline 1", + Alias: "pipeline1", + Enabled: true, + Filter: "test filter", + }, + IsValid: false, + }, + { + Name: "Valid filter", + Pipeline: PostablePipeline{ + OrderId: 1, + Name: "pipeline 1", + Alias: "pipeline1", + Enabled: true, + Filter: "attributes.method == \"GET\"", + }, + IsValid: true, + }, +} + +func TestIsValidPostablePipeline(t *testing.T) { + for _, test := range correctQueriesTest { + Convey(test.Name, t, func() { + err := test.Pipeline.IsValid() + if test.IsValid { + So(err, ShouldBeNil) + } else { + So(err, ShouldBeError) + } + }) + } +} + +var operatorTest = []struct { + Name string + Operator model.PipelineOperator + IsValid bool +}{ + { + Name: "Operator - without id", + Operator: model.PipelineOperator{ + Type: "remove", + Field: "attributes.abc", + }, + IsValid: false, + }, + { + Name: "Operator - without type", + Operator: model.PipelineOperator{ + ID: "test", + Field: "attributes.abc", + }, + IsValid: false, + }, + { + Name: "Copy - invalid to and from", + Operator: model.PipelineOperator{ + ID: "copy", + Type: "copy", + From: "date", + To: "attributes", + }, + IsValid: false, + }, + { + Name: "Move - invalid to and from", + Operator: model.PipelineOperator{ + ID: "move", + Type: "move", + From: "attributes", + To: "data", + }, + IsValid: false, + }, + { + Name: "Add - invalid to and from", + Operator: model.PipelineOperator{ + ID: "add", + Type: "add", + Field: "data", + }, + IsValid: false, + }, + { + Name: "Remove - invalid to and from", + Operator: model.PipelineOperator{ + ID: "remove", + Type: "remove", + Field: "data", + }, + IsValid: false, + }, + { + Name: "Add - valid", + Operator: model.PipelineOperator{ + ID: "add", + Type: "add", + Field: "body", + Value: "val", + }, + IsValid: true, + }, + { + Name: "Move - valid", + Operator: model.PipelineOperator{ + ID: "move", + Type: "move", + From: "attributes.x1", + To: "attributes.x2", + }, + IsValid: true, + }, + { + Name: "Copy - valid", + Operator: model.PipelineOperator{ + ID: "copy", + Type: "copy", + From: "resource.x1", + To: "resource.x2", + }, + IsValid: true, + }, + { + Name: "Unknown operator", + Operator: model.PipelineOperator{ + ID: "copy", + Type: "operator", + From: "resource.x1", + To: "resource.x2", + }, + IsValid: false, + }, + { + Name: "Grok - valid", + Operator: model.PipelineOperator{ + ID: "grok", + Type: "grok_parser", + Pattern: "%{COMMONAPACHELOG}", + ParseTo: "attributes", + }, + IsValid: true, + }, + { + Name: "Grok - invalid", + Operator: model.PipelineOperator{ + ID: "grok", + Type: "grok_parser", + Pattern: "%{COMMONAPACHELOG}", + ParseTo: "test", + }, + IsValid: false, + }, + { + Name: "Regex - valid", + Operator: model.PipelineOperator{ + ID: "regex", + Type: "regex_parser", + Regex: "(?P
+
+ + + + + +`; + +exports[`PipelinePage container test should render AddNewPipeline section 1`] = ``; diff --git a/frontend/src/components/DraggableTableRow/tests/utils.test.ts b/frontend/src/components/DraggableTableRow/tests/utils.test.ts new file mode 100644 index 0000000000..80854944c7 --- /dev/null +++ b/frontend/src/components/DraggableTableRow/tests/utils.test.ts @@ -0,0 +1,44 @@ +import { dragHandler, dropHandler } from '../utils'; + +jest.mock('react-dnd', () => ({ + useDrop: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]), + useDrag: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]), +})); + +describe('Utils testing of DraggableTableRow component', () => { + test('Should dropHandler return true', () => { + const monitor = { + isOver: jest.fn().mockReturnValueOnce(true), + } as never; + const dropDataTruthy = dropHandler(monitor); + + expect(dropDataTruthy).toEqual({ isOver: true }); + }); + + test('Should dropHandler return false', () => { + const monitor = { + isOver: jest.fn().mockReturnValueOnce(false), + } as never; + const dropDataFalsy = dropHandler(monitor); + + expect(dropDataFalsy).toEqual({ isOver: false }); + }); + + test('Should dragHandler return true', () => { + const monitor = { + isDragging: jest.fn().mockReturnValueOnce(true), + } as never; + const dragDataTruthy = dragHandler(monitor); + + expect(dragDataTruthy).toEqual({ isDragging: true }); + }); + + test('Should dragHandler return false', () => { + const monitor = { + isDragging: jest.fn().mockReturnValueOnce(false), + } as never; + const dragDataFalsy = dragHandler(monitor); + + expect(dragDataFalsy).toEqual({ isDragging: false }); + }); +}); diff --git a/frontend/src/components/DraggableTableRow/utils.ts b/frontend/src/components/DraggableTableRow/utils.ts new file mode 100644 index 0000000000..475145fdee --- /dev/null +++ b/frontend/src/components/DraggableTableRow/utils.ts @@ -0,0 +1,15 @@ +import { DragSourceMonitor, DropTargetMonitor } from 'react-dnd'; + +export function dropHandler(monitor: DropTargetMonitor): { isOver: boolean } { + return { + isOver: monitor.isOver(), + }; +} + +export function dragHandler( + monitor: DragSourceMonitor, +): { isDragging: boolean } { + return { + isDragging: monitor.isDragging(), + }; +} diff --git a/frontend/src/constants/routes.ts b/frontend/src/constants/routes.ts index f911b6be57..468c6042bb 100644 --- a/frontend/src/constants/routes.ts +++ b/frontend/src/constants/routes.ts @@ -32,6 +32,8 @@ const ROUTES = { HOME_PAGE: '/', PASSWORD_RESET: '/password-reset', LIST_LICENSES: '/licenses', + TRACE_EXPLORER: '/trace-explorer', + PIPELINES: '/pipelines', }; export default ROUTES; diff --git a/frontend/src/constants/theme.ts b/frontend/src/constants/theme.ts index 354ea190a7..fcb8dd171a 100644 --- a/frontend/src/constants/theme.ts +++ b/frontend/src/constants/theme.ts @@ -44,6 +44,12 @@ const themeColors = { lightWhite: '#ffffffd9', borderLightGrey: '#d9d9d9', borderDarkGrey: '#424242', + gainsboro: '#DBDBDB', + navyBlue: '#1668DC', + lightSkyBlue: '#8DCFF8', + neroBlack: '#1d1d1d', + snowWhite: '#fafafa', + gamboge: '#D89614', bckgGrey: '#1d1d1d', }; diff --git a/frontend/src/container/PipelinePage/Layouts/ChangeHistory/DeploymentStage.tsx b/frontend/src/container/PipelinePage/Layouts/ChangeHistory/DeploymentStage.tsx new file mode 100644 index 0000000000..0c4c432f9e --- /dev/null +++ b/frontend/src/container/PipelinePage/Layouts/ChangeHistory/DeploymentStage.tsx @@ -0,0 +1,14 @@ +import { IconDataSpan } from 'container/PipelinePage/styles'; + +import { getDeploymentStage, getDeploymentStageIcon } from './utils'; + +function DeploymentStage(deployStatus: string): JSX.Element { + return ( + <> + {getDeploymentStageIcon(deployStatus)} + {getDeploymentStage(deployStatus)} + + ); +} + +export default DeploymentStage; diff --git a/frontend/src/container/PipelinePage/Layouts/ChangeHistory/DeploymentTime.tsx b/frontend/src/container/PipelinePage/Layouts/ChangeHistory/DeploymentTime.tsx new file mode 100644 index 0000000000..fad712d421 --- /dev/null +++ b/frontend/src/container/PipelinePage/Layouts/ChangeHistory/DeploymentTime.tsx @@ -0,0 +1,9 @@ +import dayjs from 'dayjs'; + +function DeploymentTime(deployTime: string): JSX.Element { + return ( + {dayjs(deployTime).locale('en').format('MMMM DD, YYYY hh:mm A')} + ); +} + +export default DeploymentTime; diff --git a/frontend/src/container/PipelinePage/Layouts/ChangeHistory/index.tsx b/frontend/src/container/PipelinePage/Layouts/ChangeHistory/index.tsx new file mode 100644 index 0000000000..ab70101ea3 --- /dev/null +++ b/frontend/src/container/PipelinePage/Layouts/ChangeHistory/index.tsx @@ -0,0 +1,24 @@ +import { Table } from 'antd'; +import { Pipeline } from 'types/api/pipeline/def'; + +import { changeHistoryColumns } from '../../PipelineListsView/config'; +import { HistoryTableWrapper } from '../../styles'; +import { historyPagination } from '../config'; + +function ChangeHistory({ piplineData }: ChangeHistoryProps): JSX.Element { + return ( + + + + ); +} + +interface ChangeHistoryProps { + piplineData: Pipeline; +} + +export default ChangeHistory; diff --git a/frontend/src/container/PipelinePage/Layouts/ChangeHistory/utils.tsx b/frontend/src/container/PipelinePage/Layouts/ChangeHistory/utils.tsx new file mode 100644 index 0000000000..ff3161f7d4 --- /dev/null +++ b/frontend/src/container/PipelinePage/Layouts/ChangeHistory/utils.tsx @@ -0,0 +1,39 @@ +import { + CheckCircleFilled, + CloseCircleFilled, + ExclamationCircleFilled, + LoadingOutlined, +} from '@ant-design/icons'; +import { Spin } from 'antd'; + +export function getDeploymentStage(value: string): string { + switch (value) { + case 'IN_PROGRESS': + return 'In Progress'; + case 'DEPLOYED': + return 'Deployed'; + case 'DIRTY': + return 'Dirty'; + case 'FAILED': + return 'Failed'; + default: + return ''; + } +} + +export function getDeploymentStageIcon(value: string): JSX.Element { + switch (value) { + case 'IN_PROGRESS': + return ( + } /> + ); + case 'DEPLOYED': + return ; + case 'DIRTY': + return ; + case 'FAILED': + return ; + default: + return ; + } +} diff --git a/frontend/src/container/PipelinePage/Layouts/Pipeline/CreatePipelineButton.tsx b/frontend/src/container/PipelinePage/Layouts/Pipeline/CreatePipelineButton.tsx new file mode 100644 index 0000000000..05151506df --- /dev/null +++ b/frontend/src/container/PipelinePage/Layouts/Pipeline/CreatePipelineButton.tsx @@ -0,0 +1,62 @@ +import { EditFilled, PlusOutlined } from '@ant-design/icons'; +import TextToolTip from 'components/TextToolTip'; +import { useCallback, useMemo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { ActionMode, ActionType, Pipeline } from 'types/api/pipeline/def'; + +import { ButtonContainer, CustomButton } from '../../styles'; +import { checkDataLength } from '../utils'; + +function CreatePipelineButton({ + setActionType, + isActionMode, + setActionMode, + piplineData, +}: CreatePipelineButtonProps): JSX.Element { + const { t } = useTranslation(['pipeline']); + + const isAddNewPipelineVisible = useMemo( + () => checkDataLength(piplineData?.pipelines), + [piplineData?.pipelines], + ); + const isDisabled = isActionMode === ActionMode.Editing; + + const actionHandler = useCallback( + (action: string, setStateFunc: (action: string) => void) => (): void => + setStateFunc(action), + [], + ); + + return ( + + + {isAddNewPipelineVisible && ( + } + onClick={actionHandler(ActionMode.Editing, setActionMode)} + disabled={isDisabled} + > + {t('enter_edit_mode')} + + )} + {!isAddNewPipelineVisible && ( + } + onClick={actionHandler(ActionType.AddPipeline, setActionType)} + type="primary" + > + {t('new_pipeline')} + + )} + + ); +} + +interface CreatePipelineButtonProps { + setActionType: (actionType: string) => void; + isActionMode: string; + setActionMode: (actionMode: string) => void; + piplineData: Pipeline; +} + +export default CreatePipelineButton; diff --git a/frontend/src/container/PipelinePage/Layouts/Pipeline/PipelinesSearchSection.tsx b/frontend/src/container/PipelinePage/Layouts/Pipeline/PipelinesSearchSection.tsx new file mode 100644 index 0000000000..ae830cae7f --- /dev/null +++ b/frontend/src/container/PipelinePage/Layouts/Pipeline/PipelinesSearchSection.tsx @@ -0,0 +1,30 @@ +import { Input } from 'antd'; +import React, { Dispatch, SetStateAction, useCallback } from 'react'; +import { useTranslation } from 'react-i18next'; + +function PipelinesSearchSection({ + setPipelineSearchValue, +}: PipelinesSearchSectionProps): JSX.Element { + const { t } = useTranslation(['pipeline']); + + const onSeachHandler = useCallback( + (event: React.SetStateAction) => { + setPipelineSearchValue(event); + }, + [setPipelineSearchValue], + ); + + return ( + + ); +} + +interface PipelinesSearchSectionProps { + setPipelineSearchValue: Dispatch>; +} + +export default PipelinesSearchSection; diff --git a/frontend/src/container/PipelinePage/Layouts/Pipeline/index.tsx b/frontend/src/container/PipelinePage/Layouts/Pipeline/index.tsx new file mode 100644 index 0000000000..da17d90e95 --- /dev/null +++ b/frontend/src/container/PipelinePage/Layouts/Pipeline/index.tsx @@ -0,0 +1,43 @@ +import { useState } from 'react'; +import { Pipeline } from 'types/api/pipeline/def'; + +import PipelineListsView from '../../PipelineListsView'; +import CreatePipelineButton from './CreatePipelineButton'; +import PipelinesSearchSection from './PipelinesSearchSection'; + +function PipelinePageLayout({ + refetchPipelineLists, + piplineData, +}: PipelinePageLayoutProps): JSX.Element { + const [isActionType, setActionType] = useState(); + const [isActionMode, setActionMode] = useState('viewing-mode'); + const [pipelineSearchValue, setPipelineSearchValue] = useState(''); + + return ( + <> + + + + + ); +} + +interface PipelinePageLayoutProps { + refetchPipelineLists: VoidFunction; + piplineData: Pipeline; +} + +export default PipelinePageLayout; diff --git a/frontend/src/container/PipelinePage/Layouts/config.ts b/frontend/src/container/PipelinePage/Layouts/config.ts new file mode 100644 index 0000000000..46d2d2738e --- /dev/null +++ b/frontend/src/container/PipelinePage/Layouts/config.ts @@ -0,0 +1,3 @@ +export const historyPagination = { + defaultPageSize: 5, +}; diff --git a/frontend/src/container/PipelinePage/Layouts/utils.ts b/frontend/src/container/PipelinePage/Layouts/utils.ts new file mode 100644 index 0000000000..870c2e4653 --- /dev/null +++ b/frontend/src/container/PipelinePage/Layouts/utils.ts @@ -0,0 +1,4 @@ +import { PipelineData } from 'types/api/pipeline/def'; + +export const checkDataLength = (data: Array): boolean => + data?.length > 0; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/DescriptionTextArea.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/DescriptionTextArea.tsx new file mode 100644 index 0000000000..6bd456e7f1 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/DescriptionTextArea.tsx @@ -0,0 +1,31 @@ +import { Form, Input } from 'antd'; +import { useTranslation } from 'react-i18next'; + +import { ProcessorFormField } from '../../AddNewProcessor/config'; +import { FormLabelStyle } from '../styles'; + +function DescriptionTextArea({ + fieldData, +}: DescriptionTextAreaProps): JSX.Element { + const { t } = useTranslation('pipeline'); + + return ( + {fieldData.fieldName}} + key={fieldData.id} + > + + + ); +} + +interface DescriptionTextAreaProps { + fieldData: ProcessorFormField; +} +export default DescriptionTextArea; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/FilterSearch.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/FilterSearch.tsx new file mode 100644 index 0000000000..5b9863b8dd --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/FilterSearch.tsx @@ -0,0 +1,31 @@ +import { Form, Input } from 'antd'; +import { useTranslation } from 'react-i18next'; + +import { ProcessorFormField } from '../../AddNewProcessor/config'; +import { formValidationRules } from '../../config'; +import { FormLabelStyle } from '../styles'; + +function FilterSearch({ fieldData }: FilterSearchProps): JSX.Element { + const { t } = useTranslation('pipeline'); + + return ( + {fieldData.fieldName}} + key={fieldData.id} + rules={formValidationRules} + name={fieldData.name} + > + + + ); +} +interface FilterSearchProps { + fieldData: ProcessorFormField; +} +export default FilterSearch; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/NameInput.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/NameInput.tsx new file mode 100644 index 0000000000..21e25118af --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/NameInput.tsx @@ -0,0 +1,27 @@ +import { Form, Input } from 'antd'; +import { useTranslation } from 'react-i18next'; + +import { ProcessorFormField } from '../../AddNewProcessor/config'; +import { formValidationRules } from '../../config'; +import { FormLabelStyle } from '../styles'; + +function NameInput({ fieldData }: NameInputProps): JSX.Element { + const { t } = useTranslation('pipeline'); + + return ( + {fieldData.fieldName}} + key={fieldData.id} + rules={formValidationRules} + name={fieldData.name} + > + + + ); +} + +interface NameInputProps { + fieldData: ProcessorFormField; +} +export default NameInput; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/ProcessorTags.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/ProcessorTags.tsx new file mode 100644 index 0000000000..b1d8b2dfcc --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/ProcessorTags.tsx @@ -0,0 +1,36 @@ +import { Form } from 'antd'; +import TagInput from 'container/PipelinePage/components/TagInput'; +import { useTranslation } from 'react-i18next'; + +import { ProcessorFormField } from '../../AddNewProcessor/config'; +import { FormLabelStyle } from '../styles'; + +function ProcessorTags({ + fieldData, + setTagsListData, + tagsListData, +}: ProcessorTagsProps): JSX.Element { + const { t } = useTranslation('pipeline'); + + return ( + {fieldData.fieldName}} + key={fieldData.id} + name={fieldData.name} + > + + + ); +} + +interface ProcessorTagsProps { + fieldData: ProcessorFormField; + setTagsListData: (tags: Array) => void; + tagsListData: Array; +} +export default ProcessorTags; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/index.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/index.tsx new file mode 100644 index 0000000000..220ff625b2 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/index.tsx @@ -0,0 +1,150 @@ +import { Button, Divider, Form, Modal } from 'antd'; +import React, { useCallback, useEffect, useMemo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { ActionMode, ActionType, PipelineData } from 'types/api/pipeline/def'; +import AppReducer from 'types/reducer/app'; +import { v4 } from 'uuid'; + +import { ModalButtonWrapper, ModalTitle } from '../styles'; +import { getEditedDataSource, getRecordIndex } from '../utils'; +import { renderPipelineForm } from './utils'; + +function AddNewPipeline({ + isActionType, + setActionType, + selectedPipelineData, + setShowSaveButton, + setCurrPipelineData, + currPipelineData, +}: AddNewPipelineProps): JSX.Element { + const [form] = Form.useForm(); + const { t } = useTranslation('pipeline'); + const { user } = useSelector((state) => state.app); + + const isEdit = isActionType === 'edit-pipeline'; + const isAdd = isActionType === 'add-pipeline'; + + useEffect(() => { + if (isEdit) { + form.setFieldsValue(selectedPipelineData); + } + if (isAdd) { + form.resetFields(); + } + }, [form, isEdit, isAdd, selectedPipelineData]); + + const onFinish = (values: PipelineData): void => { + const newPipeLineData: PipelineData = { + id: v4(), + orderId: (currPipelineData?.length || 0) + 1, + createdAt: new Date().toISOString(), + createdBy: user?.name || '', + name: values.name, + alias: values.name.replace(/\s/g, ''), + description: values.description, + filter: values.filter, + config: [], + enabled: true, + }; + + if (isEdit && selectedPipelineData) { + const findRecordIndex = getRecordIndex( + currPipelineData, + selectedPipelineData, + 'id', + ); + const updatedPipelineData: PipelineData = { + ...currPipelineData[findRecordIndex], + ...values, + }; + + const editedPipelineData = getEditedDataSource( + currPipelineData, + selectedPipelineData, + 'id', + updatedPipelineData, + ); + + setCurrPipelineData(editedPipelineData); + } + if (isAdd) { + setCurrPipelineData((prevState) => { + if (prevState) return [...prevState, newPipeLineData]; + return [newPipeLineData]; + }); + } + setActionType(undefined); + }; + + const onCancelModalHandler = (): void => { + setActionType(undefined); + }; + + const modalTitle = useMemo( + (): string => + isEdit + ? `${t('edit_pipeline')} : ${selectedPipelineData?.name}` + : t('create_pipeline'), + [isEdit, selectedPipelineData?.name, t], + ); + + const onOkModalHandler = useCallback( + () => setShowSaveButton(ActionMode.Editing), + [setShowSaveButton], + ); + + const isOpen = useMemo(() => isEdit || isAdd, [isAdd, isEdit]); + + return ( + {modalTitle}} + centered + open={isOpen} + width={800} + footer={null} + onCancel={onCancelModalHandler} + > + +
+ {renderPipelineForm()} + + + + + + + + +
+ ); +} + +interface AddNewPipelineProps { + isActionType: string; + setActionType: (actionType?: ActionType) => void; + selectedPipelineData: PipelineData | undefined; + setShowSaveButton: (actionMode: ActionMode) => void; + setCurrPipelineData: ( + value: React.SetStateAction>, + ) => void; + currPipelineData: Array; +} + +export default AddNewPipeline; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/styles.ts b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/styles.ts new file mode 100644 index 0000000000..61dc2650c8 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/styles.ts @@ -0,0 +1,7 @@ +import styled from 'styled-components'; + +export const FormLabelStyle = styled.span` + font-size: 0.75rem; + font-weight: 400; + line-height: 1.25rem; +`; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/utils.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/utils.tsx new file mode 100644 index 0000000000..631bb34377 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/utils.tsx @@ -0,0 +1,7 @@ +import { pipelineFields } from '../config'; + +export const renderPipelineForm = (): Array => + pipelineFields.map((field) => { + const Component = field.component; + return ; + }); diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/NameInput.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/NameInput.tsx new file mode 100644 index 0000000000..3991715f63 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/NameInput.tsx @@ -0,0 +1,36 @@ +import { Form, Input } from 'antd'; +import { ModalFooterTitle } from 'container/PipelinePage/styles'; +import { useTranslation } from 'react-i18next'; + +import { formValidationRules } from '../../config'; +import { ProcessorFormField } from '../config'; +import { Container, FormWrapper, PipelineIndexIcon } from '../styles'; + +function NameInput({ fieldData }: NameInputProps): JSX.Element { + const { t } = useTranslation('pipeline'); + + return ( + + + {Number(fieldData.id) + 1} + + + {fieldData.fieldName}} + key={fieldData.id} + name={fieldData.name} + initialValue={fieldData.initialValue} + rules={fieldData.rules ? fieldData.rules : formValidationRules} + > + + + + + ); +} + +interface NameInputProps { + fieldData: ProcessorFormField; +} +export default NameInput; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/ParsingRulesTextArea.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/ParsingRulesTextArea.tsx new file mode 100644 index 0000000000..4d7f8b2ec0 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/ParsingRulesTextArea.tsx @@ -0,0 +1,37 @@ +import { Form, Input } from 'antd'; +import { ModalFooterTitle } from 'container/PipelinePage/styles'; +import { useTranslation } from 'react-i18next'; + +import { ProcessorFormField } from '../config'; +import { Container, FormWrapper, PipelineIndexIcon } from '../styles'; + +function ParsingRulesTextArea({ + fieldData, +}: ParsingRulesTextAreaProps): JSX.Element { + const { t } = useTranslation('pipeline'); + + return ( + + + {Number(fieldData.id) + 1} + + + {fieldData.fieldName}} + > + + + + + ); +} + +interface ParsingRulesTextAreaProps { + fieldData: ProcessorFormField; +} +export default ParsingRulesTextArea; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/TypeSelect.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/TypeSelect.tsx new file mode 100644 index 0000000000..e293f288f7 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/TypeSelect.tsx @@ -0,0 +1,44 @@ +import { Select } from 'antd'; +import { useTranslation } from 'react-i18next'; + +import { DEFAULT_PROCESSOR_TYPE, processorTypes } from '../config'; +import { + PipelineIndexIcon, + ProcessorType, + ProcessorTypeContainer, + ProcessorTypeWrapper, + StyledSelect, +} from '../styles'; + +function TypeSelect({ onChange, value }: TypeSelectProps): JSX.Element { + const { t } = useTranslation('pipeline'); + + return ( + + 1 + + {t('processor_type')} + onChange(value)} + value={value} + > + {processorTypes.map(({ value, label }) => ( + + {label} + + ))} + + + + ); +} + +TypeSelect.defaultProps = { + value: DEFAULT_PROCESSOR_TYPE, +}; + +interface TypeSelectProps { + onChange: (value: string | unknown) => void; + value?: string; +} +export default TypeSelect; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/config.ts b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/config.ts new file mode 100644 index 0000000000..e7e5677cd8 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/config.ts @@ -0,0 +1,226 @@ +type ProcessorType = { + key: string; + value: string; + label: string; + title?: string; + disabled?: boolean; +}; + +export const processorTypes: Array = [ + { key: 'grok_parser', value: 'grok_parser', label: 'Grok' }, + { key: 'json_parser', value: 'json_parser', label: 'Json Parser' }, + { key: 'regex_parser', value: 'regex_parser', label: 'Regex' }, + { key: 'add', value: 'add', label: 'Add' }, + { key: 'remove', value: 'remove', label: 'Remove' }, + { key: 'trace_parser', value: 'trace_parser', label: 'Trace Parser' }, + // { key: 'retain', value: 'retain', label: 'Retain' }, @Chintan - Commented as per Nitya's suggestion + { key: 'move', value: 'move', label: 'Move' }, + { key: 'copy', value: 'copy', label: 'Copy' }, +]; + +export const DEFAULT_PROCESSOR_TYPE = processorTypes[0].value; + +export type ProcessorFormField = { + id: number; + fieldName: string; + placeholder: string; + name: string; + rules?: Array<{ [key: string]: boolean }>; + initialValue?: string; +}; + +const commonFields = [ + { + id: 3, + fieldName: 'Parse From', + placeholder: 'processor_parsefrom_placeholder', + name: 'parse_from', // optional + rules: [], + initialValue: 'body', + }, + { + id: 4, + fieldName: 'Parse To', + placeholder: 'processor_parseto_placeholder', + name: 'parse_to', // optional + rules: [], + initialValue: 'attributes', + }, + { + id: 5, + fieldName: 'On Error', + placeholder: 'processor_onerror_placeholder', + name: 'on_error', // optional + rules: [], + initialValue: 'send', + }, +]; + +export const processorFields: { [key: string]: Array } = { + grok_parser: [ + { + id: 1, + fieldName: 'Name of Grok Processor', + placeholder: 'processor_name_placeholder', + name: 'name', + }, + { + id: 2, + fieldName: 'Pattern', + placeholder: 'processor_pattern_placeholder', + name: 'pattern', + }, + ...commonFields, + ], + json_parser: [ + { + id: 1, + fieldName: 'Name of Json Parser Processor', + placeholder: 'processor_name_placeholder', + name: 'name', + }, + { + id: 2, + fieldName: 'Parse From', + placeholder: 'processor_parsefrom_placeholder', + name: 'parse_from', + initialValue: 'body', + }, + { + id: 3, + fieldName: 'Parse To', + placeholder: 'processor_parseto_placeholder', + name: 'parse_to', + initialValue: 'attributes', + }, + ], + regex_parser: [ + { + id: 1, + fieldName: 'Name of Regex Processor', + placeholder: 'processor_name_placeholder', + name: 'name', + }, + { + id: 2, + fieldName: 'Define Regex', + placeholder: 'processor_regex_placeholder', + name: 'regex', + }, + ...commonFields, + ], + add: [ + { + id: 1, + fieldName: 'Name of Add Processor', + placeholder: 'processor_name_placeholder', + name: 'name', + }, + { + id: 2, + fieldName: 'Field', + placeholder: 'processor_field_placeholder', + name: 'field', + }, + { + id: 3, + fieldName: 'Value', + placeholder: 'processor_value_placeholder', + name: 'value', + }, + ], + remove: [ + { + id: 1, + fieldName: 'Name of Remove Processor', + placeholder: 'processor_name_placeholder', + name: 'name', + }, + { + id: 2, + fieldName: 'Field', + placeholder: 'processor_field_placeholder', + name: 'field', + }, + ], + trace_parser: [ + { + id: 1, + fieldName: 'Name of Trace Parser Processor', + placeholder: 'processor_name_placeholder', + name: 'name', + }, + { + id: 2, + fieldName: 'Trace Id Parce From', + placeholder: 'processor_trace_id_placeholder', + name: 'traceId', + }, + { + id: 3, + fieldName: 'Span id Parse From', + placeholder: 'processor_span_id_placeholder', + name: 'spanId', + }, + { + id: 4, + fieldName: 'Trace flags parse from', + placeholder: 'processor_trace_flags_placeholder', + name: 'traceFlags', + }, + ], + retain: [ + { + id: 1, + fieldName: 'Name of Retain Processor', + placeholder: 'processor_name_placeholder', + name: 'name', + }, + { + id: 2, + fieldName: 'Fields', + placeholder: 'processor_fields_placeholder', + name: 'fields', + }, + ], + move: [ + { + id: 1, + fieldName: 'Name of Move Processor', + placeholder: 'processor_name_placeholder', + name: 'name', + }, + { + id: 2, + fieldName: 'From', + placeholder: 'processor_from_placeholder', + name: 'from', + }, + { + id: 3, + fieldName: 'To', + placeholder: 'processor_to_placeholder', + name: 'to', + }, + ], + copy: [ + { + id: 1, + fieldName: 'Name of Copy Processor', + placeholder: 'processor_name_placeholder', + name: 'name', + }, + { + id: 2, + fieldName: 'From', + placeholder: 'processor_from_placeholder', + name: 'from', + }, + { + id: 3, + fieldName: 'To', + placeholder: 'processor_to_placeholder', + name: 'to', + }, + ], +}; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/index.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/index.tsx new file mode 100644 index 0000000000..c9ab5d8aeb --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/index.tsx @@ -0,0 +1,195 @@ +import { Button, Divider, Form, Modal } from 'antd'; +import { useCallback, useEffect, useMemo, useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { + ActionMode, + ActionType, + PipelineData, + ProcessorData, +} from 'types/api/pipeline/def'; + +import { ModalButtonWrapper, ModalTitle } from '../styles'; +import { getEditedDataSource, getRecordIndex } from '../utils'; +import { DEFAULT_PROCESSOR_TYPE } from './config'; +import TypeSelect from './FormFields/TypeSelect'; +import { renderProcessorForm } from './utils'; + +function AddNewProcessor({ + isActionType, + setActionType, + selectedProcessorData, + setShowSaveButton, + expandedPipelineData, + setExpandedPipelineData, +}: AddNewProcessorProps): JSX.Element { + const [form] = Form.useForm(); + const { t } = useTranslation('pipeline'); + const [processorType, setProcessorType] = useState( + DEFAULT_PROCESSOR_TYPE, + ); + + const isEdit = isActionType === 'edit-processor'; + const isAdd = isActionType === 'add-processor'; + + useEffect(() => { + if (isEdit && selectedProcessorData && expandedPipelineData?.config) { + const findRecordIndex = getRecordIndex( + expandedPipelineData?.config, + selectedProcessorData, + 'id', + ); + + const updatedProcessorData = { + ...expandedPipelineData?.config?.[findRecordIndex], + }; + setProcessorType(updatedProcessorData.type); + form.setFieldsValue(updatedProcessorData); + } + if (isAdd) { + form.resetFields(); + } + }, [form, isEdit, isAdd, selectedProcessorData, expandedPipelineData?.config]); + + const handleProcessorType = (value: string | unknown): void => { + const typedValue = String(value) || DEFAULT_PROCESSOR_TYPE; + setProcessorType(typedValue); + }; + + const onFinish = (values: { name: string }): void => { + const totalDataLength = expandedPipelineData?.config?.length || 0; + + const newProcessorData = { + id: values.name.replace(/\s/g, ''), + orderId: Number(totalDataLength || 0) + 1, + type: processorType, + enabled: true, + ...values, + }; + + if (isEdit && selectedProcessorData && expandedPipelineData?.config) { + const findRecordIndex = getRecordIndex( + expandedPipelineData?.config, + selectedProcessorData, + 'id', + ); + + const updatedProcessorData = { + id: values.name.replace(/\s/g, ''), + orderId: expandedPipelineData?.config?.[findRecordIndex].orderId, + type: processorType, + enabled: expandedPipelineData?.config?.[findRecordIndex].enabled, + output: expandedPipelineData?.config?.[findRecordIndex].output, + ...values, + }; + + const editedData = getEditedDataSource( + expandedPipelineData.config, + selectedProcessorData, + 'name', + updatedProcessorData, + ); + + const modifiedProcessorData = { ...expandedPipelineData }; + + modifiedProcessorData.config = editedData; + + setExpandedPipelineData(modifiedProcessorData); + } + if (isAdd && expandedPipelineData) { + const modifiedProcessorData = { + ...expandedPipelineData, + }; + if ( + modifiedProcessorData.config !== undefined && + modifiedProcessorData.config + ) { + modifiedProcessorData.config = [ + ...modifiedProcessorData.config, + newProcessorData, + ]; + if (totalDataLength > 0) { + modifiedProcessorData.config[totalDataLength - 1].output = + newProcessorData.id; + } + } + setExpandedPipelineData(modifiedProcessorData); + } + setActionType(undefined); + handleProcessorType(DEFAULT_PROCESSOR_TYPE); + }; + + const onCancelModal = (): void => { + setActionType(undefined); + handleProcessorType(DEFAULT_PROCESSOR_TYPE); + }; + + const modalTitle = useMemo( + (): string => + isEdit + ? `${t('edit_processor')} ${selectedProcessorData?.name}` + : t('create_processor'), + [isEdit, selectedProcessorData?.name, t], + ); + + const onOkModalHandler = useCallback( + () => setShowSaveButton(ActionMode.Editing), + [setShowSaveButton], + ); + + const isOpen = useMemo(() => isEdit || isAdd, [isAdd, isEdit]); + + return ( + {modalTitle}} + centered + open={isOpen} + width={800} + footer={null} + onCancel={onCancelModal} + > + +
+ + {renderProcessorForm(processorType)} + + + + + + + + +
+ ); +} + +AddNewProcessor.defaultProps = { + selectedProcessorData: undefined, + expandedPipelineData: {}, +}; + +interface AddNewProcessorProps { + isActionType: string; + setActionType: (actionType?: ActionType) => void; + selectedProcessorData?: ProcessorData; + setShowSaveButton: (actionMode: ActionMode) => void; + expandedPipelineData?: PipelineData; + setExpandedPipelineData: (data: PipelineData) => void; +} + +export default AddNewProcessor; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/styles.ts b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/styles.ts new file mode 100644 index 0000000000..585ad6284b --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/styles.ts @@ -0,0 +1,46 @@ +import { Avatar, Select } from 'antd'; +import { themeColors } from 'constants/theme'; +import styled from 'styled-components'; + +export const PipelineIndexIcon = styled(Avatar)` + background-color: ${themeColors.navyBlue}; + height: 1.5rem; + width: 1.5rem; + font-size: 0.875rem; + line-height: 1.375rem; +`; + +export const ProcessorTypeWrapper = styled.div` + display: flex; + gap: 1rem; + align-items: flex-start; + margin-bottom: 1.5rem; +`; + +export const ProcessorTypeContainer = styled.div` + display: flex; + flex-direction: column; + padding-bottom: 0.5rem; + gap: 0.313rem; +`; + +export const Container = styled.div` + display: flex; + flex-direction: row; + align-items: flex-start; + padding: 0rem; + gap: 1rem; + width: 100%; +`; + +export const FormWrapper = styled.div` + width: 100%; +`; + +export const ProcessorType = styled.span` + padding-bottom: 0.5rem; +`; + +export const StyledSelect = styled(Select)` + width: 12.5rem; +`; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/utils.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/utils.tsx new file mode 100644 index 0000000000..b0de303157 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/utils.tsx @@ -0,0 +1,9 @@ +import { processorFields, ProcessorFormField } from './config'; +import NameInput from './FormFields/NameInput'; + +export const renderProcessorForm = ( + processorType: string, +): Array => + processorFields[processorType]?.map((fieldName: ProcessorFormField) => ( + + )); diff --git a/frontend/src/container/PipelinePage/PipelineListsView/ModeAndConfiguration.tsx b/frontend/src/container/PipelinePage/PipelineListsView/ModeAndConfiguration.tsx new file mode 100644 index 0000000000..9c6a0d6a17 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/ModeAndConfiguration.tsx @@ -0,0 +1,24 @@ +import { ActionMode } from 'types/api/pipeline/def'; + +import { ModeAndConfigWrapper } from './styles'; + +function ModeAndConfiguration({ + isActionMode, + verison, +}: ModeAndConfigurationType): JSX.Element { + const actionMode = isActionMode === ActionMode.Editing; + + return ( + + Mode: {actionMode ? 'Editing' : 'Viewing'} +
Configuration Version: {verison}
+
+ ); +} + +export interface ModeAndConfigurationType { + isActionMode: string; + verison: string | number; +} + +export default ModeAndConfiguration; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/PipelineExpandView.tsx b/frontend/src/container/PipelinePage/PipelineListsView/PipelineExpandView.tsx new file mode 100644 index 0000000000..89dbc4d87d --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/PipelineExpandView.tsx @@ -0,0 +1,269 @@ +import { PlusCircleOutlined } from '@ant-design/icons'; +import { TableLocale } from 'antd/es/table/interface'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import React, { useCallback, useMemo } from 'react'; +import { DndProvider } from 'react-dnd'; +import { HTML5Backend } from 'react-dnd-html5-backend'; +import { useTranslation } from 'react-i18next'; +import { + ActionMode, + ActionType, + PipelineData, + ProcessorData, +} from 'types/api/pipeline/def'; + +import { tableComponents } from '../config'; +import { ModalFooterTitle } from '../styles'; +import { AlertMessage } from '.'; +import { processorColumns } from './config'; +import { FooterButton, StyledTable } from './styles'; +import DragAction from './TableComponents/DragAction'; +import PipelineActions from './TableComponents/PipelineActions'; +import { + getEditedDataSource, + getProcessorUpdatedRow, + getRecordIndex, + getTableColumn, +} from './utils'; + +function PipelineExpandView({ + handleAlert, + setActionType, + processorEditAction, + isActionMode, + setShowSaveButton, + expandedPipelineData, + setExpandedPipelineData, + prevPipelineData, +}: PipelineExpandViewProps): JSX.Element { + const { t } = useTranslation(['pipeline']); + const isDarkMode = useIsDarkMode(); + const isEditingActionMode = isActionMode === ActionMode.Editing; + + const deleteProcessorHandler = useCallback( + (record: ProcessorData) => (): void => { + setShowSaveButton(ActionMode.Editing); + if (expandedPipelineData && expandedPipelineData?.config) { + const filteredData = expandedPipelineData?.config.filter( + (item: ProcessorData) => item.id !== record.id, + ); + const pipelineData = { ...expandedPipelineData }; + pipelineData.config = filteredData; + pipelineData.config.forEach((item, index) => { + const obj = item; + obj.orderId = index + 1; + }); + for (let i = 0; i < pipelineData.config.length - 1; i += 1) { + pipelineData.config[i].output = pipelineData.config[i + 1].id; + } + delete pipelineData.config[pipelineData.config.length - 1]?.output; + setExpandedPipelineData(pipelineData); + } + }, + [expandedPipelineData, setShowSaveButton, setExpandedPipelineData], + ); + + const processorDeleteAction = useCallback( + (record: ProcessorData) => (): void => { + handleAlert({ + title: `${t('delete_processor')} : ${record.name}?`, + descrition: t('delete_processor_description'), + buttontext: t('delete'), + onOk: deleteProcessorHandler(record), + }); + }, + [handleAlert, deleteProcessorHandler, t], + ); + + const onSwitchProcessorChange = useCallback( + (checked: boolean, record: ProcessorData): void => { + if (expandedPipelineData && expandedPipelineData?.config) { + setShowSaveButton(ActionMode.Editing); + const findRecordIndex = getRecordIndex( + expandedPipelineData?.config, + record, + 'id', + ); + const updateSwitch = { + ...expandedPipelineData?.config[findRecordIndex], + enabled: checked, + }; + const editedData = getEditedDataSource( + expandedPipelineData?.config, + record, + 'id', + updateSwitch, + ); + const modifiedProcessorData = { ...expandedPipelineData }; + modifiedProcessorData.config = editedData; + + setExpandedPipelineData(modifiedProcessorData); + } + }, + [expandedPipelineData, setExpandedPipelineData, setShowSaveButton], + ); + + const columns = useMemo(() => { + const fieldColumns = getTableColumn(processorColumns); + if (isEditingActionMode) { + fieldColumns.push( + { + title: '', + dataIndex: 'action', + key: 'action', + render: (_value, record): JSX.Element => ( + + ), + }, + { + title: '', + dataIndex: 'enabled', + key: 'enabled', + render: (value, record) => ( + + onSwitchProcessorChange(checked, record) + } + /> + ), + }, + ); + } + return fieldColumns; + }, [ + isEditingActionMode, + processorEditAction, + processorDeleteAction, + onSwitchProcessorChange, + ]); + + const reorderProcessorRow = useCallback( + (updatedRow: ProcessorData[]) => (): void => { + setShowSaveButton(ActionMode.Editing); + if (expandedPipelineData) { + const modifiedProcessorData = { ...expandedPipelineData }; + modifiedProcessorData.config = updatedRow; + setExpandedPipelineData(modifiedProcessorData); + } + }, + [expandedPipelineData, setShowSaveButton, setExpandedPipelineData], + ); + + const onCancelReorderProcessorRow = useCallback( + () => (): void => { + if (expandedPipelineData) setExpandedPipelineData(expandedPipelineData); + }, + [expandedPipelineData, setExpandedPipelineData], + ); + + const moveProcessorRow = useCallback( + (dragIndex: number, hoverIndex: number) => { + if (expandedPipelineData?.config && isEditingActionMode) { + const updatedRow = getProcessorUpdatedRow( + expandedPipelineData?.config, + dragIndex, + hoverIndex, + ); + handleAlert({ + title: t('reorder_processor'), + descrition: t('reorder_processor_description'), + buttontext: t('reorder'), + onOk: reorderProcessorRow(updatedRow), + onCancel: onCancelReorderProcessorRow(), + }); + } + }, + [ + expandedPipelineData?.config, + isEditingActionMode, + handleAlert, + t, + reorderProcessorRow, + onCancelReorderProcessorRow, + ], + ); + + const addNewProcessorHandler = useCallback((): void => { + setActionType(ActionType.AddProcessor); + }, [setActionType]); + + const footer = useCallback((): JSX.Element | undefined => { + if (prevPipelineData.length === 0 || isEditingActionMode) { + return ( + + + {t('add_new_processor')} + + ); + } + return undefined; + }, [isEditingActionMode, prevPipelineData, addNewProcessorHandler, t]); + + const onRowHandler = ( + _data: ProcessorData, + index?: number, + ): React.HTMLAttributes => + ({ + index, + moveRow: moveProcessorRow, + } as React.HTMLAttributes); + + const processorData = useMemo( + () => + expandedPipelineData?.config && + expandedPipelineData?.config.map( + (item: ProcessorData): ProcessorData => ({ + id: item.id, + orderId: item.orderId, + type: item.type, + name: item.name, + enabled: item.enabled, + }), + ), + [expandedPipelineData], + ); + + const getLocales = (): TableLocale => ({ + emptyText: , + }); + + return ( + + + + ); +} + +PipelineExpandView.defaultProps = { + expandedPipelineData: {}, +}; + +interface PipelineExpandViewProps { + handleAlert: (props: AlertMessage) => void; + setActionType: (actionType?: ActionType) => void; + processorEditAction: (record: ProcessorData) => () => void; + isActionMode: string; + setShowSaveButton: (actionMode: ActionMode) => void; + expandedPipelineData?: PipelineData; + setExpandedPipelineData: (data: PipelineData) => void; + prevPipelineData: Array; +} + +export default PipelineExpandView; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx b/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx new file mode 100644 index 0000000000..e7aa0ecedc --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx @@ -0,0 +1,33 @@ +import { Button } from 'antd'; +import { useTranslation } from 'react-i18next'; + +import { SaveConfigWrapper } from './styles'; + +function SaveConfigButton({ + onSaveConfigurationHandler, + onCancelConfigurationHandler, +}: SaveConfigButtonTypes): JSX.Element { + const { t } = useTranslation('pipeline'); + + return ( + + + + + ); +} +export interface SaveConfigButtonTypes { + onSaveConfigurationHandler: VoidFunction; + onCancelConfigurationHandler: VoidFunction; +} + +export default SaveConfigButton; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/DragAction.tsx b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/DragAction.tsx new file mode 100644 index 0000000000..c35c8b53dc --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/DragAction.tsx @@ -0,0 +1,21 @@ +import { HolderOutlined } from '@ant-design/icons'; +import { Switch } from 'antd'; + +import { holdIconStyle } from '../config'; +import { LastActionColumn } from '../styles'; + +function DragAction({ isEnabled, onChange }: DragActionProps): JSX.Element { + return ( + + + + + ); +} + +interface DragActionProps { + isEnabled: boolean; + onChange: (checked: boolean) => void; +} + +export default DragAction; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/PipelineActions.tsx b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/PipelineActions.tsx new file mode 100644 index 0000000000..1f86d675e8 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/PipelineActions.tsx @@ -0,0 +1,28 @@ +import { IconListStyle } from '../styles'; +import DeleteAction from './TableActions/DeleteAction'; +import EditAction from './TableActions/EditAction'; +// import ViewAction from './TableActions/ViewAction'; + +function PipelineActions({ + isPipelineAction, + editAction, + deleteAction, +}: PipelineActionsProps): JSX.Element { + return ( + + + {/* */} + + + ); +} + +export interface PipelineActionsProps { + isPipelineAction: boolean; + editAction: VoidFunction; + deleteAction: VoidFunction; +} +export default PipelineActions; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableActions/DeleteAction.tsx b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableActions/DeleteAction.tsx new file mode 100644 index 0000000000..27c5189938 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableActions/DeleteAction.tsx @@ -0,0 +1,23 @@ +import { DeleteFilled } from '@ant-design/icons'; + +import { iconStyle, smallIconStyle } from '../../config'; + +function DeleteAction({ + isPipelineAction, + deleteAction, +}: DeleteActionProps): JSX.Element { + if (isPipelineAction) { + return ; + } + return ( + + + + ); +} + +export interface DeleteActionProps { + isPipelineAction: boolean; + deleteAction: VoidFunction; +} +export default DeleteAction; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableActions/EditAction.tsx b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableActions/EditAction.tsx new file mode 100644 index 0000000000..14b53b1fd8 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableActions/EditAction.tsx @@ -0,0 +1,23 @@ +import { EditOutlined } from '@ant-design/icons'; + +import { iconStyle, smallIconStyle } from '../../config'; + +function EditAction({ + isPipelineAction, + editAction, +}: EditActionProps): JSX.Element { + if (isPipelineAction) { + return ; + } + return ( + + + + ); +} + +export interface EditActionProps { + isPipelineAction: boolean; + editAction: VoidFunction; +} +export default EditAction; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableActions/ViewAction.tsx b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableActions/ViewAction.tsx new file mode 100644 index 0000000000..0260114a81 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableActions/ViewAction.tsx @@ -0,0 +1,19 @@ +import { CopyFilled, EyeFilled } from '@ant-design/icons'; + +import { iconStyle, smallIconStyle } from '../../config'; + +function ViewAction({ isPipelineAction }: ViewActionProps): JSX.Element { + if (isPipelineAction) { + return ; + } + return ( + + + + ); +} + +export interface ViewActionProps { + isPipelineAction: boolean; +} +export default ViewAction; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableExpandIcon.tsx b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableExpandIcon.tsx new file mode 100644 index 0000000000..04d5bee5d5 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/TableExpandIcon.tsx @@ -0,0 +1,28 @@ +import { DownOutlined, RightOutlined } from '@ant-design/icons'; +import React from 'react'; +import { PipelineData } from 'types/api/pipeline/def'; + +function TableExpandIcon({ + expanded, + onExpand, + record, +}: TableExpandIconProps): JSX.Element { + const handleOnExpand = ( + e: React.MouseEvent, + ): void => { + onExpand(record, e); + }; + + if (expanded) { + return ; + } + return ; +} + +interface TableExpandIconProps { + expanded: boolean; + onExpand: (record: PipelineData, e: React.MouseEvent) => void; + record: PipelineData; +} + +export default TableExpandIcon; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/Tags.tsx b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/Tags.tsx new file mode 100644 index 0000000000..2f373b2051 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/Tags.tsx @@ -0,0 +1,19 @@ +import { Tag } from 'antd'; + +function Tags({ tags }: TagsProps): JSX.Element { + return ( + + {tags?.map((tag) => ( + + {tag} + + ))} + + ); +} + +interface TagsProps { + tags: Array; +} + +export default Tags; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/index.tsx b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/index.tsx new file mode 100644 index 0000000000..4d351c7a41 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/TableComponents/index.tsx @@ -0,0 +1,41 @@ +import dayjs from 'dayjs'; +import React from 'react'; +import { PipelineData, ProcessorData } from 'types/api/pipeline/def'; + +import { PipelineIndexIcon } from '../AddNewProcessor/styles'; +import { ColumnDataStyle, ListDataStyle, ProcessorIndexIcon } from '../styles'; + +const componentMap: ComponentMap = { + orderId: ({ record }) => {record}, + createdAt: ({ record }) => ( + + {dayjs(record).locale('en').format('MMMM DD, YYYY hh:mm A')} + + ), + id: ({ record }) => {record}, + name: ({ record }) => {record}, +}; + +function TableComponents({ + columnKey, + record, +}: TableComponentsProps): JSX.Element { + const Component = + componentMap[columnKey] ?? + (({ record }): JSX.Element => {record}); + + return ; +} + +type ComponentMap = { + [key: string]: React.FC<{ record: Record }>; +}; + +export type Record = PipelineData['orderId'] & ProcessorData; + +interface TableComponentsProps { + columnKey: string; + record: Record; +} + +export default TableComponents; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/config.ts b/frontend/src/container/PipelinePage/PipelineListsView/config.ts new file mode 100644 index 0000000000..44f6bcc0d7 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/config.ts @@ -0,0 +1,131 @@ +import { ColumnGroupType, ColumnType } from 'antd/lib/table/interface'; +import { + HistoryData, + PipelineData, + ProcessorData, +} from 'types/api/pipeline/def'; + +import DeploymentStage from '../Layouts/ChangeHistory/DeploymentStage'; +import DeploymentTime from '../Layouts/ChangeHistory/DeploymentTime'; +import DescriptionTextArea from './AddNewPipeline/FormFields/DescriptionTextArea'; +import NameInput from './AddNewPipeline/FormFields/NameInput'; + +export const pipelineFields = [ + { + id: 1, + fieldName: 'Filter', + placeholder: 'search_pipeline_placeholder', + name: 'filter', + component: NameInput, + }, + { + id: 2, + fieldName: 'Name', + placeholder: 'pipeline_name_placeholder', + name: 'name', + component: NameInput, + }, + { + id: 4, + fieldName: 'Description', + placeholder: 'pipeline_description_placeholder', + name: 'description', + component: DescriptionTextArea, + }, +]; + +export const tagInputStyle: React.CSSProperties = { + width: 78, + verticalAlign: 'top', + flex: 1, +}; + +export const pipelineColumns: Array< + ColumnType | ColumnGroupType +> = [ + { + key: 'orderId', + title: '', + dataIndex: 'orderId', + }, + { + key: 'name', + title: 'Pipeline Name', + dataIndex: 'name', + }, + { + key: 'filter', + title: 'Filters', + dataIndex: 'filter', + }, + + { + key: 'createdAt', + title: 'Last Edited', + dataIndex: 'createdAt', + }, + { + key: 'createdBy', + title: 'Edited By', + dataIndex: 'createdBy', + }, +]; + +export const processorColumns: Array< + ColumnType | ColumnGroupType +> = [ + { + key: 'id', + title: '', + dataIndex: 'orderId', + width: 150, + }, + { + key: 'name', + title: '', + dataIndex: 'name', + }, +]; + +export const changeHistoryColumns: Array< + ColumnType | ColumnGroupType +> = [ + { + key: 'version', + title: 'Version', + dataIndex: 'version', + }, + { + title: 'Deployment Stage', + key: 'deployStatus', + dataIndex: 'deployStatus', + render: DeploymentStage, + }, + { + key: 'deployResult', + title: 'Last Deploy Message', + dataIndex: 'deployResult', + ellipsis: true, + }, + { + key: 'createdAt', + title: 'Last Deployed Time', + dataIndex: 'createdAt', + render: DeploymentTime, + }, + { + key: 'createdByName', + title: 'Edited by', + dataIndex: 'createdByName', + }, +]; + +export const formValidationRules = [ + { + required: true, + }, +]; + +export const iconStyle = { fontSize: '1.5rem' }; +export const smallIconStyle = { fontSize: '1rem' }; +export const holdIconStyle = { ...iconStyle, cursor: 'move' }; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/index.tsx b/frontend/src/container/PipelinePage/PipelineListsView/index.tsx new file mode 100644 index 0000000000..9ae3cf6ab9 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/index.tsx @@ -0,0 +1,467 @@ +import { ExclamationCircleOutlined, PlusOutlined } from '@ant-design/icons'; +import { Modal, Table } from 'antd'; +import { ExpandableConfig } from 'antd/es/table/interface'; +import savePipeline from 'api/pipeline/post'; +import { useNotifications } from 'hooks/useNotifications'; +import { cloneDeep } from 'lodash-es'; +import React, { useCallback, useEffect, useMemo, useState } from 'react'; +import { DndProvider } from 'react-dnd'; +import { HTML5Backend } from 'react-dnd-html5-backend'; +import { useTranslation } from 'react-i18next'; +import { + ActionMode, + ActionType, + Pipeline, + PipelineData, + ProcessorData, +} from 'types/api/pipeline/def'; +import { v4 } from 'uuid'; + +import { tableComponents } from '../config'; +import AddNewPipeline from './AddNewPipeline'; +import AddNewProcessor from './AddNewProcessor'; +import { pipelineColumns } from './config'; +import ModeAndConfiguration from './ModeAndConfiguration'; +import PipelineExpanView from './PipelineExpandView'; +import SaveConfigButton from './SaveConfigButton'; +import { + AlertContentWrapper, + AlertModalTitle, + Container, + FooterButton, +} from './styles'; +import DragAction from './TableComponents/DragAction'; +import PipelineActions from './TableComponents/PipelineActions'; +import TableExpandIcon from './TableComponents/TableExpandIcon'; +import { + getDataOnSearch, + getEditedDataSource, + getElementFromArray, + getRecordIndex, + getTableColumn, + getUpdatedRow, +} from './utils'; + +function PipelineListsView({ + isActionType, + setActionType, + isActionMode, + setActionMode, + piplineData, + refetchPipelineLists, + pipelineSearchValue, +}: PipelineListsViewProps): JSX.Element { + const { t } = useTranslation(['pipeline', 'common']); + const [modal, contextHolder] = Modal.useModal(); + const { notifications } = useNotifications(); + const [prevPipelineData, setPrevPipelineData] = useState>( + cloneDeep(piplineData?.pipelines), + ); + const [currPipelineData, setCurrPipelineData] = useState>( + cloneDeep(piplineData?.pipelines), + ); + const [ + expandedPipelineData, + setExpandedPipelineData, + ] = useState(); + const [ + selectedProcessorData, + setSelectedProcessorData, + ] = useState(); + const [ + selectedPipelineData, + setSelectedPipelineData, + ] = useState(); + const [expandedRowKeys, setExpandedRowKeys] = useState>(); + const [showSaveButton, setShowSaveButton] = useState(); + const isEditingActionMode = isActionMode === ActionMode.Editing; + + useEffect(() => { + if (pipelineSearchValue === '') setCurrPipelineData(piplineData?.pipelines); + if (pipelineSearchValue !== '') { + const filterData = piplineData?.pipelines.filter((data: PipelineData) => + getDataOnSearch(data as never, pipelineSearchValue), + ); + setCurrPipelineData(filterData); + } + }, [pipelineSearchValue, piplineData?.pipelines]); + + const handleAlert = useCallback( + ({ title, descrition, buttontext, onCancel, onOk }: AlertMessage) => { + modal.confirm({ + title: {title}, + icon: , + content: {descrition}, + okText: {buttontext}, + cancelText: {t('cancel')}, + onOk, + onCancel, + }); + }, + [modal, t], + ); + + const pipelineEditAction = useCallback( + (record: PipelineData) => (): void => { + setActionType(ActionType.EditPipeline); + setSelectedPipelineData(record); + }, + [setActionType], + ); + + const pipelineDeleteHandler = useCallback( + (record: PipelineData) => (): void => { + setShowSaveButton(ActionMode.Editing); + const filteredData = getElementFromArray(currPipelineData, record, 'id'); + filteredData.forEach((item, index) => { + const obj = item; + obj.orderId = index + 1; + }); + setCurrPipelineData(filteredData); + }, + [currPipelineData], + ); + + const pipelineDeleteAction = useCallback( + (record: PipelineData) => (): void => { + handleAlert({ + title: `${t('delete_pipeline')} : ${record.name}?`, + descrition: t('delete_pipeline_description'), + buttontext: t('delete'), + onOk: pipelineDeleteHandler(record), + }); + }, + [handleAlert, pipelineDeleteHandler, t], + ); + + const processorEditAction = useCallback( + (record: ProcessorData) => (): void => { + setActionType(ActionType.EditProcessor); + setSelectedProcessorData(record); + }, + [setActionType], + ); + + const onSwitchPipelineChange = useCallback( + (checked: boolean, record: PipelineData): void => { + setShowSaveButton(ActionMode.Editing); + const findRecordIndex = getRecordIndex(currPipelineData, record, 'id'); + const updateSwitch = { + ...currPipelineData[findRecordIndex], + enabled: checked, + }; + const editedPipelineData = getEditedDataSource( + currPipelineData, + record, + 'id', + updateSwitch, + ); + setCurrPipelineData(editedPipelineData); + }, + [currPipelineData], + ); + + const columns = useMemo(() => { + const fieldColumns = getTableColumn(pipelineColumns); + if (isEditingActionMode) { + fieldColumns.push( + { + title: 'Actions', + dataIndex: 'smartAction', + key: 'smartAction', + align: 'center', + render: (_value, record): JSX.Element => ( + + ), + }, + { + title: '', + dataIndex: 'enabled', + key: 'enabled', + render: (value, record) => ( + + onSwitchPipelineChange(checked, record) + } + /> + ), + }, + ); + } + return fieldColumns; + }, [ + isEditingActionMode, + pipelineEditAction, + pipelineDeleteAction, + onSwitchPipelineChange, + ]); + + const updatePipelineSequence = useCallback( + (updatedRow: PipelineData[]) => (): void => { + setShowSaveButton(ActionMode.Editing); + setCurrPipelineData(updatedRow); + }, + [], + ); + + const onCancelPipelineSequence = useCallback( + (rawData: PipelineData[]) => (): void => { + setCurrPipelineData(rawData); + }, + [], + ); + + const movePipelineRow = useCallback( + (dragIndex: number, hoverIndex: number) => { + if (currPipelineData && isEditingActionMode) { + const rawData = currPipelineData; + const updatedRow = getUpdatedRow(currPipelineData, dragIndex, hoverIndex); + updatedRow.forEach((item, index) => { + const obj = item; + obj.orderId = index + 1; + }); + handleAlert({ + title: t('reorder_pipeline'), + descrition: t('reorder_pipeline_description'), + buttontext: t('reorder'), + onOk: updatePipelineSequence(updatedRow), + onCancel: onCancelPipelineSequence(rawData), + }); + } + }, + [ + currPipelineData, + isEditingActionMode, + handleAlert, + t, + updatePipelineSequence, + onCancelPipelineSequence, + ], + ); + + const expandedRowView = useCallback( + (): JSX.Element => ( + + ), + [ + handleAlert, + processorEditAction, + isActionMode, + expandedPipelineData, + setActionType, + prevPipelineData, + ], + ); + + const onExpand = useCallback( + (expanded: boolean, record: PipelineData): void => { + const keys = []; + if (expanded && record.id) { + keys.push(record?.id); + } + setExpandedRowKeys(keys); + setExpandedPipelineData(record); + }, + [], + ); + + const getExpandIcon = ( + expanded: boolean, + onExpand: (record: PipelineData, e: React.MouseEvent) => void, + record: PipelineData, + ): JSX.Element => ( + + ); + + const addNewPipelineHandler = useCallback((): void => { + setActionType(ActionType.AddPipeline); + }, [setActionType]); + + const footer = useCallback((): JSX.Element | undefined => { + if (isEditingActionMode) { + return ( + } + > + {t('add_new_pipeline')} + + ); + } + return undefined; + }, [isEditingActionMode, addNewPipelineHandler, t]); + + const onSaveConfigurationHandler = useCallback(async () => { + const modifiedPipelineData = currPipelineData.map((item: PipelineData) => { + const pipelineData = item; + if ( + expandedPipelineData !== undefined && + item.id === expandedPipelineData?.id + ) { + pipelineData.config = expandedPipelineData?.config; + } + pipelineData.config = item.config; + return pipelineData; + }); + modifiedPipelineData.forEach((item: PipelineData) => { + const pipelineData = item; + delete pipelineData?.id; + return pipelineData; + }); + const response = await savePipeline({ + data: { pipelines: modifiedPipelineData }, + }); + if (response.statusCode === 200) { + refetchPipelineLists(); + setActionMode(ActionMode.Viewing); + setShowSaveButton(undefined); + setCurrPipelineData(response.payload?.pipelines); + setPrevPipelineData(response.payload?.pipelines); + } else { + modifiedPipelineData.forEach((item: PipelineData) => { + const pipelineData = item; + pipelineData.id = v4(); + return pipelineData; + }); + setActionMode(ActionMode.Editing); + setShowSaveButton(ActionMode.Editing); + notifications.error({ + message: 'Error', + description: response.error || t('something_went_wrong'), + }); + setCurrPipelineData(modifiedPipelineData); + setPrevPipelineData(modifiedPipelineData); + } + }, [ + currPipelineData, + expandedPipelineData, + notifications, + refetchPipelineLists, + setActionMode, + t, + ]); + + const onCancelConfigurationHandler = useCallback((): void => { + setActionMode(ActionMode.Viewing); + setShowSaveButton(undefined); + prevPipelineData.forEach((item, index) => { + const obj = item; + obj.orderId = index + 1; + if (obj.config) { + obj.config?.forEach((configItem, index) => { + const config = configItem; + config.orderId = index + 1; + }); + for (let i = 0; i < obj.config.length - 1; i += 1) { + obj.config[i].output = obj.config[i + 1].id; + } + } + }); + setCurrPipelineData(prevPipelineData); + setExpandedRowKeys([]); + }, [prevPipelineData, setActionMode]); + + const onRowHandler = ( + _data: PipelineData, + index?: number, + ): React.HTMLAttributes => + ({ + index, + moveRow: movePipelineRow, + } as React.HTMLAttributes); + + const expandableConfig: ExpandableConfig = { + expandedRowKeys, + onExpand, + expandIcon: ({ expanded, onExpand, record }: ExpandRowConfig) => + getExpandIcon(expanded, onExpand, record), + }; + + return ( + <> + {contextHolder} + + + + + +
+ + {showSaveButton && ( + + )} + + + ); +} + +interface PipelineListsViewProps { + isActionType: string; + setActionType: (actionType?: ActionType) => void; + isActionMode: string; + setActionMode: (actionMode: ActionMode) => void; + piplineData: Pipeline; + refetchPipelineLists: VoidFunction; + pipelineSearchValue: string; +} + +interface ExpandRowConfig { + expanded: boolean; + onExpand: (record: PipelineData, e: React.MouseEvent) => void; + record: PipelineData; +} + +export interface AlertMessage { + title: string; + descrition: string; + buttontext: string; + onOk: VoidFunction; + onCancel?: VoidFunction; +} + +export default PipelineListsView; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/styles.ts b/frontend/src/container/PipelinePage/PipelineListsView/styles.ts new file mode 100644 index 0000000000..0b3ddbff3d --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/styles.ts @@ -0,0 +1,113 @@ +import { Avatar, Button, Table, Typography } from 'antd'; +import { TableProps } from 'antd/lib/table'; +import { themeColors } from 'constants/theme'; +import { StyledCSS } from 'container/GantChart/Trace/styles'; +import styled from 'styled-components'; + +export const FooterButton = styled(Button)` + display: flex; + gap: 0.5rem; + margin-left: 6.2rem; + align-items: center; + font-weight: 400; + font-size: 0.875rem; + line-height: 1.25rem; +`; + +export const IconListStyle = styled.div` + display: flex; + gap: 1rem; + justify-content: flex-end; +`; + +export const ColumnDataStyle = styled.span` + font-size: 0.75rem; +`; + +export const ListDataStyle = styled.div` + margin: 0.125rem; + padding: 0.313rem; + border: none; + font-style: normal; + font-weight: 400; + font-size: 0.75rem; + line-height: 1.25rem; +`; + +export const ProcessorIndexIcon = styled(Avatar)` + background-color: ${themeColors.navyBlue}; + height: 1rem; + width: 1rem; + font-size: 0.75rem; + line-height: 0.813rem; + font-weight: 400; +`; + +export const StyledTable: React.FC< + // eslint-disable-next-line @typescript-eslint/no-explicit-any + TableProps & { isDarkMode: boolean } +> = styled(Table)` + .ant-table-tbody > tr > td { + border: none; + } + + .ant-table-tbody > tr:last-child > td { + border: none; + } + .ant-table-content { + background: ${({ isDarkMode }: { isDarkMode: boolean }): StyledCSS => + isDarkMode ? themeColors.neroBlack : themeColors.snowWhite}; + } +`; + +export const AlertContentWrapper = styled.div` + font-weight: 400; + font-style: normal; + font-size: 0.75rem; + margin-bottom: 0.5rem; +`; + +export const AlertModalTitle = styled.h1` + font-weight: 600; + font-size: 0.875rem; + line-height: 1rem; +`; + +export const Container = styled.div` + margin-top: 3rem; +`; + +export const LastActionColumn = styled.div` + display: flex; + justify-content: center; + gap: 1.25rem; + align-items: center; +`; + +export const ModalTitle = styled(Typography.Title)` + font-style: normal; + font-weight: 600; + font-size: 1.125rem; + line-height: 1.5rem; +`; + +export const ModalButtonWrapper = styled.div` + display: flex; + flex-direction: row-reverse; + gap: 0.625rem; +`; + +export const ModeAndConfigWrapper = styled.div` + display: flex; + gap: 0.5rem; + justify-content: flex-end; + color: ${themeColors.gamboge}; + margin: 0.125rem; + padding: 0.313rem; +`; + +export const SaveConfigWrapper = styled.div` + display: flex; + gap: 0.938rem; + margin-top: 1.25rem; +`; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/utils.tsx b/frontend/src/container/PipelinePage/PipelineListsView/utils.tsx new file mode 100644 index 0000000000..306f6d1b4e --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/utils.tsx @@ -0,0 +1,95 @@ +import { ColumnType } from 'antd/lib/table/interface'; +import dayjs from 'dayjs'; +import update from 'react-addons-update'; +import { ProcessorData } from 'types/api/pipeline/def'; + +import TableComponents, { Record } from './TableComponents'; + +export function getElementFromArray( + arr: Array, + target: T, + key: keyof T, +): Array { + return arr.filter((data) => data[key] !== target?.[key]); +} + +export function getRecordIndex( + arr: Array, + target: T, + key: keyof T, +): number { + return arr?.findIndex((item) => item[key] === target?.[key]); +} + +export function getUpdatedRow( + data: Array, + dragIndex: number, + hoverIndex: number, +): Array { + return update(data, { + $splice: [ + [dragIndex, 1], + [hoverIndex, 0, data[dragIndex]], + ], + }); +} + +export function getTableColumn( + columnData: Array>, +): Array> { + return columnData.map(({ title, key, dataIndex, ellipsis, width }) => ({ + title, + dataIndex, + key, + align: key === 'id' ? 'right' : 'left', + ellipsis, + width, + render: (record: Record): JSX.Element => ( + + ), + })); +} + +export function getEditedDataSource( + arr: Array, + target: T, + key: keyof T, + editedArr: T, +): Array { + return arr?.map((data) => (data[key] === target?.[key] ? editedArr : data)); +} + +export function getDataOnSearch( + data: { + [key: string]: never; + }, + searchValue: string, +): boolean { + return Object.keys(data).some((key) => + key === 'createdAt' + ? dayjs(data[key]) + .locale('en') + .format('MMMM DD, YYYY hh:mm A') + .includes(searchValue) + : String(data[key]).toLowerCase().includes(searchValue.toLowerCase()), + ); +} + +export function getProcessorUpdatedRow( + processorData: Array, + dragIndex: number, + hoverIndex: number, +): Array { + const data = processorData; + const item = data.splice(dragIndex, 1)[0]; + data.splice(hoverIndex, 0, item); + data.forEach((item, index) => { + const obj = item; + obj.orderId = index + 1; + }); + for (let i = 0; i < data.length - 1; i += 1) { + data[i].output = data[i + 1].id; + } + delete data[data.length - 1].output; + return data; +} diff --git a/frontend/src/container/PipelinePage/components/TagInput.tsx b/frontend/src/container/PipelinePage/components/TagInput.tsx new file mode 100644 index 0000000000..e0583b90bf --- /dev/null +++ b/frontend/src/container/PipelinePage/components/TagInput.tsx @@ -0,0 +1,157 @@ +import { + CloseCircleFilled, + ExclamationCircleOutlined, +} from '@ant-design/icons'; +import { Button, Input, InputRef, message, Modal, Tag, Tooltip } from 'antd'; +import React, { useEffect, useMemo, useRef, useState } from 'react'; +import { useTranslation } from 'react-i18next'; + +import { tagInputStyle } from '../PipelineListsView/config'; +import { TagInputWrapper } from './styles'; + +function TagInput({ + setTagsListData, + tagsListData, + placeHolder, +}: TagInputProps): JSX.Element { + const [inputVisible, setInputVisible] = useState(false); + const [inputValue, setInputValue] = useState(''); + const [editInputIndex, setEditInputIndex] = useState(-1); + const [editInputValue, setEditInputValue] = useState(''); + const inputRef = useRef(null); + const editInputRef = useRef(null); + const { t } = useTranslation(['alerts']); + + useEffect(() => { + if (inputVisible) { + inputRef.current?.focus(); + } + }, [inputVisible]); + + useEffect(() => { + editInputRef.current?.focus(); + }, [inputValue]); + + const handleClose = (removedTag: string) => (): void => { + const newTags = tagsListData?.filter((tag) => tag !== removedTag); + setTagsListData(newTags); + }; + + const handleInputChange = (e: React.ChangeEvent): void => { + setInputValue(e.target.value); + }; + + const handleInputConfirm = (): void => { + if (inputValue && tagsListData?.indexOf(inputValue) === -1) { + setTagsListData([...tagsListData, inputValue]); + } + setInputVisible(false); + setInputValue(''); + }; + + const handleEditInputChange = ( + e: React.ChangeEvent, + ): void => { + setEditInputValue(e.target.value); + }; + + const handleEditInputConfirm = (): void => { + const newTags = [...tagsListData]; + newTags[editInputIndex] = editInputValue; + setTagsListData(newTags); + setEditInputIndex(-1); + setInputValue(''); + }; + + const handleClearAll = (): void => { + Modal.confirm({ + title: 'Confirm', + icon: , + content: t('remove_label_confirm'), + onOk() { + setTagsListData([]); + message.success(t('remove_label_success')); + }, + okText: t('button_yes'), + cancelText: t('button_no'), + }); + }; + + const showAllData = tagsListData?.map((tag: string, index: number) => { + if (editInputIndex === index) { + return ( + + ); + } + const isLongTag = tag.length > 20; + const tagElem = ( + + { + setEditInputIndex(index); + setEditInputValue(tag); + e.preventDefault(); + }} + > + {isLongTag ? `${tag.slice(0, 20)}...` : tag} + + + ); + return isLongTag ? ( + + {tagElem} + + ) : ( + tagElem + ); + }); + + const isButtonVisible = useMemo( + () => tagsListData?.length || inputValue.length || inputValue, + [inputValue, tagsListData?.length], + ); + + return ( + + { + e.preventDefault(); + handleInputConfirm(); + }} + placeholder={placeHolder} + prefix={showAllData} + /> + + {isButtonVisible ? ( + + + +`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/DeleteAction.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/DeleteAction.test.tsx.snap new file mode 100644 index 0000000000..7f01c2f6cd --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/DeleteAction.test.tsx.snap @@ -0,0 +1,27 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`PipelinePage container test should render DeleteAction section 1`] = ` + + + + + +`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/DragAction.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/DragAction.test.tsx.snap new file mode 100644 index 0000000000..a9574993a0 --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/DragAction.test.tsx.snap @@ -0,0 +1,66 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`PipelinePage container test should render DragAction section 1`] = ` + + .c0 { + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + -webkit-box-pack: center; + -webkit-justify-content: center; + -ms-flex-pack: center; + justify-content: center; + gap: 1.25rem; + -webkit-align-items: center; + -webkit-box-align: center; + -ms-flex-align: center; + align-items: center; +} + +
+ + + + +
+
+`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/EditAction.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/EditAction.test.tsx.snap new file mode 100644 index 0000000000..1bf7485d94 --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/EditAction.test.tsx.snap @@ -0,0 +1,27 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`PipelinePage container test should render EditAction section 1`] = ` + + + + + +`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/PipelineActions.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelineActions.test.tsx.snap new file mode 100644 index 0000000000..e96d3ac9a1 --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelineActions.test.tsx.snap @@ -0,0 +1,64 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`PipelinePage container test should render PipelineActions section 1`] = ` + + .c0 { + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + gap: 1rem; + -webkit-box-pack: end; + -webkit-justify-content: flex-end; + -ms-flex-pack: end; + justify-content: flex-end; +} + +
+ + + + + + +
+
+`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/PipelineExpandView.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelineExpandView.test.tsx.snap new file mode 100644 index 0000000000..ab46e3dd8e --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelineExpandView.test.tsx.snap @@ -0,0 +1,141 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`PipelinePage container test should render AddNewPipeline section 1`] = ``; + +exports[`PipelinePage should render PipelineExpandView section 1`] = ` + + .c2 { + margin: 0.125rem; + padding: 0.313rem; + border: none; + font-style: normal; + font-weight: 400; + font-size: 0.75rem; + line-height: 1.25rem; +} + +.c1 { + background-color: #1668DC; + height: 1rem; + width: 1rem; + font-size: 0.75rem; + line-height: 0.813rem; + font-weight: 400; +} + +.c0 .ant-table-tbody > tr > td { + border: none; +} + +.c0 .ant-table-tbody > tr:last-child > td { + border: none; +} + +.c0 .ant-table-content { + background: #1d1d1d; +} + +
+
+
+
+
+
+
+ + + + + + + + + + + + + +
+ + + 1 + + + +
+ grok use common asd +
+
+ + + 2 + + + +
+ rename auth +
+
+ + + + + + +
+`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/PipelinePageLayout.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelinePageLayout.test.tsx.snap new file mode 100644 index 0000000000..fc3092c4f6 --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelinePageLayout.test.tsx.snap @@ -0,0 +1,326 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`PipelinePage container test should render AddNewPipeline section 1`] = ``; + +exports[`PipelinePage container test should render PipelinePageLayout section 1`] = ` + + .c0.c0.c0 { + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + -webkit-box-pack: end; + -webkit-justify-content: flex-end; + -ms-flex-pack: end; + justify-content: flex-end; + margin-bottom: 2rem; + -webkit-align-items: center; + -webkit-box-align: center; + -ms-flex-align: center; + align-items: center; +} + +.c1.c1.c1 { + margin-left: 1rem; +} + +
+ + + + +
+ + + + + + + + + + + + + + + + + + .c0 { + margin-top: 3rem; +} + +.c1 { + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + gap: 0.5rem; + -webkit-box-pack: end; + -webkit-justify-content: flex-end; + -ms-flex-pack: end; + justify-content: flex-end; + color: #D89614; + margin: 0.125rem; + padding: 0.313rem; +} + +
+
+ Mode: + + Viewing + +
+ Configuration Version: 1 +
+
+
+
+
+
+
+
+ + + + + + + + + + + + + + + + + +
+ + + Pipeline Name + + Filters + + Last Edited + + Edited By +
+
+
+ + + + + + + + + +
+
+ No data +
+
+
+
+
+ +
+
+
+
+ +`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/PipelinesSearchSection.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelinesSearchSection.test.tsx.snap new file mode 100644 index 0000000000..17d488d290 --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelinesSearchSection.test.tsx.snap @@ -0,0 +1,81 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`PipelinePage container test should render PipelinesSearchSection section 1`] = ` + + + + + + + + + + + + + + + + + + + +`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/TableExpandIcon.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/TableExpandIcon.test.tsx.snap new file mode 100644 index 0000000000..a3ce5e3110 --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/TableExpandIcon.test.tsx.snap @@ -0,0 +1,26 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`PipelinePage container test should render TableExpandIcon section 1`] = ` + + + + + +`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/TagInput.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/TagInput.test.tsx.snap new file mode 100644 index 0000000000..6a97ec9ad6 --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/TagInput.test.tsx.snap @@ -0,0 +1,32 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Pipeline Page should render TagInput section 1`] = ` + + .c0 { + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + width: 100%; +} + +
+ + + + +
+
+`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/Tags.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/Tags.test.tsx.snap new file mode 100644 index 0000000000..1fa7dd1d55 --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/Tags.test.tsx.snap @@ -0,0 +1,18 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`PipelinePage container test should render Tags section 1`] = ` + + + + server + + + app + + + +`; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/ViewAction.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/ViewAction.test.tsx.snap new file mode 100644 index 0000000000..c318633144 --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/ViewAction.test.tsx.snap @@ -0,0 +1,26 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`PipelinePage container test should render ViewAction section 1`] = ` + + + + + +`; diff --git a/frontend/src/container/PipelinePage/tests/utils.test.ts b/frontend/src/container/PipelinePage/tests/utils.test.ts new file mode 100644 index 0000000000..f433c422b9 --- /dev/null +++ b/frontend/src/container/PipelinePage/tests/utils.test.ts @@ -0,0 +1,88 @@ +import { pipelineMockData } from '../mocks/pipeline'; +import { + processorFields, + processorTypes, +} from '../PipelineListsView/AddNewProcessor/config'; +import { pipelineFields, processorColumns } from '../PipelineListsView/config'; +import { + getEditedDataSource, + getElementFromArray, + getRecordIndex, + getTableColumn, +} from '../PipelineListsView/utils'; + +describe('Utils testing of Pipeline Page', () => { + test('it should be check form field of add pipeline', () => { + expect(pipelineFields.length).toBe(3); + expect(pipelineFields.length).toBeGreaterThan(1); + }); + + test('it should be check processor types field of add pipeline', () => { + expect(processorTypes.length).toBeGreaterThan(1); + }); + + test('it should check form field of add processor', () => { + Object.keys(processorFields).forEach((key) => { + expect(processorFields[key].length).toBeGreaterThan(1); + }); + }); + + test('it should be check data length of pipeline', () => { + expect(pipelineMockData.length).toBe(2); + expect(pipelineMockData.length).toBeGreaterThan(0); + }); + + test('it should be return filtered data and perform deletion', () => { + const filterData = getElementFromArray( + pipelineMockData, + pipelineMockData[0], + 'id', + ); + expect(pipelineMockData).not.toEqual(filterData); + expect(pipelineMockData[0]).not.toEqual(filterData); + }); + + test('it should be return index data and perform deletion', () => { + const findRecordIndex = getRecordIndex( + pipelineMockData, + pipelineMockData[0], + 'id', + ); + expect(pipelineMockData).not.toEqual(findRecordIndex); + expect(pipelineMockData[0]).not.toEqual(findRecordIndex); + }); + + test('it should be return modified column data', () => { + const columnData = getTableColumn(processorColumns); + expect(processorColumns).not.toEqual(columnData); + expect(processorColumns.length).toEqual(columnData.length); + }); + + test('it should be return modified column data', () => { + const findRecordIndex = getRecordIndex( + pipelineMockData, + pipelineMockData[0], + 'name', + ); + const updatedPipelineData = { + ...pipelineMockData[findRecordIndex], + name: 'updated name', + description: 'changed description', + filter: 'value == test', + tags: ['test'], + }; + const editedData = getEditedDataSource( + pipelineMockData, + pipelineMockData[0], + 'name', + updatedPipelineData, + ); + expect(pipelineMockData).not.toEqual(editedData); + expect(pipelineMockData.length).toEqual(editedData.length); + expect(pipelineMockData[0].name).not.toEqual(editedData[0].name); + expect(pipelineMockData[0].description).not.toEqual( + editedData[0].description, + ); + expect(pipelineMockData[0].tags).not.toEqual(editedData[0].tags); + }); +}); diff --git a/frontend/src/container/SideNav/config.ts b/frontend/src/container/SideNav/config.ts index 0246bc0e8e..dd1a2a1201 100644 --- a/frontend/src/container/SideNav/config.ts +++ b/frontend/src/container/SideNav/config.ts @@ -1,8 +1,16 @@ import { QueryParams } from 'constants/query'; import ROUTES from 'constants/routes'; +import { themeColors } from 'constants/theme'; export const styles = { background: '#1f1f1f' }; +export const subMenuStyles = { + background: '#1f1f1f', + margin: '0rem', + width: '100%', + color: themeColors.gainsboro, +}; + export const routeConfig: Record = { [ROUTES.SERVICE_METRICS]: [QueryParams.resourceAttributes], [ROUTES.SERVICE_MAP]: [QueryParams.resourceAttributes], @@ -36,4 +44,6 @@ export const routeConfig: Record = { [ROUTES.UN_AUTHORIZED]: [QueryParams.resourceAttributes], [ROUTES.USAGE_EXPLORER]: [QueryParams.resourceAttributes], [ROUTES.VERSION]: [QueryParams.resourceAttributes], + [ROUTES.TRACE_EXPLORER]: [QueryParams.resourceAttributes], + [ROUTES.PIPELINES]: [QueryParams.resourceAttributes], }; diff --git a/frontend/src/container/SideNav/index.tsx b/frontend/src/container/SideNav/index.tsx index 2fe53d4d44..0a4942f115 100644 --- a/frontend/src/container/SideNav/index.tsx +++ b/frontend/src/container/SideNav/index.tsx @@ -53,14 +53,14 @@ function SideNav(): JSX.Element { }, [collapsed, dispatch]); const onClickHandler = useCallback( - (to: string) => { + (key: string) => { const params = new URLSearchParams(search); - const availableParams = routeConfig[to]; + const availableParams = routeConfig[key]; const queryString = getQueryString(availableParams || [], params); - if (pathname !== to) { - history.push(`${to}?${queryString.join('&')}`); + if (pathname !== key) { + history.push(`${key}?${queryString.join('&')}`); } }, [pathname, search], diff --git a/frontend/src/container/SideNav/menuItems.tsx b/frontend/src/container/SideNav/menuItems.tsx index 3da53f194c..b7348e3d50 100644 --- a/frontend/src/container/SideNav/menuItems.tsx +++ b/frontend/src/container/SideNav/menuItems.tsx @@ -68,6 +68,11 @@ const menus: SidebarMenu[] = [ // label: 'Views', // }, // ], + // { + // key: ROUTES.PIPELINES, + // label: 'Pipelines', + // }, + // ], }, { key: ROUTES.ALL_DASHBOARD, diff --git a/frontend/src/container/TopNav/Breadcrumbs/index.tsx b/frontend/src/container/TopNav/Breadcrumbs/index.tsx index f3bcfe560f..ca127b388f 100644 --- a/frontend/src/container/TopNav/Breadcrumbs/index.tsx +++ b/frontend/src/container/TopNav/Breadcrumbs/index.tsx @@ -21,6 +21,7 @@ const breadcrumbNameMap = { [ROUTES.ALL_DASHBOARD]: 'Dashboard', [ROUTES.LOGS]: 'Logs', [ROUTES.LOGS_EXPLORER]: 'Logs Explorer', + [ROUTES.PIPELINES]: 'Pipelines', }; function ShowBreadcrumbs(props: RouteComponentProps): JSX.Element { diff --git a/frontend/src/container/TopNav/DateTimeSelection/config.ts b/frontend/src/container/TopNav/DateTimeSelection/config.ts index d327476e08..ef31201da7 100644 --- a/frontend/src/container/TopNav/DateTimeSelection/config.ts +++ b/frontend/src/container/TopNav/DateTimeSelection/config.ts @@ -82,4 +82,5 @@ export const routesToSkip = [ ROUTES.ALERTS_NEW, ROUTES.EDIT_ALERTS, ROUTES.LIST_ALL_ALERT, + ROUTES.PIPELINES, ]; diff --git a/frontend/src/pages/Pipelines/index.tsx b/frontend/src/pages/Pipelines/index.tsx new file mode 100644 index 0000000000..8828ad4ab7 --- /dev/null +++ b/frontend/src/pages/Pipelines/index.tsx @@ -0,0 +1,64 @@ +import type { TabsProps } from 'antd'; +import { Tabs } from 'antd'; +import getPipeline from 'api/pipeline/get'; +import Spinner from 'components/Spinner'; +import ChangeHistory from 'container/PipelinePage/Layouts/ChangeHistory'; +import PipelinePage from 'container/PipelinePage/Layouts/Pipeline'; +import { useNotifications } from 'hooks/useNotifications'; +import { useEffect, useMemo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { useQuery } from 'react-query'; +import { Pipeline } from 'types/api/pipeline/def'; + +function Pipelines(): JSX.Element { + const { t } = useTranslation('common'); + const { notifications } = useNotifications(); + const { + isLoading, + data: piplineData, + isError, + refetch: refetchPipelineLists, + } = useQuery(['version', 'latest', 'pipeline'], { + queryFn: () => + getPipeline({ + version: 'latest', + }), + }); + + const tabItems: TabsProps['items'] = useMemo( + () => [ + { + key: 'pipelines', + label: `Pipelines`, + children: ( + + ), + }, + { + key: 'change-history', + label: `Change History`, + children: , + }, + ], + [piplineData?.payload, refetchPipelineLists], + ); + + useEffect(() => { + if (piplineData?.error && isError) { + notifications.error({ + message: piplineData?.error || t('something_went_wrong'), + }); + } + }, [isError, notifications, piplineData?.error, t]); + + if (isLoading) { + return ; + } + + return ; +} + +export default Pipelines; diff --git a/frontend/src/types/api/pipeline/def.ts b/frontend/src/types/api/pipeline/def.ts new file mode 100644 index 0000000000..5008cace6c --- /dev/null +++ b/frontend/src/types/api/pipeline/def.ts @@ -0,0 +1,76 @@ +export interface ProcessorData { + type: string; + id?: string; + orderId: number; + name: string; + enabled?: boolean; + output?: string; + parse_to?: string; + pattern?: string; + parse_from?: string; + from?: string; + to?: string; + regex?: string; + on_error?: string; + field?: string; + value?: string; +} + +export interface PipelineData { + alias: string; + config?: Array; + createdAt: string; + description?: string; + createdBy: string; + enabled: boolean; + filter: string; + id?: string; + name: string; + orderId: number; + tags?: Array; // Tags data is missing in API response +} + +export interface HistoryData { + active: boolean; + createdAt: string; + createdBy: string; + createdByName: string; + deployStatus: string; + deployResult: string; + disabled: boolean; + elementType: string; + id: string; + isValid: boolean; + lastConf: string; + lastHash: string; + version: number; +} + +export interface Pipeline { + active: boolean; + createdBy: string; + deployResult: string; + deployStatus: string; + disabled: boolean; + elementType: string; + history: Array; + id: string; + is_valid: boolean; + lastConf: string; + lastHash: string; + pipelines: Array; + version: string | number; +} + +export enum ActionType { + AddPipeline = 'add-pipeline', + EditPipeline = 'edit-pipeline', + AddProcessor = 'add-processor', + EditProcessor = 'edit-processor', +} + +export enum ActionMode { + Viewing = 'viewing-mode', + Editing = 'editing-mode', + Deploying = 'deploying-mode', +} diff --git a/frontend/src/types/api/pipeline/get.ts b/frontend/src/types/api/pipeline/get.ts new file mode 100644 index 0000000000..969770cd06 --- /dev/null +++ b/frontend/src/types/api/pipeline/get.ts @@ -0,0 +1,3 @@ +export type Props = { + version: string | number; +}; diff --git a/frontend/src/types/api/pipeline/post.ts b/frontend/src/types/api/pipeline/post.ts new file mode 100644 index 0000000000..b4015635b5 --- /dev/null +++ b/frontend/src/types/api/pipeline/post.ts @@ -0,0 +1,5 @@ +import { PipelineData } from './def'; + +export interface Props { + data: { pipelines: Array }; +} diff --git a/frontend/src/utils/permission/index.ts b/frontend/src/utils/permission/index.ts index 452d495fec..db2a7eb049 100644 --- a/frontend/src/utils/permission/index.ts +++ b/frontend/src/utils/permission/index.ts @@ -17,7 +17,8 @@ export type ComponentTypes = | 'new_dashboard' | 'new_alert_action' | 'edit_widget' - | 'add_panel'; + | 'add_panel' + | 'page_pipelines'; export const componentPermission: Record = { current_org_settings: ['ADMIN'], @@ -36,6 +37,7 @@ export const componentPermission: Record = { new_alert_action: ['ADMIN'], edit_widget: ['ADMIN', 'EDITOR'], add_panel: ['ADMIN', 'EDITOR'], + page_pipelines: ['ADMIN', 'EDITOR'], }; export const routePermission: Record = { @@ -72,4 +74,6 @@ export const routePermission: Record = { LOGS: ['ADMIN', 'EDITOR', 'VIEWER'], LOGS_EXPLORER: ['ADMIN', 'EDITOR', 'VIEWER'], LIST_LICENSES: ['ADMIN'], + TRACE_EXPLORER: ['ADMIN', 'EDITOR', 'VIEWER'], + PIPELINES: ['ADMIN', 'EDITOR', 'VIEWER'], }; diff --git a/frontend/webpack.config.js b/frontend/webpack.config.js index e201a202c8..f03b264435 100644 --- a/frontend/webpack.config.js +++ b/frontend/webpack.config.js @@ -26,6 +26,9 @@ if (process.env.BUNDLE_ANALYSER === 'true') { plugins.push(new BundleAnalyzerPlugin({ analyzerMode: 'server' })); } +/** + * @type {import('webpack').Configuration} + */ const config = { mode: 'development', devtool: 'source-map', @@ -51,6 +54,7 @@ const config = { resolve: { extensions: ['.ts', '.tsx', '.js', '.jsx'], plugins: [new TsconfigPathsPlugin({})], + fallback: { 'process/browser': require.resolve('process/browser') }, }, module: { rules: [ diff --git a/frontend/webpack.config.prod.js b/frontend/webpack.config.prod.js index 78dbb67875..13fad223d5 100644 --- a/frontend/webpack.config.prod.js +++ b/frontend/webpack.config.prod.js @@ -53,6 +53,7 @@ const config = { resolve: { extensions: ['.ts', '.tsx', '.js', '.jsx'], plugins: [new TsconfigPathsPlugin({})], + fallback: { 'process/browser': require.resolve('process/browser') }, }, cache: { type: 'filesystem', diff --git a/frontend/yarn.lock b/frontend/yarn.lock index f1f6cfbe71..769b65d920 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -1850,6 +1850,21 @@ rc-trigger "^5.3.4" rc-util "^5.24.4" +"@react-dnd/asap@^5.0.1": + version "5.0.2" + resolved "https://registry.yarnpkg.com/@react-dnd/asap/-/asap-5.0.2.tgz#1f81f124c1cd6f39511c11a881cfb0f715343488" + integrity sha512-WLyfoHvxhs0V9U+GTsGilGgf2QsPl6ZZ44fnv0/b8T3nQyvzxidxsg/ZltbWssbsRDlYW8UKSQMTGotuTotZ6A== + +"@react-dnd/invariant@^4.0.1": + version "4.0.2" + resolved "https://registry.yarnpkg.com/@react-dnd/invariant/-/invariant-4.0.2.tgz#b92edffca10a26466643349fac7cdfb8799769df" + integrity sha512-xKCTqAK/FFauOM9Ta2pswIyT3D8AQlfrYdOi/toTPEhqCuAs1v5tcJ3Y08Izh1cJ5Jchwy9SeAXmMg6zrKs2iw== + +"@react-dnd/shallowequal@^4.0.1": + version "4.0.2" + resolved "https://registry.yarnpkg.com/@react-dnd/shallowequal/-/shallowequal-4.0.2.tgz#d1b4befa423f692fa4abf1c79209702e7d8ae4b4" + integrity sha512-/RVXdLvJxLg4QKvMoM5WlwNR9ViO9z8B/qPcc+C0Sa/teJY7QG7kJ441DwzOjMYEY7GmU4dj5EcGHIkKZiQZCA== + "@sideway/address@^4.1.3": version "4.1.4" resolved "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz" @@ -2314,6 +2329,13 @@ resolved "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz" integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== +"@types/react-addons-update@0.14.21": + version "0.14.21" + resolved "https://registry.yarnpkg.com/@types/react-addons-update/-/react-addons-update-0.14.21.tgz#00feaa412c376cba2dd37a5adc347352ba15f4ed" + integrity sha512-HOxr0Hd8C1L4uw8DHyv2etqMVIj78oLEpe567/HgjoE+1Lc+PUsTGXTrkr1BDvFqsu5r49mSlgI5evwrk9eutA== + dependencies: + "@types/react" "*" + "@types/react-dom@18.0.10", "@types/react-dom@^18.0.0": version "18.0.10" resolved "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.0.10.tgz" @@ -5192,6 +5214,15 @@ dir-glob@^3.0.1: dependencies: path-type "^4.0.0" +dnd-core@^16.0.1: + version "16.0.1" + resolved "https://registry.yarnpkg.com/dnd-core/-/dnd-core-16.0.1.tgz#a1c213ed08961f6bd1959a28bb76f1a868360d19" + integrity sha512-HK294sl7tbw6F6IeuK16YSBUoorvHpY8RHO+9yFfaJyCDVb6n7PRcezrOEOa2SBCqiYpemh5Jx20ZcjKdFAVng== + dependencies: + "@react-dnd/asap" "^5.0.1" + "@react-dnd/invariant" "^4.0.1" + redux "^4.2.0" + dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz" @@ -10523,6 +10554,31 @@ rc-virtual-list@^3.2.0, rc-virtual-list@^3.4.8: rc-resize-observer "^1.0.0" rc-util "^5.15.0" +react-addons-update@15.6.3: + version "15.6.3" + resolved "https://registry.yarnpkg.com/react-addons-update/-/react-addons-update-15.6.3.tgz#c449c309154024d04087b206d0400e020547b313" + integrity sha512-wBkjgx5cR0XTjZEz5jl2kScChrjI9T7rWVdaM0dLiIdHSgeHycLRdHPPiTgKk7vK18Od4rXmLJv91qofBXlE0A== + dependencies: + object-assign "^4.1.0" + +react-dnd-html5-backend@16.0.1: + version "16.0.1" + resolved "https://registry.yarnpkg.com/react-dnd-html5-backend/-/react-dnd-html5-backend-16.0.1.tgz#87faef15845d512a23b3c08d29ecfd34871688b6" + integrity sha512-Wu3dw5aDJmOGw8WjH1I1/yTH+vlXEL4vmjk5p+MHxP8HuHJS1lAGeIdG/hze1AvNeXWo/JgULV87LyQOr+r5jw== + dependencies: + dnd-core "^16.0.1" + +react-dnd@16.0.1: + version "16.0.1" + resolved "https://registry.yarnpkg.com/react-dnd/-/react-dnd-16.0.1.tgz#2442a3ec67892c60d40a1559eef45498ba26fa37" + integrity sha512-QeoM/i73HHu2XF9aKksIUuamHPDvRglEwdHL4jsp784BgUuWcg6mzfxT0QDdQz8Wj0qyRKx2eMg8iZtWvU4E2Q== + dependencies: + "@react-dnd/invariant" "^4.0.1" + "@react-dnd/shallowequal" "^4.0.1" + dnd-core "^16.0.1" + fast-deep-equal "^3.1.3" + hoist-non-react-statics "^3.3.2" + react-dom@17.0.2: version "17.0.2" resolved "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz" @@ -10819,9 +10875,9 @@ redux-thunk@^2.3.0: resolved "https://registry.npmjs.org/redux-thunk/-/redux-thunk-2.4.2.tgz" integrity sha512-+P3TjtnP0k/FEjcBL5FZpoovtvrTNT/UXd4/sluaSyrURlSlhLSzEdfsTBW7WsKB6yPvgd7q/iZPICFjW4o57Q== -redux@^4.0.0, redux@^4.0.5: +redux@^4.0.0, redux@^4.0.5, redux@^4.2.0: version "4.2.1" - resolved "https://registry.npmjs.org/redux/-/redux-4.2.1.tgz" + resolved "https://registry.yarnpkg.com/redux/-/redux-4.2.1.tgz#c08f4306826c49b5e9dc901dee0452ea8fce6197" integrity sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w== dependencies: "@babel/runtime" "^7.9.2" From 68ab022836ce827c8afebc01a2ed8c788144994a Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Wed, 2 Aug 2023 13:06:41 +0530 Subject: [PATCH 10/19] feat: limit offset logic updated for logs list view (#3243) --- .../app/logs/v3/query_builder.go | 26 ++++- .../app/logs/v3/query_builder_test.go | 106 ++++++++++++++++++ 2 files changed, 128 insertions(+), 4 deletions(-) diff --git a/pkg/query-service/app/logs/v3/query_builder.go b/pkg/query-service/app/logs/v3/query_builder.go index 4c6ee73bad..2c98280b8f 100644 --- a/pkg/query-service/app/logs/v3/query_builder.go +++ b/pkg/query-service/app/logs/v3/query_builder.go @@ -413,6 +413,13 @@ type Options struct { IsLivetailQuery bool } +func isOrderByTs(orderBy []v3.OrderBy) bool { + if len(orderBy) == 1 && orderBy[0].Key == constants.TIMESTAMP { + return true + } + return false +} + func PrepareLogsQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery, options Options) (string, error) { if options.IsLivetailQuery { query, err := buildLogsLiveTailQuery(mq) @@ -446,12 +453,23 @@ func PrepareLogsQuery(start, end int64, queryType v3.QueryType, panelType v3.Pan } if panelType == v3.PanelTypeList { + // check if limit exceeded + if mq.Limit > 0 && mq.Offset >= mq.Limit { + return "", fmt.Errorf("max limit exceeded") + } + if mq.PageSize > 0 { - if mq.Limit > 0 && mq.Offset > mq.Limit { - return "", fmt.Errorf("max limit exceeded") + if mq.Limit > 0 && mq.Offset+mq.PageSize > mq.Limit { + query = addLimitToQuery(query, mq.Limit-mq.Offset) + } else { + query = addLimitToQuery(query, mq.PageSize) } - query = addLimitToQuery(query, mq.PageSize) - query = addOffsetToQuery(query, mq.Offset) + + // add offset to the query only if it is not orderd by timestamp. + if !isOrderByTs(mq.OrderBy) { + query = addOffsetToQuery(query, mq.Offset) + } + } else { query = addLimitToQuery(query, mq.Limit) } diff --git a/pkg/query-service/app/logs/v3/query_builder_test.go b/pkg/query-service/app/logs/v3/query_builder_test.go index 361c1cefa7..95671dbc90 100644 --- a/pkg/query-service/app/logs/v3/query_builder_test.go +++ b/pkg/query-service/app/logs/v3/query_builder_test.go @@ -1185,3 +1185,109 @@ func TestPrepareLogsQuery(t *testing.T) { }) } } + +var testPrepLogsQueryLimitOffsetData = []struct { + Name string + PanelType v3.PanelType + Start int64 + End int64 + Step int64 + BuilderQuery *v3.BuilderQuery + GroupByTags []v3.AttributeKey + TableName string + AggregateOperator v3.AggregateOperator + ExpectedQuery string + Options Options +}{ + { + Name: "Test limit less than pageSize - order by ts", + PanelType: v3.PanelTypeList, + Start: 1680518666000000000, + End: 1691618704365000000, + Step: 60, + BuilderQuery: &v3.BuilderQuery{ + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + Expression: "A", + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}, + OrderBy: []v3.OrderBy{{ColumnName: constants.TIMESTAMP, Order: "desc", Key: constants.TIMESTAMP, DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}}, + Limit: 1, + Offset: 0, + PageSize: 5, + }, + TableName: "logs", + ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680518666000000000 AND timestamp <= 1691618704365000000) order by timestamp desc LIMIT 1", + }, + { + Name: "Test limit greater than pageSize - order by ts", + PanelType: v3.PanelTypeList, + Start: 1680518666000000000, + End: 1691618704365000000, + Step: 60, + BuilderQuery: &v3.BuilderQuery{ + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + Expression: "A", + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ + {Key: v3.AttributeKey{Key: "id", Type: v3.AttributeKeyTypeUnspecified, DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Operator: v3.FilterOperatorLessThan, Value: "2TNh4vp2TpiWyLt3SzuadLJF2s4"}, + }}, + OrderBy: []v3.OrderBy{{ColumnName: constants.TIMESTAMP, Order: "desc", Key: constants.TIMESTAMP, DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}}, + Limit: 100, + Offset: 10, + PageSize: 10, + }, + TableName: "logs", + ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680518666000000000 AND timestamp <= 1691618704365000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by timestamp desc LIMIT 10", + }, + { + Name: "Test limit less than pageSize - order by custom", + PanelType: v3.PanelTypeList, + Start: 1680518666000000000, + End: 1691618704365000000, + Step: 60, + BuilderQuery: &v3.BuilderQuery{ + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + Expression: "A", + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}, + OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "desc", Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, + Limit: 1, + Offset: 0, + PageSize: 5, + }, + TableName: "logs", + ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680518666000000000 AND timestamp <= 1691618704365000000) order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 1 OFFSET 0", + }, + { + Name: "Test limit greater than pageSize - order by custom", + PanelType: v3.PanelTypeList, + Start: 1680518666000000000, + End: 1691618704365000000, + Step: 60, + BuilderQuery: &v3.BuilderQuery{ + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + Expression: "A", + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ + {Key: v3.AttributeKey{Key: "id", Type: v3.AttributeKeyTypeUnspecified, DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Operator: v3.FilterOperatorLessThan, Value: "2TNh4vp2TpiWyLt3SzuadLJF2s4"}, + }}, + OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "desc", Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, + Limit: 100, + Offset: 50, + PageSize: 50, + }, + TableName: "logs", + ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680518666000000000 AND timestamp <= 1691618704365000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 50 OFFSET 50", + }, +} + +func TestPrepareLogsQueryLimitOffset(t *testing.T) { + for _, tt := range testPrepLogsQueryLimitOffsetData { + Convey("TestBuildLogsQuery", t, func() { + query, err := PrepareLogsQuery(tt.Start, tt.End, "", tt.PanelType, tt.BuilderQuery, tt.Options) + So(err, ShouldBeNil) + So(query, ShouldEqual, tt.ExpectedQuery) + + }) + } +} From 562621a1171a5cbdb7d11a4e24f0c8fe2199b1da Mon Sep 17 00:00:00 2001 From: Rajat Dabade Date: Wed, 2 Aug 2023 15:00:58 +0530 Subject: [PATCH 11/19] Service layer to metrics using `USE_SPAN_METRIC` feature flag (#3196) * refactor: remove the dependency of services using redux * refactor: seperated columns and unit test case * refactor: move the constant to other file * refactor: updated test case * refactor: removed the duplicate enum * fix: removed the inline function * fix: removed the inline function * refactor: removed the magic string * fix: change the name from matrics to metrics * fix: one on one mapping of props * refactor: created a hook to getting services through api call * fix: linter error * refactor: renamed the file according to functionality * refactor: renamed more file according to functionality * refactor: generic querybuilderWithFormula * refactor: added generic datasource * refactor: dynamic disabled in getQueryBuilderQueriesWithFormula * refactor: generic legend for building query with formulas * feat: added new TopOperationMetrics component for key operation * refactor: added feature flag for key operation * refactor: shifted types and fixed typos * refactor: separated types and renamed file * refactor: one on one mapping * refactor: removed unwanted interfaces and renamed files * refactor: separated types * chore: done with basic struction and moving up the files * chore: moved some files to proper places * feat: added the support for metrics in service layer * refactor: shifted SkipOnBoardingModal logic to parent * refactor: created object to send as an augument for getQueryRangeRequestData * refactor: changes from columns to getColumns * refactor: updated the utils function getServiceListFromQuery * refactor: added memo to getQueryRangeRequestData in serive metrics application * refactor: separated constants from ServiceMetricsQuery.ts * refactor: separated mock data and updated test case * refactor: added useMemo on getColumns * refactor: made the use of useErrorNotification for show error * refactor: handled the error case * refactor: one on one mapping * chore: useGetQueriesRange hooks type is updated * refactor: review changes * chore: update type for columnconstants * chore: reverted back the changes lost in merge conflicts --------- Co-authored-by: Vishal Sharma Co-authored-by: Palash Gupta Co-authored-by: Srikanth Chekuri --- frontend/src/constants/theme.ts | 1 + .../MetricsPageQueries/DBCallQueries.ts | 19 +- .../MetricsPageQueries/ExternalQueries.ts | 83 +++---- .../MetricsPageQueriesFactory.ts | 16 +- .../MetricsPageQueries/OverviewQueries.ts | 8 +- .../MetricsPageQueries/TopOperationQueries.ts | 8 +- .../Tabs/Overview/TopOperationMetrics.tsx | 15 +- .../MetricsApplication/Tabs/types.ts | 4 +- .../container/MetricsApplication/constant.ts | 1 + .../src/container/MetricsApplication/types.ts | 12 + .../Columns/ColumnContants.ts | 24 ++ .../Columns/GetColumnSearchProps.tsx | 34 +++ .../Columns/ServiceColumn.ts | 54 +++++ .../Filter/FilterDropdown.tsx | 41 ++++ .../ServiceMetrics/ServiceMetricTable.tsx | 67 ++++++ .../ServiceMetricsApplication.tsx | 36 +++ .../ServiceMetrics/ServiceMetricsQuery.ts | 208 ++++++++++++++++++ .../ServiceMetrics/index.tsx | 59 +++++ .../ServiceTraces/Service.test.tsx | 50 +++++ .../ServiceTraces/ServiceTracesTable.tsx | 26 +++ .../ServiceTraces/__mocks__/getServices.ts | 22 ++ .../ServiceTraces/index.tsx | 60 +++++ .../SkipOnBoardModal/index.tsx | 48 ++++ .../container/ServiceApplication/index.tsx | 19 ++ .../container/ServiceApplication/styles.ts | 15 ++ .../src/container/ServiceApplication/types.ts | 34 +++ .../src/container/ServiceApplication/utils.ts | 99 +++++++++ .../ServiceTable/Columns/ColumnContants.ts | 4 +- .../hooks/queryBuilder/useGetQueriesRange.ts | 35 +++ .../src/hooks/useGetTopLevelOperations.ts | 16 ++ frontend/src/pages/Services/index.tsx | 54 +---- frontend/src/types/api/metrics/getService.ts | 7 + 32 files changed, 1034 insertions(+), 145 deletions(-) create mode 100644 frontend/src/container/ServiceApplication/Columns/ColumnContants.ts create mode 100644 frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx create mode 100644 frontend/src/container/ServiceApplication/Columns/ServiceColumn.ts create mode 100644 frontend/src/container/ServiceApplication/Filter/FilterDropdown.tsx create mode 100644 frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx create mode 100644 frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsApplication.tsx create mode 100644 frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsQuery.ts create mode 100644 frontend/src/container/ServiceApplication/ServiceMetrics/index.tsx create mode 100644 frontend/src/container/ServiceApplication/ServiceTraces/Service.test.tsx create mode 100644 frontend/src/container/ServiceApplication/ServiceTraces/ServiceTracesTable.tsx create mode 100644 frontend/src/container/ServiceApplication/ServiceTraces/__mocks__/getServices.ts create mode 100644 frontend/src/container/ServiceApplication/ServiceTraces/index.tsx create mode 100644 frontend/src/container/ServiceApplication/SkipOnBoardModal/index.tsx create mode 100644 frontend/src/container/ServiceApplication/index.tsx create mode 100644 frontend/src/container/ServiceApplication/styles.ts create mode 100644 frontend/src/container/ServiceApplication/types.ts create mode 100644 frontend/src/container/ServiceApplication/utils.ts create mode 100644 frontend/src/hooks/queryBuilder/useGetQueriesRange.ts create mode 100644 frontend/src/hooks/useGetTopLevelOperations.ts diff --git a/frontend/src/constants/theme.ts b/frontend/src/constants/theme.ts index fcb8dd171a..18b7db2b18 100644 --- a/frontend/src/constants/theme.ts +++ b/frontend/src/constants/theme.ts @@ -51,6 +51,7 @@ const themeColors = { snowWhite: '#fafafa', gamboge: '#D89614', bckgGrey: '#1d1d1d', + lightBlue: '#177ddc', }; export { themeColors }; diff --git a/frontend/src/container/MetricsApplication/MetricsPageQueries/DBCallQueries.ts b/frontend/src/container/MetricsApplication/MetricsPageQueries/DBCallQueries.ts index 72b9703cdd..9036b81abb 100644 --- a/frontend/src/container/MetricsApplication/MetricsPageQueries/DBCallQueries.ts +++ b/frontend/src/container/MetricsApplication/MetricsPageQueries/DBCallQueries.ts @@ -8,7 +8,7 @@ import { } from 'types/common/queryBuilder'; import { DataType, FORMULA, MetricsType, WidgetKeys } from '../constant'; -import { IServiceName } from '../Tabs/types'; +import { DatabaseCallProps, DatabaseCallsRPSProps } from '../types'; import { getQueryBuilderQueries, getQueryBuilderQuerieswithFormula, @@ -103,8 +103,8 @@ export const databaseCallsAvgDuration = ({ const legends = ['', '']; const disabled = [true, true]; - const legendFormula = 'Average Duration'; - const expression = FORMULA.DATABASE_CALLS_AVG_DURATION; + const legendFormulas = ['Average Duration']; + const expressions = [FORMULA.DATABASE_CALLS_AVG_DURATION]; const aggregateOperators = [ MetricAggregateOperator.SUM, MetricAggregateOperator.SUM, @@ -116,18 +116,9 @@ export const databaseCallsAvgDuration = ({ additionalItems, legends, disabled, - expression, - legendFormula, + expressions, + legendFormulas, aggregateOperators, dataSource, }); }; - -interface DatabaseCallsRPSProps extends DatabaseCallProps { - legend: '{{db_system}}'; -} - -interface DatabaseCallProps { - servicename: IServiceName['servicename']; - tagFilterItems: TagFilterItem[]; -} diff --git a/frontend/src/container/MetricsApplication/MetricsPageQueries/ExternalQueries.ts b/frontend/src/container/MetricsApplication/MetricsPageQueries/ExternalQueries.ts index b140882c46..18038ecf0d 100644 --- a/frontend/src/container/MetricsApplication/MetricsPageQueries/ExternalQueries.ts +++ b/frontend/src/container/MetricsApplication/MetricsPageQueries/ExternalQueries.ts @@ -83,22 +83,18 @@ export const externalCallErrorPercent = ({ }, ...tagFilterItems, ]; + const legendFormulas = [legend]; + const expressions = [FORMULA.ERROR_PERCENTAGE]; + const disabled = [true, true]; + const autocompleteData = [autocompleteDataA, autocompleteDataB]; - const legendFormula = legend; - const expression = FORMULA.ERROR_PERCENTAGE; - const autocompleteData: BaseAutocompleteData[] = [ - autocompleteDataA, - autocompleteDataB, + const additionalItems = [additionalItemsA, additionalItemsB]; + + const aggregateOperators = [ + MetricAggregateOperator.SUM, + MetricAggregateOperator.SUM, ]; - - const additionalItems: TagFilterItem[][] = [ - additionalItemsA, - additionalItemsB, - ]; - - const legends = Array(2).fill(legend); - const aggregateOperators = Array(2).fill(MetricAggregateOperator.SUM); - const disabled = Array(2).fill(true); + const legends = [legend, legend]; const dataSource = DataSource.METRICS; return getQueryBuilderQuerieswithFormula({ @@ -107,8 +103,8 @@ export const externalCallErrorPercent = ({ legends, groupBy, disabled, - expression, - legendFormula, + expressions, + legendFormulas, aggregateOperators, dataSource, }); @@ -130,11 +126,10 @@ export const externalCallDuration = ({ key: WidgetKeys.SignozExternalCallLatencyCount, type: null, }; - - const expression = FORMULA.DATABASE_CALLS_AVG_DURATION; - const legendFormula = 'Average Duration'; + const expressions = [FORMULA.DATABASE_CALLS_AVG_DURATION]; + const legendFormulas = ['Average Duration']; const legend = ''; - const disabled = Array(2).fill(true); + const disabled = [true, true]; const additionalItemsA: TagFilterItem[] = [ { id: '', @@ -150,28 +145,25 @@ export const externalCallDuration = ({ ...tagFilterItems, ]; - const autocompleteData: BaseAutocompleteData[] = [ - autocompleteDataA, - autocompleteDataB, - ]; + const autocompleteData = [autocompleteDataA, autocompleteDataB]; - const additionalItems: TagFilterItem[][] = [ - additionalItemsA, - additionalItemsA, + const additionalItems = [additionalItemsA, additionalItemsA]; + const legends = [legend, legend]; + const aggregateOperators = [ + MetricAggregateOperator.SUM, + MetricAggregateOperator.SUM, ]; - - const legends = Array(2).fill(legend); - const aggregateOperators = Array(2).fill(MetricAggregateOperator.SUM); + const dataSource = DataSource.METRICS; return getQueryBuilderQuerieswithFormula({ autocompleteData, additionalItems, legends, disabled, - expression, - legendFormula, + expressions, + legendFormulas, aggregateOperators, - dataSource: DataSource.METRICS, + dataSource, }); }; @@ -234,8 +226,8 @@ export const externalCallDurationByAddress = ({ key: WidgetKeys.SignozExternalCallLatencyCount, type: null, }; - const expression = FORMULA.DATABASE_CALLS_AVG_DURATION; - const legendFormula = legend; + const expressions = [FORMULA.DATABASE_CALLS_AVG_DURATION]; + const legendFormulas = [legend]; const disabled = [true, true]; const additionalItemsA: TagFilterItem[] = [ { @@ -252,18 +244,13 @@ export const externalCallDurationByAddress = ({ ...tagFilterItems, ]; - const autocompleteData: BaseAutocompleteData[] = [ - autocompleteDataA, - autocompleteDataB, + const autocompleteData = [autocompleteDataA, autocompleteDataB]; + const additionalItems = [additionalItemsA, additionalItemsA]; + const legends = [legend, legend]; + const aggregateOperators = [ + MetricAggregateOperator.SUM, + MetricAggregateOperator.SUM, ]; - - const additionalItems: TagFilterItem[][] = [ - additionalItemsA, - additionalItemsA, - ]; - - const legends = Array(2).fill(legend); - const aggregateOperators = Array(2).fill(MetricAggregateOperator.SUM_RATE); const dataSource = DataSource.METRICS; return getQueryBuilderQuerieswithFormula({ @@ -272,8 +259,8 @@ export const externalCallDurationByAddress = ({ legends, groupBy, disabled, - expression, - legendFormula, + expressions, + legendFormulas, aggregateOperators, dataSource, }); diff --git a/frontend/src/container/MetricsApplication/MetricsPageQueries/MetricsPageQueriesFactory.ts b/frontend/src/container/MetricsApplication/MetricsPageQueries/MetricsPageQueriesFactory.ts index 2412dfce47..efe00eec4d 100644 --- a/frontend/src/container/MetricsApplication/MetricsPageQueries/MetricsPageQueriesFactory.ts +++ b/frontend/src/container/MetricsApplication/MetricsPageQueries/MetricsPageQueriesFactory.ts @@ -67,18 +67,16 @@ export const getQueryBuilderQuerieswithFormula = ({ legends, groupBy = [], disabled, - expression, - legendFormula, + expressions, + legendFormulas, aggregateOperators, dataSource, }: BuilderQuerieswithFormulaProps): QueryBuilderData => ({ - queryFormulas: [ - { - ...initialFormulaBuilderFormValues, - expression, - legend: legendFormula, - }, - ], + queryFormulas: expressions.map((expression, index) => ({ + ...initialFormulaBuilderFormValues, + expression, + legend: legendFormulas[index], + })), queryData: autocompleteData.map((_, index) => ({ ...initialQueryBuilderFormValuesMap.metrics, aggregateOperator: aggregateOperators[index], diff --git a/frontend/src/container/MetricsApplication/MetricsPageQueries/OverviewQueries.ts b/frontend/src/container/MetricsApplication/MetricsPageQueries/OverviewQueries.ts index e1137f4cfc..77df3e30f6 100644 --- a/frontend/src/container/MetricsApplication/MetricsPageQueries/OverviewQueries.ts +++ b/frontend/src/container/MetricsApplication/MetricsPageQueries/OverviewQueries.ts @@ -224,8 +224,8 @@ export const errorPercentage = ({ const additionalItems = [additionalItemsA, additionalItemsB]; const legends = [GraphTitle.ERROR_PERCENTAGE]; const disabled = [true, true]; - const expression = FORMULA.ERROR_PERCENTAGE; - const legendFormula = GraphTitle.ERROR_PERCENTAGE; + const expressions = [FORMULA.ERROR_PERCENTAGE]; + const legendFormulas = [GraphTitle.ERROR_PERCENTAGE]; const aggregateOperators = [ MetricAggregateOperator.SUM_RATE, MetricAggregateOperator.SUM_RATE, @@ -237,8 +237,8 @@ export const errorPercentage = ({ additionalItems, legends, disabled, - expression, - legendFormula, + expressions, + legendFormulas, aggregateOperators, dataSource, }); diff --git a/frontend/src/container/MetricsApplication/MetricsPageQueries/TopOperationQueries.ts b/frontend/src/container/MetricsApplication/MetricsPageQueries/TopOperationQueries.ts index 6f75d9666d..42aac24d9f 100644 --- a/frontend/src/container/MetricsApplication/MetricsPageQueries/TopOperationQueries.ts +++ b/frontend/src/container/MetricsApplication/MetricsPageQueries/TopOperationQueries.ts @@ -124,8 +124,8 @@ export const topOperationQueries = ({ MetricAggregateOperator.SUM_RATE, MetricAggregateOperator.SUM_RATE, ]; - const expression = 'D*100/E'; - const legendFormula = GraphTitle.ERROR_PERCENTAGE; + const expressions = ['D*100/E']; + const legendFormulas = [GraphTitle.ERROR_PERCENTAGE]; const dataSource = DataSource.METRICS; return getQueryBuilderQuerieswithFormula({ @@ -134,8 +134,8 @@ export const topOperationQueries = ({ disabled, legends, aggregateOperators, - expression, - legendFormula, + expressions, + legendFormulas, dataSource, groupBy, }); diff --git a/frontend/src/container/MetricsApplication/Tabs/Overview/TopOperationMetrics.tsx b/frontend/src/container/MetricsApplication/Tabs/Overview/TopOperationMetrics.tsx index d205bfbd83..8a044fddba 100644 --- a/frontend/src/container/MetricsApplication/Tabs/Overview/TopOperationMetrics.tsx +++ b/frontend/src/container/MetricsApplication/Tabs/Overview/TopOperationMetrics.tsx @@ -4,12 +4,13 @@ import { topOperationQueries } from 'container/MetricsApplication/MetricsPageQue import { QueryTable } from 'container/QueryTable'; import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; import { useStepInterval } from 'hooks/queryBuilder/useStepInterval'; +import { useNotifications } from 'hooks/useNotifications'; import useResourceAttribute from 'hooks/useResourceAttribute'; import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils'; import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables'; import { RowData } from 'lib/query/createTableColumnsFromQuery'; import { isEmpty } from 'lodash-es'; -import { ReactNode, useMemo, useState } from 'react'; +import { ReactNode, useMemo } from 'react'; import { useSelector } from 'react-redux'; import { useParams } from 'react-router-dom'; import { AppState } from 'store/reducers'; @@ -18,18 +19,19 @@ import { GlobalReducer } from 'types/reducer/globalTime'; import { v4 as uuid } from 'uuid'; import { IServiceName } from '../types'; -import { title } from './config'; import ColumnWithLink from './TableRenderer/ColumnWithLink'; import { getTableColumnRenderer } from './TableRenderer/TableColumnRenderer'; function TopOperationMetrics(): JSX.Element { const { servicename } = useParams(); - const [errorMessage, setErrorMessage] = useState(''); + const { notifications } = useNotifications(); + const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector< AppState, GlobalReducer >((state) => state.globalTime); + const { queries } = useResourceAttribute(); const selectedTraceTags = JSON.stringify( @@ -80,7 +82,7 @@ function TopOperationMetrics(): JSX.Element { enabled: !isEmptyWidget, refetchOnMount: false, onError: (error) => { - setErrorMessage(error.message); + notifications.error({ message: error.message }); }, }, ); @@ -104,13 +106,8 @@ function TopOperationMetrics(): JSX.Element { [servicename, minTime, maxTime, selectedTraceTags], ); - if (errorMessage) { - return
{errorMessage}
; - } - return ( = { + [ColumnKey.Application]: 'Application', + [ColumnKey.P99]: 'P99 latency', + [ColumnKey.ErrorRate]: 'Error Rate (% of total)', + [ColumnKey.Operations]: 'Operations Per Second', +}; + +export enum ColumnWidth { + Application = 200, + P99 = 150, + ErrorRate = 150, + Operations = 150, +} + +export const SORTING_ORDER = 'descend'; + +export const SEARCH_PLACEHOLDER = 'Search by service'; diff --git a/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx b/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx new file mode 100644 index 0000000000..4257dc57ec --- /dev/null +++ b/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx @@ -0,0 +1,34 @@ +import { SearchOutlined } from '@ant-design/icons'; +import type { ColumnType } from 'antd/es/table'; +import ROUTES from 'constants/routes'; +import { routeConfig } from 'container/SideNav/config'; +import { getQueryString } from 'container/SideNav/helper'; +import { Link } from 'react-router-dom'; +import { ServicesList } from 'types/api/metrics/getService'; + +import { filterDropdown } from '../Filter/FilterDropdown'; +import { Name } from '../styles'; + +export const getColumnSearchProps = ( + dataIndex: keyof ServicesList, + search: string, +): ColumnType => ({ + filterDropdown, + filterIcon: , + onFilter: (value: string | number | boolean, record: ServicesList): boolean => + record[dataIndex] + .toString() + .toLowerCase() + .includes(value.toString().toLowerCase()), + render: (metrics: string): JSX.Element => { + const urlParams = new URLSearchParams(search); + const avialableParams = routeConfig[ROUTES.SERVICE_METRICS]; + const queryString = getQueryString(avialableParams, urlParams); + + return ( + + {metrics} + + ); + }, +}); diff --git a/frontend/src/container/ServiceApplication/Columns/ServiceColumn.ts b/frontend/src/container/ServiceApplication/Columns/ServiceColumn.ts new file mode 100644 index 0000000000..b290e8409c --- /dev/null +++ b/frontend/src/container/ServiceApplication/Columns/ServiceColumn.ts @@ -0,0 +1,54 @@ +import type { ColumnsType } from 'antd/es/table'; +import { ServicesList } from 'types/api/metrics/getService'; + +import { + ColumnKey, + ColumnTitle, + ColumnWidth, + SORTING_ORDER, +} from './ColumnContants'; +import { getColumnSearchProps } from './GetColumnSearchProps'; + +export const getColumns = ( + search: string, + isMetricData: boolean, +): ColumnsType => [ + { + title: ColumnTitle[ColumnKey.Application], + dataIndex: ColumnKey.Application, + width: ColumnWidth.Application, + key: ColumnKey.Application, + ...getColumnSearchProps('serviceName', search), + }, + { + title: `${ColumnTitle[ColumnKey.P99]}${ + isMetricData ? ' (in ns)' : ' (in ms)' + }`, + dataIndex: ColumnKey.P99, + key: ColumnKey.P99, + width: ColumnWidth.P99, + defaultSortOrder: SORTING_ORDER, + sorter: (a: ServicesList, b: ServicesList): number => a.p99 - b.p99, + render: (value: number): string => { + if (Number.isNaN(value)) return '0.00'; + return isMetricData ? value.toFixed(2) : (value / 1000000).toFixed(2); + }, + }, + { + title: ColumnTitle[ColumnKey.ErrorRate], + dataIndex: ColumnKey.ErrorRate, + key: ColumnKey.ErrorRate, + width: 150, + sorter: (a: ServicesList, b: ServicesList): number => + a.errorRate - b.errorRate, + render: (value: number): string => value.toFixed(2), + }, + { + title: ColumnTitle[ColumnKey.Operations], + dataIndex: ColumnKey.Operations, + key: ColumnKey.Operations, + width: ColumnWidth.Operations, + sorter: (a: ServicesList, b: ServicesList): number => a.callRate - b.callRate, + render: (value: number): string => value.toFixed(2), + }, +]; diff --git a/frontend/src/container/ServiceApplication/Filter/FilterDropdown.tsx b/frontend/src/container/ServiceApplication/Filter/FilterDropdown.tsx new file mode 100644 index 0000000000..1dc4a12d89 --- /dev/null +++ b/frontend/src/container/ServiceApplication/Filter/FilterDropdown.tsx @@ -0,0 +1,41 @@ +import { SearchOutlined } from '@ant-design/icons'; +import { Button, Card, Input, Space } from 'antd'; +import type { FilterDropdownProps } from 'antd/es/table/interface'; + +import { SEARCH_PLACEHOLDER } from '../Columns/ColumnContants'; + +export const filterDropdown = ({ + setSelectedKeys, + selectedKeys, + confirm, +}: FilterDropdownProps): JSX.Element => { + const handleSearch = (): void => { + confirm(); + }; + + const selectedKeysHandler = (e: React.ChangeEvent): void => { + setSelectedKeys(e.target.value ? [e.target.value] : []); + }; + + return ( + + + + + + + ); +}; diff --git a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx new file mode 100644 index 0000000000..154cc4ab11 --- /dev/null +++ b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx @@ -0,0 +1,67 @@ +import { ResizeTable } from 'components/ResizeTable'; +import { useGetQueriesRange } from 'hooks/queryBuilder/useGetQueriesRange'; +import { useNotifications } from 'hooks/useNotifications'; +import { useMemo } from 'react'; +import { useSelector } from 'react-redux'; +import { useLocation } from 'react-router-dom'; +import { AppState } from 'store/reducers'; +import { ServicesList } from 'types/api/metrics/getService'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +import { getColumns } from '../Columns/ServiceColumn'; +import { ServiceMetricsTableProps } from '../types'; +import { getServiceListFromQuery } from '../utils'; + +function ServiceMetricTable({ + topLevelOperations, + queryRangeRequestData, +}: ServiceMetricsTableProps): JSX.Element { + const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); + + const { notifications } = useNotifications(); + + const queries = useGetQueriesRange(queryRangeRequestData, { + queryKey: [ + `GetMetricsQueryRange-${queryRangeRequestData[0].selectedTime}-${globalSelectedInterval}`, + maxTime, + minTime, + globalSelectedInterval, + ], + keepPreviousData: true, + enabled: true, + refetchOnMount: false, + onError: (error) => { + notifications.error({ + message: error.message, + }); + }, + }); + + const isLoading = queries.some((query) => query.isLoading); + const services: ServicesList[] = useMemo( + () => + getServiceListFromQuery({ + queries, + topLevelOperations, + isLoading, + }), + [isLoading, queries, topLevelOperations], + ); + + const { search } = useLocation(); + const tableColumns = useMemo(() => getColumns(search, true), [search]); + + return ( + + ); +} + +export default ServiceMetricTable; diff --git a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsApplication.tsx b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsApplication.tsx new file mode 100644 index 0000000000..4d5889b909 --- /dev/null +++ b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsApplication.tsx @@ -0,0 +1,36 @@ +import { useMemo } from 'react'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +import { ServiceMetricsProps } from '../types'; +import { getQueryRangeRequestData } from '../utils'; +import ServiceMetricTable from './ServiceMetricTable'; + +function ServiceMetricsApplication({ + topLevelOperations, +}: ServiceMetricsProps): JSX.Element { + const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); + + const queryRangeRequestData = useMemo( + () => + getQueryRangeRequestData({ + topLevelOperations, + minTime, + maxTime, + globalSelectedInterval, + }), + [globalSelectedInterval, maxTime, minTime, topLevelOperations], + ); + return ( + + ); +} + +export default ServiceMetricsApplication; diff --git a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsQuery.ts b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsQuery.ts new file mode 100644 index 0000000000..7214a7c912 --- /dev/null +++ b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsQuery.ts @@ -0,0 +1,208 @@ +import { ServiceDataProps } from 'api/metrics/getTopLevelOperations'; +import { OPERATORS } from 'constants/queryBuilder'; +import { + DataType, + KeyOperationTableHeader, + MetricsType, + WidgetKeys, +} from 'container/MetricsApplication/constant'; +import { getQueryBuilderQuerieswithFormula } from 'container/MetricsApplication/MetricsPageQueries/MetricsPageQueriesFactory'; +import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse'; +import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData'; +import { + DataSource, + MetricAggregateOperator, + QueryBuilderData, +} from 'types/common/queryBuilder'; + +export const serviceMetricsQuery = ( + topLevelOperation: [keyof ServiceDataProps, string[]], +): QueryBuilderData => { + const p99AutoCompleteData: BaseAutocompleteData = { + dataType: DataType.FLOAT64, + isColumn: true, + key: WidgetKeys.Signoz_latency_bucket, + type: null, + }; + + const errorRateAutoCompleteData: BaseAutocompleteData = { + dataType: DataType.FLOAT64, + isColumn: true, + key: WidgetKeys.SignozCallsTotal, + type: null, + }; + + const operationPrSecondAutoCompleteData: BaseAutocompleteData = { + dataType: DataType.FLOAT64, + isColumn: true, + key: WidgetKeys.SignozCallsTotal, + type: null, + }; + + const autocompleteData = [ + p99AutoCompleteData, + errorRateAutoCompleteData, + errorRateAutoCompleteData, + operationPrSecondAutoCompleteData, + ]; + + const p99AdditionalItems: TagFilterItem[] = [ + { + id: '', + key: { + dataType: DataType.STRING, + isColumn: false, + key: WidgetKeys.Service_name, + type: MetricsType.Resource, + }, + op: OPERATORS.IN, + value: [topLevelOperation[0].toString()], + }, + { + id: '', + key: { + dataType: DataType.STRING, + isColumn: false, + key: WidgetKeys.Operation, + type: MetricsType.Tag, + }, + op: OPERATORS.IN, + value: [...topLevelOperation[1]], + }, + ]; + + const errorRateAdditionalItemsA: TagFilterItem[] = [ + { + id: '', + key: { + dataType: DataType.STRING, + isColumn: false, + key: WidgetKeys.Service_name, + type: MetricsType.Resource, + }, + op: OPERATORS.IN, + value: [topLevelOperation[0].toString()], + }, + { + id: '', + key: { + dataType: DataType.INT64, + isColumn: false, + key: WidgetKeys.StatusCode, + type: MetricsType.Tag, + }, + op: OPERATORS.IN, + value: ['STATUS_CODE_ERROR'], + }, + { + id: '', + key: { + dataType: DataType.STRING, + isColumn: false, + key: WidgetKeys.Operation, + type: MetricsType.Tag, + }, + op: OPERATORS.IN, + value: [...topLevelOperation[1]], + }, + ]; + + const errorRateAdditionalItemsB: TagFilterItem[] = [ + { + id: '', + key: { + dataType: DataType.STRING, + isColumn: false, + key: WidgetKeys.Service_name, + type: MetricsType.Resource, + }, + op: OPERATORS.IN, + value: [topLevelOperation[0].toString()], + }, + { + id: '', + key: { + dataType: DataType.STRING, + isColumn: false, + key: WidgetKeys.Operation, + type: MetricsType.Tag, + }, + op: OPERATORS.IN, + value: [...topLevelOperation[1]], + }, + ]; + + const operationPrSecondAdditionalItems: TagFilterItem[] = [ + { + id: '', + key: { + dataType: DataType.STRING, + isColumn: false, + key: WidgetKeys.Service_name, + type: MetricsType.Resource, + }, + op: OPERATORS.IN, + value: [topLevelOperation[0].toString()], + }, + { + id: '', + key: { + dataType: DataType.STRING, + isColumn: false, + key: WidgetKeys.Operation, + type: MetricsType.Tag, + }, + op: OPERATORS.IN, + value: [...topLevelOperation[1]], + }, + ]; + + const additionalItems = [ + p99AdditionalItems, + errorRateAdditionalItemsA, + errorRateAdditionalItemsB, + operationPrSecondAdditionalItems, + ]; + + const aggregateOperators = [ + MetricAggregateOperator.HIST_QUANTILE_99, + MetricAggregateOperator.SUM_RATE, + MetricAggregateOperator.SUM_RATE, + MetricAggregateOperator.SUM_RATE, + ]; + + const disabled = [false, true, true, false]; + const legends = [ + KeyOperationTableHeader.P99, + KeyOperationTableHeader.ERROR_RATE, + KeyOperationTableHeader.ERROR_RATE, + KeyOperationTableHeader.OPERATION_PR_SECOND, + ]; + + const expressions = ['B*100/C']; + + const legendFormulas = ['Error Rate']; + + const groupBy: BaseAutocompleteData[] = [ + { + dataType: DataType.STRING, + isColumn: false, + key: WidgetKeys.Service_name, + type: MetricsType.Tag, + }, + ]; + + const dataSource = DataSource.METRICS; + + return getQueryBuilderQuerieswithFormula({ + autocompleteData, + additionalItems, + disabled, + legends, + aggregateOperators, + expressions, + legendFormulas, + groupBy, + dataSource, + }); +}; diff --git a/frontend/src/container/ServiceApplication/ServiceMetrics/index.tsx b/frontend/src/container/ServiceApplication/ServiceMetrics/index.tsx new file mode 100644 index 0000000000..740168f96c --- /dev/null +++ b/frontend/src/container/ServiceApplication/ServiceMetrics/index.tsx @@ -0,0 +1,59 @@ +import localStorageGet from 'api/browser/localstorage/get'; +import localStorageSet from 'api/browser/localstorage/set'; +import Spinner from 'components/Spinner'; +import { SKIP_ONBOARDING } from 'constants/onboarding'; +import useGetTopLevelOperations from 'hooks/useGetTopLevelOperations'; +import useResourceAttribute from 'hooks/useResourceAttribute'; +import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils'; +import { useMemo, useState } from 'react'; +import { QueryKey } from 'react-query'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { GlobalReducer } from 'types/reducer/globalTime'; +import { Tags } from 'types/reducer/trace'; + +import SkipOnBoardingModal from '../SkipOnBoardModal'; +import ServiceMetricsApplication from './ServiceMetricsApplication'; + +function ServicesUsingMetrics(): JSX.Element { + const { maxTime, minTime, selectedTime: globalSelectedInterval } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); + const { queries } = useResourceAttribute(); + const selectedTags = useMemo( + () => (convertRawQueriesToTraceSelectedTags(queries) as Tags[]) || [], + [queries], + ); + + const queryKey: QueryKey = [ + minTime, + maxTime, + selectedTags, + globalSelectedInterval, + ]; + const { data, isLoading, isError } = useGetTopLevelOperations(queryKey); + + const [skipOnboarding, setSkipOnboarding] = useState( + localStorageGet(SKIP_ONBOARDING) === 'true', + ); + + const onContinueClick = (): void => { + localStorageSet(SKIP_ONBOARDING, 'true'); + setSkipOnboarding(true); + }; + + const topLevelOperations = Object.entries(data || {}); + + if (isLoading === false && !skipOnboarding && isError === true) { + return ; + } + + if (isLoading) { + return ; + } + + return ; +} + +export default ServicesUsingMetrics; diff --git a/frontend/src/container/ServiceApplication/ServiceTraces/Service.test.tsx b/frontend/src/container/ServiceApplication/ServiceTraces/Service.test.tsx new file mode 100644 index 0000000000..2e94296d8d --- /dev/null +++ b/frontend/src/container/ServiceApplication/ServiceTraces/Service.test.tsx @@ -0,0 +1,50 @@ +import { render, screen, waitFor } from '@testing-library/react'; +import ROUTES from 'constants/routes'; +import { BrowserRouter } from 'react-router-dom'; + +import { services } from './__mocks__/getServices'; +import ServiceTraceTable from './ServiceTracesTable'; + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useLocation: (): { pathname: string } => ({ + pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.APPLICATION}/`, + }), +})); + +describe('Metrics Component', () => { + it('renders without errors', async () => { + render( + + + , + ); + + await waitFor(() => { + expect(screen.getByText(/application/i)).toBeInTheDocument(); + expect(screen.getByText(/p99 latency \(in ms\)/i)).toBeInTheDocument(); + expect(screen.getByText(/error rate \(% of total\)/i)).toBeInTheDocument(); + expect(screen.getByText(/operations per second/i)).toBeInTheDocument(); + }); + }); + + it('renders if the data is loaded in the table', async () => { + render( + + + , + ); + + expect(screen.getByText('frontend')).toBeInTheDocument(); + }); + + it('renders no data when required conditions are met', async () => { + render( + + + , + ); + + expect(screen.getByText('No data')).toBeInTheDocument(); + }); +}); diff --git a/frontend/src/container/ServiceApplication/ServiceTraces/ServiceTracesTable.tsx b/frontend/src/container/ServiceApplication/ServiceTraces/ServiceTracesTable.tsx new file mode 100644 index 0000000000..b5c4f6e7a1 --- /dev/null +++ b/frontend/src/container/ServiceApplication/ServiceTraces/ServiceTracesTable.tsx @@ -0,0 +1,26 @@ +import { ResizeTable } from 'components/ResizeTable'; +import { useMemo } from 'react'; +import { useLocation } from 'react-router-dom'; + +import { getColumns } from '../Columns/ServiceColumn'; +import ServiceTableProps from '../types'; + +function ServiceTraceTable({ + services, + loading, +}: ServiceTableProps): JSX.Element { + const { search } = useLocation(); + + const tableColumns = useMemo(() => getColumns(search, false), [search]); + + return ( + + ); +} + +export default ServiceTraceTable; diff --git a/frontend/src/container/ServiceApplication/ServiceTraces/__mocks__/getServices.ts b/frontend/src/container/ServiceApplication/ServiceTraces/__mocks__/getServices.ts new file mode 100644 index 0000000000..c7ffdf0d46 --- /dev/null +++ b/frontend/src/container/ServiceApplication/ServiceTraces/__mocks__/getServices.ts @@ -0,0 +1,22 @@ +import { ServicesList } from 'types/api/metrics/getService'; + +export const services: ServicesList[] = [ + { + serviceName: 'frontend', + p99: 1261498140, + avgDuration: 768497850.9803921, + numCalls: 255, + callRate: 0.9444444444444444, + numErrors: 0, + errorRate: 0, + }, + { + serviceName: 'customer', + p99: 890150740.0000001, + avgDuration: 369612035.2941176, + numCalls: 255, + callRate: 0.9444444444444444, + numErrors: 0, + errorRate: 0, + }, +]; diff --git a/frontend/src/container/ServiceApplication/ServiceTraces/index.tsx b/frontend/src/container/ServiceApplication/ServiceTraces/index.tsx new file mode 100644 index 0000000000..370697af00 --- /dev/null +++ b/frontend/src/container/ServiceApplication/ServiceTraces/index.tsx @@ -0,0 +1,60 @@ +import localStorageGet from 'api/browser/localstorage/get'; +import localStorageSet from 'api/browser/localstorage/set'; +import { SKIP_ONBOARDING } from 'constants/onboarding'; +import useErrorNotification from 'hooks/useErrorNotification'; +import { useQueryService } from 'hooks/useQueryService'; +import useResourceAttribute from 'hooks/useResourceAttribute'; +import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils'; +import { useMemo, useState } from 'react'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { GlobalReducer } from 'types/reducer/globalTime'; +import { Tags } from 'types/reducer/trace'; + +import SkipOnBoardingModal from '../SkipOnBoardModal'; +import ServiceTraceTable from './ServiceTracesTable'; + +function ServiceTraces(): JSX.Element { + const { maxTime, minTime, selectedTime } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); + const { queries } = useResourceAttribute(); + const selectedTags = useMemo( + () => (convertRawQueriesToTraceSelectedTags(queries) as Tags[]) || [], + [queries], + ); + + const { data, error, isLoading, isError } = useQueryService({ + minTime, + maxTime, + selectedTime, + selectedTags, + }); + + useErrorNotification(error); + + const services = data || []; + + const [skipOnboarding, setSkipOnboarding] = useState( + localStorageGet(SKIP_ONBOARDING) === 'true', + ); + + const onContinueClick = (): void => { + localStorageSet(SKIP_ONBOARDING, 'true'); + setSkipOnboarding(true); + }; + + if ( + services.length === 0 && + isLoading === false && + !skipOnboarding && + isError === true + ) { + return ; + } + + return ; +} + +export default ServiceTraces; diff --git a/frontend/src/container/ServiceApplication/SkipOnBoardModal/index.tsx b/frontend/src/container/ServiceApplication/SkipOnBoardModal/index.tsx new file mode 100644 index 0000000000..aedc3d4e43 --- /dev/null +++ b/frontend/src/container/ServiceApplication/SkipOnBoardModal/index.tsx @@ -0,0 +1,48 @@ +import { Button, Typography } from 'antd'; +import Modal from 'components/Modal'; + +function SkipOnBoardingModal({ onContinueClick }: Props): JSX.Element { + return ( + + Continue without instrumentation + , + ]} + > + <> +