From d150cfa46ca07244afcc1733cc30bd4b98312bba Mon Sep 17 00:00:00 2001 From: Palash gupta Date: Mon, 6 Jun 2022 10:48:12 +0530 Subject: [PATCH 01/42] fix: using legacy_createStore instead of createStore as it seem it is depecreated --- frontend/src/store/index.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/frontend/src/store/index.ts b/frontend/src/store/index.ts index 39f43fdf85..3ff983b82e 100644 --- a/frontend/src/store/index.ts +++ b/frontend/src/store/index.ts @@ -1,4 +1,8 @@ -import { applyMiddleware, compose, createStore } from 'redux'; +import { + applyMiddleware, + compose, + legacy_createStore as createStore, +} from 'redux'; import thunk, { ThunkMiddleware } from 'redux-thunk'; import AppActions from 'types/actions'; From 0c06c5ee0ed759aa939faa97edc651db9ae5f31e Mon Sep 17 00:00:00 2001 From: Palash gupta Date: Mon, 6 Jun 2022 17:06:16 +0530 Subject: [PATCH 02/42] fix: trace detail is updated --- .../TraceDetail/SelectedSpanDetails/index.tsx | 20 ++++++++++++------- .../TraceDetail/SelectedSpanDetails/styles.ts | 17 +++++++++++++--- 2 files changed, 27 insertions(+), 10 deletions(-) diff --git a/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx b/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx index 87a953fd6e..dc16a2b7e9 100644 --- a/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx +++ b/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx @@ -1,12 +1,13 @@ -import { Space, Tabs, Typography } from 'antd'; +import { Space, Tabs, Tooltip, Typography } from 'antd'; import { StyledSpace } from 'components/Styled'; import useThemeMode from 'hooks/useThemeMode'; -import React from 'react'; +import React, { useMemo } from 'react'; import { ITraceTree } from 'types/api/trace/getTraceItem'; import ErrorTag from './ErrorTag'; import { CardContainer, + // CustomSpace, CustomSubText, CustomSubTitle, CustomText, @@ -19,11 +20,14 @@ const { TabPane } = Tabs; function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element { const { tree } = props; const { isDarkMode } = useThemeMode(); + + const OverLayComponent = useMemo(() => tree?.name, [tree?.name]); + if (!tree) { return
; } - const { name, tags, serviceName } = tree; + const { tags, serviceName } = tree; return ( @@ -37,10 +41,12 @@ function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element { Service {serviceName} - - Operation - {name} - + {/* */} + Operation + + {tree.name} + + {/* */} diff --git a/frontend/src/container/TraceDetail/SelectedSpanDetails/styles.ts b/frontend/src/container/TraceDetail/SelectedSpanDetails/styles.ts index dc5bdc03e9..211b07b190 100644 --- a/frontend/src/container/TraceDetail/SelectedSpanDetails/styles.ts +++ b/frontend/src/container/TraceDetail/SelectedSpanDetails/styles.ts @@ -1,7 +1,7 @@ -import { Typography } from 'antd'; +import { Space, Typography } from 'antd'; import styled, { css } from 'styled-components'; -const { Text, Title, Paragraph } = Typography; +const { Title, Paragraph } = Typography; export const CustomTitle = styled(Title)` &&& { @@ -9,7 +9,7 @@ export const CustomTitle = styled(Title)` } `; -export const CustomText = styled(Text)` +export const CustomText = styled(Paragraph)` &&& { color: #2d9cdb; } @@ -44,6 +44,17 @@ export const CardContainer = styled.div` width: 100%; flex: 1; overflow-y: auto; + overflow-x: hidden; + white-space: nowrap; + text-overflow: ellipsis; +`; + +export const CustomSpace = styled(Space)` + &&& { + .ant-space-item { + width: 100%; + } + } `; const removeMargin = css` From 8a0bcf6cd98a2e9a711978c019a42dc1d18c69c4 Mon Sep 17 00:00:00 2001 From: Palash gupta Date: Tue, 7 Jun 2022 15:56:16 +0530 Subject: [PATCH 03/42] feat: operation name is now ellipsed --- .../TraceDetail/SelectedSpanDetails/index.tsx | 3 +-- .../TraceDetail/SelectedSpanDetails/styles.ts | 13 ++++++++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx b/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx index dc16a2b7e9..7d1a374b42 100644 --- a/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx +++ b/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx @@ -7,7 +7,6 @@ import { ITraceTree } from 'types/api/trace/getTraceItem'; import ErrorTag from './ErrorTag'; import { CardContainer, - // CustomSpace, CustomSubText, CustomSubTitle, CustomText, @@ -32,7 +31,7 @@ function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element { return ( diff --git a/frontend/src/container/TraceDetail/SelectedSpanDetails/styles.ts b/frontend/src/container/TraceDetail/SelectedSpanDetails/styles.ts index 211b07b190..d8bae86ba7 100644 --- a/frontend/src/container/TraceDetail/SelectedSpanDetails/styles.ts +++ b/frontend/src/container/TraceDetail/SelectedSpanDetails/styles.ts @@ -17,7 +17,6 @@ export const CustomText = styled(Paragraph)` export const CustomSubTitle = styled(Title)` &&& { - /* color: #bdbdbd; */ font-size: 14px; margin-bottom: 8px; } @@ -71,9 +70,21 @@ const selectedSpanDetailsContainer = css` const spanEventsTabsContainer = css` margin-top: 1rem; `; + +const overflow = css` + width: 95%; + + > div.ant-space-item:nth-child(4) { + overflow-x: hidden; + text-overflow: ellipsis; + white-space: nowrap; + } +`; + export const styles = { removeMargin, removePadding, selectedSpanDetailsContainer, spanEventsTabsContainer, + overflow, }; From d8775c91d7c026aff7e86c1274444fd44046969d Mon Sep 17 00:00:00 2001 From: Palash gupta Date: Mon, 13 Jun 2022 18:38:06 +0530 Subject: [PATCH 04/42] feat: metrics is renamed to services in sidebar --- frontend/src/container/SideNav/menuItems.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/container/SideNav/menuItems.ts b/frontend/src/container/SideNav/menuItems.ts index 921cef71d5..66a2a1fe77 100644 --- a/frontend/src/container/SideNav/menuItems.ts +++ b/frontend/src/container/SideNav/menuItems.ts @@ -15,7 +15,7 @@ const menus: SidebarMenu[] = [ { Icon: BarChartOutlined, to: ROUTES.APPLICATION, - name: 'Metrics', + name: 'Services', }, { Icon: AlignLeftOutlined, From c16ae790d4ee438ed91782fb5e9b905da1401065 Mon Sep 17 00:00:00 2001 From: Palash gupta Date: Mon, 13 Jun 2022 19:05:17 +0530 Subject: [PATCH 05/42] feat: rule id is passed as params --- frontend/src/constants/routes.ts | 2 +- .../container/ListAlertRules/ListAlert.tsx | 7 +--- frontend/src/pages/EditRules/index.tsx | 32 +++++++++++++------ 3 files changed, 25 insertions(+), 16 deletions(-) diff --git a/frontend/src/constants/routes.ts b/frontend/src/constants/routes.ts index 636a3b0758..506474af97 100644 --- a/frontend/src/constants/routes.ts +++ b/frontend/src/constants/routes.ts @@ -12,7 +12,7 @@ const ROUTES = { ALL_DASHBOARD: '/dashboard', DASHBOARD: '/dashboard/:dashboardId', DASHBOARD_WIDGET: '/dashboard/:dashboardId/:widgetId', - EDIT_ALERTS: '/alerts/edit/:ruleId', + EDIT_ALERTS: '/alerts/edit', LIST_ALL_ALERT: '/alerts', ALERTS_NEW: '/alerts/new', ALL_CHANNELS: '/settings/channels', diff --git a/frontend/src/container/ListAlertRules/ListAlert.tsx b/frontend/src/container/ListAlertRules/ListAlert.tsx index 8ec1fa9987..b851b0829a 100644 --- a/frontend/src/container/ListAlertRules/ListAlert.tsx +++ b/frontend/src/container/ListAlertRules/ListAlert.tsx @@ -11,7 +11,6 @@ import React, { useCallback, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { UseQueryResult } from 'react-query'; import { useSelector } from 'react-redux'; -import { generatePath } from 'react-router-dom'; import { AppState } from 'store/reducers'; import { ErrorResponse, SuccessResponse } from 'types/api'; import { Alerts } from 'types/api/alerts/getAll'; @@ -51,11 +50,7 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element { const [notifications, Element] = notification.useNotification(); const onEditHandler = (id: string): void => { - history.push( - generatePath(ROUTES.EDIT_ALERTS, { - ruleId: id, - }), - ); + history.push(`${ROUTES.EDIT_ALERTS}?ruleId=${id}`); }; const columns: ColumnsType = [ diff --git a/frontend/src/pages/EditRules/index.tsx b/frontend/src/pages/EditRules/index.tsx index 9b80ff7024..1a65e668af 100644 --- a/frontend/src/pages/EditRules/index.tsx +++ b/frontend/src/pages/EditRules/index.tsx @@ -1,23 +1,41 @@ +import { notification } from 'antd'; import get from 'api/alerts/get'; import Spinner from 'components/Spinner'; +import ROUTES from 'constants/routes'; import EditRulesContainer from 'container/EditRules'; -import React from 'react'; +import history from 'lib/history'; +import React, { useEffect } from 'react'; import { useTranslation } from 'react-i18next'; import { useQuery } from 'react-query'; -import { useParams } from 'react-router-dom'; +import { useLocation } from 'react-router-dom'; function EditRules(): JSX.Element { - const { ruleId } = useParams(); + const { search } = useLocation(); + const params = new URLSearchParams(search); + const ruleId = params.get('ruleId'); + const { t } = useTranslation('common'); + const isValidRuleId = ruleId !== null && String(ruleId).length !== 0; + const { isLoading, data, isError } = useQuery(['ruleId', ruleId], { queryFn: () => get({ - id: parseInt(ruleId, 10), + id: parseInt(ruleId || '', 10), }), + enabled: isValidRuleId, }); - if (isError) { + useEffect(() => { + if (!isValidRuleId) { + notification.error({ + message: 'Rule Id is required', + }); + history.replace(ROUTES.LIST_ALL_ALERT); + } + }, [isValidRuleId, ruleId]); + + if ((isError && !isValidRuleId) || ruleId == null) { return
{data?.error || t('something_went_wrong')}
; } @@ -28,8 +46,4 @@ function EditRules(): JSX.Element { return ; } -interface EditRulesParam { - ruleId: string; -} - export default EditRules; From 24e84bac2a52f4e6a8c8c6768bf0473d92dfc109 Mon Sep 17 00:00:00 2001 From: Ankit Nayan Date: Tue, 14 Jun 2022 20:50:13 +0530 Subject: [PATCH 06/42] Create codeball.yml --- .github/workflows/codeball.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 .github/workflows/codeball.yml diff --git a/.github/workflows/codeball.yml b/.github/workflows/codeball.yml new file mode 100644 index 0000000000..ed69d5c10e --- /dev/null +++ b/.github/workflows/codeball.yml @@ -0,0 +1,17 @@ +name: Codeball +on: [pull_request] + +jobs: + codeball_job: + runs-on: ubuntu-latest + name: Codeball + steps: + # Run Codeball on all new Pull Requests šŸš€ + # For customizations and more documentation, see https://github.com/sturdy-dev/codeball-action + - name: Codeball + uses: sturdy-dev/codeball-action@v2 + with: + approvePullRequests: "true" + labelPullRequestsWhenApproved: "true" + labelPullRequestsWhenReviewNeeded: "false" + failJobsWhenReviewNeeded: "false" From 0a5eff2255279eaf2d567ffb883e8b8529691461 Mon Sep 17 00:00:00 2001 From: Palash gupta Date: Wed, 15 Jun 2022 01:34:56 +0530 Subject: [PATCH 07/42] feat: alerts breadcrumb is added --- frontend/src/container/TopNav/Breadcrumbs/index.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/container/TopNav/Breadcrumbs/index.tsx b/frontend/src/container/TopNav/Breadcrumbs/index.tsx index 25ff730711..33d42ac5b0 100644 --- a/frontend/src/container/TopNav/Breadcrumbs/index.tsx +++ b/frontend/src/container/TopNav/Breadcrumbs/index.tsx @@ -16,6 +16,7 @@ const breadcrumbNameMap = { [ROUTES.ORG_SETTINGS]: 'Organization Settings', [ROUTES.MY_SETTINGS]: 'My Settings', [ROUTES.ERROR_DETAIL]: 'Errors', + [ROUTES.LIST_ALL_ALERT]: 'Alerts', }; function ShowBreadcrumbs(props: RouteComponentProps): JSX.Element { From ba5e3dcfd3f8c499e4fc81478c6eb422eb370957 Mon Sep 17 00:00:00 2001 From: zedongh <248348907@qq.com> Date: Tue, 14 Jun 2022 20:15:13 +0800 Subject: [PATCH 08/42] fix: getMinMax with 'GLOBAL_TIME' and 'custom' need pass globalTimefeat (#1269) --- .../src/container/GridGraphLayout/Graph/FullView/index.tsx | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/frontend/src/container/GridGraphLayout/Graph/FullView/index.tsx b/frontend/src/container/GridGraphLayout/Graph/FullView/index.tsx index e4b4bc8183..3111e33e87 100644 --- a/frontend/src/container/GridGraphLayout/Graph/FullView/index.tsx +++ b/frontend/src/container/GridGraphLayout/Graph/FullView/index.tsx @@ -57,7 +57,10 @@ function FullView({ time: timePreferenceType, ): { min: string | number; max: string | number } => { if (time === 'GLOBAL_TIME') { - const minMax = GetMinMax(globalSelectedTime); + const minMax = GetMinMax(globalSelectedTime, [ + minTime / 1000000, + maxTime / 1000000, + ]); return { min: convertToNanoSecondsToSecond(minMax.minTime / 1000), max: convertToNanoSecondsToSecond(minMax.maxTime / 1000), From 988557284222d3b6d12e525a8ca62235e35bd199 Mon Sep 17 00:00:00 2001 From: Palash gupta Date: Fri, 17 Jun 2022 08:07:33 +0530 Subject: [PATCH 09/42] feat: nginx uri issue is handled by increasing buffers --- deploy/docker/common/nginx-config.conf | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/deploy/docker/common/nginx-config.conf b/deploy/docker/common/nginx-config.conf index 705656bb6e..02fc9a8e69 100644 --- a/deploy/docker/common/nginx-config.conf +++ b/deploy/docker/common/nginx-config.conf @@ -11,6 +11,16 @@ server { gzip_buffers 16 8k; gzip_http_version 1.1; + # to handle uri issue 414 from nginx + fastcgi_buffers 8 16k; + fastcgi_buffer_size 32k; + + client_max_body_size 24M; + client_body_buffer_size 128k; + + client_header_buffer_size 5120k; + large_client_header_buffers 16 5120k; + location / { if ( $uri = '/index.html' ) { add_header Cache-Control no-store always; From 43e4f637d1c7eac64e52531c50fdc4a711bacb20 Mon Sep 17 00:00:00 2001 From: Palash gupta Date: Fri, 17 Jun 2022 12:31:48 +0530 Subject: [PATCH 10/42] fix: remove fastcgi --- deploy/docker/common/nginx-config.conf | 3 --- 1 file changed, 3 deletions(-) diff --git a/deploy/docker/common/nginx-config.conf b/deploy/docker/common/nginx-config.conf index 02fc9a8e69..a26c6662ca 100644 --- a/deploy/docker/common/nginx-config.conf +++ b/deploy/docker/common/nginx-config.conf @@ -12,9 +12,6 @@ server { gzip_http_version 1.1; # to handle uri issue 414 from nginx - fastcgi_buffers 8 16k; - fastcgi_buffer_size 32k; - client_max_body_size 24M; client_body_buffer_size 128k; From 8871d53ae05bab6f8fc45ac72f516a59fc68ffa1 Mon Sep 17 00:00:00 2001 From: Palash Date: Tue, 21 Jun 2022 15:12:03 +0530 Subject: [PATCH 11/42] nginx config is updated --- deploy/docker/common/nginx-config.conf | 1 - 1 file changed, 1 deletion(-) diff --git a/deploy/docker/common/nginx-config.conf b/deploy/docker/common/nginx-config.conf index a26c6662ca..e61719198b 100644 --- a/deploy/docker/common/nginx-config.conf +++ b/deploy/docker/common/nginx-config.conf @@ -13,7 +13,6 @@ server { # to handle uri issue 414 from nginx client_max_body_size 24M; - client_body_buffer_size 128k; client_header_buffer_size 5120k; large_client_header_buffers 16 5120k; From 7f64dfd023365785db5b5c31ab54554f7de73dd6 Mon Sep 17 00:00:00 2001 From: Palash Date: Tue, 21 Jun 2022 16:05:18 +0530 Subject: [PATCH 12/42] chore: nginx config is updated --- deploy/docker/common/nginx-config.conf | 1 - 1 file changed, 1 deletion(-) diff --git a/deploy/docker/common/nginx-config.conf b/deploy/docker/common/nginx-config.conf index e61719198b..da7f1b63f1 100644 --- a/deploy/docker/common/nginx-config.conf +++ b/deploy/docker/common/nginx-config.conf @@ -14,7 +14,6 @@ server { # to handle uri issue 414 from nginx client_max_body_size 24M; - client_header_buffer_size 5120k; large_client_header_buffers 16 5120k; location / { From fdca72b9b24ea316301f0f07758b2f97382b449f Mon Sep 17 00:00:00 2001 From: Palash Date: Tue, 21 Jun 2022 16:09:17 +0530 Subject: [PATCH 13/42] chore: nginx config is updated --- deploy/docker/common/nginx-config.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/docker/common/nginx-config.conf b/deploy/docker/common/nginx-config.conf index da7f1b63f1..99615f1f60 100644 --- a/deploy/docker/common/nginx-config.conf +++ b/deploy/docker/common/nginx-config.conf @@ -14,7 +14,7 @@ server { # to handle uri issue 414 from nginx client_max_body_size 24M; - large_client_header_buffers 16 5120k; + large_client_header_buffers 8 16k; location / { if ( $uri = '/index.html' ) { From bcb5256de0577cd9225e6ea05f134f866154b0fe Mon Sep 17 00:00:00 2001 From: Palash Date: Tue, 21 Jun 2022 17:25:59 +0530 Subject: [PATCH 14/42] Update CODEOWNERS (#1265) --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index afd70250c4..b717ca9eee 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,5 +2,5 @@ # Owners are automatically requested for review for PRs that changes code # that they own. * @ankitnayan -/frontend/ @palash-signoz @pranshuchittora +/frontend/ @palashgdev @pranshuchittora /deploy/ @prashant-shahi From 3603e497a6570fb114ef1c9a1afd4befe856903d Mon Sep 17 00:00:00 2001 From: Palash Date: Wed, 22 Jun 2022 22:49:01 +0530 Subject: [PATCH 15/42] chore: error state is updated --- frontend/src/pages/EditRules/index.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/frontend/src/pages/EditRules/index.tsx b/frontend/src/pages/EditRules/index.tsx index 1a65e668af..09cda600ab 100644 --- a/frontend/src/pages/EditRules/index.tsx +++ b/frontend/src/pages/EditRules/index.tsx @@ -35,7 +35,11 @@ function EditRules(): JSX.Element { } }, [isValidRuleId, ruleId]); - if ((isError && !isValidRuleId) || ruleId == null) { + if ( + (isError && !isValidRuleId) || + ruleId == null || + (data?.payload?.data === undefined && !isLoading) + ) { return
{data?.error || t('something_went_wrong')}
; } From 241121ebecb74c02855f5b9747a7c6d0acee1c85 Mon Sep 17 00:00:00 2001 From: Palash Date: Wed, 22 Jun 2022 23:46:30 +0530 Subject: [PATCH 16/42] chore: serivce name now ellipsed --- .../TraceDetail/SelectedSpanDetails/index.tsx | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx b/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx index 7d1a374b42..50f2aa9537 100644 --- a/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx +++ b/frontend/src/container/TraceDetail/SelectedSpanDetails/index.tsx @@ -1,4 +1,4 @@ -import { Space, Tabs, Tooltip, Typography } from 'antd'; +import { Tabs, Tooltip, Typography } from 'antd'; import { StyledSpace } from 'components/Styled'; import useThemeMode from 'hooks/useThemeMode'; import React, { useMemo } from 'react'; @@ -20,13 +20,16 @@ function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element { const { tree } = props; const { isDarkMode } = useThemeMode(); - const OverLayComponent = useMemo(() => tree?.name, [tree?.name]); + const OverLayComponentName = useMemo(() => tree?.name, [tree?.name]); + const OverLayComponentServiceName = useMemo(() => tree?.serviceName, [ + tree?.serviceName, + ]); if (!tree) { return
; } - const { tags, serviceName } = tree; + const { tags } = tree; return ( @@ -36,17 +39,18 @@ function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element { style={{ marginLeft: '0.5rem' }} > Details for selected Span - - Service - {serviceName} - - {/* */} + + Service + + {tree.serviceName} + + Operation - + {tree.name} - {/* */} + {tags.length !== 0 ? ( From 224ec8d0d9d3ce0b9422c6c35dd378d3d5cd6449 Mon Sep 17 00:00:00 2001 From: Palash Date: Thu, 23 Jun 2022 01:04:38 +0530 Subject: [PATCH 17/42] feat: search filter is added in the trace filter --- .../Panel/PanelBody/CommonCheckBox/index.tsx | 51 +++++++++++++++++-- frontend/src/store/reducers/trace.ts | 21 ++++++++ frontend/src/types/actions/trace.ts | 10 +++- frontend/src/types/reducer/trace.ts | 1 + 4 files changed, 79 insertions(+), 4 deletions(-) diff --git a/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx b/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx index 0681e7e5d5..72d7439ade 100644 --- a/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx +++ b/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx @@ -1,12 +1,19 @@ -import React from 'react'; -import { useSelector } from 'react-redux'; +import { Button, Input } from 'antd'; +import React, { useState } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { Dispatch } from 'redux'; import { AppState } from 'store/reducers'; +import { INITIAL_FILTER_VALUE } from 'store/reducers/trace'; +import AppActions from 'types/actions'; +import { UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE } from 'types/actions/trace'; import { TraceFilterEnum, TraceReducer } from 'types/reducer/trace'; import CheckBoxComponent from '../Common/Checkbox'; +const { Search } = Input; + function CommonCheckBox(props: CommonCheckBoxProps): JSX.Element { - const { filter } = useSelector( + const { filter, filterDisplayValue } = useSelector( (state) => state.traces, ); @@ -15,9 +22,34 @@ function CommonCheckBox(props: CommonCheckBoxProps): JSX.Element { const status = filter.get(name) || {}; const statusObj = Object.keys(status); + const numberOfFilters = filterDisplayValue.get(name) || 0; + const dispatch = useDispatch>(); + const [searchFilter, setSearchFilter] = useState(''); + + const onClickMoreHandler = (): void => { + const newFilterDisplayValue = new Map(filterDisplayValue); + const preValue = + (newFilterDisplayValue.get(name) || 0) + INITIAL_FILTER_VALUE; + + newFilterDisplayValue.set(name, preValue); + + dispatch({ + type: UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE, + payload: newFilterDisplayValue, + }); + }; return ( <> + setSearchFilter(e.target.value)} + style={{ + padding: '0 3%', + }} + placeholder="Filter Values" + /> + {statusObj .sort((a, b) => { const countA = +status[a]; @@ -28,6 +60,13 @@ function CommonCheckBox(props: CommonCheckBoxProps): JSX.Element { } return countA - countB; }) + .filter((_, index) => index < numberOfFilters) + .filter((filter) => { + if (searchFilter.length === 0) { + return true; + } + return filter.includes(searchFilter); + }) .map((e) => ( ))} + + {numberOfFilters && statusObj.length > numberOfFilters && ( + + )} ); } diff --git a/frontend/src/store/reducers/trace.ts b/frontend/src/store/reducers/trace.ts index b99ee7dd3a..4023d375ca 100644 --- a/frontend/src/store/reducers/trace.ts +++ b/frontend/src/store/reducers/trace.ts @@ -11,6 +11,7 @@ import { UPDATE_SELECTED_TAGS, UPDATE_SPAN_ORDER, UPDATE_SPAN_ORDER_PARAMS, + UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE, UPDATE_SPANS_AGGREGATE, UPDATE_SPANS_AGGREGATE_PAGE_NUMBER, UPDATE_SPANS_AGGREGATE_PAGE_SIZE, @@ -23,6 +24,8 @@ import { } from 'types/actions/trace'; import { TraceFilterEnum, TraceReducer } from 'types/reducer/trace'; +export const INITIAL_FILTER_VALUE = 4; + const initialValue: TraceReducer = { filter: new Map(), filterToFetchData: ['duration', 'status', 'serviceName'], @@ -53,6 +56,17 @@ const initialValue: TraceReducer = { loading: true, payload: { items: {} }, }, + filterDisplayValue: new Map([ + ['component', INITIAL_FILTER_VALUE], + ['duration', INITIAL_FILTER_VALUE], + ['httpCode', INITIAL_FILTER_VALUE], + ['httpHost', INITIAL_FILTER_VALUE], + ['httpMethod', INITIAL_FILTER_VALUE], + ['httpUrl', INITIAL_FILTER_VALUE], + ['operation', INITIAL_FILTER_VALUE], + ['serviceName', INITIAL_FILTER_VALUE], + ['status', INITIAL_FILTER_VALUE], + ]), }; const traceReducer = ( @@ -251,6 +265,13 @@ const traceReducer = ( }; } + case UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE: { + return { + ...state, + filterDisplayValue: action.payload, + }; + } + default: return state; } diff --git a/frontend/src/types/actions/trace.ts b/frontend/src/types/actions/trace.ts index da97d05129..f043926142 100644 --- a/frontend/src/types/actions/trace.ts +++ b/frontend/src/types/actions/trace.ts @@ -31,6 +31,8 @@ export const UPDATE_SPANS_AGGREGATE_PAGE_NUMBER = export const UPDATE_SPANS_AGGREGATE_PAGE_SIZE = 'UPDATE_SPANS_AGGREGATE_PAGE_SIZE'; export const UPDATE_SPAN_ORDER_PARAMS = 'UPDATE_SPAN_ORDER_PARAMS'; +export const UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE = + 'UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE'; export interface UpdateFilter { type: typeof UPDATE_TRACE_FILTER; @@ -187,6 +189,11 @@ export interface UpdateSpanParams { }; } +export interface UpdateTraceFilterDisplayValue { + type: typeof UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE; + payload: TraceReducer['filterDisplayValue']; +} + export type TraceActions = | UpdateFilter | GetTraceFilter @@ -208,4 +215,5 @@ export type TraceActions = | UpdateSpanOrder | UpdateSpansAggregatePageNumber | UpdateSpanSize - | UpdateSpanParams; + | UpdateSpanParams + | UpdateTraceFilterDisplayValue; diff --git a/frontend/src/types/reducer/trace.ts b/frontend/src/types/reducer/trace.ts index babeb344c6..fc1c08f4fc 100644 --- a/frontend/src/types/reducer/trace.ts +++ b/frontend/src/types/reducer/trace.ts @@ -32,6 +32,7 @@ export interface TraceReducer { payload: PayloadProps; }; yAxisUnit: string | undefined; + filterDisplayValue: Map; } interface SpansAggregateData { From 729c7fce7b84d67f8433d984ae87152306e3ef43 Mon Sep 17 00:00:00 2001 From: Palash Date: Thu, 23 Jun 2022 01:08:51 +0530 Subject: [PATCH 18/42] chore: initial value is made 8 --- frontend/src/store/reducers/trace.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/store/reducers/trace.ts b/frontend/src/store/reducers/trace.ts index 4023d375ca..3f7672cdd5 100644 --- a/frontend/src/store/reducers/trace.ts +++ b/frontend/src/store/reducers/trace.ts @@ -24,7 +24,7 @@ import { } from 'types/actions/trace'; import { TraceFilterEnum, TraceReducer } from 'types/reducer/trace'; -export const INITIAL_FILTER_VALUE = 4; +export const INITIAL_FILTER_VALUE = 8; const initialValue: TraceReducer = { filter: new Map(), From d7d0d70aa5e281834f87087f996e5a95e37aede4 Mon Sep 17 00:00:00 2001 From: Palash Date: Thu, 23 Jun 2022 01:12:12 +0530 Subject: [PATCH 19/42] chore: search filter is made conditional as filters need to be present --- .../Panel/PanelBody/CommonCheckBox/index.tsx | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx b/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx index 72d7439ade..29255ae549 100644 --- a/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx +++ b/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx @@ -41,14 +41,16 @@ function CommonCheckBox(props: CommonCheckBoxProps): JSX.Element { return ( <> - setSearchFilter(e.target.value)} - style={{ - padding: '0 3%', - }} - placeholder="Filter Values" - /> + {statusObj.length > 0 && ( + setSearchFilter(e.target.value)} + style={{ + padding: '0 3%', + }} + placeholder="Filter Values" + /> + )} {statusObj .sort((a, b) => { From 84b876170df0facb447e2b5409969068191ec716 Mon Sep 17 00:00:00 2001 From: Prashant Shahi Date: Thu, 23 Jun 2022 10:15:21 +0530 Subject: [PATCH 20/42] chore: clickhouse version bump (#1280) * chore: clickhouse version bump --- .../clickhouse-setup/clickhouse-config.xml | 1474 +++++++++++++---- .../clickhouse-setup/clickhouse-storage.xml | 28 + .../clickhouse-setup/clickhouse-users.xml | 123 ++ .../clickhouse-setup/docker-compose.yaml | 4 +- .../clickhouse-setup/clickhouse-config.xml | 1474 +++++++++++++---- .../clickhouse-setup/clickhouse-storage.xml | 28 + .../clickhouse-setup/clickhouse-users.xml | 123 ++ deploy/docker/clickhouse-setup/config.xml | 1304 +++++++++++++++ .../clickhouse-setup/docker-compose.arm.yaml | 133 -- .../clickhouse-setup/docker-compose.yaml | 4 +- deploy/docker/clickhouse-setup/users.xml | 123 ++ .../tests/test-deploy/clickhouse-config.xml | 1473 ++++++++++++---- .../tests/test-deploy/clickhouse-storage.xml | 28 + .../tests/test-deploy/clickhouse-users.xml | 123 ++ .../tests/test-deploy/docker-compose.arm.yaml | 99 -- .../tests/test-deploy/docker-compose.yaml | 8 +- 16 files changed, 5245 insertions(+), 1304 deletions(-) create mode 100644 deploy/docker-swarm/clickhouse-setup/clickhouse-storage.xml create mode 100644 deploy/docker-swarm/clickhouse-setup/clickhouse-users.xml create mode 100644 deploy/docker/clickhouse-setup/clickhouse-storage.xml create mode 100644 deploy/docker/clickhouse-setup/clickhouse-users.xml create mode 100644 deploy/docker/clickhouse-setup/config.xml delete mode 100644 deploy/docker/clickhouse-setup/docker-compose.arm.yaml create mode 100644 deploy/docker/clickhouse-setup/users.xml create mode 100644 pkg/query-service/tests/test-deploy/clickhouse-storage.xml create mode 100644 pkg/query-service/tests/test-deploy/clickhouse-users.xml delete mode 100644 pkg/query-service/tests/test-deploy/docker-compose.arm.yaml diff --git a/deploy/docker-swarm/clickhouse-setup/clickhouse-config.xml b/deploy/docker-swarm/clickhouse-setup/clickhouse-config.xml index 7a5f40d299..3bb26a3a36 100644 --- a/deploy/docker-swarm/clickhouse-setup/clickhouse-config.xml +++ b/deploy/docker-swarm/clickhouse-setup/clickhouse-config.xml @@ -1,137 +1,567 @@ - + + - information - 1 + + trace + /var/log/clickhouse-server/clickhouse-server.log + /var/log/clickhouse-server/clickhouse-server.err.log + + 1000M + 10 + + + + + + + + + + + + + + + 8123 + + 9000 - - + + 9004 - - - - - /etc/clickhouse-server/server.crt - /etc/clickhouse-server/server.key - - /etc/clickhouse-server/dhparam.pem - none - true - true - sslv2,sslv3 - true - + + 9005 - - true - true - sslv2,sslv3 - true - - - - RejectCertificateHandler - - - + + - - + + + + - - + + 9009 - -9009 + + + If not specified, then it is determined analogous to 'hostname -f' command. + This setting could be used to switch replication to another network interface + (the server may be connected to multiple networks via multiple addresses) + --> + + example.clickhouse.com + --> - -:: - - + + - - + + -4096 -3 - -100 + + + + + + + + + + + + + + + 4096 + + + 3 + + + + + false + + + /path/to/ssl_cert_file + /path/to/ssl_key_file + + + false + + + /path/to/ssl_ca_cert_file + + + none + + + 0 + + + -1 + -1 + + + false + + + + + + + /etc/clickhouse-server/server.crt + /etc/clickhouse-server/server.key + + + none + true + true + sslv2,sslv3 + true + + + + true + true + sslv2,sslv3 + true + + + + RejectCertificateHandler + + + + + + + + + 100 + + + 0 + + + + 10000 + + + + + + 0.9 + + + 4194304 + + + 0 + correct maximum value. --> - 8589934592 + + Note: uncompressed cache can be pointless for lz4, because memory bandwidth + is slower than multi-core decompression on some server configurations. + Enabling it can sometimes paradoxically make queries slower. + --> + 8589934592 - 5368709120 + --> + 5368709120 - - /var/lib/clickhouse/ + + 1000 - - /var/lib/clickhouse/tmp/ + + 134217728 - - users.xml + + 10000 - - default + + /var/lib/clickhouse/ - - default + + /var/lib/clickhouse/tmp/ + + + + ` + + + + + + /var/lib/clickhouse/user_files/ + + + + + + + + + + + + + users.xml + + + + /var/lib/clickhouse/access/ + + + + + + + default + + + + + + + + + + + + default - + Example: Zulu is an alias for UTC. + --> + - + --> + + + + true + + + false + + ' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + apt install --no-install-recommends -f ./clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + clickhouse-jdbc-bridge & + + * [CentOS/RHEL] + export MVN_URL=https://repo1.maven.org/maven2/ru/yandex/clickhouse/clickhouse-jdbc-bridge + export PKG_VER=$(curl -sL $MVN_URL/maven-metadata.xml | grep '' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + yum localinstall -y clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + clickhouse-jdbc-bridge & + + Please refer to https://github.com/ClickHouse/clickhouse-jdbc-bridge#usage for more information. + ]]> + - + https://clickhouse.com/docs/en/operations/table_engines/distributed/ + --> + + + + + + + + + + + localhost + 9000 + + + + + + + + false + + 127.0.0.1 + 9000 + + + 127.0.0.2 + 9000 + + + 127.0.0.3 + 9000 + + + + + + + + localhost + 9000 + + + + + localhost + 9000 + + + + + + + 127.0.0.1 + 9000 + + + + + 127.0.0.2 + 9000 + + + + + + true + + 127.0.0.1 + 9000 + + + + true + + 127.0.0.2 + 9000 + + + + + + + localhost + 9440 + 1 + + + + localhost 9000 - + + + localhost + 1 + + + + + + + + + + Values for substitutions are specified in /clickhouse/name_of_substitution elements in that file. + --> - + See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/ + --> + + - + See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/#creating-replicated-tables + --> + - - 3600 + + 3600 - - 3600 + + 3600 - - 60 + + 60 - + + --> + + + - + true + true + true + true + + --> + + + + system + query_log
+ + toYYYYMM(event_date) + + + - system - query_log
- - 7500 -
+ + 7500 +
+ + + system + trace_log
- + + system + query_thread_log
+ toYYYYMM(event_date) + 7500 +
+ + + + system + query_views_log
+ toYYYYMM(event_date) + 7500 +
+ + system part_log
- + toYYYYMM(event_date) 7500
---> + + + + + + system + metric_log
+ 7500 + 1000 +
+ + + + system + asynchronous_metric_log
+ + 7000 +
+ + + + + + engine MergeTree + partition by toYYYYMM(finish_date) + order by (finish_date, finish_time_us, trace_id) + + system + opentelemetry_span_log
+ 7500 +
- + + + system + crash_log
- - + + 1000 +
- - + + + + + + system + processors_profile_log
+ + toYYYYMM(event_date) + 7500 +
+ + + + + + - *_dictionary.xml + https://clickhouse.com/docs/en/sql-reference/dictionaries/external-dictionaries/external-dicts + --> + *_dictionary.xml + + + *_function.xml - + --> - + + + + + + + + + + + + + + + + + + + + + + + + + Works only if ZooKeeper is enabled. Comment it if such functionality isn't required. --> /clickhouse/task_queue/ddl + + + + + + + + + + + + + + + + + @@ -304,239 +1149,156 @@ 5 ---> + --> - + --> + + - - - + + - ^carbon\. + click_cost any 0 + 3600 + + + 86400 60 - - 7776000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - ^collectd\. - any - - 0 - 10 - - - 43200 - 60 - - - 864000 - 900 - - - 1728000 - 1800 - - - 3456000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - - ^high\. - any - - 0 - 10 - - - 172800 - 60 - - - 864000 - 900 - - - 1728000 - 1800 - - - 3456000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - - ^medium\. - any - - 0 - 60 - - - 864000 - 900 - - - 1728000 - 1800 - - - 3456000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - - ^low\. - any - - 0 - 600 - - - 15552000 - 1800 - - - 31536000 - 3600 - - - 63072000 - 21600 - - - 126144000 - 43200 - - - 252288000 - 86400 - - - 315360000 - 604800 - - - - any + max 0 60 - 864000 - 900 + 3600 + 300 - 1728000 - 1800 - - - 3456000 + 86400 3600 - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - + - /var/lib/clickhouse/format_schemas/ -
+ --> + /var/lib/clickhouse/format_schemas/ + + + + + hide encrypt/decrypt arguments + ((?:aes_)?(?:encrypt|decrypt)(?:_mysql)?)\s*\(\s*(?:'(?:\\'|.)+'|.*?)\s*\) + + \1(???) + + + + + + + + + + false + + false + + + https://6f33034cfe684dd7a3ab9875e57b1c8d@o388870.ingest.sentry.io/5226277 + + + + + + + + + + + 268435456 + true + + diff --git a/deploy/docker-swarm/clickhouse-setup/clickhouse-storage.xml b/deploy/docker-swarm/clickhouse-setup/clickhouse-storage.xml new file mode 100644 index 0000000000..aab0c15da7 --- /dev/null +++ b/deploy/docker-swarm/clickhouse-setup/clickhouse-storage.xml @@ -0,0 +1,28 @@ + + + + + + 10485760 + + + s3 + https://BUCKET-NAME.s3.amazonaws.com/data/ + ACCESS-KEY-ID + SECRET-ACCESS-KEY + + + + + + + default + + + s3 + + + + + + diff --git a/deploy/docker-swarm/clickhouse-setup/clickhouse-users.xml b/deploy/docker-swarm/clickhouse-setup/clickhouse-users.xml new file mode 100644 index 0000000000..f18562071d --- /dev/null +++ b/deploy/docker-swarm/clickhouse-setup/clickhouse-users.xml @@ -0,0 +1,123 @@ + + + + + + + + + + 10000000000 + + + random + + + + + 1 + + + + + + + + + + + + + ::/0 + + + + default + + + default + + + + + + + + + + + + + + 3600 + + + 0 + 0 + 0 + 0 + 0 + + + + diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index bc58e956b5..45b4137d11 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -2,12 +2,14 @@ version: "3.9" services: clickhouse: - image: yandex/clickhouse-server:21.12.3.32 + image: clickhouse/clickhouse-server:22.4.5-alpine # ports: # - "9000:9000" # - "8123:8123" volumes: - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml - ./data/clickhouse/:/var/lib/clickhouse/ deploy: restart_policy: diff --git a/deploy/docker/clickhouse-setup/clickhouse-config.xml b/deploy/docker/clickhouse-setup/clickhouse-config.xml index 7a5f40d299..3bb26a3a36 100644 --- a/deploy/docker/clickhouse-setup/clickhouse-config.xml +++ b/deploy/docker/clickhouse-setup/clickhouse-config.xml @@ -1,137 +1,567 @@ - + + - information - 1 + + trace + /var/log/clickhouse-server/clickhouse-server.log + /var/log/clickhouse-server/clickhouse-server.err.log + + 1000M + 10 + + + + + + + + + + + + + + + 8123 + + 9000 - - + + 9004 - - - - - /etc/clickhouse-server/server.crt - /etc/clickhouse-server/server.key - - /etc/clickhouse-server/dhparam.pem - none - true - true - sslv2,sslv3 - true - + + 9005 - - true - true - sslv2,sslv3 - true - - - - RejectCertificateHandler - - - + + - - + + + + - - + + 9009 - -9009 + + + If not specified, then it is determined analogous to 'hostname -f' command. + This setting could be used to switch replication to another network interface + (the server may be connected to multiple networks via multiple addresses) + --> + + example.clickhouse.com + --> - -:: - - + + - - + + -4096 -3 - -100 + + + + + + + + + + + + + + + 4096 + + + 3 + + + + + false + + + /path/to/ssl_cert_file + /path/to/ssl_key_file + + + false + + + /path/to/ssl_ca_cert_file + + + none + + + 0 + + + -1 + -1 + + + false + + + + + + + /etc/clickhouse-server/server.crt + /etc/clickhouse-server/server.key + + + none + true + true + sslv2,sslv3 + true + + + + true + true + sslv2,sslv3 + true + + + + RejectCertificateHandler + + + + + + + + + 100 + + + 0 + + + + 10000 + + + + + + 0.9 + + + 4194304 + + + 0 + correct maximum value. --> - 8589934592 + + Note: uncompressed cache can be pointless for lz4, because memory bandwidth + is slower than multi-core decompression on some server configurations. + Enabling it can sometimes paradoxically make queries slower. + --> + 8589934592 - 5368709120 + --> + 5368709120 - - /var/lib/clickhouse/ + + 1000 - - /var/lib/clickhouse/tmp/ + + 134217728 - - users.xml + + 10000 - - default + + /var/lib/clickhouse/ - - default + + /var/lib/clickhouse/tmp/ + + + + ` + + + + + + /var/lib/clickhouse/user_files/ + + + + + + + + + + + + + users.xml + + + + /var/lib/clickhouse/access/ + + + + + + + default + + + + + + + + + + + + default - + Example: Zulu is an alias for UTC. + --> + - + --> + + + + true + + + false + + ' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + apt install --no-install-recommends -f ./clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + clickhouse-jdbc-bridge & + + * [CentOS/RHEL] + export MVN_URL=https://repo1.maven.org/maven2/ru/yandex/clickhouse/clickhouse-jdbc-bridge + export PKG_VER=$(curl -sL $MVN_URL/maven-metadata.xml | grep '' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + yum localinstall -y clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + clickhouse-jdbc-bridge & + + Please refer to https://github.com/ClickHouse/clickhouse-jdbc-bridge#usage for more information. + ]]> + - + https://clickhouse.com/docs/en/operations/table_engines/distributed/ + --> + + + + + + + + + + + localhost + 9000 + + + + + + + + false + + 127.0.0.1 + 9000 + + + 127.0.0.2 + 9000 + + + 127.0.0.3 + 9000 + + + + + + + + localhost + 9000 + + + + + localhost + 9000 + + + + + + + 127.0.0.1 + 9000 + + + + + 127.0.0.2 + 9000 + + + + + + true + + 127.0.0.1 + 9000 + + + + true + + 127.0.0.2 + 9000 + + + + + + + localhost + 9440 + 1 + + + + localhost 9000 - + + + localhost + 1 + + + + + + + + + + Values for substitutions are specified in /clickhouse/name_of_substitution elements in that file. + --> - + See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/ + --> + + - + See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/#creating-replicated-tables + --> + - - 3600 + + 3600 - - 3600 + + 3600 - - 60 + + 60 - + + --> + + + - + true + true + true + true + + --> + + + + system + query_log
+ + toYYYYMM(event_date) + + + - system - query_log
- - 7500 -
+ + 7500 +
+ + + system + trace_log
- + + system + query_thread_log
+ toYYYYMM(event_date) + 7500 +
+ + + + system + query_views_log
+ toYYYYMM(event_date) + 7500 +
+ + system part_log
- + toYYYYMM(event_date) 7500
---> + + + + + + system + metric_log
+ 7500 + 1000 +
+ + + + system + asynchronous_metric_log
+ + 7000 +
+ + + + + + engine MergeTree + partition by toYYYYMM(finish_date) + order by (finish_date, finish_time_us, trace_id) + + system + opentelemetry_span_log
+ 7500 +
- + + + system + crash_log
- - + + 1000 +
- - + + + + + + system + processors_profile_log
+ + toYYYYMM(event_date) + 7500 +
+ + + + + + - *_dictionary.xml + https://clickhouse.com/docs/en/sql-reference/dictionaries/external-dictionaries/external-dicts + --> + *_dictionary.xml + + + *_function.xml - + --> - + + + + + + + + + + + + + + + + + + + + + + + + + Works only if ZooKeeper is enabled. Comment it if such functionality isn't required. --> /clickhouse/task_queue/ddl + + + + + + + + + + + + + + + + + @@ -304,239 +1149,156 @@ 5 ---> + --> - + --> + + - - - + + - ^carbon\. + click_cost any 0 + 3600 + + + 86400 60 - - 7776000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - ^collectd\. - any - - 0 - 10 - - - 43200 - 60 - - - 864000 - 900 - - - 1728000 - 1800 - - - 3456000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - - ^high\. - any - - 0 - 10 - - - 172800 - 60 - - - 864000 - 900 - - - 1728000 - 1800 - - - 3456000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - - ^medium\. - any - - 0 - 60 - - - 864000 - 900 - - - 1728000 - 1800 - - - 3456000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - - ^low\. - any - - 0 - 600 - - - 15552000 - 1800 - - - 31536000 - 3600 - - - 63072000 - 21600 - - - 126144000 - 43200 - - - 252288000 - 86400 - - - 315360000 - 604800 - - - - any + max 0 60 - 864000 - 900 + 3600 + 300 - 1728000 - 1800 - - - 3456000 + 86400 3600 - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - + - /var/lib/clickhouse/format_schemas/ -
+ --> + /var/lib/clickhouse/format_schemas/ + + + + + hide encrypt/decrypt arguments + ((?:aes_)?(?:encrypt|decrypt)(?:_mysql)?)\s*\(\s*(?:'(?:\\'|.)+'|.*?)\s*\) + + \1(???) + + + + + + + + + + false + + false + + + https://6f33034cfe684dd7a3ab9875e57b1c8d@o388870.ingest.sentry.io/5226277 + + + + + + + + + + + 268435456 + true + + diff --git a/deploy/docker/clickhouse-setup/clickhouse-storage.xml b/deploy/docker/clickhouse-setup/clickhouse-storage.xml new file mode 100644 index 0000000000..aab0c15da7 --- /dev/null +++ b/deploy/docker/clickhouse-setup/clickhouse-storage.xml @@ -0,0 +1,28 @@ + + + + + + 10485760 + + + s3 + https://BUCKET-NAME.s3.amazonaws.com/data/ + ACCESS-KEY-ID + SECRET-ACCESS-KEY + + + + + + + default + + + s3 + + + + + + diff --git a/deploy/docker/clickhouse-setup/clickhouse-users.xml b/deploy/docker/clickhouse-setup/clickhouse-users.xml new file mode 100644 index 0000000000..f18562071d --- /dev/null +++ b/deploy/docker/clickhouse-setup/clickhouse-users.xml @@ -0,0 +1,123 @@ + + + + + + + + + + 10000000000 + + + random + + + + + 1 + + + + + + + + + + + + + ::/0 + + + + default + + + default + + + + + + + + + + + + + + 3600 + + + 0 + 0 + 0 + 0 + 0 + + + + diff --git a/deploy/docker/clickhouse-setup/config.xml b/deploy/docker/clickhouse-setup/config.xml new file mode 100644 index 0000000000..3bb26a3a36 --- /dev/null +++ b/deploy/docker/clickhouse-setup/config.xml @@ -0,0 +1,1304 @@ + + + + + + trace + /var/log/clickhouse-server/clickhouse-server.log + /var/log/clickhouse-server/clickhouse-server.err.log + + 1000M + 10 + + + + + + + + + + + + + + + + + + 8123 + + + 9000 + + + 9004 + + + 9005 + + + + + + + + + + + + 9009 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 4096 + + + 3 + + + + + false + + + /path/to/ssl_cert_file + /path/to/ssl_key_file + + + false + + + /path/to/ssl_ca_cert_file + + + none + + + 0 + + + -1 + -1 + + + false + + + + + + + /etc/clickhouse-server/server.crt + /etc/clickhouse-server/server.key + + + none + true + true + sslv2,sslv3 + true + + + + true + true + sslv2,sslv3 + true + + + + RejectCertificateHandler + + + + + + + + + 100 + + + 0 + + + + 10000 + + + + + + 0.9 + + + 4194304 + + + 0 + + + + + + 8589934592 + + + 5368709120 + + + + 1000 + + + 134217728 + + + 10000 + + + /var/lib/clickhouse/ + + + /var/lib/clickhouse/tmp/ + + + + ` + + + + + + /var/lib/clickhouse/user_files/ + + + + + + + + + + + + + users.xml + + + + /var/lib/clickhouse/access/ + + + + + + + default + + + + + + + + + + + + default + + + + + + + + + true + + + false + + ' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + apt install --no-install-recommends -f ./clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + clickhouse-jdbc-bridge & + + * [CentOS/RHEL] + export MVN_URL=https://repo1.maven.org/maven2/ru/yandex/clickhouse/clickhouse-jdbc-bridge + export PKG_VER=$(curl -sL $MVN_URL/maven-metadata.xml | grep '' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + yum localinstall -y clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + clickhouse-jdbc-bridge & + + Please refer to https://github.com/ClickHouse/clickhouse-jdbc-bridge#usage for more information. + ]]> + + + + + + + + + + + + + + + + localhost + 9000 + + + + + + + + false + + 127.0.0.1 + 9000 + + + 127.0.0.2 + 9000 + + + 127.0.0.3 + 9000 + + + + + + + + localhost + 9000 + + + + + localhost + 9000 + + + + + + + 127.0.0.1 + 9000 + + + + + 127.0.0.2 + 9000 + + + + + + true + + 127.0.0.1 + 9000 + + + + true + + 127.0.0.2 + 9000 + + + + + + + localhost + 9440 + 1 + + + + + + + localhost + 9000 + + + + + localhost + 1 + + + + + + + + + + + + + + + + + + + + + + + + 3600 + + + + 3600 + + + 60 + + + + + + + + + + + + + system + query_log
+ + toYYYYMM(event_date) + + + + + + 7500 +
+ + + + system + trace_log
+ + toYYYYMM(event_date) + 7500 +
+ + + + system + query_thread_log
+ toYYYYMM(event_date) + 7500 +
+ + + + system + query_views_log
+ toYYYYMM(event_date) + 7500 +
+ + + + system + part_log
+ toYYYYMM(event_date) + 7500 +
+ + + + + + system + metric_log
+ 7500 + 1000 +
+ + + + system + asynchronous_metric_log
+ + 7000 +
+ + + + + + engine MergeTree + partition by toYYYYMM(finish_date) + order by (finish_date, finish_time_us, trace_id) + + system + opentelemetry_span_log
+ 7500 +
+ + + + + system + crash_log
+ + + 1000 +
+ + + + + + + system + processors_profile_log
+ + toYYYYMM(event_date) + 7500 +
+ + + + + + + + + *_dictionary.xml + + + *_function.xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + /clickhouse/task_queue/ddl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + click_cost + any + + 0 + 3600 + + + 86400 + 60 + + + + max + + 0 + 60 + + + 3600 + 300 + + + 86400 + 3600 + + + + + + /var/lib/clickhouse/format_schemas/ + + + + + hide encrypt/decrypt arguments + ((?:aes_)?(?:encrypt|decrypt)(?:_mysql)?)\s*\(\s*(?:'(?:\\'|.)+'|.*?)\s*\) + + \1(???) + + + + + + + + + + false + + false + + + https://6f33034cfe684dd7a3ab9875e57b1c8d@o388870.ingest.sentry.io/5226277 + + + + + + + + + + + 268435456 + true + +
diff --git a/deploy/docker/clickhouse-setup/docker-compose.arm.yaml b/deploy/docker/clickhouse-setup/docker-compose.arm.yaml deleted file mode 100644 index c30bf82064..0000000000 --- a/deploy/docker/clickhouse-setup/docker-compose.arm.yaml +++ /dev/null @@ -1,133 +0,0 @@ -version: "2.4" - -services: - clickhouse: - image: altinity/clickhouse-server:21.12.3.32.altinitydev.arm - # ports: - # - "9000:9000" - # - "8123:8123" - volumes: - - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml - - ./data/clickhouse/:/var/lib/clickhouse/ - restart: on-failure - logging: - options: - max-size: 50m - max-file: "3" - healthcheck: - # "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'" - test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"] - interval: 30s - timeout: 5s - retries: 3 - - alertmanager: - image: signoz/alertmanager:0.23.0-0.1 - volumes: - - ./data/alertmanager:/data - depends_on: - query-service: - condition: service_healthy - restart: on-failure - command: - - --queryService.url=http://query-service:8085 - - --storage.path=/data - -# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` - - - query-service: - image: signoz/query-service:0.8.2 - container_name: query-service - command: ["-config=/root/config/prometheus.yml"] - # ports: - # - "6060:6060" # pprof port - # - "8080:8080" # query-service port - volumes: - - ./prometheus.yml:/root/config/prometheus.yml - - ../dashboards:/root/config/dashboards - - ./data/signoz/:/var/lib/signoz/ - environment: - - ClickHouseUrl=tcp://clickhouse:9000/?database=signoz_traces - - STORAGE=clickhouse - - GODEBUG=netdns=go - - TELEMETRY_ENABLED=true - - DEPLOYMENT_TYPE=docker-standalone-arm - restart: on-failure - healthcheck: - test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/version"] - interval: 30s - timeout: 5s - retries: 3 - depends_on: - clickhouse: - condition: service_healthy - - frontend: - image: signoz/frontend:0.8.2 - container_name: frontend - restart: on-failure - depends_on: - - alertmanager - - query-service - ports: - - "3301:3301" - volumes: - - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf - - otel-collector: - image: signoz/otelcontribcol:0.45.1-0.3 - command: ["--config=/etc/otel-collector-config.yaml"] - volumes: - - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ports: - - "4317:4317" # OTLP gRPC receiver - - "4318:4318" # OTLP HTTP receiver - # - "8889:8889" # Prometheus metrics exposed by the agent - # - "13133:13133" # health_check - # - "14268:14268" # Jaeger receiver - # - "55678:55678" # OpenCensus receiver - # - "55679:55679" # zpages extension - # - "55680:55680" # OTLP gRPC legacy receiver - # - "55681:55681" # OTLP HTTP legacy receiver - mem_limit: 2000m - restart: on-failure - depends_on: - clickhouse: - condition: service_healthy - - otel-collector-metrics: - image: signoz/otelcontribcol:0.45.1-0.3 - command: ["--config=/etc/otel-collector-metrics-config.yaml"] - volumes: - - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml - restart: on-failure - depends_on: - clickhouse: - condition: service_healthy - - hotrod: - image: jaegertracing/example-hotrod:1.30 - container_name: hotrod - logging: - options: - max-size: 50m - max-file: "3" - command: ["all"] - environment: - - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces - - load-hotrod: - image: "grubykarol/locust:1.2.3-python3.9-alpine3.12" - container_name: load-hotrod - hostname: load-hotrod - environment: - ATTACKED_HOST: http://hotrod:8080 - LOCUST_MODE: standalone - NO_PROXY: standalone - TASK_DELAY_FROM: 5 - TASK_DELAY_TO: 30 - QUIET_MODE: "${QUIET_MODE:-false}" - LOCUST_OPTS: "--headless -u 10 -r 1" - volumes: - - ../common/locust-scripts:/locust diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 44a0e25554..166a2096e3 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -2,12 +2,14 @@ version: "2.4" services: clickhouse: - image: yandex/clickhouse-server:21.12.3.32 + image: clickhouse/clickhouse-server:22.4.5-alpine # ports: # - "9000:9000" # - "8123:8123" volumes: - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml - ./data/clickhouse/:/var/lib/clickhouse/ restart: on-failure logging: diff --git a/deploy/docker/clickhouse-setup/users.xml b/deploy/docker/clickhouse-setup/users.xml new file mode 100644 index 0000000000..f18562071d --- /dev/null +++ b/deploy/docker/clickhouse-setup/users.xml @@ -0,0 +1,123 @@ + + + + + + + + + + 10000000000 + + + random + + + + + 1 + + + + + + + + + + + + + ::/0 + + + + default + + + default + + + + + + + + + + + + + + 3600 + + + 0 + 0 + 0 + 0 + 0 + + + + diff --git a/pkg/query-service/tests/test-deploy/clickhouse-config.xml b/pkg/query-service/tests/test-deploy/clickhouse-config.xml index 06ddb2b723..3bb26a3a36 100644 --- a/pkg/query-service/tests/test-deploy/clickhouse-config.xml +++ b/pkg/query-service/tests/test-deploy/clickhouse-config.xml @@ -1,136 +1,567 @@ - + + - information - 1 + + trace + /var/log/clickhouse-server/clickhouse-server.log + /var/log/clickhouse-server/clickhouse-server.err.log + + 1000M + 10 + + + + + + + + + + + + + + + 8123 + + 9000 - - + + 9004 - - - - - /etc/clickhouse-server/server.crt - /etc/clickhouse-server/server.key - - /etc/clickhouse-server/dhparam.pem - none - true - true - sslv2,sslv3 - true - + + 9005 - - true - true - sslv2,sslv3 - true - - - - RejectCertificateHandler - - - + + - - - - - - - s3 - http://172.17.0.1:9100/test// - ash - password - - - - - - - default - - - s3 - - - - - + + + + - - + + 9009 - -9009 + + + If not specified, then it is determined analogous to 'hostname -f' command. + This setting could be used to switch replication to another network interface + (the server may be connected to multiple networks via multiple addresses) + --> + + example.clickhouse.com + --> - -:: - - + + - - + + -4096 -3 - -100 + + + + + + + + + + + + + + + 4096 + + + 3 + + + + + false + + + /path/to/ssl_cert_file + /path/to/ssl_key_file + + + false + + + /path/to/ssl_ca_cert_file + + + none + + + 0 + + + -1 + -1 + + + false + + + + + + + /etc/clickhouse-server/server.crt + /etc/clickhouse-server/server.key + + + none + true + true + sslv2,sslv3 + true + + + + true + true + sslv2,sslv3 + true + + + + RejectCertificateHandler + + + + + + + + + 100 + + + 0 + + + + 10000 + + + + + + 0.9 + + + 4194304 + + + 0 + correct maximum value. --> - 8589934592 + + Note: uncompressed cache can be pointless for lz4, because memory bandwidth + is slower than multi-core decompression on some server configurations. + Enabling it can sometimes paradoxically make queries slower. + --> + 8589934592 - 5368709120 + --> + 5368709120 - - /var/lib/clickhouse/ + + 1000 - - /var/lib/clickhouse/tmp/ + + 134217728 - - users.xml + + 10000 - - default + + /var/lib/clickhouse/ - - default + + /var/lib/clickhouse/tmp/ + + + + ` + + + + + + /var/lib/clickhouse/user_files/ + + + + + + + + + + + + + users.xml + + + + /var/lib/clickhouse/access/ + + + + + + + default + + + + + + + + + + + + default - + Example: Zulu is an alias for UTC. + --> + - + --> + + + + true + + + false + + ' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + apt install --no-install-recommends -f ./clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + clickhouse-jdbc-bridge & + + * [CentOS/RHEL] + export MVN_URL=https://repo1.maven.org/maven2/ru/yandex/clickhouse/clickhouse-jdbc-bridge + export PKG_VER=$(curl -sL $MVN_URL/maven-metadata.xml | grep '' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + yum localinstall -y clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + clickhouse-jdbc-bridge & + + Please refer to https://github.com/ClickHouse/clickhouse-jdbc-bridge#usage for more information. + ]]> + - + https://clickhouse.com/docs/en/operations/table_engines/distributed/ + --> + + + + + + + + + + + localhost + 9000 + + + + + + + + false + + 127.0.0.1 + 9000 + + + 127.0.0.2 + 9000 + + + 127.0.0.3 + 9000 + + + + + + + + localhost + 9000 + + + + + localhost + 9000 + + + + + + + 127.0.0.1 + 9000 + + + + + 127.0.0.2 + 9000 + + + + + + true + + 127.0.0.1 + 9000 + + + + true + + 127.0.0.2 + 9000 + + + + + + + localhost + 9440 + 1 + + + + localhost 9000 - + + + localhost + 1 + + + + + + + + + + Values for substitutions are specified in /clickhouse/name_of_substitution elements in that file. + --> - + See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/ + --> + + - + See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/#creating-replicated-tables + --> + - - 3600 + + 3600 - - 3600 + + 3600 - - 60 + + 60 - + + --> + + + - + true + true + true + true + + --> + + + + system + query_log
+ + toYYYYMM(event_date) + + + - system - query_log
- - 7500 -
+ + 7500 +
+ + + system + trace_log
- + + system + query_thread_log
+ toYYYYMM(event_date) + 7500 +
+ + + + system + query_views_log
+ toYYYYMM(event_date) + 7500 +
+ + system part_log
- + toYYYYMM(event_date) 7500
---> + + + + + + system + metric_log
+ 7500 + 1000 +
+ + + + system + asynchronous_metric_log
+ + 7000 +
+ + + + + + engine MergeTree + partition by toYYYYMM(finish_date) + order by (finish_date, finish_time_us, trace_id) + + system + opentelemetry_span_log
+ 7500 +
- + + + system + crash_log
- - + + 1000 +
- - + + + + + + system + processors_profile_log
+ + toYYYYMM(event_date) + 7500 +
+ + + + + + - *_dictionary.xml + https://clickhouse.com/docs/en/sql-reference/dictionaries/external-dictionaries/external-dicts + --> + *_dictionary.xml + + + *_function.xml - + --> - + + + + + + + + + + + + + + + + + + + + + + + + + Works only if ZooKeeper is enabled. Comment it if such functionality isn't required. --> /clickhouse/task_queue/ddl + + + + + + + + + + + + + + + + + @@ -303,239 +1149,156 @@ 5 ---> + --> - + --> + + - - - + + - ^carbon\. + click_cost any 0 + 3600 + + + 86400 60 - - 7776000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - ^collectd\. - any - - 0 - 10 - - - 43200 - 60 - - - 864000 - 900 - - - 1728000 - 1800 - - - 3456000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - - ^high\. - any - - 0 - 10 - - - 172800 - 60 - - - 864000 - 900 - - - 1728000 - 1800 - - - 3456000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - - ^medium\. - any - - 0 - 60 - - - 864000 - 900 - - - 1728000 - 1800 - - - 3456000 - 3600 - - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - - - - ^low\. - any - - 0 - 600 - - - 15552000 - 1800 - - - 31536000 - 3600 - - - 63072000 - 21600 - - - 126144000 - 43200 - - - 252288000 - 86400 - - - 315360000 - 604800 - - - - any + max 0 60 - 864000 - 900 + 3600 + 300 - 1728000 - 1800 - - - 3456000 + 86400 3600 - - 10368000 - 21600 - - - 34560000 - 43200 - - - 63072000 - 86400 - - - 94608000 - 604800 - - + - /var/lib/clickhouse/format_schemas/ -
+ --> + /var/lib/clickhouse/format_schemas/ + + + + + hide encrypt/decrypt arguments + ((?:aes_)?(?:encrypt|decrypt)(?:_mysql)?)\s*\(\s*(?:'(?:\\'|.)+'|.*?)\s*\) + + \1(???) + + + + + + + + + + false + + false + + + https://6f33034cfe684dd7a3ab9875e57b1c8d@o388870.ingest.sentry.io/5226277 + + + + + + + + + + + 268435456 + true + + diff --git a/pkg/query-service/tests/test-deploy/clickhouse-storage.xml b/pkg/query-service/tests/test-deploy/clickhouse-storage.xml new file mode 100644 index 0000000000..eaf1e7e99d --- /dev/null +++ b/pkg/query-service/tests/test-deploy/clickhouse-storage.xml @@ -0,0 +1,28 @@ + + + + + + 10485760 + + + s3 + http://172.17.0.1:9100/test// + ash + password + + + + + + + default + + + s3 + + + + + + diff --git a/pkg/query-service/tests/test-deploy/clickhouse-users.xml b/pkg/query-service/tests/test-deploy/clickhouse-users.xml new file mode 100644 index 0000000000..f18562071d --- /dev/null +++ b/pkg/query-service/tests/test-deploy/clickhouse-users.xml @@ -0,0 +1,123 @@ + + + + + + + + + + 10000000000 + + + random + + + + + 1 + + + + + + + + + + + + + ::/0 + + + + default + + + default + + + + + + + + + + + + + + 3600 + + + 0 + 0 + 0 + 0 + 0 + + + + diff --git a/pkg/query-service/tests/test-deploy/docker-compose.arm.yaml b/pkg/query-service/tests/test-deploy/docker-compose.arm.yaml deleted file mode 100644 index 48a1449fe4..0000000000 --- a/pkg/query-service/tests/test-deploy/docker-compose.arm.yaml +++ /dev/null @@ -1,99 +0,0 @@ -version: "2.4" - -services: - clickhouse: - image: altinity/clickhouse-server:21.12.3.32.altinitydev.arm - volumes: - - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml - restart: on-failure - logging: - options: - max-size: 50m - max-file: "3" - healthcheck: - # "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'" - test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"] - interval: 30s - timeout: 5s - retries: 3 - - alertmanager: - image: signoz/alertmanager:0.23.0-0.1 - depends_on: - - query-service - restart: on-failure - command: - - --queryService.url=http://query-service:8085 - - --storage.path=/data - - query-service: - image: signoz/query-service:latest - container_name: query-service - command: ["-config=/root/config/prometheus.yml"] - volumes: - - ./prometheus.yml:/root/config/prometheus.yml - - ../dashboards:/root/config/dashboards - - ./data:/var/lib/signoz - ports: - - "8180:8080" - environment: - - ClickHouseUrl=tcp://clickhouse:9000/?database=signoz_traces - - STORAGE=clickhouse - - GODEBUG=netdns=go - - TELEMETRY_ENABLED=true - healthcheck: - test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/version"] - interval: 30s - timeout: 5s - retries: 3 - depends_on: - clickhouse: - condition: service_healthy - - otel-collector: - image: signoz/otelcontribcol:0.45.1-0.3 - command: ["--config=/etc/otel-collector-config.yaml"] - volumes: - - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ports: - - "4317:4317" # OTLP GRPC receiver - mem_limit: 2000m - restart: always - depends_on: - clickhouse: - condition: service_healthy - - otel-collector-metrics: - image: signoz/otelcontribcol:0.45.1-0.3 - command: ["--config=/etc/otel-collector-metrics-config.yaml"] - volumes: - - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml - depends_on: - clickhouse: - condition: service_healthy - - hotrod: - image: jaegertracing/example-hotrod:1.30 - container_name: hotrod - logging: - options: - max-size: 50m - max-file: "3" - command: ["all"] - environment: - - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces - - load-hotrod: - image: "grubykarol/locust:1.2.3-python3.9-alpine3.12" - container_name: load-hotrod - hostname: load-hotrod - environment: - ATTACKED_HOST: http://hotrod:8080 - LOCUST_MODE: standalone - NO_PROXY: standalone - TASK_DELAY_FROM: 5 - TASK_DELAY_TO: 30 - QUIET_MODE: "${QUIET_MODE:-false}" - LOCUST_OPTS: "--headless -u 10 -r 1" - volumes: - - ../../../../deploy/docker/common/locust-scripts:/locust diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index 4a3ad41870..b1c6a39d7e 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -2,9 +2,11 @@ version: "2.4" services: clickhouse: - image: yandex/clickhouse-server:21.12.3.32 + image: clickhouse/clickhouse-server:22.4.5-alpine volumes: - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml restart: on-failure logging: options: @@ -17,8 +19,8 @@ services: timeout: 5s retries: 3 ports: - - "9000:9000" - - "8123:8123" + - "9000:9000" + - "8123:8123" alertmanager: image: signoz/alertmanager:0.23.0-0.1 From d21ab7b82d1cb46ce819834659898de64f266133 Mon Sep 17 00:00:00 2001 From: rw4nn <86979911+rw4nn@users.noreply.github.com> Date: Thu, 23 Jun 2022 06:55:55 +0200 Subject: [PATCH 21/42] fix(FE): escape regular expression to filter dashboards with special characters (#1279) * fix(FE): escape reg exp to filter dashboards * test(FE): add type and use uuid v4 --- .../SearchFilter/__tests__/utils.test.ts | 58 +++++++++++++++++++ .../ListOfDashboard/SearchFilter/utils.ts | 4 +- 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 frontend/src/container/ListOfDashboard/SearchFilter/__tests__/utils.test.ts diff --git a/frontend/src/container/ListOfDashboard/SearchFilter/__tests__/utils.test.ts b/frontend/src/container/ListOfDashboard/SearchFilter/__tests__/utils.test.ts new file mode 100644 index 0000000000..db9825b677 --- /dev/null +++ b/frontend/src/container/ListOfDashboard/SearchFilter/__tests__/utils.test.ts @@ -0,0 +1,58 @@ +import { Dashboard } from 'types/api/dashboard/getAll'; +import { v4 as uuid } from 'uuid'; + +import { TOperator } from '../types'; +import { executeSearchQueries } from '../utils'; + +describe('executeSearchQueries', () => { + const firstDashboard: Dashboard = { + id: 11111, + uuid: uuid(), + created_at: '', + updated_at: '', + data: { + title: 'first dashboard', + }, + }; + const secondDashboard: Dashboard = { + id: 22222, + uuid: uuid(), + created_at: '', + updated_at: '', + data: { + title: 'second dashboard', + }, + }; + const thirdDashboard: Dashboard = { + id: 333333, + uuid: uuid(), + created_at: '', + updated_at: '', + data: { + title: 'third dashboard (with special characters +?\\)', + }, + }; + const dashboards = [firstDashboard, secondDashboard, thirdDashboard]; + + it('should filter dashboards based on title', () => { + const query = { + category: 'title', + id: 'someid', + operator: '=' as TOperator, + value: 'first dashboard', + }; + + expect(executeSearchQueries([query], dashboards)).toEqual([firstDashboard]); + }); + + it('should filter dashboards with special characters', () => { + const query = { + category: 'title', + id: 'someid', + operator: '=' as TOperator, + value: 'third dashboard (with special characters +?\\)', + }; + + expect(executeSearchQueries([query], dashboards)).toEqual([thirdDashboard]); + }); +}); diff --git a/frontend/src/container/ListOfDashboard/SearchFilter/utils.ts b/frontend/src/container/ListOfDashboard/SearchFilter/utils.ts index 5f9b37cc3e..6487ccd789 100644 --- a/frontend/src/container/ListOfDashboard/SearchFilter/utils.ts +++ b/frontend/src/container/ListOfDashboard/SearchFilter/utils.ts @@ -42,6 +42,8 @@ export const executeSearchQueries = ( if (!searchData.length || !queries.length) { return searchData; } + const escapeRegExp = (regExp: string): string => + regExp.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); queries.forEach((query: IQueryStructure) => { const { operator } = query; @@ -61,7 +63,7 @@ export const executeSearchQueries = ( for (const searchSpaceItem of searchSpace) { if (searchSpaceItem) for (const queryValue of value) { - if (searchSpaceItem.match(queryValue)) { + if (searchSpaceItem.match(escapeRegExp(queryValue))) { return resolveOperator(true, operator); } } From 63e663a92d88859f0ec5f5438ef9aba8641666ad Mon Sep 17 00:00:00 2001 From: Palash Date: Thu, 23 Jun 2022 10:54:15 +0530 Subject: [PATCH 22/42] feat: removed auto save layout from dashboard --- frontend/src/container/GridGraphLayout/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/container/GridGraphLayout/index.tsx b/frontend/src/container/GridGraphLayout/index.tsx index 88250f3b7c..8f7ea56c15 100644 --- a/frontend/src/container/GridGraphLayout/index.tsx +++ b/frontend/src/container/GridGraphLayout/index.tsx @@ -218,7 +218,7 @@ function GridGraph(props: Props): JSX.Element { const onLayoutChangeHandler = async (layout: Layout[]): Promise => { setLayoutFunction(layout); - await onLayoutSaveHandler(layout); + // await onLayoutSaveHandler(layout); }; const onAddPanelHandler = useCallback(() => { From 1e980c3886701dc12c3b59e3ff29ceb44944da60 Mon Sep 17 00:00:00 2001 From: Palash Date: Thu, 23 Jun 2022 12:37:42 +0530 Subject: [PATCH 23/42] feat: condition is updated --- .../Filters/Panel/PanelBody/CommonCheckBox/index.tsx | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx b/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx index 29255ae549..440c9652a7 100644 --- a/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx +++ b/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx @@ -39,6 +39,10 @@ function CommonCheckBox(props: CommonCheckBoxProps): JSX.Element { }); }; + const isMoreButtonAvilable = Boolean( + numberOfFilters && statusObj.length > numberOfFilters, + ); + return ( <> {statusObj.length > 0 && ( @@ -62,13 +66,15 @@ function CommonCheckBox(props: CommonCheckBoxProps): JSX.Element { } return countA - countB; }) - .filter((_, index) => index < numberOfFilters) .filter((filter) => { if (searchFilter.length === 0) { return true; } - return filter.includes(searchFilter); + return filter + .toLocaleLowerCase() + .includes(searchFilter.toLocaleLowerCase()); }) + .filter((_, index) => index < numberOfFilters) .map((e) => ( ))} - {numberOfFilters && statusObj.length > numberOfFilters && ( + {isMoreButtonAvilable && ( From b8c3fd1cbf61c76da5a041222121ddb0c94278fd Mon Sep 17 00:00:00 2001 From: Palash Date: Thu, 23 Jun 2022 15:26:44 +0530 Subject: [PATCH 24/42] test: test pipeline for unit test is configured (#1277) * test: test pipeline is configured Co-authored-by: Palash gupta --- .github/workflows/build.yaml | 2 + frontend/jest.setup.ts | 1 + frontend/package.json | 1 + .../__snapshots__/NotFound.test.tsx.snap | 106 +++++++++++++++++- .../TraceFlameGraph.test.tsx.snap | 24 +++- frontend/tsconfig.json | 3 +- frontend/yarn.lock | 7 ++ 7 files changed, 134 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index f9096698cc..8f346bf882 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -17,6 +17,8 @@ jobs: run: cd frontend && yarn install - name: Run ESLint run: cd frontend && npm run lint + - name: Run Jest + run: cd frontend && npm run jest - name: TSC run: yarn tsc working-directory: ./frontend diff --git a/frontend/jest.setup.ts b/frontend/jest.setup.ts index 6557f780cf..b3b8061422 100644 --- a/frontend/jest.setup.ts +++ b/frontend/jest.setup.ts @@ -2,3 +2,4 @@ * Adds custom matchers from the react testing library to all tests */ import '@testing-library/jest-dom'; +import 'jest-styled-components'; diff --git a/frontend/package.json b/frontend/package.json index ebaffb5fae..f93bc9684c 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -159,6 +159,7 @@ "husky": "^7.0.4", "is-ci": "^3.0.1", "jest-playwright-preset": "^1.7.0", + "jest-styled-components": "^7.0.8", "less-plugin-npm-import": "^2.1.0", "lint-staged": "^12.3.7", "portfinder-sync": "^0.0.2", diff --git a/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap b/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap index 0e9ce92e30..9da91a31bd 100644 --- a/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap +++ b/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap @@ -2,8 +2,102 @@ exports[`Not Found page test should render Not Found page without errors 1`] = ` -

Ah, seems like we reached a dead end!

Page Not Found

diff --git a/frontend/src/container/TraceFlameGraph/__tests__/__snapshots__/TraceFlameGraph.test.tsx.snap b/frontend/src/container/TraceFlameGraph/__tests__/__snapshots__/TraceFlameGraph.test.tsx.snap index 39a3638956..c8c24ebfd4 100644 --- a/frontend/src/container/TraceFlameGraph/__tests__/__snapshots__/TraceFlameGraph.test.tsx.snap +++ b/frontend/src/container/TraceFlameGraph/__tests__/__snapshots__/TraceFlameGraph.test.tsx.snap @@ -2,12 +2,30 @@ exports[`loads and displays greeting 1`] = ` -
diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index b3aa27fe50..a10bce6e81 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -35,6 +35,7 @@ "playwright.config.ts", "./commitlint.config.js", "./webpack.config.js", - "./webpack.config.prod.js" + "./webpack.config.prod.js", + "./jest.setup.ts" ] } diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 2227658f6b..3a254370a1 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -8128,6 +8128,13 @@ jest-snapshot@^27.5.1: pretty-format "^27.5.1" semver "^7.3.2" +jest-styled-components@^7.0.8: + version "7.0.8" + resolved "https://registry.yarnpkg.com/jest-styled-components/-/jest-styled-components-7.0.8.tgz#9ea3b43f002de060b4638fde3b422d14b3e3ec9f" + integrity sha512-0KE54d0yIzKcvtOv8eikyjG3rFRtKYUyQovaoha3nondtZzXYGB3bhsvYgEegU08Iry0ndWx2+g9f5ZzD4I+0Q== + dependencies: + css "^3.0.0" + jest-util@^26.6.2: version "26.6.2" resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-26.6.2.tgz#907535dbe4d5a6cb4c47ac9b926f6af29576cbc1" From afbcde5edc9f8cac44122593ae1963c3f1c5c3e2 Mon Sep 17 00:00:00 2001 From: Ankit Nayan Date: Thu, 23 Jun 2022 15:29:15 +0530 Subject: [PATCH 25/42] fix: added 404 for error in getRule api (#1309) * fix: added multiple error checks in getRule api --- .../app/clickhouseReader/reader.go | 36 +++++++++++++++---- 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index 1258060c41..f405c69f6d 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -592,21 +592,45 @@ func (r *ClickHouseReader) GetRulesFromDB() (*[]model.RuleResponseItem, *model.A func (r *ClickHouseReader) GetRule(id string) (*model.RuleResponseItem, *model.ApiError) { - idInt, _ := strconv.Atoi(id) + idInt, err := strconv.Atoi(id) + if err != nil { + zap.S().Debug("Error in parsing param: ", err) + return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err} + } rule := &model.RuleResponseItem{} - query := fmt.Sprintf("SELECT id, updated_at, data FROM rules WHERE id=%d", idInt) - - err := r.localDB.Get(rule, query) - - zap.S().Info(query) + query := "SELECT id, updated_at, data FROM rules WHERE id=?" + rows, err := r.localDB.Query(query, idInt) if err != nil { zap.S().Debug("Error in processing sql query: ", err) return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} } + count := 0 + // iterate over each row + for rows.Next() { + err = rows.Scan(&rule.Id, &rule.UpdatedAt, &rule.Data) + if err != nil { + zap.S().Debug(err) + return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} + } + count += 1 + + } + + if count == 0 { + err = fmt.Errorf("no rule with id %d found", idInt) + zap.S().Debug(err) + return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: err} + } + if count > 1 { + err = fmt.Errorf("multiple rules with id %d found", idInt) + zap.S().Debug(err) + return nil, &model.ApiError{Typ: model.ErrorConflict, Err: err} + } + return rule, nil } From 64927acd976259c85dad26b6b5ea935b24f81501 Mon Sep 17 00:00:00 2001 From: Ankit Nayan Date: Thu, 23 Jun 2022 15:33:31 +0530 Subject: [PATCH 26/42] updated codeowners for query-service --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index b717ca9eee..2781d2a0c6 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,3 +4,4 @@ * @ankitnayan /frontend/ @palashgdev @pranshuchittora /deploy/ @prashant-shahi +/pkg/query-service/ @srikanthccv @makeavish @nityanandagohain From e4f2219f8c8f12030fd9544f2cf02aaf39baf5bb Mon Sep 17 00:00:00 2001 From: Palash Date: Thu, 23 Jun 2022 16:29:38 +0530 Subject: [PATCH 27/42] feat: dashboard breadcrumb is added --- frontend/src/container/TopNav/Breadcrumbs/index.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/container/TopNav/Breadcrumbs/index.tsx b/frontend/src/container/TopNav/Breadcrumbs/index.tsx index 33d42ac5b0..c8d8bc9628 100644 --- a/frontend/src/container/TopNav/Breadcrumbs/index.tsx +++ b/frontend/src/container/TopNav/Breadcrumbs/index.tsx @@ -17,6 +17,7 @@ const breadcrumbNameMap = { [ROUTES.MY_SETTINGS]: 'My Settings', [ROUTES.ERROR_DETAIL]: 'Errors', [ROUTES.LIST_ALL_ALERT]: 'Alerts', + [ROUTES.ALL_DASHBOARD]: 'Dashboard', }; function ShowBreadcrumbs(props: RouteComponentProps): JSX.Element { From 186f4dca71c9c40d0f211267e5cbc8ccac59c804 Mon Sep 17 00:00:00 2001 From: Palash Date: Thu, 23 Jun 2022 18:07:03 +0530 Subject: [PATCH 28/42] feat: light mode tooltip is updated --- .../Trace/Filters/Panel/PanelBody/Common/Checkbox.tsx | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/frontend/src/container/Trace/Filters/Panel/PanelBody/Common/Checkbox.tsx b/frontend/src/container/Trace/Filters/Panel/PanelBody/Common/Checkbox.tsx index a198dd960b..a30118fe36 100644 --- a/frontend/src/container/Trace/Filters/Panel/PanelBody/Common/Checkbox.tsx +++ b/frontend/src/container/Trace/Filters/Panel/PanelBody/Common/Checkbox.tsx @@ -155,10 +155,9 @@ function CheckBoxComponent(props: CheckBoxProps): JSX.Element { const isCheckBoxSelected = isUserSelected; - const TooTipOverLay = useMemo( - (): JSX.Element => {keyValue}, - [keyValue], - ); + const TooTipOverLay = useMemo((): JSX.Element =>
{keyValue}
, [ + keyValue, + ]); return ( From 1ebf3dbf657f00e3dee4ef221e1087ef8b9a2233 Mon Sep 17 00:00:00 2001 From: Palash Date: Thu, 23 Jun 2022 19:11:19 +0530 Subject: [PATCH 29/42] feat: select tags key and value are updated to autocomplete filtering (#1267) * feat: select tags key and value are updated to autocomplete filtering Co-authored-by: Palash gupta --- .../Trace/Search/AllTags/Tag/TagKey.tsx | 23 ++++++++++---- .../Trace/Search/AllTags/Tag/TagValue.tsx | 31 ++++++++++++++----- .../Trace/Search/AllTags/Tag/styles.ts | 21 ++++++++++--- 3 files changed, 57 insertions(+), 18 deletions(-) diff --git a/frontend/src/container/Trace/Search/AllTags/Tag/TagKey.tsx b/frontend/src/container/Trace/Search/AllTags/Tag/TagKey.tsx index b57b4fc361..bb9794d8e7 100644 --- a/frontend/src/container/Trace/Search/AllTags/Tag/TagKey.tsx +++ b/frontend/src/container/Trace/Search/AllTags/Tag/TagKey.tsx @@ -73,11 +73,24 @@ function TagsKey(props: TagsKeysProps): JSX.Element { { - if (options && options.find((option) => option.value === value)) { + allowClear + showSearch + options={options?.map((e) => ({ + label: e.label?.toString(), + value: e.value, + }))} + filterOption={(inputValue, option): boolean => + option?.label?.toUpperCase().indexOf(inputValue.toUpperCase()) !== -1 + } + onChange={(e): void => setSelectedKey(e)} + onSelect={(value: unknown): void => { + if ( + typeof value === 'string' && + options && + options.find((option) => option.value === value) + ) { setSelectedKey(value); setLocalSelectedTags((tags) => [ @@ -89,8 +102,6 @@ function TagsKey(props: TagsKeysProps): JSX.Element { }, ...tags.slice(index + 1, tags.length), ]); - } else { - setSelectedKey(''); } }} > diff --git a/frontend/src/container/Trace/Search/AllTags/Tag/TagValue.tsx b/frontend/src/container/Trace/Search/AllTags/Tag/TagValue.tsx index 756bb54225..60b2d4118b 100644 --- a/frontend/src/container/Trace/Search/AllTags/Tag/TagValue.tsx +++ b/frontend/src/container/Trace/Search/AllTags/Tag/TagValue.tsx @@ -1,13 +1,13 @@ import { Select } from 'antd'; import getTagValue from 'api/trace/getTagValue'; -import React from 'react'; +import React, { useState } from 'react'; import { useQuery } from 'react-query'; import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; import { GlobalReducer } from 'types/reducer/globalTime'; import { TraceReducer } from 'types/reducer/trace'; -import { SelectComponent } from './styles'; +import { AutoCompleteComponent } from './styles'; function TagValue(props: TagValueProps): JSX.Element { const { tag, setLocalSelectedTags, index, tagKey } = props; @@ -16,6 +16,7 @@ function TagValue(props: TagValueProps): JSX.Element { Operator: selectedOperator, Values: selectedValues, } = tag; + const [localValue, setLocalValue] = useState(selectedValues[0]); const globalReducer = useSelector( (state) => state.globalTime, @@ -34,22 +35,38 @@ function TagValue(props: TagValueProps): JSX.Element { ); return ( - ({ + label: e.tagValues, + value: e.tagValues, + }))} + allowClear + defaultOpen + showSearch + filterOption={(inputValue, option): boolean => + option?.label.toUpperCase().indexOf(inputValue.toUpperCase()) !== -1 + } + disabled={isLoading} + value={localValue} + onChange={(values): void => { + if (typeof values === 'string') { + setLocalValue(values); + } + }} onSelect={(value: unknown): void => { if (typeof value === 'string') { + setLocalValue(value); setLocalSelectedTags((tags) => [ ...tags.slice(0, index), { Key: selectedKey, Operator: selectedOperator, - Values: [...selectedValues, value], + Values: [value], }, ...tags.slice(index + 1, tags.length), ]); } }} - loading={isLoading || false} > {data && data.payload && @@ -58,7 +75,7 @@ function TagValue(props: TagValueProps): JSX.Element { {suggestion.tagValues} ))} - + ); } diff --git a/frontend/src/container/Trace/Search/AllTags/Tag/styles.ts b/frontend/src/container/Trace/Search/AllTags/Tag/styles.ts index 347bc287f2..e604a444d7 100644 --- a/frontend/src/container/Trace/Search/AllTags/Tag/styles.ts +++ b/frontend/src/container/Trace/Search/AllTags/Tag/styles.ts @@ -1,4 +1,4 @@ -import { Select, Space } from 'antd'; +import { AutoComplete, Select, Space } from 'antd'; import styled from 'styled-components'; export const SpaceComponent = styled(Space)` @@ -9,18 +9,23 @@ export const SpaceComponent = styled(Space)` export const SelectComponent = styled(Select)` &&& { - min-width: 170px; - margin-right: 21.91px; - margin-left: 21.92px; + width: 100%; } `; -export const Container = styled.div` +export const Container = styled(Space)` &&& { display: flex; margin-top: 1rem; margin-bottom: 1rem; } + + .ant-space-item:not(:last-child, :nth-child(2)) { + width: 100%; + } + .ant-space-item:nth-child(2) { + width: 50%; + } `; export const IconContainer = styled.div` @@ -31,3 +36,9 @@ export const IconContainer = styled.div` margin-left: 1.125rem; `; + +export const AutoCompleteComponent = styled(AutoComplete)` + &&& { + width: 100%; + } +`; From bef83d30cce8e3d86dc39c8e312c4759b44ef5f5 Mon Sep 17 00:00:00 2001 From: Palash Date: Thu, 23 Jun 2022 19:12:43 +0530 Subject: [PATCH 30/42] feat: duration filter is updated (#1272) * feat: duration filter is updated * feat: search filter is added in the trace filter Co-authored-by: Palash gupta --- .../Panel/PanelBody/CommonCheckBox/index.tsx | 51 ++++++++++- .../Panel/PanelBody/Duration/index.tsx | 89 +++++++++---------- .../Panel/PanelBody/Duration/styles.ts | 15 +++- .../Filters/Panel/PanelBody/Duration/util.ts | 13 +++ frontend/src/store/reducers/trace.ts | 21 +++++ frontend/src/types/actions/trace.ts | 10 ++- frontend/src/types/reducer/trace.ts | 1 + 7 files changed, 149 insertions(+), 51 deletions(-) create mode 100644 frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/util.ts diff --git a/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx b/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx index 0681e7e5d5..72d7439ade 100644 --- a/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx +++ b/frontend/src/container/Trace/Filters/Panel/PanelBody/CommonCheckBox/index.tsx @@ -1,12 +1,19 @@ -import React from 'react'; -import { useSelector } from 'react-redux'; +import { Button, Input } from 'antd'; +import React, { useState } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { Dispatch } from 'redux'; import { AppState } from 'store/reducers'; +import { INITIAL_FILTER_VALUE } from 'store/reducers/trace'; +import AppActions from 'types/actions'; +import { UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE } from 'types/actions/trace'; import { TraceFilterEnum, TraceReducer } from 'types/reducer/trace'; import CheckBoxComponent from '../Common/Checkbox'; +const { Search } = Input; + function CommonCheckBox(props: CommonCheckBoxProps): JSX.Element { - const { filter } = useSelector( + const { filter, filterDisplayValue } = useSelector( (state) => state.traces, ); @@ -15,9 +22,34 @@ function CommonCheckBox(props: CommonCheckBoxProps): JSX.Element { const status = filter.get(name) || {}; const statusObj = Object.keys(status); + const numberOfFilters = filterDisplayValue.get(name) || 0; + const dispatch = useDispatch>(); + const [searchFilter, setSearchFilter] = useState(''); + + const onClickMoreHandler = (): void => { + const newFilterDisplayValue = new Map(filterDisplayValue); + const preValue = + (newFilterDisplayValue.get(name) || 0) + INITIAL_FILTER_VALUE; + + newFilterDisplayValue.set(name, preValue); + + dispatch({ + type: UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE, + payload: newFilterDisplayValue, + }); + }; return ( <> + setSearchFilter(e.target.value)} + style={{ + padding: '0 3%', + }} + placeholder="Filter Values" + /> + {statusObj .sort((a, b) => { const countA = +status[a]; @@ -28,6 +60,13 @@ function CommonCheckBox(props: CommonCheckBoxProps): JSX.Element { } return countA - countB; }) + .filter((_, index) => index < numberOfFilters) + .filter((filter) => { + if (searchFilter.length === 0) { + return true; + } + return filter.includes(searchFilter); + }) .map((e) => ( ))} + + {numberOfFilters && statusObj.length > numberOfFilters && ( + + )} ); } diff --git a/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/index.tsx b/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/index.tsx index a2a5d163a1..81bd6e7faa 100644 --- a/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/index.tsx +++ b/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/index.tsx @@ -1,11 +1,14 @@ -/* eslint-disable react/no-unstable-nested-components */ -import { Input, Slider } from 'antd'; +import { Slider } from 'antd'; import { SliderRangeProps } from 'antd/lib/slider'; import getFilters from 'api/trace/getFilters'; -import dayjs from 'dayjs'; -import durationPlugin from 'dayjs/plugin/duration'; import useDebouncedFn from 'hooks/useDebouncedFunction'; -import React, { useEffect, useMemo, useRef, useState } from 'react'; +import React, { + useCallback, + useEffect, + useMemo, + useRef, + useState, +} from 'react'; import { useDispatch, useSelector } from 'react-redux'; import { Dispatch } from 'redux'; import { getFilter, updateURL } from 'store/actions/trace/util'; @@ -15,19 +18,8 @@ import { UPDATE_ALL_FILTERS } from 'types/actions/trace'; import { GlobalReducer } from 'types/reducer/globalTime'; import { TraceReducer } from 'types/reducer/trace'; -import { Container, InputContainer, Text } from './styles'; - -dayjs.extend(durationPlugin); - -const getMs = (value: string): string => { - return parseFloat( - dayjs - .duration({ - milliseconds: parseInt(value, 10) / 1000000, - }) - .format('SSS'), - ).toFixed(2); -}; +import { Container, InputComponent, InputContainer, Text } from './styles'; +import { getMs } from './util'; function Duration(): JSX.Element { const { @@ -77,17 +69,18 @@ function Duration(): JSX.Element { preLocalMinDuration.current = parseFloat(minDuration); } - setPreMax(maxDuration); - setPreMin(minDuration); + setPreMax(getMs(maxDuration)); + setPreMin(getMs(minDuration)); }, [getDuration]); - const defaultValue = [parseFloat(preMin), parseFloat(preMax)]; - const updatedUrl = async (min: number, max: number): Promise => { const preSelectedFilter = new Map(selectedFilter); const preUserSelected = new Map(userSelectedFilter); - preSelectedFilter.set('duration', [String(max), String(min)]); + preSelectedFilter.set('duration', [ + String(max * 1000000), + String(min * 1000000), + ]); const response = await getFilters({ end: String(globalTime.maxTime), @@ -137,18 +130,18 @@ function Duration(): JSX.Element { } }; - const onRangeSliderHandler = (number: [number, number]): void => { + const onRangeSliderHandler = (number: [string, string]): void => { const [min, max] = number; - setPreMin(min.toString()); - setPreMax(max.toString()); + setPreMin(min); + setPreMax(max); }; const debouncedFunction = useDebouncedFn( (min, max) => { updatedUrl(min as number, max as number); }, - 500, + 1500, undefined, ); @@ -156,8 +149,8 @@ function Duration(): JSX.Element { event, ) => { const { value } = event.target; - const min = parseFloat(preMin); - const max = parseFloat(value) * 1000000; + const min = preMin; + const max = value; onRangeSliderHandler([min, max]); debouncedFunction(min, max); @@ -167,8 +160,9 @@ function Duration(): JSX.Element { event, ) => { const { value } = event.target; - const min = parseFloat(value) * 1000000; - const max = parseFloat(preMax); + const min = value; + const max = preMax; + onRangeSliderHandler([min, max]); debouncedFunction(min, max); }; @@ -177,45 +171,48 @@ function Duration(): JSX.Element { updatedUrl(min, max); }; + const TipComponent = useCallback((value) => { + if (value === undefined) { + return
; + } + return
{`${getMs(value?.toString())}ms`}
; + }, []); + return (
Min - Max - { - if (value === undefined) { - return
; - } - return
{`${getMs(value?.toString())}ms`}
; - }} + tipFormatter={TipComponent} onChange={([min, max]): void => { - onRangeSliderHandler([min, max]); + onRangeSliderHandler([String(min), String(max)]); }} onAfterChange={onRangeHandler} - value={[parseFloat(preMin), parseFloat(preMax)]} + value={[Number(preMin), Number(preMax)]} />
diff --git a/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/styles.ts b/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/styles.ts index d80c0e503d..1cab3f8954 100644 --- a/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/styles.ts +++ b/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/styles.ts @@ -1,4 +1,4 @@ -import { Typography } from 'antd'; +import { Input, Typography } from 'antd'; import styled from 'styled-components'; export const DurationText = styled.div` @@ -9,6 +9,19 @@ export const DurationText = styled.div` flex-direction: column; `; +export const InputComponent = styled(Input)` + input::-webkit-outer-spin-button, + input::-webkit-inner-spin-button { + -webkit-appearance: none; + margin: 0; + } + + /* Firefox */ + input[type='number'] { + -moz-appearance: textfield; + } +`; + export const InputContainer = styled.div` width: 100%; margin-top: 0.5rem; diff --git a/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/util.ts b/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/util.ts new file mode 100644 index 0000000000..2a6bcf9586 --- /dev/null +++ b/frontend/src/container/Trace/Filters/Panel/PanelBody/Duration/util.ts @@ -0,0 +1,13 @@ +import dayjs from 'dayjs'; +import durationPlugin from 'dayjs/plugin/duration'; + +dayjs.extend(durationPlugin); + +export const getMs = (value: string): string => + parseFloat( + dayjs + .duration({ + milliseconds: parseInt(value, 10) / 1000000, + }) + .format('SSS'), + ).toFixed(2); diff --git a/frontend/src/store/reducers/trace.ts b/frontend/src/store/reducers/trace.ts index b99ee7dd3a..4023d375ca 100644 --- a/frontend/src/store/reducers/trace.ts +++ b/frontend/src/store/reducers/trace.ts @@ -11,6 +11,7 @@ import { UPDATE_SELECTED_TAGS, UPDATE_SPAN_ORDER, UPDATE_SPAN_ORDER_PARAMS, + UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE, UPDATE_SPANS_AGGREGATE, UPDATE_SPANS_AGGREGATE_PAGE_NUMBER, UPDATE_SPANS_AGGREGATE_PAGE_SIZE, @@ -23,6 +24,8 @@ import { } from 'types/actions/trace'; import { TraceFilterEnum, TraceReducer } from 'types/reducer/trace'; +export const INITIAL_FILTER_VALUE = 4; + const initialValue: TraceReducer = { filter: new Map(), filterToFetchData: ['duration', 'status', 'serviceName'], @@ -53,6 +56,17 @@ const initialValue: TraceReducer = { loading: true, payload: { items: {} }, }, + filterDisplayValue: new Map([ + ['component', INITIAL_FILTER_VALUE], + ['duration', INITIAL_FILTER_VALUE], + ['httpCode', INITIAL_FILTER_VALUE], + ['httpHost', INITIAL_FILTER_VALUE], + ['httpMethod', INITIAL_FILTER_VALUE], + ['httpUrl', INITIAL_FILTER_VALUE], + ['operation', INITIAL_FILTER_VALUE], + ['serviceName', INITIAL_FILTER_VALUE], + ['status', INITIAL_FILTER_VALUE], + ]), }; const traceReducer = ( @@ -251,6 +265,13 @@ const traceReducer = ( }; } + case UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE: { + return { + ...state, + filterDisplayValue: action.payload, + }; + } + default: return state; } diff --git a/frontend/src/types/actions/trace.ts b/frontend/src/types/actions/trace.ts index da97d05129..f043926142 100644 --- a/frontend/src/types/actions/trace.ts +++ b/frontend/src/types/actions/trace.ts @@ -31,6 +31,8 @@ export const UPDATE_SPANS_AGGREGATE_PAGE_NUMBER = export const UPDATE_SPANS_AGGREGATE_PAGE_SIZE = 'UPDATE_SPANS_AGGREGATE_PAGE_SIZE'; export const UPDATE_SPAN_ORDER_PARAMS = 'UPDATE_SPAN_ORDER_PARAMS'; +export const UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE = + 'UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE'; export interface UpdateFilter { type: typeof UPDATE_TRACE_FILTER; @@ -187,6 +189,11 @@ export interface UpdateSpanParams { }; } +export interface UpdateTraceFilterDisplayValue { + type: typeof UPDATE_SPAN_UPDATE_FILTER_DISPLAY_VALUE; + payload: TraceReducer['filterDisplayValue']; +} + export type TraceActions = | UpdateFilter | GetTraceFilter @@ -208,4 +215,5 @@ export type TraceActions = | UpdateSpanOrder | UpdateSpansAggregatePageNumber | UpdateSpanSize - | UpdateSpanParams; + | UpdateSpanParams + | UpdateTraceFilterDisplayValue; diff --git a/frontend/src/types/reducer/trace.ts b/frontend/src/types/reducer/trace.ts index babeb344c6..fc1c08f4fc 100644 --- a/frontend/src/types/reducer/trace.ts +++ b/frontend/src/types/reducer/trace.ts @@ -32,6 +32,7 @@ export interface TraceReducer { payload: PayloadProps; }; yAxisUnit: string | undefined; + filterDisplayValue: Map; } interface SpansAggregateData { From a733adad2c0ec9cc9b1c2f3b62e91a11b42c30e8 Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Fri, 24 Jun 2022 14:52:11 +0530 Subject: [PATCH 31/42] Add v2 query range metrics API (#1020) * Queryrange params tests * review suggestions, quantile, simple metric filter and some refactoring * Add value type support * Add supprot for re2 regex, refactor, update tests and other changes * chore: update govaluate dep to signoz/govaluate * chore: add name to grouping * chore: add support for NOOP * fix: make result format compatible with prom HTTP API * chore: update clickhouse server and update query builder to use new schema * chore: use metric_name in auto suggest APIs * chore: add reduce operator and new aggregate functions * chore: add support for not like op * chore: fix the dip at the end for incomplete time range * chore: rounddown the end to exclude the incomplete collection --- .../app/clickhouseReader/reader.go | 116 ++++- pkg/query-service/app/http_handler.go | 178 +++++++- pkg/query-service/app/interface.go | 3 +- .../app/metrics/query_builder.go | 421 ++++++++++++++++++ .../app/metrics/query_builder_test.go | 130 ++++++ pkg/query-service/app/parser.go | 9 + pkg/query-service/app/parser/metrics.go | 26 +- pkg/query-service/app/parser_test.go | 60 +++ pkg/query-service/constants/constants.go | 5 + pkg/query-service/go.mod | 5 +- pkg/query-service/go.sum | 6 +- pkg/query-service/model/queryParams.go | 137 +++++- pkg/query-service/model/response.go | 17 + 13 files changed, 1062 insertions(+), 51 deletions(-) create mode 100644 pkg/query-service/app/metrics/query_builder.go create mode 100644 pkg/query-service/app/metrics/query_builder_test.go create mode 100644 pkg/query-service/app/parser_test.go diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index f405c69f6d..17f3e5b047 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -14,6 +14,7 @@ import ( "net/http" "net/url" "os" + "reflect" "regexp" "sort" "strconv" @@ -61,10 +62,8 @@ const ( signozErrorIndexTable = "signoz_error_index" signozTraceTableName = "signoz_index_v2" signozMetricDBName = "signoz_metrics" - signozSampleName = "samples" - signozTSName = "time_series" - signozSampleTableName = "samples" - signozTSTableName = "time_series" + signozSampleTableName = "samples_v2" + signozTSTableName = "time_series_v2" minTimespanForProgressiveSearch = time.Hour minTimespanForProgressiveSearchMargin = time.Minute @@ -2368,7 +2367,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, } case constants.MetricsTTL: - tableName = signozMetricDBName + "." + signozSampleName + tableName = signozMetricDBName + "." + signozSampleTableName statusItem, err := r.checkTTLStatusItem(ctx, tableName) if err != nil { return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing ttl_status check sql query")} @@ -2607,7 +2606,7 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, ttlParams *model.GetTTLPa return &model.GetTTLResponseItem{TracesTime: delTTL, TracesMoveTime: moveTTL, ExpectedTracesTime: ttlQuery.TTL, ExpectedTracesMoveTime: ttlQuery.ColdStorageTtl, Status: status}, nil case constants.MetricsTTL: - tableNameArray := []string{signozMetricDBName + "." + signozSampleName} + tableNameArray := []string{signozMetricDBName + "." + signozSampleTableName} status, err := r.setTTLQueryStatus(ctx, tableNameArray) if err != nil { return nil, err @@ -2726,16 +2725,16 @@ func (r *ClickHouseReader) GetMetricAutocompleteTagKey(ctx context.Context, para tagsWhereClause := "" for key, val := range params.MetricTags { - tagsWhereClause += fmt.Sprintf("AND JSONExtractString(labels,'%s') = '%s'", key, val) + tagsWhereClause += fmt.Sprintf(" AND labels_object.%s = '%s' ", key, val) } // "select distinctTagKeys from (SELECT DISTINCT arrayJoin(tagKeys) distinctTagKeys from (SELECT DISTINCT(JSONExtractKeys(labels)) tagKeys from signoz_metrics.time_series WHERE JSONExtractString(labels,'__name__')='node_udp_queues')) WHERE distinctTagKeys ILIKE '%host%';" if len(params.Match) != 0 { - query = fmt.Sprintf("select distinctTagKeys from (SELECT DISTINCT arrayJoin(tagKeys) distinctTagKeys from (SELECT DISTINCT(JSONExtractKeys(labels)) tagKeys from %s.%s WHERE JSONExtractString(labels,'__name__')=$1 %s)) WHERE distinctTagKeys ILIKE $2;", signozMetricDBName, signozTSTableName, tagsWhereClause) + query = fmt.Sprintf("select distinctTagKeys from (SELECT DISTINCT arrayJoin(tagKeys) distinctTagKeys from (SELECT DISTINCT(JSONExtractKeys(labels)) tagKeys from %s.%s WHERE metric_name=$1 %s)) WHERE distinctTagKeys ILIKE $2;", signozMetricDBName, signozTSTableName, tagsWhereClause) rows, err = r.db.Query(ctx, query, params.MetricName, fmt.Sprintf("%%%s%%", params.Match)) } else { - query = fmt.Sprintf("select distinctTagKeys from (SELECT DISTINCT arrayJoin(tagKeys) distinctTagKeys from (SELECT DISTINCT(JSONExtractKeys(labels)) tagKeys from %s.%s WHERE JSONExtractString(labels,'__name__')=$1 %s ));", signozMetricDBName, signozTSTableName, tagsWhereClause) + query = fmt.Sprintf("select distinctTagKeys from (SELECT DISTINCT arrayJoin(tagKeys) distinctTagKeys from (SELECT DISTINCT(JSONExtractKeys(labels)) tagKeys from %s.%s WHERE metric_name=$1 %s ));", signozMetricDBName, signozTSTableName, tagsWhereClause) rows, err = r.db.Query(ctx, query, params.MetricName) } @@ -2765,16 +2764,16 @@ func (r *ClickHouseReader) GetMetricAutocompleteTagValue(ctx context.Context, pa tagsWhereClause := "" for key, val := range params.MetricTags { - tagsWhereClause += fmt.Sprintf("AND JSONExtractString(labels,'%s') = '%s'", key, val) + tagsWhereClause += fmt.Sprintf(" AND labels_object.%s = '%s' ", key, val) } if len(params.Match) != 0 { - query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, $1)) from %s.%s WHERE JSONExtractString(labels,'__name__')=$2 %s AND JSONExtractString(labels, $1) ILIKE $3;", signozMetricDBName, signozTSTableName, tagsWhereClause) + query = fmt.Sprintf("SELECT DISTINCT(labels_object.%s) from %s.%s WHERE metric_name=$1 %s AND labels_object.%s ILIKE $2;", params.TagKey, signozMetricDBName, signozTSTableName, tagsWhereClause, params.TagKey) rows, err = r.db.Query(ctx, query, params.TagKey, params.MetricName, fmt.Sprintf("%%%s%%", params.Match)) } else { - query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, $1)) FROM %s.%s WHERE JSONExtractString(labels,'__name__')=$2 %s;", signozMetricDBName, signozTSTableName, tagsWhereClause) + query = fmt.Sprintf("SELECT DISTINCT(labels_object.%s) FROM %s.%s WHERE metric_name=$2 %s;", params.TagKey, signozMetricDBName, signozTSTableName, tagsWhereClause) rows, err = r.db.Query(ctx, query, params.TagKey, params.MetricName) } @@ -2796,20 +2795,18 @@ func (r *ClickHouseReader) GetMetricAutocompleteTagValue(ctx context.Context, pa return &tagValueList, nil } -func (r *ClickHouseReader) GetMetricAutocompleteMetricNames(ctx context.Context, matchText string) (*[]string, *model.ApiError) { +func (r *ClickHouseReader) GetMetricAutocompleteMetricNames(ctx context.Context, matchText string, limit int) (*[]string, *model.ApiError) { var query string var err error var metricNameList []string var rows driver.Rows - if len(matchText) != 0 { - query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels,'__name__')) from %s.%s WHERE JSONExtractString(labels,'__name__') ILIKE $1;", signozMetricDBName, signozTSTableName) - rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", matchText)) - } else { - query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels,'__name__')) from %s.%s;", signozMetricDBName, signozTSTableName) - rows, err = r.db.Query(ctx, query) + query = fmt.Sprintf("SELECT DISTINCT(metric_name) from %s.%s WHERE metric_name ILIKE $1", signozMetricDBName, signozTSTableName) + if limit != 0 { + query = query + fmt.Sprintf(" LIMIT %d;", limit) } + rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", matchText)) if err != nil { zap.S().Error(err) @@ -2828,3 +2825,84 @@ func (r *ClickHouseReader) GetMetricAutocompleteMetricNames(ctx context.Context, return &metricNameList, nil } + +// GetMetricResult runs the query and returns list of time series +func (r *ClickHouseReader) GetMetricResult(ctx context.Context, query string) ([]*model.Series, error) { + + rows, err := r.db.Query(ctx, query) + + if err != nil { + zap.S().Debug("Error in processing query: ", err) + return nil, fmt.Errorf("error in processing query") + } + + var ( + columnTypes = rows.ColumnTypes() + columnNames = rows.Columns() + vars = make([]interface{}, len(columnTypes)) + ) + for i := range columnTypes { + vars[i] = reflect.New(columnTypes[i].ScanType()).Interface() + } + // when group by is applied, each combination of cartesian product + // of attributes is separate series. each item in metricPointsMap + // represent a unique series. + metricPointsMap := make(map[string][]model.MetricPoint) + // attribute key-value pairs for each group selection + attributesMap := make(map[string]map[string]string) + + defer rows.Close() + for rows.Next() { + if err := rows.Scan(vars...); err != nil { + return nil, err + } + var groupBy []string + var metricPoint model.MetricPoint + groupAttributes := make(map[string]string) + // Assuming that the end result row contains a timestamp, value and option labels + // Label key and value are both strings. + for idx, v := range vars { + colName := columnNames[idx] + switch v := v.(type) { + case *string: + // special case for returning all labels + if colName == "fullLabels" { + var metric map[string]string + err := json.Unmarshal([]byte(*v), &metric) + if err != nil { + return nil, err + } + for key, val := range metric { + groupBy = append(groupBy, val) + groupAttributes[key] = val + } + } else { + groupBy = append(groupBy, *v) + groupAttributes[colName] = *v + } + case *time.Time: + metricPoint.Timestamp = v.UnixMilli() + case *float64: + metricPoint.Value = *v + } + } + sort.Strings(groupBy) + key := strings.Join(groupBy, "") + attributesMap[key] = groupAttributes + metricPointsMap[key] = append(metricPointsMap[key], metricPoint) + } + + var seriesList []*model.Series + for key := range metricPointsMap { + points := metricPointsMap[key] + // first point in each series could be invalid since the + // aggregations are applied with point from prev series + if len(points) != 0 && len(points) > 1 { + points = points[1:] + } + attributes := attributesMap[key] + series := model.Series{Labels: attributes, Points: points} + seriesList = append(seriesList, &series) + } + return seriesList, nil +} diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index ee4633ff1a..400c9f2de4 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -7,12 +7,16 @@ import ( "fmt" "io/ioutil" "net/http" + "strconv" + "sync" + "time" "github.com/gorilla/mux" jsoniter "github.com/json-iterator/go" _ "github.com/mattn/go-sqlite3" "github.com/prometheus/prometheus/promql" "go.signoz.io/query-service/app/dashboards" + "go.signoz.io/query-service/app/metrics" "go.signoz.io/query-service/app/parser" "go.signoz.io/query-service/auth" "go.signoz.io/query-service/constants" @@ -384,7 +388,12 @@ func (aH *APIHandler) getRule(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) metricAutocompleteMetricName(w http.ResponseWriter, r *http.Request) { matchText := r.URL.Query().Get("match") - metricNameList, apiErrObj := (*aH.reader).GetMetricAutocompleteMetricNames(r.Context(), matchText) + limit, err := strconv.Atoi(r.URL.Query().Get("limit")) + if err != nil { + limit = 0 // no limit + } + + metricNameList, apiErrObj := (*aH.reader).GetMetricAutocompleteMetricNames(r.Context(), matchText, limit) if apiErrObj != nil { respondError(w, apiErrObj, nil) @@ -436,18 +445,173 @@ func (aH *APIHandler) metricAutocompleteTagValue(w http.ResponseWriter, r *http. func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) { metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r) - fmt.Println(metricsQueryRangeParams) - if apiErrorObj != nil { zap.S().Errorf(apiErrorObj.Err.Error()) respondError(w, apiErrorObj, nil) return } - response_data := &model.QueryDataV2{ - ResultType: "matrix", - Result: nil, + + // prometheus instant query needs same timestamp + if metricsQueryRangeParams.CompositeMetricQuery.PanelType == model.QUERY_VALUE && + metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.PROM { + metricsQueryRangeParams.Start = metricsQueryRangeParams.End } - aH.respond(w, response_data) + + // round up the end to neaerest multiple + if metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.QUERY_BUILDER { + end := (metricsQueryRangeParams.End) / 1000 + step := metricsQueryRangeParams.Step + metricsQueryRangeParams.End = (end / step * step) * 1000 + } + + type channelResult struct { + Series []*model.Series + Err error + } + + execClickHouseQueries := func(queries map[string]string) ([]*model.Series, error) { + var seriesList []*model.Series + ch := make(chan channelResult, len(queries)) + var wg sync.WaitGroup + + for name, query := range queries { + wg.Add(1) + go func(name, query string) { + defer wg.Done() + seriesList, err := (*aH.reader).GetMetricResult(r.Context(), query) + for _, series := range seriesList { + series.QueryName = name + } + + if err != nil { + ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err)} + return + } + ch <- channelResult{Series: seriesList} + }(name, query) + } + + wg.Wait() + close(ch) + + var errs []error + // read values from the channel + for r := range ch { + if r.Err != nil { + errs = append(errs, r.Err) + continue + } + seriesList = append(seriesList, r.Series...) + } + if len(errs) != 0 { + return nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")) + } + return seriesList, nil + } + + execPromQueries := func(metricsQueryRangeParams *model.QueryRangeParamsV2) ([]*model.Series, error) { + var seriesList []*model.Series + ch := make(chan channelResult, len(metricsQueryRangeParams.CompositeMetricQuery.PromQueries)) + var wg sync.WaitGroup + + for name, query := range metricsQueryRangeParams.CompositeMetricQuery.PromQueries { + if query.Disabled { + continue + } + wg.Add(1) + go func(name string, query *model.PromQuery) { + var seriesList []*model.Series + defer wg.Done() + queryModel := model.QueryRangeParams{ + Start: time.UnixMilli(metricsQueryRangeParams.Start), + End: time.UnixMilli(metricsQueryRangeParams.End), + Step: time.Duration(metricsQueryRangeParams.Step * int64(time.Second)), + Query: query.Query, + } + promResult, _, err := (*aH.reader).GetQueryRangeResult(r.Context(), &queryModel) + if err != nil { + ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err)} + return + } + matrix, _ := promResult.Matrix() + for _, v := range matrix { + var s model.Series + s.QueryName = name + s.Labels = v.Metric.Copy().Map() + for _, p := range v.Points { + s.Points = append(s.Points, model.MetricPoint{Timestamp: p.T, Value: p.V}) + } + seriesList = append(seriesList, &s) + } + ch <- channelResult{Series: seriesList} + }(name, query) + } + + wg.Wait() + close(ch) + + var errs []error + // read values from the channel + for r := range ch { + if r.Err != nil { + errs = append(errs, r.Err) + continue + } + seriesList = append(seriesList, r.Series...) + } + if len(errs) != 0 { + return nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")) + } + return seriesList, nil + } + + var seriesList []*model.Series + var err error + switch metricsQueryRangeParams.CompositeMetricQuery.QueryType { + case model.QUERY_BUILDER: + runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SIGNOZ_TIMESERIES_TABLENAME) + if runQueries.Err != nil { + respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: runQueries.Err}, nil) + return + } + seriesList, err = execClickHouseQueries(runQueries.Queries) + + case model.CLICKHOUSE: + queries := make(map[string]string) + for name, chQuery := range metricsQueryRangeParams.CompositeMetricQuery.ClickHouseQueries { + if chQuery.Disabled { + continue + } + queries[name] = chQuery.Query + } + seriesList, err = execClickHouseQueries(queries) + case model.PROM: + seriesList, err = execPromQueries(metricsQueryRangeParams) + default: + err = fmt.Errorf("invalid query type") + respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) + return + } + + if err != nil { + apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err} + respondError(w, apiErrObj, nil) + return + } + if metricsQueryRangeParams.CompositeMetricQuery.PanelType == model.QUERY_VALUE && + len(seriesList) > 1 && + (metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.QUERY_BUILDER || + metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.CLICKHOUSE) { + respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil) + return + } + + type ResponseFormat struct { + ResultType string `json:"resultType"` + Result []*model.Series `json:"result"` + } + resp := ResponseFormat{ResultType: "matrix", Result: seriesList} + aH.respond(w, resp) } func (aH *APIHandler) listRulesFromProm(w http.ResponseWriter, r *http.Request) { diff --git a/pkg/query-service/app/interface.go b/pkg/query-service/app/interface.go index cbbb78249b..fadbcd1e79 100644 --- a/pkg/query-service/app/interface.go +++ b/pkg/query-service/app/interface.go @@ -50,7 +50,8 @@ type Reader interface { // Setter Interfaces SetTTL(ctx context.Context, ttlParams *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) - GetMetricAutocompleteMetricNames(ctx context.Context, matchText string) (*[]string, *model.ApiError) + GetMetricAutocompleteMetricNames(ctx context.Context, matchText string, limit int) (*[]string, *model.ApiError) GetMetricAutocompleteTagKey(ctx context.Context, params *model.MetricAutocompleteTagParams) (*[]string, *model.ApiError) GetMetricAutocompleteTagValue(ctx context.Context, params *model.MetricAutocompleteTagParams) (*[]string, *model.ApiError) + GetMetricResult(ctx context.Context, query string) ([]*model.Series, error) } diff --git a/pkg/query-service/app/metrics/query_builder.go b/pkg/query-service/app/metrics/query_builder.go new file mode 100644 index 0000000000..bf1896af2e --- /dev/null +++ b/pkg/query-service/app/metrics/query_builder.go @@ -0,0 +1,421 @@ +package metrics + +import ( + "fmt" + "reflect" + "strings" + + "github.com/SigNoz/govaluate" + "go.signoz.io/query-service/constants" + "go.signoz.io/query-service/model" +) + +type RunQueries struct { + Queries map[string]string + Err error +} + +var AggregateOperatorToPercentile = map[model.AggregateOperator]float64{ + model.P05: 0.5, + model.P10: 0.10, + model.P20: 0.20, + model.P25: 0.25, + model.P50: 0.50, + model.P75: 0.75, + model.P90: 0.90, + model.P95: 0.95, + model.P99: 0.99, +} + +var AggregateOperatorToSQLFunc = map[model.AggregateOperator]string{ + model.AVG: "avg", + model.MAX: "max", + model.MIN: "min", + model.SUM: "sum", + model.RATE_SUM: "sum", + model.RATE_AVG: "avg", + model.RATE_MAX: "max", + model.RATE_MIN: "min", +} + +var SupportedFunctions = []string{"exp", "log", "ln", "exp2", "log2", "exp10", "log10", "sqrt", "cbrt", "erf", "erfc", "lgamma", "tgamma", "sin", "cos", "tan", "asin", "acos", "atan", "degrees", "radians"} + +func GoValuateFuncs() map[string]govaluate.ExpressionFunction { + var GoValuateFuncs = map[string]govaluate.ExpressionFunction{} + for _, fn := range SupportedFunctions { + GoValuateFuncs[fn] = func(args ...interface{}) (interface{}, error) { + return nil, nil + } + } + return GoValuateFuncs +} + +// formattedValue formats the value to be used in clickhouse query +func formattedValue(v interface{}) string { + switch x := v.(type) { + case int: + return fmt.Sprintf("%d", x) + case float32, float64: + return fmt.Sprintf("%f", x) + case string: + return fmt.Sprintf("'%s'", x) + case bool: + return fmt.Sprintf("%v", x) + case []interface{}: + switch x[0].(type) { + case string: + str := "[" + for idx, sVal := range x { + str += fmt.Sprintf("'%s'", sVal) + if idx != len(x)-1 { + str += "," + } + } + str += "]" + return str + case int, float32, float64, bool: + return strings.Join(strings.Fields(fmt.Sprint(x)), ",") + } + return "" + default: + // may be log the warning here? + return "" + } +} + +// BuildMetricsTimeSeriesFilterQuery builds the sub-query to be used for filtering +// timeseries based on search criteria +func BuildMetricsTimeSeriesFilterQuery(fs *model.FilterSet, groupTags []string, metricName string, aggregateOperator model.AggregateOperator) (string, error) { + var conditions []string + conditions = append(conditions, fmt.Sprintf("metric_name = %s", formattedValue(metricName))) + if fs != nil && len(fs.Items) != 0 { + for _, item := range fs.Items { + toFormat := item.Value + // if the received value is an array for like/match op, just take the first value + if strings.ToLower(item.Operation) == "like" || + strings.ToLower(item.Operation) == "match" || + strings.ToLower(item.Operation) == "nlike" { + x, ok := item.Value.([]interface{}) + if ok { + if len(x) == 0 { + continue + } + toFormat = x[0] + } + } + fmtVal := formattedValue(toFormat) + switch op := strings.ToLower(item.Operation); op { + case "eq": + conditions = append(conditions, fmt.Sprintf("labels_object.%s = %s", item.Key, fmtVal)) + case "neq": + conditions = append(conditions, fmt.Sprintf("labels_object.%s != %s", item.Key, fmtVal)) + case "in": + conditions = append(conditions, fmt.Sprintf("labels_object.%s IN %s", item.Key, fmtVal)) + case "nin": + conditions = append(conditions, fmt.Sprintf("labels_object.%s NOT IN %s", item.Key, fmtVal)) + case "like": + conditions = append(conditions, fmt.Sprintf("like(labels_object.%s, %s)", item.Key, fmtVal)) + case "nlike": + conditions = append(conditions, fmt.Sprintf("notLike(labels_object.%s, %s)", item.Key, fmtVal)) + case "match": + conditions = append(conditions, fmt.Sprintf("match(labels_object.%s, %s)", item.Key, fmtVal)) + default: + return "", fmt.Errorf("unsupported operation") + } + } + } + queryString := strings.Join(conditions, " AND ") + + var selectLabels string + if aggregateOperator == model.NOOP || aggregateOperator == model.RATE { + selectLabels = "labels," + } else { + for _, tag := range groupTags { + selectLabels += fmt.Sprintf(" labels_object.%s as %s,", tag, tag) + } + } + + filterSubQuery := fmt.Sprintf("SELECT %s fingerprint FROM %s.%s WHERE %s", selectLabels, constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_TABLENAME, queryString) + + return filterSubQuery, nil +} + +func BuildMetricQuery(qp *model.QueryRangeParamsV2, mq *model.MetricQuery, tableName string) (string, error) { + + if qp.CompositeMetricQuery.PanelType == model.QUERY_VALUE && len(mq.GroupingTags) != 0 { + return "", fmt.Errorf("reduce operator cannot be applied for the query") + } + + filterSubQuery, err := BuildMetricsTimeSeriesFilterQuery(mq.TagFilters, mq.GroupingTags, mq.MetricName, mq.AggregateOperator) + if err != nil { + return "", err + } + + samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", formattedValue(mq.MetricName), qp.Start, qp.End) + + // Select the aggregate value for interval + queryTmpl := + "SELECT %s" + + " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," + + " %s as value" + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " INNER JOIN" + + " (%s) as filtered_time_series" + + " USING fingerprint" + + " WHERE " + samplesTableTimeFilter + + " GROUP BY %s" + + " ORDER BY %s ts" + + groupBy := groupBy(mq.GroupingTags...) + groupTags := groupSelect(mq.GroupingTags...) + + switch mq.AggregateOperator { + case model.RATE: + // Calculate rate of change of metric for each unique time series + groupBy = "fingerprint, ts" + groupTags = "fingerprint," + op := "max(value)" // max value should be the closest value for point in time + subQuery := fmt.Sprintf( + queryTmpl, "any(labels) as labels, "+groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags, + ) // labels will be same so any should be fine + query := `SELECT %s ts, runningDifference(value)/runningDifference(ts) as value FROM(%s)` + + query = fmt.Sprintf(query, "labels as fullLabels,", subQuery) + return query, nil + case model.SUM_RATE: + rateGroupBy := "fingerprint, " + groupBy + rateGroupTags := "fingerprint, " + groupTags + op := "max(value)" + subQuery := fmt.Sprintf( + queryTmpl, rateGroupTags, qp.Step, op, filterSubQuery, rateGroupBy, rateGroupTags, + ) // labels will be same so any should be fine + query := `SELECT %s ts, runningDifference(value)/runningDifference(ts) as value FROM(%s) OFFSET 1` + query = fmt.Sprintf(query, groupTags, subQuery) + query = fmt.Sprintf(`SELECT %s ts, sum(value) as value FROM (%s) GROUP BY %s ORDER BY %s ts`, groupTags, query, groupBy, groupTags) + return query, nil + case model.RATE_SUM, model.RATE_MAX, model.RATE_AVG, model.RATE_MIN: + op := fmt.Sprintf("%s(value)", AggregateOperatorToSQLFunc[mq.AggregateOperator]) + subQuery := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags) + query := `SELECT %s ts, runningDifference(value)/runningDifference(ts) as value FROM(%s) OFFSET 1` + query = fmt.Sprintf(query, groupTags, subQuery) + return query, nil + case model.P05, model.P10, model.P20, model.P25, model.P50, model.P75, model.P90, model.P95, model.P99: + op := fmt.Sprintf("quantile(%v)(value)", AggregateOperatorToPercentile[mq.AggregateOperator]) + query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags) + return query, nil + case model.AVG, model.SUM, model.MIN, model.MAX: + op := fmt.Sprintf("%s(value)", AggregateOperatorToSQLFunc[mq.AggregateOperator]) + query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags) + return query, nil + case model.COUNT: + op := "toFloat64(count(*))" + query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags) + return query, nil + case model.COUNT_DISTINCT: + op := "toFloat64(count(distinct(value)))" + query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags) + return query, nil + case model.NOOP: + queryTmpl := + "SELECT fingerprint, labels as fullLabels," + + " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," + + " any(value) as value" + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " INNER JOIN" + + " (%s) as filtered_time_series" + + " USING fingerprint" + + " WHERE " + samplesTableTimeFilter + + " GROUP BY fingerprint, labels, ts" + + " ORDER BY fingerprint, labels, ts" + query := fmt.Sprintf(queryTmpl, qp.Step, filterSubQuery) + return query, nil + default: + return "", fmt.Errorf("unsupported aggregate operator") + } +} + +func groupBy(tags ...string) string { + tags = append(tags, "ts") + return strings.Join(tags, ",") +} + +func groupSelect(tags ...string) string { + groupTags := strings.Join(tags, ",") + if len(tags) != 0 { + groupTags += ", " + } + return groupTags +} + +// validateExpressions validates the math expressions using the list of +// allowed functions. +func validateExpressions(expressions []string, funcs map[string]govaluate.ExpressionFunction) []error { + var errs []error + for _, exp := range expressions { + _, err := govaluate.NewEvaluableExpressionWithFunctions(exp, funcs) + if err != nil { + errs = append(errs, err) + } + } + return errs +} + +// FormatErrs returns formatted error string +func FormatErrs(errs []error, separator string) string { + var errStrs []string + for _, err := range errs { + errStrs = append(errStrs, err.Error()) + } + return strings.Join(errStrs, separator) +} + +func reduceQuery(query string, reduceTo model.ReduceToOperator, aggregateOperator model.AggregateOperator) (string, error) { + var selectLabels string + var groupBy string + // NOOP and RATE can possibly return multiple time series and reduce should be applied + // for each uniques series. When the final result contains more than one series we throw + // an error post DB fetching. Otherwise just return the single data. This is not known until queried so the + // the query is prepared accordingly. + if aggregateOperator == model.NOOP || aggregateOperator == model.RATE { + selectLabels = ", any(fullLabels) as fullLabels" + groupBy = "GROUP BY fingerprint" + } + // the timestamp picked is not relevant here since the final value used is show the single + // chart with just the query value. For the quer + switch reduceTo { + case model.RLAST: + query = fmt.Sprintf("SELECT anyLast(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy) + case model.RSUM: + query = fmt.Sprintf("SELECT sum(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy) + case model.RAVG: + query = fmt.Sprintf("SELECT avg(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy) + case model.RMAX: + query = fmt.Sprintf("SELECT max(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy) + case model.RMIN: + query = fmt.Sprintf("SELECT min(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy) + default: + return "", fmt.Errorf("unsupported reduce operator") + } + return query, nil +} + +// varToQuery constructs the query for each named builder block +func varToQuery(qp *model.QueryRangeParamsV2, tableName string) (map[string]string, error) { + evalFuncs := GoValuateFuncs() + varToQuery := make(map[string]string) + for _, builderQuery := range qp.CompositeMetricQuery.BuilderQueries { + expression, _ := govaluate.NewEvaluableExpressionWithFunctions(builderQuery.Expression, evalFuncs) + + // Use the parsed expression and build the query for each variable + // if not already exists + var errs []error + for _, _var := range expression.Vars() { + if _, ok := varToQuery[_var]; !ok { + mq := qp.CompositeMetricQuery.BuilderQueries[_var] + query, err := BuildMetricQuery(qp, mq, tableName) + if err != nil { + errs = append(errs, err) + } else { + if qp.CompositeMetricQuery.PanelType == model.QUERY_VALUE { + query, err = reduceQuery(query, mq.ReduceTo, mq.AggregateOperator) + if err != nil { + errs = append(errs, err) + } + } + } + varToQuery[_var] = query + } + } + if len(errs) != 0 { + return nil, fmt.Errorf("error while creating query: %s", FormatErrs(errs, "\n")) + } + } + return varToQuery, nil +} + +// expressionToQuery constructs the query for the expression +func expressionToQuery(qp *model.QueryRangeParamsV2, varToQuery map[string]string, expression *govaluate.EvaluableExpression) (string, error) { + var formulaQuery string + vars := expression.Vars() + for idx, var_ := range vars[1:] { + x, y := vars[idx], var_ + if !reflect.DeepEqual(qp.CompositeMetricQuery.BuilderQueries[x].GroupingTags, qp.CompositeMetricQuery.BuilderQueries[y].GroupingTags) { + return "", fmt.Errorf("group by must be same") + } + } + var modified []govaluate.ExpressionToken + tokens := expression.Tokens() + for idx := range tokens { + token := tokens[idx] + if token.Kind == govaluate.VARIABLE { + token.Value = fmt.Sprintf("%v.value", token.Value) + token.Meta = fmt.Sprintf("%v.value", token.Meta) + } + modified = append(modified, token) + } + formula, _ := govaluate.NewEvaluableExpressionFromTokens(modified) + + var formulaSubQuery string + var joinUsing string + for idx, var_ := range vars { + query := varToQuery[var_] + groupTags := qp.CompositeMetricQuery.BuilderQueries[var_].GroupingTags + groupTags = append(groupTags, "ts") + joinUsing = strings.Join(groupTags, ",") + formulaSubQuery += fmt.Sprintf("(%s) as %s ", query, var_) + if idx < len(vars)-1 { + formulaSubQuery += "INNER JOIN" + } else if len(vars) > 1 { + formulaSubQuery += fmt.Sprintf("USING (%s)", joinUsing) + } + } + formulaQuery = fmt.Sprintf("SELECT %s, %s as value FROM ", joinUsing, formula.ExpressionString()) + formulaSubQuery + return formulaQuery, nil +} + +// PrepareBuilderMetricQueries constructs the queries to be run for query range timeseries +func PrepareBuilderMetricQueries(qp *model.QueryRangeParamsV2, tableName string) *RunQueries { + evalFuncs := GoValuateFuncs() + + // validate the expressions + var expressions []string + for _, bq := range qp.CompositeMetricQuery.BuilderQueries { + expressions = append(expressions, bq.Expression) + } + if errs := validateExpressions(expressions, evalFuncs); len(errs) != 0 { + return &RunQueries{Err: fmt.Errorf("invalid expressions: %s", FormatErrs(errs, "\n"))} + } + + varToQuery, err := varToQuery(qp, tableName) + if err != nil { + return &RunQueries{Err: err} + } + + namedQueries := make(map[string]string) + + var errs []error + for _, builderQuery := range qp.CompositeMetricQuery.BuilderQueries { + if builderQuery.Disabled { + continue + } + expression, _ := govaluate.NewEvaluableExpressionWithFunctions(builderQuery.Expression, evalFuncs) + tokens := expression.Tokens() + // expression with one token is used to represent + // that there are no functions applied on query + if len(tokens) == 1 { + _var := tokens[0].Value.(string) + namedQueries[builderQuery.QueryName] = varToQuery[_var] + } else { + query, err := expressionToQuery(qp, varToQuery, expression) + if err != nil { + errs = append(errs, err) + } + namedQueries[builderQuery.QueryName] = query + } + } + if len(errs) != 0 { + return &RunQueries{Err: fmt.Errorf("errors with formulas: %s", FormatErrs(errs, "\n"))} + } + fmt.Println(namedQueries) + return &RunQueries{Queries: namedQueries} +} diff --git a/pkg/query-service/app/metrics/query_builder_test.go b/pkg/query-service/app/metrics/query_builder_test.go new file mode 100644 index 0000000000..4530a01a79 --- /dev/null +++ b/pkg/query-service/app/metrics/query_builder_test.go @@ -0,0 +1,130 @@ +package metrics + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" + "go.signoz.io/query-service/model" +) + +func TestBuildQuery(t *testing.T) { + Convey("TestSimpleQueryWithName", t, func() { + q := &model.QueryRangeParamsV2{ + Start: 1650991982000, + End: 1651078382000, + Step: 60, + CompositeMetricQuery: &model.CompositeMetricQuery{ + BuilderQueries: map[string]*model.MetricQuery{ + "a": { + QueryName: "a", + MetricName: "name", + AggregateOperator: model.RATE_MAX, + Expression: "a", + }, + }, + }, + } + queries := PrepareBuilderMetricQueries(q, "table").Queries + So(len(queries), ShouldEqual, 1) + So(queries["a"], ShouldContainSubstring, "WHERE metric_name = 'name'") + So(queries["a"], ShouldContainSubstring, "runningDifference(value)/runningDifference(ts)") + }) +} + +func TestBuildQueryWithFilters(t *testing.T) { + Convey("TestBuildQueryWithFilters", t, func() { + q := &model.QueryRangeParamsV2{ + Start: 1650991982000, + End: 1651078382000, + Step: 60, + CompositeMetricQuery: &model.CompositeMetricQuery{ + BuilderQueries: map[string]*model.MetricQuery{ + "a": { + QueryName: "a", + MetricName: "name", + TagFilters: &model.FilterSet{Operation: "AND", Items: []model.FilterItem{ + {Key: "a", Value: "b", Operation: "neq"}, + }}, + AggregateOperator: model.RATE_MAX, + Expression: "a", + }, + }, + }, + } + queries := PrepareBuilderMetricQueries(q, "table").Queries + So(len(queries), ShouldEqual, 1) + + So(queries["a"], ShouldContainSubstring, "WHERE metric_name = 'name' AND labels_object.a != 'b'") + So(queries["a"], ShouldContainSubstring, "runningDifference(value)/runningDifference(ts)") + }) +} + +func TestBuildQueryWithMultipleQueries(t *testing.T) { + Convey("TestBuildQueryWithFilters", t, func() { + q := &model.QueryRangeParamsV2{ + Start: 1650991982000, + End: 1651078382000, + Step: 60, + CompositeMetricQuery: &model.CompositeMetricQuery{ + BuilderQueries: map[string]*model.MetricQuery{ + "a": { + QueryName: "a", + MetricName: "name", + TagFilters: &model.FilterSet{Operation: "AND", Items: []model.FilterItem{ + {Key: "in", Value: []interface{}{"a", "b", "c"}, Operation: "in"}, + }}, + AggregateOperator: model.RATE_AVG, + Expression: "a", + }, + "b": { + QueryName: "b", + MetricName: "name2", + AggregateOperator: model.RATE_MAX, + Expression: "b", + }, + }, + }, + } + queries := PrepareBuilderMetricQueries(q, "table").Queries + So(len(queries), ShouldEqual, 2) + So(queries["a"], ShouldContainSubstring, "WHERE metric_name = 'name' AND labels_object.in IN ['a','b','c']") + So(queries["a"], ShouldContainSubstring, "runningDifference(value)/runningDifference(ts)") + }) +} + +func TestBuildQueryWithMultipleQueriesAndFormula(t *testing.T) { + Convey("TestBuildQueryWithFilters", t, func() { + q := &model.QueryRangeParamsV2{ + Start: 1650991982000, + End: 1651078382000, + Step: 60, + CompositeMetricQuery: &model.CompositeMetricQuery{ + BuilderQueries: map[string]*model.MetricQuery{ + "a": { + QueryName: "a", + MetricName: "name", + TagFilters: &model.FilterSet{Operation: "AND", Items: []model.FilterItem{ + {Key: "in", Value: []interface{}{"a", "b", "c"}, Operation: "in"}, + }}, + AggregateOperator: model.RATE_MAX, + Expression: "a", + }, + "b": { + MetricName: "name2", + AggregateOperator: model.RATE_AVG, + Expression: "b", + }, + "c": { + QueryName: "c", + Expression: "a/b", + }, + }, + }, + } + queries := PrepareBuilderMetricQueries(q, "table").Queries + So(len(queries), ShouldEqual, 3) + So(queries["c"], ShouldContainSubstring, "SELECT ts, a.value / b.value") + So(queries["c"], ShouldContainSubstring, "WHERE metric_name = 'name' AND labels_object.in IN ['a','b','c']") + So(queries["c"], ShouldContainSubstring, "runningDifference(value)/runningDifference(ts)") + }) +} diff --git a/pkg/query-service/app/parser.go b/pkg/query-service/app/parser.go index 2dd6e44c52..9d3705da9f 100644 --- a/pkg/query-service/app/parser.go +++ b/pkg/query-service/app/parser.go @@ -667,3 +667,12 @@ func parseChangePasswordRequest(r *http.Request) (*model.ChangePasswordRequest, return &req, nil } + +func parseFilterSet(r *http.Request) (*model.FilterSet, error) { + var filterSet model.FilterSet + err := json.NewDecoder(r.Body).Decode(&filterSet) + if err != nil { + return nil, err + } + return &filterSet, nil +} diff --git a/pkg/query-service/app/parser/metrics.go b/pkg/query-service/app/parser/metrics.go index 279331ba3c..ce4d079fa5 100644 --- a/pkg/query-service/app/parser/metrics.go +++ b/pkg/query-service/app/parser/metrics.go @@ -5,19 +5,39 @@ import ( "fmt" "net/http" + "go.signoz.io/query-service/app/metrics" "go.signoz.io/query-service/model" ) +func validateQueryRangeParamsV2(qp *model.QueryRangeParamsV2) error { + var errs []error + if !(qp.DataSource >= model.METRICS && qp.DataSource <= model.LOGS) { + errs = append(errs, fmt.Errorf("unsupported data source")) + } + if !(qp.CompositeMetricQuery.QueryType >= model.QUERY_BUILDER && qp.CompositeMetricQuery.QueryType <= model.PROM) { + errs = append(errs, fmt.Errorf("unsupported query type")) + } + if !(qp.CompositeMetricQuery.PanelType >= model.TIME_SERIES && qp.CompositeMetricQuery.PanelType <= model.QUERY_VALUE) { + errs = append(errs, fmt.Errorf("unsupported panel type")) + } + if len(errs) != 0 { + return fmt.Errorf("one or more errors found : %s", metrics.FormatErrs(errs, ",")) + } + return nil +} + func ParseMetricQueryRangeParams(r *http.Request) (*model.QueryRangeParamsV2, *model.ApiError) { var postData *model.QueryRangeParamsV2 - err := json.NewDecoder(r.Body).Decode(&postData) - if err != nil { + if err := json.NewDecoder(r.Body).Decode(&postData); err != nil { + return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err} + } + if err := validateQueryRangeParamsV2(postData); err != nil { return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err} } - return nil, nil + return postData, nil } func ParseMetricAutocompleteTagParams(r *http.Request) (*model.MetricAutocompleteTagParams, *model.ApiError) { diff --git a/pkg/query-service/app/parser_test.go b/pkg/query-service/app/parser_test.go new file mode 100644 index 0000000000..6fa049a05e --- /dev/null +++ b/pkg/query-service/app/parser_test.go @@ -0,0 +1,60 @@ +package app + +import ( + "bytes" + "net/http" + "testing" + + "github.com/smartystreets/assertions/should" + + . "github.com/smartystreets/goconvey/convey" + "go.signoz.io/query-service/app/metrics" + "go.signoz.io/query-service/model" +) + +func TestParseFilterSingleFilter(t *testing.T) { + Convey("TestParseFilterSingleFilter", t, func() { + postBody := []byte(`{ + "op": "AND", + "items": [ + {"key": "namespace", "value": "a", "op": "EQ"} + ] + }`) + req, _ := http.NewRequest("POST", "", bytes.NewReader(postBody)) + res, _ := parseFilterSet(req) + query, _ := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NOOP) + So(query, ShouldContainSubstring, "signoz_metrics.time_series_v2 WHERE metric_name = 'table' AND labels_object.namespace = 'a'") + }) +} + +func TestParseFilterMultipleFilter(t *testing.T) { + Convey("TestParseFilterMultipleFilter", t, func() { + postBody := []byte(`{ + "op": "AND", + "items": [ + {"key": "namespace", "value": "a", "op": "EQ"}, + {"key": "host", "value": ["host-1", "host-2"], "op": "IN"} + ] + }`) + req, _ := http.NewRequest("POST", "", bytes.NewReader(postBody)) + res, _ := parseFilterSet(req) + query, _ := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NOOP) + So(query, should.ContainSubstring, "labels_object.host IN ['host-1','host-2']") + So(query, should.ContainSubstring, "labels_object.namespace = 'a'") + }) +} + +func TestParseFilterNotSupportedOp(t *testing.T) { + Convey("TestParseFilterNotSupportedOp", t, func() { + postBody := []byte(`{ + "op": "AND", + "items": [ + {"key": "namespace", "value": "a", "op": "PO"} + ] + }`) + req, _ := http.NewRequest("POST", "", bytes.NewReader(postBody)) + res, _ := parseFilterSet(req) + _, err := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NOOP) + So(err, should.BeError, "unsupported operation") + }) +} diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index 2825ce31ea..b4bc4b08ef 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -62,6 +62,11 @@ const ( StatusFailed = "failed" StatusSuccess = "success" ) +const ( + SIGNOZ_METRIC_DBNAME = "signoz_metrics" + SIGNOZ_SAMPLES_TABLENAME = "samples_v2" + SIGNOZ_TIMESERIES_TABLENAME = "time_series_v2" +) func GetOrDefaultEnv(key string, fallback string) string { v := os.Getenv(key) diff --git a/pkg/query-service/go.mod b/pkg/query-service/go.mod index b101f18da5..6bd0e68ead 100644 --- a/pkg/query-service/go.mod +++ b/pkg/query-service/go.mod @@ -4,6 +4,7 @@ go 1.17 require ( github.com/ClickHouse/clickhouse-go/v2 v2.0.12 + github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb github.com/go-kit/log v0.1.0 github.com/google/uuid v1.3.0 github.com/gorilla/handlers v1.5.1 @@ -107,7 +108,7 @@ require ( github.com/segmentio/backo-go v1.0.0 // indirect github.com/shopspring/decimal v1.3.1 // indirect github.com/sirupsen/logrus v1.8.1 // indirect - github.com/smartystreets/assertions v1.1.0 // indirect + github.com/smartystreets/assertions v1.1.0 github.com/spaolacci/murmur3 v1.1.0 // indirect github.com/spf13/pflag v1.0.3 // indirect github.com/stretchr/testify v1.7.1 @@ -139,4 +140,4 @@ require ( k8s.io/client-go v8.0.0+incompatible // indirect ) -replace github.com/prometheus/prometheus => github.com/SigNoz/prometheus v1.9.70 +replace github.com/prometheus/prometheus => github.com/SigNoz/prometheus v1.9.71 diff --git a/pkg/query-service/go.sum b/pkg/query-service/go.sum index 77fd087ceb..72675da497 100644 --- a/pkg/query-service/go.sum +++ b/pkg/query-service/go.sum @@ -55,8 +55,10 @@ github.com/ClickHouse/clickhouse-go/v2 v2.0.12/go.mod h1:u4RoNQLLM2W6hNSPYrIESLJ github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8= github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= -github.com/SigNoz/prometheus v1.9.70 h1:0214i78cje5MkX0tXYwX2cK4cHXrFw18WcSLhv4YDpk= -github.com/SigNoz/prometheus v1.9.70/go.mod h1:Y4J9tGDmacMC+EcOTp+EIAn2C1sN+9kE+idyVKadiVM= +github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb h1:bneLSKPf9YUSFmafKx32bynV6QrzViL/s+ZDvQxH1E4= +github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA= +github.com/SigNoz/prometheus v1.9.71 h1:X+6f4k5bqX+lpPFHCi+f6XiSehTj3Yzh1B/FDJi//Sk= +github.com/SigNoz/prometheus v1.9.71/go.mod h1:Y4J9tGDmacMC+EcOTp+EIAn2C1sN+9kE+idyVKadiVM= github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= diff --git a/pkg/query-service/model/queryParams.go b/pkg/query-service/model/queryParams.go index 9cc1ce235c..d1763a0440 100644 --- a/pkg/query-service/model/queryParams.go +++ b/pkg/query-service/model/queryParams.go @@ -18,27 +18,114 @@ type QueryRangeParams struct { Stats string } -type Query struct { - Datasource string `json:"datasource"` - Format string `json:"format"` - Expr string `json:"expr"` +type MetricQuery struct { + QueryName string `json:"queryName"` + MetricName string `json:"metricName"` + TagFilters *FilterSet `json:"tagFilters,omitempty"` + GroupingTags []string `json:"groupBy,omitempty"` + AggregateOperator AggregateOperator `json:"aggregateOperator"` + Expression string `json:"expression"` + Disabled bool `json:"disabled"` + ReduceTo ReduceToOperator `json:"reduceTo,omitempty"` } +type ReduceToOperator int + +const ( + _ ReduceToOperator = iota + RLAST + RSUM + RAVG + RMAX + RMIN +) + +type QueryType int + +const ( + _ QueryType = iota + QUERY_BUILDER + CLICKHOUSE + PROM +) + +type PromQuery struct { + Query string `json:"query"` + Stats string `json:"stats,omitempty"` + Disabled bool `json:"disabled"` +} + +type ClickHouseQuery struct { + Query string `json:"query"` + Disabled bool `json:"disabled"` +} + +type PanelType int + +const ( + _ PanelType = iota + TIME_SERIES + QUERY_VALUE +) + +type CompositeMetricQuery struct { + BuilderQueries map[string]*MetricQuery `json:"builderQueries,omitempty"` + ClickHouseQueries map[string]*ClickHouseQuery `json:"chQueries,omitempty"` + PromQueries map[string]*PromQuery `json:"promQueries,omitempty"` + PanelType PanelType `json:"panelType"` + QueryType QueryType `json:"queryType"` +} + +type AggregateOperator int + +const ( + _ AggregateOperator = iota + NOOP + COUNT + COUNT_DISTINCT + SUM + AVG + MAX + MIN + P05 + P10 + P20 + P25 + P50 + P75 + P90 + P95 + P99 + RATE + SUM_RATE + // leave blank space for possily {AVG, X}_RATE + _ + _ + _ + RATE_SUM + RATE_AVG + RATE_MAX + RATE_MIN +) + +type DataSource int + +const ( + _ DataSource = iota + METRICS + TRACES + LOGS +) + type QueryRangeParamsV2 struct { - Start time.Time - End time.Time - Step time.Duration - StartStr string `json:"start"` - EndStr string `json:"end"` - StepStr string `json:"step"` - Queries []Query `json:"queries"` -} - -func (params QueryRangeParamsV2) sanitizeAndValidate() (*QueryRangeParamsV2, error) { - - return nil, nil + DataSource DataSource `json:"dataSource"` + Start int64 `json:"start"` + End int64 `json:"end"` + Step int64 `json:"step"` + CompositeMetricQuery *CompositeMetricQuery `json:"compositeMetricQuery"` } +// Metric auto complete types type metricTags map[string]string type MetricAutocompleteTagParams struct { @@ -192,7 +279,7 @@ type TTLParams struct { } type GetTTLParams struct { - Type string + Type string } type GetErrorsParams struct { @@ -205,3 +292,19 @@ type GetErrorParams struct { ErrorID string ServiceName string } + +type FilterItem struct { + Key string `json:"key"` + Value interface{} `json:"value"` + Operation string `json:"op"` +} + +type FilterSet struct { + Operation string `json:"op,omitempty"` + Items []FilterItem `json:"items"` +} + +type RemoveTTLParams struct { + Type string + RemoveAllTTL bool +} diff --git a/pkg/query-service/model/response.go b/pkg/query-service/model/response.go index 358fe6e979..523ad7e96e 100644 --- a/pkg/query-service/model/response.go +++ b/pkg/query-service/model/response.go @@ -356,3 +356,20 @@ type ErrorWithSpan struct { NewerErrorID string `json:"newerErrorId" ch:"newerErrorId"` OlderErrorID string `json:"olderErrorId" ch:"olderErrorId"` } + +type Series struct { + QueryName string `json:"queryName"` + Labels map[string]string `json:"metric"` + Points []MetricPoint `json:"values"` +} + +type MetricPoint struct { + Timestamp int64 + Value float64 +} + +// MarshalJSON implements json.Marshaler. +func (p *MetricPoint) MarshalJSON() ([]byte, error) { + v := strconv.FormatFloat(p.Value, 'f', -1, 64) + return json.Marshal([...]interface{}{float64(p.Timestamp) / 1000, v}) +} From b2afb9aabc9cfc8a6867fa8328c14ecd94fa543e Mon Sep 17 00:00:00 2001 From: Ankit Nayan Date: Fri, 24 Jun 2022 14:55:02 +0530 Subject: [PATCH 32/42] chore: changed scrape interval to 60s and batch size to 10000 (#1316) * chore: changed scrape interval to 60s and batch size to 10000 * chore: added send_batch_max_size to the batch processor --- .../docker-swarm/clickhouse-setup/otel-collector-config.yaml | 5 +++-- .../clickhouse-setup/otel-collector-metrics-config.yaml | 5 +++-- deploy/docker/clickhouse-setup/otel-collector-config.yaml | 5 +++-- .../clickhouse-setup/otel-collector-metrics-config.yaml | 5 +++-- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml b/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml index 5f65e6eb5c..a998d93ab9 100644 --- a/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml +++ b/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml @@ -12,7 +12,7 @@ receivers: grpc: thrift_http: hostmetrics: - collection_interval: 30s + collection_interval: 60s scrapers: cpu: load: @@ -22,7 +22,8 @@ receivers: network: processors: batch: - send_batch_size: 1000 + send_batch_size: 10000 + send_batch_max_size: 11000 timeout: 10s signozspanmetrics/prometheus: metrics_exporter: prometheus diff --git a/deploy/docker-swarm/clickhouse-setup/otel-collector-metrics-config.yaml b/deploy/docker-swarm/clickhouse-setup/otel-collector-metrics-config.yaml index 0563a397da..3aa39b5f7e 100644 --- a/deploy/docker-swarm/clickhouse-setup/otel-collector-metrics-config.yaml +++ b/deploy/docker-swarm/clickhouse-setup/otel-collector-metrics-config.yaml @@ -9,12 +9,13 @@ receivers: config: scrape_configs: - job_name: "otel-collector" - scrape_interval: 30s + scrape_interval: 60s static_configs: - targets: ["otel-collector:8889"] processors: batch: - send_batch_size: 1000 + send_batch_size: 10000 + send_batch_max_size: 11000 timeout: 10s # memory_limiter: # # 80% of maximum memory up to 2G diff --git a/deploy/docker/clickhouse-setup/otel-collector-config.yaml b/deploy/docker/clickhouse-setup/otel-collector-config.yaml index bcf7ce58ce..e363f015df 100644 --- a/deploy/docker/clickhouse-setup/otel-collector-config.yaml +++ b/deploy/docker/clickhouse-setup/otel-collector-config.yaml @@ -12,7 +12,7 @@ receivers: grpc: thrift_http: hostmetrics: - collection_interval: 30s + collection_interval: 60s scrapers: cpu: load: @@ -22,7 +22,8 @@ receivers: network: processors: batch: - send_batch_size: 1000 + send_batch_size: 10000 + send_batch_max_size: 11000 timeout: 10s signozspanmetrics/prometheus: metrics_exporter: prometheus diff --git a/deploy/docker/clickhouse-setup/otel-collector-metrics-config.yaml b/deploy/docker/clickhouse-setup/otel-collector-metrics-config.yaml index cd5ede2358..26c629ba60 100644 --- a/deploy/docker/clickhouse-setup/otel-collector-metrics-config.yaml +++ b/deploy/docker/clickhouse-setup/otel-collector-metrics-config.yaml @@ -9,12 +9,13 @@ receivers: config: scrape_configs: - job_name: "otel-collector" - scrape_interval: 30s + scrape_interval: 60s static_configs: - targets: ["otel-collector:8889"] processors: batch: - send_batch_size: 1000 + send_batch_size: 10000 + send_batch_max_size: 11000 timeout: 10s # memory_limiter: # # 80% of maximum memory up to 2G From 9d3fc493a38115e2c402fc6c81c40aca59584c1e Mon Sep 17 00:00:00 2001 From: Pranshu Chittora Date: Fri, 24 Jun 2022 15:00:21 +0530 Subject: [PATCH 33/42] feat: Metrics Query Builder (#1166) * feat: metrics builder metrics name suggestion UX changes * feat: metrics builder metric name and single value selection * feat: code cleanup * feat: improved ts typings Co-authored-by: Srikanth Chekuri --- frontend/src/api/metrics/getMetricName.ts | 27 ++ frontend/src/api/metrics/getQueryRange.ts | 25 ++ .../src/api/metrics/getResourceAttributes.ts | 13 +- frontend/src/components/Editor/index.tsx | 32 +- .../src/components/Graph/Plugin/Legend.ts | 1 - frontend/src/components/Graph/index.tsx | 9 +- frontend/src/components/Graph/xAxisConfig.ts | 8 +- frontend/src/components/Graph/yAxisConfig.ts | 10 +- frontend/src/components/TextToolTip/index.tsx | 8 +- frontend/src/constants/dashboard.ts | 33 ++ .../Graph/FullView/index.metricsBuilder.tsx | 132 ++++++++ .../GridGraphLayout/Graph/FullView/index.tsx | 4 +- .../container/GridGraphLayout/Graph/index.tsx | 191 ++++++++---- .../src/container/GridGraphLayout/utils.ts | 36 ++- .../ResourceAttributesFilter/utils.ts | 12 +- .../MetricsApplication/Tabs/DBCall.tsx | 4 +- .../MetricsApplication/Tabs/External.tsx | 4 +- .../MetricsApplication/Tabs/Overview.tsx | 10 +- .../container/MetricsApplication/index.tsx | 4 +- .../LeftContainer/QuerySection/Query.tsx | 151 --------- .../QuerySection/QueryBuilder/Options.ts | 20 ++ .../QuerySection/QueryBuilder/QueryHeader.tsx | 46 +++ .../QueryBuilder/clickHouse/index.tsx | 69 ++++ .../QueryBuilder/clickHouse/query.tsx | 53 ++++ .../QuerySection/QueryBuilder/index.tsx | 95 ++++++ .../QueryBuilder/promQL/index.tsx | 67 ++++ .../QueryBuilder/promQL/query.tsx | 45 +++ .../MetricTagKey.machine.ts | 61 ++++ .../MetricTagKey.machine.typegen.ts | 32 ++ .../MetricTagKeyFilter/QueryChip.tsx | 31 ++ .../queryBuilder/MetricTagKeyFilter/index.tsx | 211 +++++++++++++ .../queryBuilder/MetricTagKeyFilter/styles.ts | 30 ++ .../queryBuilder/MetricTagKeyFilter/types.ts | 11 + .../queryBuilder/MetricTagKeyFilter/utils.ts | 55 ++++ .../QueryBuilder/queryBuilder/formula.tsx | 37 +++ .../QueryBuilder/queryBuilder/index.tsx | 168 ++++++++++ .../QueryBuilder/queryBuilder/query.tsx | 200 ++++++++++++ .../QueryBuilder/queryBuilder/utils.ts | 16 + .../LeftContainer/QuerySection/TabHeader.tsx | 31 ++ .../LeftContainer/QuerySection/constants.ts | 17 + .../LeftContainer/QuerySection/index.tsx | 295 ++++++++++++++++-- .../LeftContainer/QuerySection/styles.ts | 13 +- .../LeftContainer/QuerySection/types.ts | 13 + .../QuerySection/utils/getQueryKey.ts | 7 + .../QuerySection/utils/userSettings.ts | 23 ++ .../NewWidget/LeftContainer/QueryTypeTag.tsx | 35 +++ .../LeftContainer/WidgetGraph/PlotTag.tsx | 17 + .../LeftContainer/WidgetGraph/WidgetGraph.tsx | 10 +- .../LeftContainer/WidgetGraph/index.tsx | 3 +- .../LeftContainer/WidgetGraph/styles.ts | 4 +- .../NewWidget/LeftContainer/index.tsx | 9 +- frontend/src/container/NewWidget/index.tsx | 106 ++++--- frontend/src/container/NewWidget/styles.ts | 6 +- frontend/src/lib/getChartData.ts | 11 +- frontend/src/lib/getLabelName.ts | 4 +- frontend/src/lib/getMaxMinTime.ts | 2 +- frontend/src/lib/query/GetFormulaName.ts | 27 ++ frontend/src/lib/query/GetQueryName.ts | 29 ++ .../store/actions/dashboard/getDashboard.ts | 42 ++- .../actions/dashboard/getQueryResults.ts | 223 +++++++++---- .../store/actions/dashboard/saveDashboard.ts | 1 - .../store/actions/dashboard/updateQuery.ts | 29 +- .../actions/dashboard/updateQueryType.ts | 21 ++ frontend/src/store/reducers/dashboard.ts | 122 ++++---- frontend/src/types/actions/dashboard.ts | 37 ++- frontend/src/types/api/dashboard/getAll.ts | 67 +++- frontend/src/types/api/dashboard/shared.ts | 5 + .../src/types/api/metrics/getMetricName.ts | 4 + .../src/types/api/metrics/getQueryRange.ts | 9 + .../api/metrics/getResourceAttributes.ts | 9 +- frontend/src/types/api/widgets/getQuery.ts | 1 + frontend/src/types/common/dashboard.ts | 50 +++ 72 files changed, 2700 insertions(+), 543 deletions(-) create mode 100644 frontend/src/api/metrics/getMetricName.ts create mode 100644 frontend/src/api/metrics/getQueryRange.ts create mode 100644 frontend/src/constants/dashboard.ts create mode 100644 frontend/src/container/GridGraphLayout/Graph/FullView/index.metricsBuilder.tsx delete mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/Query.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/Options.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/index.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/index.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/promQL/index.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/promQL/query.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/MetricTagKeyFilter/MetricTagKey.machine.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/MetricTagKeyFilter/MetricTagKey.machine.typegen.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/MetricTagKeyFilter/QueryChip.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/MetricTagKeyFilter/index.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/MetricTagKeyFilter/styles.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/MetricTagKeyFilter/types.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/MetricTagKeyFilter/utils.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/formula.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/index.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/query.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/utils.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/TabHeader.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/constants.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/types.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/utils/getQueryKey.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QuerySection/utils/userSettings.ts create mode 100644 frontend/src/container/NewWidget/LeftContainer/QueryTypeTag.tsx create mode 100644 frontend/src/container/NewWidget/LeftContainer/WidgetGraph/PlotTag.tsx create mode 100644 frontend/src/lib/query/GetFormulaName.ts create mode 100644 frontend/src/lib/query/GetQueryName.ts create mode 100644 frontend/src/store/actions/dashboard/updateQueryType.ts create mode 100644 frontend/src/types/api/dashboard/shared.ts create mode 100644 frontend/src/types/api/metrics/getMetricName.ts create mode 100644 frontend/src/types/api/metrics/getQueryRange.ts create mode 100644 frontend/src/types/common/dashboard.ts diff --git a/frontend/src/api/metrics/getMetricName.ts b/frontend/src/api/metrics/getMetricName.ts new file mode 100644 index 0000000000..f3bff5a921 --- /dev/null +++ b/frontend/src/api/metrics/getMetricName.ts @@ -0,0 +1,27 @@ +import { ApiV2Instance as axios } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + MetricNameProps, + MetricNamesPayloadProps, +} from 'types/api/metrics/getMetricName'; + +export const getMetricName = async ( + props: MetricNameProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/metrics/autocomplete/list?match=${props || ''}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; diff --git a/frontend/src/api/metrics/getQueryRange.ts b/frontend/src/api/metrics/getQueryRange.ts new file mode 100644 index 0000000000..b6715f85e6 --- /dev/null +++ b/frontend/src/api/metrics/getQueryRange.ts @@ -0,0 +1,25 @@ +import { ApiV2Instance as axios } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + MetricRangePayloadProps, + MetricsRangeProps, +} from 'types/api/metrics/getQueryRange'; + +export const getMetricsQueryRange = async ( + props: MetricsRangeProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/metrics/query_range`, props); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; diff --git a/frontend/src/api/metrics/getResourceAttributes.ts b/frontend/src/api/metrics/getResourceAttributes.ts index 5be45af6f1..66524bf8f7 100644 --- a/frontend/src/api/metrics/getResourceAttributes.ts +++ b/frontend/src/api/metrics/getResourceAttributes.ts @@ -3,17 +3,20 @@ import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; import { AxiosError } from 'axios'; import { ErrorResponse, SuccessResponse } from 'types/api'; import { + TagKeyProps, TagKeysPayloadProps, TagValueProps, TagValuesPayloadProps, } from 'types/api/metrics/getResourceAttributes'; -export const getResourceAttributesTagKeys = async (): Promise< - SuccessResponse | ErrorResponse -> => { +export const getResourceAttributesTagKeys = async ( + props: TagKeyProps, +): Promise | ErrorResponse> => { try { const response = await axios.get( - '/metrics/autocomplete/tagKey?metricName=signoz_calls_total&match=resource_', + `/metrics/autocomplete/tagKey?metricName=${props.metricName}${ + props.match ? `&match=${props.match}` : '' + }`, ); return { @@ -32,7 +35,7 @@ export const getResourceAttributesTagValues = async ( ): Promise | ErrorResponse> => { try { const response = await axios.get( - `/metrics/autocomplete/tagValue?metricName=signoz_calls_total&tagKey=${props}`, + `/metrics/autocomplete/tagValue?metricName=${props.metricName}&tagKey=${props.tagKey}`, ); return { diff --git a/frontend/src/components/Editor/index.tsx b/frontend/src/components/Editor/index.tsx index 0ed486e248..92033b0830 100644 --- a/frontend/src/components/Editor/index.tsx +++ b/frontend/src/components/Editor/index.tsx @@ -1,38 +1,46 @@ -import MEditor from '@monaco-editor/react'; +import MEditor, { EditorProps } from '@monaco-editor/react'; import React from 'react'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import AppReducer from 'types/reducer/app'; function Editor({ value, - language = 'yaml', + language, onChange, - readOnly = false, -}: EditorProps): JSX.Element { + readOnly, + height, + options, +}: MEditorProps): JSX.Element { + const { isDarkMode } = useSelector((state) => state.app); return ( { - if (newValue) { - onChange(newValue); - } + if (typeof newValue === 'string') onChange(newValue); }} /> ); } -interface EditorProps { +interface MEditorProps { value: string; language?: string; onChange: (value: string) => void; readOnly?: boolean; + height?: string; + options?: EditorProps['options']; } Editor.defaultProps = { - language: undefined, + language: 'yaml', readOnly: false, + height: '40vh', + options: {}, }; export default Editor; diff --git a/frontend/src/components/Graph/Plugin/Legend.ts b/frontend/src/components/Graph/Plugin/Legend.ts index b8ec2facf9..51ec58df2e 100644 --- a/frontend/src/components/Graph/Plugin/Legend.ts +++ b/frontend/src/components/Graph/Plugin/Legend.ts @@ -22,7 +22,6 @@ const getOrCreateLegendList = ( listContainer.style.height = '100%'; listContainer.style.flexWrap = 'wrap'; listContainer.style.justifyContent = 'center'; - legendContainer?.appendChild(listContainer); } diff --git a/frontend/src/components/Graph/index.tsx b/frontend/src/components/Graph/index.tsx index 2194387dd4..4bb76276c0 100644 --- a/frontend/src/components/Graph/index.tsx +++ b/frontend/src/components/Graph/index.tsx @@ -182,11 +182,10 @@ function Graph({ }; const chartHasData = hasData(data); const chartPlugins = []; - if (chartHasData) { - chartPlugins.push(legend(name, data.datasets.length > 3)); - } else { - chartPlugins.push(emptyGraph); - } + + if (!chartHasData) chartPlugins.push(emptyGraph); + chartPlugins.push(legend(name, data.datasets.length > 3)); + lineChartRef.current = new Chart(chartRef.current, { type, data, diff --git a/frontend/src/components/Graph/xAxisConfig.ts b/frontend/src/components/Graph/xAxisConfig.ts index 565b60f7e0..d14d9bba09 100644 --- a/frontend/src/components/Graph/xAxisConfig.ts +++ b/frontend/src/components/Graph/xAxisConfig.ts @@ -109,14 +109,14 @@ export const useXAxisTimeUnit = (data: Chart['data']): IAxisTimeConfig => { let minTime = Number.POSITIVE_INFINITY; let maxTime = Number.NEGATIVE_INFINITY; data?.labels?.forEach((timeStamp: unknown): void => { - const getTimeStamp = (time: string | number): Date | number | string => { - if (typeof timeStamp === 'string') { - return Date.parse(timeStamp); + const getTimeStamp = (time: Date | number): Date | number | string => { + if (time instanceof Date) { + return Date.parse(time.toString()); } return time; }; - const time = getTimeStamp(timeStamp as string | number); + const time = getTimeStamp(timeStamp as Date | number); minTime = Math.min(parseInt(time.toString(), 10), minTime); maxTime = Math.max(parseInt(time.toString(), 10), maxTime); diff --git a/frontend/src/components/Graph/yAxisConfig.ts b/frontend/src/components/Graph/yAxisConfig.ts index 5d1eeb5da7..44377b6fc5 100644 --- a/frontend/src/components/Graph/yAxisConfig.ts +++ b/frontend/src/components/Graph/yAxisConfig.ts @@ -7,13 +7,17 @@ export const getYAxisFormattedValue = ( let decimalPrecision: number | undefined; const parsedValue = getValueFormat(format)( parseFloat(value), - undefined, - undefined, + 12, + 12, undefined, ); + try { const decimalSplitted = parsedValue.text.split('.'); - if (decimalSplitted.length === 1) { + if ( + decimalSplitted.length === 1 || + parseFloat(parsedValue.text) === parseInt(parsedValue.text, 10) + ) { decimalPrecision = 0; } else { const decimalDigits = decimalSplitted[1].split(''); diff --git a/frontend/src/components/TextToolTip/index.tsx b/frontend/src/components/TextToolTip/index.tsx index 051d48c30d..06802b475f 100644 --- a/frontend/src/components/TextToolTip/index.tsx +++ b/frontend/src/components/TextToolTip/index.tsx @@ -10,9 +10,11 @@ function TextToolTip({ text, url }: TextToolTipProps): JSX.Element { return (
); }} diff --git a/frontend/src/constants/dashboard.ts b/frontend/src/constants/dashboard.ts new file mode 100644 index 0000000000..3aa495d6b8 --- /dev/null +++ b/frontend/src/constants/dashboard.ts @@ -0,0 +1,33 @@ +import { EAggregateOperator, EReduceOperator } from 'types/common/dashboard'; + +export const PromQLQueryTemplate = { + query: '', + legend: '', + disabled: false, +}; + +export const ClickHouseQueryTemplate = { + rawQuery: '', + legend: '', + disabled: false, +}; + +export const QueryBuilderQueryTemplate = { + metricName: null, + aggregateOperator: EAggregateOperator.NOOP, + tagFilters: { + op: 'AND', + items: [], + }, + legend: '', + disabled: false, + // Specific to TIME_SERIES type graph + groupBy: [], + // Specific to VALUE type graph + reduceTo: EReduceOperator['Latest of values in timeframe'], +}; + +export const QueryBuilderFormulaTemplate = { + expression: '', + disabled: false, +}; diff --git a/frontend/src/container/GridGraphLayout/Graph/FullView/index.metricsBuilder.tsx b/frontend/src/container/GridGraphLayout/Graph/FullView/index.metricsBuilder.tsx new file mode 100644 index 0000000000..8fe3b259c1 --- /dev/null +++ b/frontend/src/container/GridGraphLayout/Graph/FullView/index.metricsBuilder.tsx @@ -0,0 +1,132 @@ +import { Button, Typography } from 'antd'; +import { GraphOnClickHandler } from 'components/Graph'; +import Spinner from 'components/Spinner'; +import TimePreference from 'components/TimePreferenceDropDown'; +import GridGraphComponent from 'container/GridGraphComponent'; +import { + timeItems, + timePreferance, +} from 'container/NewWidget/RightContainer/timeItems'; +import getChartData from 'lib/getChartData'; +import React, { useCallback, useState } from 'react'; +import { useQuery } from 'react-query'; +import { useSelector } from 'react-redux'; +import { GetMetricQueryRange } from 'store/actions/dashboard/getQueryResults'; +import { AppState } from 'store/reducers'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { Widgets } from 'types/api/dashboard/getAll'; +import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +import { NotFoundContainer, TimeContainer } from './styles'; + +function FullView({ + widget, + fullViewOptions = true, + onClickHandler, + name, + yAxisUnit, +}: FullViewProps): JSX.Element { + const { selectedTime: globalSelectedTime } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); + + const getSelectedTime = useCallback( + () => + timeItems.find((e) => e.enum === (widget?.timePreferance || 'GLOBAL_TIME')), + [widget], + ); + + const [selectedTime, setSelectedTime] = useState({ + name: getSelectedTime()?.name || '', + enum: widget?.timePreferance || 'GLOBAL_TIME', + }); + const response = useQuery< + SuccessResponse | ErrorResponse + >( + `FullViewGetMetricsQueryRange-${selectedTime.enum}-${globalSelectedTime}`, + () => + GetMetricQueryRange({ + selectedTime: selectedTime.enum, + graphType: widget.panelTypes, + query: widget.query, + globalSelectedInterval: globalSelectedTime, + }), + ); + + const isError = response?.error; + const isLoading = response.isLoading === true; + const errorMessage = isError instanceof Error ? isError?.message : ''; + + if (isLoading) { + return ; + } + if (isError || !response?.data?.payload?.data?.result) { + return ( + + {errorMessage} + + ); + } + + return ( + <> + {fullViewOptions && ( + + + + + )} + + + + ); +} + +interface FullViewProps { + widget: Widgets; + fullViewOptions?: boolean; + onClickHandler?: GraphOnClickHandler; + name: string; + yAxisUnit?: string; +} + +FullView.defaultProps = { + fullViewOptions: undefined, + onClickHandler: undefined, + yAxisUnit: undefined, +}; + +export default FullView; diff --git a/frontend/src/container/GridGraphLayout/Graph/FullView/index.tsx b/frontend/src/container/GridGraphLayout/Graph/FullView/index.tsx index 3111e33e87..ad228f642e 100644 --- a/frontend/src/container/GridGraphLayout/Graph/FullView/index.tsx +++ b/frontend/src/container/GridGraphLayout/Graph/FullView/index.tsx @@ -19,7 +19,7 @@ import React, { useCallback, useState } from 'react'; import { useQueries } from 'react-query'; import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; -import { Widgets } from 'types/api/dashboard/getAll'; +import { PromQLWidgets } from 'types/api/dashboard/getAll'; import { GlobalReducer } from 'types/reducer/globalTime'; import { NotFoundContainer, TimeContainer } from './styles'; @@ -173,7 +173,7 @@ function FullView({ } interface FullViewProps { - widget: Widgets; + widget: PromQLWidgets; fullViewOptions?: boolean; onClickHandler?: GraphOnClickHandler; name: string; diff --git a/frontend/src/container/GridGraphLayout/Graph/index.tsx b/frontend/src/container/GridGraphLayout/Graph/index.tsx index a57a396fd0..f39d64cb76 100644 --- a/frontend/src/container/GridGraphLayout/Graph/index.tsx +++ b/frontend/src/container/GridGraphLayout/Graph/index.tsx @@ -1,14 +1,12 @@ import { Typography } from 'antd'; -import getQueryResult from 'api/widgets/getQuery'; +import { AxiosError } from 'axios'; +import { ChartData } from 'chart.js'; import Spinner from 'components/Spinner'; import GridGraphComponent from 'container/GridGraphComponent'; import getChartData from 'lib/getChartData'; -import GetMaxMinTime from 'lib/getMaxMinTime'; -import GetStartAndEndTime from 'lib/getStartAndEndTime'; import isEmpty from 'lodash-es/isEmpty'; -import React, { memo, useCallback, useState } from 'react'; +import React, { memo, useCallback, useEffect, useState } from 'react'; import { Layout } from 'react-grid-layout'; -import { useQueries } from 'react-query'; import { connect, useSelector } from 'react-redux'; import { bindActionCreators, Dispatch } from 'redux'; import { ThunkDispatch } from 'redux-thunk'; @@ -16,15 +14,17 @@ import { DeleteWidget, DeleteWidgetProps, } from 'store/actions/dashboard/deleteWidget'; +import { GetMetricQueryRange } from 'store/actions/dashboard/getQueryResults'; import { AppState } from 'store/reducers'; import AppActions from 'types/actions'; import { GlobalTime } from 'types/actions/globalTime'; import { Widgets } from 'types/api/dashboard/getAll'; +import { GlobalReducer } from 'types/reducer/globalTime'; import { LayoutProps } from '..'; import EmptyWidget from '../EmptyWidget'; import WidgetHeader from '../WidgetHeader'; -import FullView from './FullView'; +import FullView from './FullView/index.metricsBuilder'; import { ErrorContainer, FullViewContainer, Modal } from './styles'; function GridCardGraph({ @@ -35,60 +35,118 @@ function GridCardGraph({ layout = [], setLayout, }: GridCardGraphProps): JSX.Element { + const [state, setState] = useState({ + loading: true, + errorMessage: '', + error: false, + payload: undefined, + }); const [hovered, setHovered] = useState(false); const [modal, setModal] = useState(false); + const [deleteModal, setDeleteModal] = useState(false); + const { minTime, maxTime } = useSelector( (state) => state.globalTime, ); - const [deleteModal, setDeleteModal] = useState(false); + const { selectedTime: globalSelectedInterval } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); - const getMaxMinTime = GetMaxMinTime({ - graphType: widget?.panelTypes, - maxTime, - minTime, - }); + // const getMaxMinTime = GetMaxMinTime({ + // graphType: widget?.panelTypes, + // maxTime, + // minTime, + // }); - const { start, end } = GetStartAndEndTime({ - type: widget?.timePreferance, - maxTime: getMaxMinTime.maxTime, - minTime: getMaxMinTime.minTime, - }); + // const { start, end } = GetStartAndEndTime({ + // type: widget?.timePreferance, + // maxTime: getMaxMinTime.maxTime, + // minTime: getMaxMinTime.minTime, + // }); - const queryLength = widget?.query?.filter((e) => e.query.length !== 0) || []; + // const queryLength = widget?.query?.filter((e) => e.query.length !== 0) || []; - const response = useQueries( - queryLength?.map((query) => { - return { - // eslint-disable-next-line @typescript-eslint/explicit-function-return-type - queryFn: () => { - return getQueryResult({ - end, - query: query?.query, - start, - step: '60', + // const response = useQueries( + // queryLength?.map((query) => { + // return { + // // eslint-disable-next-line @typescript-eslint/explicit-function-return-type + // queryFn: () => { + // return getQueryResult({ + // end, + // query: query?.query, + // start, + // step: '60', + // }); + // }, + // queryHash: `${query?.query}-${query?.legend}-${start}-${end}`, + // retryOnMount: false, + // }; + // }), + // ); + + // const isError = + // response.find((e) => e?.data?.statusCode !== 200) !== undefined || + // response.some((e) => e.isError === true); + + // const isLoading = response.some((e) => e.isLoading === true); + + // const errorMessage = response.find((e) => e.data?.error !== null)?.data?.error; + + // const data = response.map((responseOfQuery) => + // responseOfQuery?.data?.payload?.result.map((e, index) => ({ + // query: queryLength[index]?.query, + // queryData: e, + // legend: queryLength[index]?.legend, + // })), + // ); + + useEffect(() => { + (async (): Promise => { + try { + const response = await GetMetricQueryRange({ + selectedTime: widget.timePreferance, + graphType: widget.panelTypes, + query: widget.query, + globalSelectedInterval, + }); + + const isError = response.error; + + if (isError != null) { + setState((state) => ({ + ...state, + error: true, + errorMessage: isError || 'Something went wrong', + loading: false, + })); + } else { + const chartDataSet = getChartData({ + queryData: [ + { + queryData: response.payload?.data?.result + ? response.payload?.data?.result + : [], + }, + ], }); - }, - queryHash: `${query?.query}-${query?.legend}-${start}-${end}`, - retryOnMount: false, - }; - }), - ); - const isError = - response.find((e) => e?.data?.statusCode !== 200) !== undefined || - response.some((e) => e.isError === true); - - const isLoading = response.some((e) => e.isLoading === true); - - const errorMessage = response.find((e) => e.data?.error !== null)?.data?.error; - - const data = response.map((responseOfQuery) => - responseOfQuery?.data?.payload?.result.map((e, index) => ({ - query: queryLength[index]?.query, - queryData: e, - legend: queryLength[index]?.legend, - })), - ); + setState((state) => ({ + ...state, + loading: false, + payload: chartDataSet, + })); + } + } catch (error) { + setState((state) => ({ + ...state, + error: true, + errorMessage: (error as AxiosError).toString(), + loading: false, + })); + } + })(); + }, [widget, maxTime, minTime, globalSelectedInterval]); const onToggleModal = useCallback( (func: React.Dispatch>) => { @@ -144,14 +202,7 @@ function GridCardGraph({ const isEmptyLayout = widget?.id === 'empty' || isEmpty(widget); - if (isLoading) { - return ; - } - - if ( - (isError || data === undefined || data[0] === undefined) && - !isEmptyLayout - ) { + if (state.error && !isEmptyLayout) { return ( <> {getModals()} @@ -163,18 +214,17 @@ function GridCardGraph({ onDelete={(): void => onToggleModal(setDeleteModal)} /> - {errorMessage} + {state.errorMessage} ); } - const chartData = getChartData({ - queryData: data.map((e) => ({ - query: e?.map((e) => e.query).join(' ') || '', - queryData: e?.map((e) => e.queryData) || [], - legend: e?.map((e) => e.legend).join('') || '', - })), - }); + if ( + (state.loading === true || state.payload === undefined) && + !isEmptyLayout + ) { + return ; + } return ( => { // resolve(TagKeysCache); // }); // } - const { payload } = await getResourceAttributesTagKeys(); + const { payload } = await getResourceAttributesTagKeys({ + metricName: 'signoz_calls_total', + match: 'resource_', + }); if (!payload || !payload?.data) { return []; } @@ -32,12 +35,15 @@ export const GetTagKeys = async (): Promise => { }; export const GetTagValues = async (tagKey: string): Promise => { - const { payload } = await getResourceAttributesTagValues(tagKey); + const { payload } = await getResourceAttributesTagValues({ + tagKey, + metricName: 'signoz_calls_total', + }); if (!payload || !payload?.data) { return []; } - return payload.data.filter(Boolean).map((tagValue: string) => ({ + return payload.data.map((tagValue: string) => ({ label: tagValue, value: tagValue, })); diff --git a/frontend/src/container/MetricsApplication/Tabs/DBCall.tsx b/frontend/src/container/MetricsApplication/Tabs/DBCall.tsx index ba0af031cd..b5466f2b02 100644 --- a/frontend/src/container/MetricsApplication/Tabs/DBCall.tsx +++ b/frontend/src/container/MetricsApplication/Tabs/DBCall.tsx @@ -4,7 +4,7 @@ import React from 'react'; import { useSelector } from 'react-redux'; import { useParams } from 'react-router-dom'; import { AppState } from 'store/reducers'; -import { Widgets } from 'types/api/dashboard/getAll'; +import { PromQLWidgets, Widgets } from 'types/api/dashboard/getAll'; import MetricReducer from 'types/reducer/metrics'; import { Card, GraphContainer, GraphTitle, Row } from '../styles'; @@ -58,7 +58,7 @@ function DBCall({ getWidget }: DBCallProps): JSX.Element { } interface DBCallProps { - getWidget: (query: Widgets['query']) => Widgets; + getWidget: (query: PromQLWidgets['query']) => PromQLWidgets; } export default DBCall; diff --git a/frontend/src/container/MetricsApplication/Tabs/External.tsx b/frontend/src/container/MetricsApplication/Tabs/External.tsx index 73eca09600..2974ba3f38 100644 --- a/frontend/src/container/MetricsApplication/Tabs/External.tsx +++ b/frontend/src/container/MetricsApplication/Tabs/External.tsx @@ -4,7 +4,7 @@ import React from 'react'; import { useSelector } from 'react-redux'; import { useParams } from 'react-router-dom'; import { AppState } from 'store/reducers'; -import { Widgets } from 'types/api/dashboard/getAll'; +import { PromQLWidgets } from 'types/api/dashboard/getAll'; import MetricReducer from 'types/reducer/metrics'; import { Card, GraphContainer, GraphTitle, Row } from '../styles'; @@ -102,7 +102,7 @@ function External({ getWidget }: ExternalProps): JSX.Element { } interface ExternalProps { - getWidget: (query: Widgets['query']) => Widgets; + getWidget: (query: PromQLWidgets['query']) => PromQLWidgets; } export default External; diff --git a/frontend/src/container/MetricsApplication/Tabs/Overview.tsx b/frontend/src/container/MetricsApplication/Tabs/Overview.tsx index fbc1a855ac..a24e67aeb7 100644 --- a/frontend/src/container/MetricsApplication/Tabs/Overview.tsx +++ b/frontend/src/container/MetricsApplication/Tabs/Overview.tsx @@ -11,7 +11,7 @@ import React, { useRef } from 'react'; import { useSelector } from 'react-redux'; import { useParams } from 'react-router-dom'; import { AppState } from 'store/reducers'; -import { Widgets } from 'types/api/dashboard/getAll'; +import { PromQLWidgets } from 'types/api/dashboard/getAll'; import MetricReducer from 'types/reducer/metrics'; import { Card, Col, GraphContainer, GraphTitle, Row } from '../styles'; @@ -42,8 +42,7 @@ function Application({ getWidget }: DashboardProps): JSX.Element { urlParams.set(METRICS_PAGE_QUERY_PARAM.endTime, tPlusOne.toString()); history.replace( - `${ - ROUTES.TRACE + `${ROUTES.TRACE }?${urlParams.toString()}&selected={"serviceName":["${servicename}"]}&filterToFetchData=["duration","status","serviceName"]&spanAggregateCurrentPage=1&selectedTags=${selectedTraceTags}&&isFilterExclude={"serviceName":false}&userSelectedFilter={"status":["error","ok"],"serviceName":["${servicename}"]}&spanAggregateCurrentPage=1&spanAggregateOrder=ascend`, ); }; @@ -94,8 +93,7 @@ function Application({ getWidget }: DashboardProps): JSX.Element { urlParams.set(METRICS_PAGE_QUERY_PARAM.endTime, tPlusOne.toString()); history.replace( - `${ - ROUTES.TRACE + `${ROUTES.TRACE }?${urlParams.toString()}?selected={"serviceName":["${servicename}"],"status":["error"]}&filterToFetchData=["duration","status","serviceName"]&spanAggregateCurrentPage=1&selectedTags=${selectedTraceTags}&isFilterExclude={"serviceName":false,"status":false}&userSelectedFilter={"serviceName":["${servicename}"],"status":["error"]}&spanAggregateCurrentPage=1&spanAggregateOrder=ascend`, ); }; @@ -248,7 +246,7 @@ function Application({ getWidget }: DashboardProps): JSX.Element { } interface DashboardProps { - getWidget: (query: Widgets['query']) => Widgets; + getWidget: (query: PromQLWidgets['query']) => PromQLWidgets; } export default Application; diff --git a/frontend/src/container/MetricsApplication/index.tsx b/frontend/src/container/MetricsApplication/index.tsx index 9922fbb8c2..aeaf4a5664 100644 --- a/frontend/src/container/MetricsApplication/index.tsx +++ b/frontend/src/container/MetricsApplication/index.tsx @@ -3,14 +3,14 @@ import ROUTES from 'constants/routes'; import React from 'react'; import { generatePath, useParams } from 'react-router-dom'; import { useLocation } from 'react-use'; -import { Widgets } from 'types/api/dashboard/getAll'; +import { PromQLWidgets } from 'types/api/dashboard/getAll'; import ResourceAttributesFilter from './ResourceAttributesFilter'; import DBCall from './Tabs/DBCall'; import External from './Tabs/External'; import Overview from './Tabs/Overview'; -const getWidget = (query: Widgets['query']): Widgets => { +const getWidget = (query: PromQLWidgets['query']): PromQLWidgets => { return { description: '', id: '', diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/Query.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/Query.tsx deleted file mode 100644 index 916771eece..0000000000 --- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/Query.tsx +++ /dev/null @@ -1,151 +0,0 @@ -import { Button, Divider } from 'antd'; -import Input from 'components/Input'; -import TextToolTip from 'components/TextToolTip'; -import { timePreferance } from 'container/NewWidget/RightContainer/timeItems'; -import React, { useCallback, useMemo, useState } from 'react'; -import { connect, useSelector } from 'react-redux'; -import { useLocation } from 'react-router-dom'; -import { bindActionCreators, Dispatch } from 'redux'; -import { ThunkDispatch } from 'redux-thunk'; -import { DeleteQuery } from 'store/actions'; -import { - UpdateQuery, - UpdateQueryProps, -} from 'store/actions/dashboard/updateQuery'; -import { AppState } from 'store/reducers'; -import AppActions from 'types/actions'; -import { DeleteQueryProps } from 'types/actions/dashboard'; -import { Widgets } from 'types/api/dashboard/getAll'; -import DashboardReducer from 'types/reducer/dashboards'; - -import { - ButtonContainer, - Container, - InputContainer, - QueryWrapper, -} from './styles'; - -function Query({ - currentIndex, - preLegend, - preQuery, - updateQuery, - deleteQuery, -}: QueryProps): JSX.Element { - const [promqlQuery, setPromqlQuery] = useState(preQuery); - const [legendFormat, setLegendFormat] = useState(preLegend); - const { search } = useLocation(); - const { dashboards } = useSelector( - (state) => state.dashboards, - ); - - const [selectedDashboards] = dashboards; - const { widgets } = selectedDashboards.data; - - const query = new URLSearchParams(search); - const widgetId = query.get('widgetId') || ''; - - const urlQuery = useMemo(() => { - return new URLSearchParams(search); - }, [search]); - - const getWidget = useCallback(() => { - const widgetId = urlQuery.get('widgetId'); - return widgets?.find((e) => e.id === widgetId); - }, [widgets, urlQuery]); - - const selectedWidget = getWidget() as Widgets; - - const onChangeHandler = useCallback( - (setFunc: React.Dispatch>, value: string) => { - setFunc(value); - }, - [], - ); - - const onBlurHandler = (): void => { - updateQuery({ - currentIndex, - legend: legendFormat, - query: promqlQuery, - widgetId, - yAxisUnit: selectedWidget.yAxisUnit, - }); - }; - - const onDeleteQueryHandler = (): void => { - deleteQuery({ - widgetId, - currentIndex, - }); - }; - - return ( - <> - - - - - onChangeHandler(setPromqlQuery, event.target.value) - } - size="middle" - value={promqlQuery} - addonBefore="PromQL Query" - onBlur={(): void => onBlurHandler()} - /> - - - - - onChangeHandler(setLegendFormat, event.target.value) - } - size="middle" - value={legendFormat} - addonBefore="Legend Format" - onBlur={(): void => onBlurHandler()} - /> - - - - - - - - - - - - ); -} - -interface DispatchProps { - updateQuery: ( - props: UpdateQueryProps, - ) => (dispatch: Dispatch) => void; - deleteQuery: ( - props: DeleteQueryProps, - ) => (dispatch: Dispatch) => void; -} - -const mapDispatchToProps = ( - dispatch: ThunkDispatch, -): DispatchProps => ({ - updateQuery: bindActionCreators(UpdateQuery, dispatch), - deleteQuery: bindActionCreators(DeleteQuery, dispatch), -}); - -interface QueryProps extends DispatchProps { - selectedTime: timePreferance; - currentIndex: number; - preQuery: string; - preLegend: string; -} - -export default connect(null, mapDispatchToProps)(Query); diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/Options.ts b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/Options.ts new file mode 100644 index 0000000000..80a14f173b --- /dev/null +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/Options.ts @@ -0,0 +1,20 @@ +import { EAggregateOperator } from 'types/common/dashboard'; + +export const AggregateFunctions = Object.keys(EAggregateOperator) + .filter((key) => Number.isNaN(parseInt(key, 10))) + .map((key) => { + return { + label: key, + value: EAggregateOperator[key], + }; + }); + +export const TagKeyOperator = [ + { label: 'In', value: 'IN' }, + { label: 'Not In', value: 'NIN' }, + { label: 'Like', value: 'LIKE' }, + { label: 'Not Like', value: 'NLIKE' }, + // { label: 'Equal', value: 'EQ' }, + // { label: 'Not Equal', value: 'NEQ' }, + // { label: 'REGEX', value: 'REGEX' }, +]; diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx new file mode 100644 index 0000000000..9164c532af --- /dev/null +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx @@ -0,0 +1,46 @@ +import { + DeleteOutlined, + DownOutlined, + EyeFilled, + EyeInvisibleFilled, + RightOutlined, +} from '@ant-design/icons'; +import { Button, Row } from 'antd'; +import React, { useState } from 'react'; + +import { QueryWrapper } from '../styles'; + +function QueryHeader({ + disabled, + onDisable, + name, + onDelete, + children, +}): JSX.Element { + const [collapse, setCollapse] = useState(false); + return ( + + + + + +