mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-09-20 00:33:13 +08:00
commit
858a0cb0de
@ -347,7 +347,7 @@ curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-
|
|||||||
```bash
|
```bash
|
||||||
kubectl -n sample-application run strzal --image=djbingham/curl \
|
kubectl -n sample-application run strzal --image=djbingham/curl \
|
||||||
--restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \
|
--restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \
|
||||||
'locust_count=6' -F 'hatch_rate=2' http://locust-master:8089/swarm
|
'user_count=6' -F 'spawn_rate=2' http://locust-master:8089/swarm
|
||||||
```
|
```
|
||||||
|
|
||||||
**5.1.3 To stop the load generation:**
|
**5.1.3 To stop the load generation:**
|
||||||
|
1
Makefile
1
Makefile
@ -188,3 +188,4 @@ test:
|
|||||||
go test ./pkg/query-service/tests/integration/...
|
go test ./pkg/query-service/tests/integration/...
|
||||||
go test ./pkg/query-service/rules/...
|
go test ./pkg/query-service/rules/...
|
||||||
go test ./pkg/query-service/collectorsimulator/...
|
go test ./pkg/query-service/collectorsimulator/...
|
||||||
|
go test ./pkg/query-service/postprocess/...
|
||||||
|
@ -146,7 +146,7 @@ services:
|
|||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz/query-service:0.48.1
|
image: signoz/query-service:0.49.0
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"-config=/root/config/prometheus.yml",
|
"-config=/root/config/prometheus.yml",
|
||||||
@ -199,7 +199,7 @@ services:
|
|||||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
otel-collector:
|
otel-collector:
|
||||||
image: signoz/signoz-otel-collector:0.102.0
|
image: signoz/signoz-otel-collector:0.102.1
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"--config=/etc/otel-collector-config.yaml",
|
"--config=/etc/otel-collector-config.yaml",
|
||||||
@ -237,7 +237,7 @@ services:
|
|||||||
- query-service
|
- query-service
|
||||||
|
|
||||||
otel-collector-migrator:
|
otel-collector-migrator:
|
||||||
image: signoz/signoz-schema-migrator:0.102.0
|
image: signoz/signoz-schema-migrator:0.102.1
|
||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
@ -66,7 +66,7 @@ services:
|
|||||||
- --storage.path=/data
|
- --storage.path=/data
|
||||||
|
|
||||||
otel-collector-migrator:
|
otel-collector-migrator:
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.1}
|
||||||
container_name: otel-migrator
|
container_name: otel-migrator
|
||||||
command:
|
command:
|
||||||
- "--dsn=tcp://clickhouse:9000"
|
- "--dsn=tcp://clickhouse:9000"
|
||||||
@ -81,7 +81,7 @@ services:
|
|||||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||||
otel-collector:
|
otel-collector:
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
image: signoz/signoz-otel-collector:0.102.0
|
image: signoz/signoz-otel-collector:0.102.1
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"--config=/etc/otel-collector-config.yaml",
|
"--config=/etc/otel-collector-config.yaml",
|
||||||
|
@ -164,7 +164,7 @@ services:
|
|||||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz/query-service:${DOCKER_TAG:-0.48.1}
|
image: signoz/query-service:${DOCKER_TAG:-0.49.0}
|
||||||
container_name: signoz-query-service
|
container_name: signoz-query-service
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
@ -204,7 +204,7 @@ services:
|
|||||||
<<: *db-depend
|
<<: *db-depend
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: signoz/frontend:${DOCKER_TAG:-0.48.1}
|
image: signoz/frontend:${DOCKER_TAG:-0.49.0}
|
||||||
container_name: signoz-frontend
|
container_name: signoz-frontend
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
depends_on:
|
depends_on:
|
||||||
@ -216,7 +216,7 @@ services:
|
|||||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
otel-collector-migrator:
|
otel-collector-migrator:
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.1}
|
||||||
container_name: otel-migrator
|
container_name: otel-migrator
|
||||||
command:
|
command:
|
||||||
- "--dsn=tcp://clickhouse:9000"
|
- "--dsn=tcp://clickhouse:9000"
|
||||||
@ -230,7 +230,7 @@ services:
|
|||||||
|
|
||||||
|
|
||||||
otel-collector:
|
otel-collector:
|
||||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.0}
|
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.1}
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
|
@ -164,7 +164,7 @@ services:
|
|||||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz/query-service:${DOCKER_TAG:-0.48.1}
|
image: signoz/query-service:${DOCKER_TAG:-0.49.0}
|
||||||
container_name: signoz-query-service
|
container_name: signoz-query-service
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
@ -203,7 +203,7 @@ services:
|
|||||||
<<: *db-depend
|
<<: *db-depend
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: signoz/frontend:${DOCKER_TAG:-0.48.1}
|
image: signoz/frontend:${DOCKER_TAG:-0.49.0}
|
||||||
container_name: signoz-frontend
|
container_name: signoz-frontend
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
depends_on:
|
depends_on:
|
||||||
@ -215,7 +215,7 @@ services:
|
|||||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
otel-collector-migrator:
|
otel-collector-migrator:
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.1}
|
||||||
container_name: otel-migrator
|
container_name: otel-migrator
|
||||||
command:
|
command:
|
||||||
- "--dsn=tcp://clickhouse:9000"
|
- "--dsn=tcp://clickhouse:9000"
|
||||||
@ -229,7 +229,7 @@ services:
|
|||||||
|
|
||||||
|
|
||||||
otel-collector:
|
otel-collector:
|
||||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.0}
|
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.1}
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
|
@ -88,7 +88,7 @@
|
|||||||
"lucide-react": "0.379.0",
|
"lucide-react": "0.379.0",
|
||||||
"mini-css-extract-plugin": "2.4.5",
|
"mini-css-extract-plugin": "2.4.5",
|
||||||
"papaparse": "5.4.1",
|
"papaparse": "5.4.1",
|
||||||
"posthog-js": "1.140.1",
|
"posthog-js": "1.142.1",
|
||||||
"rc-tween-one": "3.0.6",
|
"rc-tween-one": "3.0.6",
|
||||||
"react": "18.2.0",
|
"react": "18.2.0",
|
||||||
"react-addons-update": "15.6.3",
|
"react-addons-update": "15.6.3",
|
||||||
|
@ -5,7 +5,13 @@ import { Button, Dropdown, MenuProps } from 'antd';
|
|||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
|
|
||||||
function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
|
function DropDown({
|
||||||
|
element,
|
||||||
|
onDropDownItemClick,
|
||||||
|
}: {
|
||||||
|
element: JSX.Element[];
|
||||||
|
onDropDownItemClick?: MenuProps['onClick'];
|
||||||
|
}): JSX.Element {
|
||||||
const isDarkMode = useIsDarkMode();
|
const isDarkMode = useIsDarkMode();
|
||||||
|
|
||||||
const items: MenuProps['items'] = element.map(
|
const items: MenuProps['items'] = element.map(
|
||||||
@ -23,6 +29,7 @@ function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
|
|||||||
items,
|
items,
|
||||||
onMouseEnter: (): void => setDdOpen(true),
|
onMouseEnter: (): void => setDdOpen(true),
|
||||||
onMouseLeave: (): void => setDdOpen(false),
|
onMouseLeave: (): void => setDdOpen(false),
|
||||||
|
onClick: (item): void => onDropDownItemClick?.(item),
|
||||||
}}
|
}}
|
||||||
open={isDdOpen}
|
open={isDdOpen}
|
||||||
>
|
>
|
||||||
@ -40,4 +47,8 @@ function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
DropDown.defaultProps = {
|
||||||
|
onDropDownItemClick: (): void => {},
|
||||||
|
};
|
||||||
|
|
||||||
export default DropDown;
|
export default DropDown;
|
||||||
|
@ -62,8 +62,6 @@ function RawLogView({
|
|||||||
const isDarkMode = useIsDarkMode();
|
const isDarkMode = useIsDarkMode();
|
||||||
const isReadOnlyLog = !isLogsExplorerPage || isReadOnly;
|
const isReadOnlyLog = !isLogsExplorerPage || isReadOnly;
|
||||||
|
|
||||||
const severityText = data.severity_text ? `${data.severity_text} |` : '';
|
|
||||||
|
|
||||||
const logType = getLogIndicatorType(data);
|
const logType = getLogIndicatorType(data);
|
||||||
|
|
||||||
const updatedSelecedFields = useMemo(
|
const updatedSelecedFields = useMemo(
|
||||||
@ -88,17 +86,16 @@ function RawLogView({
|
|||||||
attributesText += ' | ';
|
attributesText += ' | ';
|
||||||
}
|
}
|
||||||
|
|
||||||
const text = useMemo(
|
const text = useMemo(() => {
|
||||||
() =>
|
const date =
|
||||||
typeof data.timestamp === 'string'
|
typeof data.timestamp === 'string'
|
||||||
? `${dayjs(data.timestamp).format(
|
? dayjs(data.timestamp)
|
||||||
'YYYY-MM-DD HH:mm:ss.SSS',
|
: dayjs(data.timestamp / 1e6);
|
||||||
)} | ${attributesText} ${severityText} ${data.body}`
|
|
||||||
: `${dayjs(data.timestamp / 1e6).format(
|
return `${date.format('YYYY-MM-DD HH:mm:ss.SSS')} | ${attributesText} ${
|
||||||
'YYYY-MM-DD HH:mm:ss.SSS',
|
data.body
|
||||||
)} | ${attributesText} ${severityText} ${data.body}`,
|
}`;
|
||||||
[data.timestamp, data.body, severityText, attributesText],
|
}, [data.timestamp, data.body, attributesText]);
|
||||||
);
|
|
||||||
|
|
||||||
const handleClickExpand = useCallback(() => {
|
const handleClickExpand = useCallback(() => {
|
||||||
if (activeContextLog || isReadOnly) return;
|
if (activeContextLog || isReadOnly) return;
|
||||||
|
@ -2,7 +2,9 @@
|
|||||||
import './DynamicColumnTable.syles.scss';
|
import './DynamicColumnTable.syles.scss';
|
||||||
|
|
||||||
import { Button, Dropdown, Flex, MenuProps, Switch } from 'antd';
|
import { Button, Dropdown, Flex, MenuProps, Switch } from 'antd';
|
||||||
|
import { ColumnGroupType, ColumnType } from 'antd/es/table';
|
||||||
import { ColumnsType } from 'antd/lib/table';
|
import { ColumnsType } from 'antd/lib/table';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
||||||
import { SlidersHorizontal } from 'lucide-react';
|
import { SlidersHorizontal } from 'lucide-react';
|
||||||
import { memo, useEffect, useState } from 'react';
|
import { memo, useEffect, useState } from 'react';
|
||||||
@ -22,6 +24,7 @@ function DynamicColumnTable({
|
|||||||
dynamicColumns,
|
dynamicColumns,
|
||||||
onDragColumn,
|
onDragColumn,
|
||||||
facingIssueBtn,
|
facingIssueBtn,
|
||||||
|
shouldSendAlertsLogEvent,
|
||||||
...restProps
|
...restProps
|
||||||
}: DynamicColumnTableProps): JSX.Element {
|
}: DynamicColumnTableProps): JSX.Element {
|
||||||
const [columnsData, setColumnsData] = useState<ColumnsType | undefined>(
|
const [columnsData, setColumnsData] = useState<ColumnsType | undefined>(
|
||||||
@ -47,11 +50,18 @@ function DynamicColumnTable({
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [columns, dynamicColumns]);
|
}, [columns, dynamicColumns]);
|
||||||
|
|
||||||
const onToggleHandler = (index: number) => (
|
const onToggleHandler = (
|
||||||
checked: boolean,
|
index: number,
|
||||||
event: React.MouseEvent<HTMLButtonElement>,
|
column: ColumnGroupType<any> | ColumnType<any>,
|
||||||
): void => {
|
) => (checked: boolean, event: React.MouseEvent<HTMLButtonElement>): void => {
|
||||||
event.stopPropagation();
|
event.stopPropagation();
|
||||||
|
|
||||||
|
if (shouldSendAlertsLogEvent) {
|
||||||
|
logEvent('Alert: Column toggled', {
|
||||||
|
column: column?.title,
|
||||||
|
action: checked ? 'Enable' : 'Disable',
|
||||||
|
});
|
||||||
|
}
|
||||||
setVisibleColumns({
|
setVisibleColumns({
|
||||||
tablesource,
|
tablesource,
|
||||||
dynamicColumns,
|
dynamicColumns,
|
||||||
@ -75,7 +85,7 @@ function DynamicColumnTable({
|
|||||||
<div>{column.title?.toString()}</div>
|
<div>{column.title?.toString()}</div>
|
||||||
<Switch
|
<Switch
|
||||||
checked={columnsData?.findIndex((c) => c.key === column.key) !== -1}
|
checked={columnsData?.findIndex((c) => c.key === column.key) !== -1}
|
||||||
onChange={onToggleHandler(index)}
|
onChange={onToggleHandler(index, column)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
),
|
),
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
import { Table } from 'antd';
|
import { Table } from 'antd';
|
||||||
import { ColumnsType } from 'antd/lib/table';
|
import { ColumnsType } from 'antd/lib/table';
|
||||||
import { dragColumnParams } from 'hooks/useDragColumns/configs';
|
import { dragColumnParams } from 'hooks/useDragColumns/configs';
|
||||||
|
import { set } from 'lodash-es';
|
||||||
import {
|
import {
|
||||||
SyntheticEvent,
|
SyntheticEvent,
|
||||||
useCallback,
|
useCallback,
|
||||||
@ -20,6 +21,7 @@ import { ResizeTableProps } from './types';
|
|||||||
function ResizeTable({
|
function ResizeTable({
|
||||||
columns,
|
columns,
|
||||||
onDragColumn,
|
onDragColumn,
|
||||||
|
pagination,
|
||||||
...restProps
|
...restProps
|
||||||
}: ResizeTableProps): JSX.Element {
|
}: ResizeTableProps): JSX.Element {
|
||||||
const [columnsData, setColumns] = useState<ColumnsType>([]);
|
const [columnsData, setColumns] = useState<ColumnsType>([]);
|
||||||
@ -58,15 +60,22 @@ function ResizeTable({
|
|||||||
[columnsData, onDragColumn, handleResize],
|
[columnsData, onDragColumn, handleResize],
|
||||||
);
|
);
|
||||||
|
|
||||||
const tableParams = useMemo(
|
const tableParams = useMemo(() => {
|
||||||
() => ({
|
const props = {
|
||||||
...restProps,
|
...restProps,
|
||||||
components: { header: { cell: ResizableHeader } },
|
components: { header: { cell: ResizableHeader } },
|
||||||
columns: mergedColumns,
|
columns: mergedColumns,
|
||||||
}),
|
};
|
||||||
[mergedColumns, restProps],
|
|
||||||
|
set(
|
||||||
|
props,
|
||||||
|
'pagination',
|
||||||
|
pagination ? { ...pagination, hideOnSinglePage: true } : false,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
return props;
|
||||||
|
}, [mergedColumns, pagination, restProps]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (columns) {
|
if (columns) {
|
||||||
setColumns(columns);
|
setColumns(columns);
|
||||||
|
@ -14,6 +14,7 @@ export interface DynamicColumnTableProps extends TableProps<any> {
|
|||||||
dynamicColumns: TableProps<any>['columns'];
|
dynamicColumns: TableProps<any>['columns'];
|
||||||
onDragColumn?: (fromIndex: number, toIndex: number) => void;
|
onDragColumn?: (fromIndex: number, toIndex: number) => void;
|
||||||
facingIssueBtn?: FacingIssueBtnProps;
|
facingIssueBtn?: FacingIssueBtnProps;
|
||||||
|
shouldSendAlertsLogEvent?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type GetVisibleColumnsFunction = (
|
export type GetVisibleColumnsFunction = (
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
import { PlusOutlined } from '@ant-design/icons';
|
import { PlusOutlined } from '@ant-design/icons';
|
||||||
import { Tooltip, Typography } from 'antd';
|
import { Tooltip, Typography } from 'antd';
|
||||||
import getAll from 'api/channels/getAll';
|
import getAll from 'api/channels/getAll';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import Spinner from 'components/Spinner';
|
import Spinner from 'components/Spinner';
|
||||||
import TextToolTip from 'components/TextToolTip';
|
import TextToolTip from 'components/TextToolTip';
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
import useComponentPermission from 'hooks/useComponentPermission';
|
import useComponentPermission from 'hooks/useComponentPermission';
|
||||||
import useFetch from 'hooks/useFetch';
|
import useFetch from 'hooks/useFetch';
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { useCallback } from 'react';
|
import { isUndefined } from 'lodash-es';
|
||||||
|
import { useCallback, useEffect } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
import { AppState } from 'store/reducers';
|
import { AppState } from 'store/reducers';
|
||||||
@ -31,6 +33,14 @@ function AlertChannels(): JSX.Element {
|
|||||||
|
|
||||||
const { loading, payload, error, errorMessage } = useFetch(getAll);
|
const { loading, payload, error, errorMessage } = useFetch(getAll);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isUndefined(payload)) {
|
||||||
|
logEvent('Alert Channel: Channel list page visited', {
|
||||||
|
number: payload?.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, [payload]);
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
return <Typography>{errorMessage}</Typography>;
|
return <Typography>{errorMessage}</Typography>;
|
||||||
}
|
}
|
||||||
|
@ -11,11 +11,12 @@ import testOpsGenie from 'api/channels/testOpsgenie';
|
|||||||
import testPagerApi from 'api/channels/testPager';
|
import testPagerApi from 'api/channels/testPager';
|
||||||
import testSlackApi from 'api/channels/testSlack';
|
import testSlackApi from 'api/channels/testSlack';
|
||||||
import testWebhookApi from 'api/channels/testWebhook';
|
import testWebhookApi from 'api/channels/testWebhook';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
import FormAlertChannels from 'container/FormAlertChannels';
|
import FormAlertChannels from 'container/FormAlertChannels';
|
||||||
import { useNotifications } from 'hooks/useNotifications';
|
import { useNotifications } from 'hooks/useNotifications';
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { useCallback, useState } from 'react';
|
import { useCallback, useEffect, useState } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
@ -43,6 +44,10 @@ function CreateAlertChannels({
|
|||||||
|
|
||||||
const [formInstance] = Form.useForm();
|
const [formInstance] = Form.useForm();
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
logEvent('Alert Channel: Create channel page visited', {});
|
||||||
|
}, []);
|
||||||
|
|
||||||
const [selectedConfig, setSelectedConfig] = useState<
|
const [selectedConfig, setSelectedConfig] = useState<
|
||||||
Partial<
|
Partial<
|
||||||
SlackChannel &
|
SlackChannel &
|
||||||
@ -139,19 +144,25 @@ function CreateAlertChannels({
|
|||||||
description: t('channel_creation_done'),
|
description: t('channel_creation_done'),
|
||||||
});
|
});
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_creation_failed'),
|
description: response.error || t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_creation_failed'),
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('channel_creation_failed'),
|
description: t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||||
|
} finally {
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
}
|
||||||
}, [prepareSlackRequest, t, notifications]);
|
}, [prepareSlackRequest, t, notifications]);
|
||||||
|
|
||||||
const prepareWebhookRequest = useCallback(() => {
|
const prepareWebhookRequest = useCallback(() => {
|
||||||
@ -200,19 +211,25 @@ function CreateAlertChannels({
|
|||||||
description: t('channel_creation_done'),
|
description: t('channel_creation_done'),
|
||||||
});
|
});
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_creation_failed'),
|
description: response.error || t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_creation_failed'),
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('channel_creation_failed'),
|
description: t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||||
|
} finally {
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
}
|
||||||
}, [prepareWebhookRequest, t, notifications]);
|
}, [prepareWebhookRequest, t, notifications]);
|
||||||
|
|
||||||
const preparePagerRequest = useCallback(() => {
|
const preparePagerRequest = useCallback(() => {
|
||||||
@ -245,8 +262,8 @@ function CreateAlertChannels({
|
|||||||
setSavingState(true);
|
setSavingState(true);
|
||||||
const request = preparePagerRequest();
|
const request = preparePagerRequest();
|
||||||
|
|
||||||
if (request) {
|
|
||||||
try {
|
try {
|
||||||
|
if (request) {
|
||||||
const response = await createPagerApi(request);
|
const response = await createPagerApi(request);
|
||||||
|
|
||||||
if (response.statusCode === 200) {
|
if (response.statusCode === 200) {
|
||||||
@ -255,20 +272,31 @@ function CreateAlertChannels({
|
|||||||
description: t('channel_creation_done'),
|
description: t('channel_creation_done'),
|
||||||
});
|
});
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_creation_failed'),
|
description: response.error || t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_creation_failed'),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
} catch (e) {
|
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('channel_creation_failed'),
|
description: t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||||
}
|
} catch (error) {
|
||||||
|
notifications.error({
|
||||||
|
message: 'Error',
|
||||||
|
description: t('channel_creation_failed'),
|
||||||
|
});
|
||||||
|
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||||
|
} finally {
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
}
|
||||||
}, [t, notifications, preparePagerRequest]);
|
}, [t, notifications, preparePagerRequest]);
|
||||||
|
|
||||||
const prepareOpsgenieRequest = useCallback(
|
const prepareOpsgenieRequest = useCallback(
|
||||||
@ -295,19 +323,25 @@ function CreateAlertChannels({
|
|||||||
description: t('channel_creation_done'),
|
description: t('channel_creation_done'),
|
||||||
});
|
});
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_creation_failed'),
|
description: response.error || t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_creation_failed'),
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('channel_creation_failed'),
|
description: t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||||
|
} finally {
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
}
|
||||||
}, [prepareOpsgenieRequest, t, notifications]);
|
}, [prepareOpsgenieRequest, t, notifications]);
|
||||||
|
|
||||||
const prepareEmailRequest = useCallback(
|
const prepareEmailRequest = useCallback(
|
||||||
@ -332,19 +366,25 @@ function CreateAlertChannels({
|
|||||||
description: t('channel_creation_done'),
|
description: t('channel_creation_done'),
|
||||||
});
|
});
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_creation_failed'),
|
description: response.error || t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_creation_failed'),
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('channel_creation_failed'),
|
description: t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||||
|
} finally {
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
}
|
||||||
}, [prepareEmailRequest, t, notifications]);
|
}, [prepareEmailRequest, t, notifications]);
|
||||||
|
|
||||||
const prepareMsTeamsRequest = useCallback(
|
const prepareMsTeamsRequest = useCallback(
|
||||||
@ -370,19 +410,25 @@ function CreateAlertChannels({
|
|||||||
description: t('channel_creation_done'),
|
description: t('channel_creation_done'),
|
||||||
});
|
});
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_creation_failed'),
|
description: response.error || t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_creation_failed'),
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('channel_creation_failed'),
|
description: t('channel_creation_failed'),
|
||||||
});
|
});
|
||||||
}
|
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||||
|
} finally {
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
}
|
||||||
}, [prepareMsTeamsRequest, t, notifications]);
|
}, [prepareMsTeamsRequest, t, notifications]);
|
||||||
|
|
||||||
const onSaveHandler = useCallback(
|
const onSaveHandler = useCallback(
|
||||||
@ -400,7 +446,15 @@ function CreateAlertChannels({
|
|||||||
const functionToCall = functionMapper[value as keyof typeof functionMapper];
|
const functionToCall = functionMapper[value as keyof typeof functionMapper];
|
||||||
|
|
||||||
if (functionToCall) {
|
if (functionToCall) {
|
||||||
functionToCall();
|
const result = await functionToCall();
|
||||||
|
logEvent('Alert Channel: Save channel', {
|
||||||
|
type: value,
|
||||||
|
sendResolvedAlert: selectedConfig.send_resolved,
|
||||||
|
name: selectedConfig.name,
|
||||||
|
new: 'true',
|
||||||
|
status: result?.status,
|
||||||
|
statusMessage: result?.statusMessage,
|
||||||
|
});
|
||||||
} else {
|
} else {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
@ -409,6 +463,7 @@ function CreateAlertChannels({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
[
|
[
|
||||||
onSlackHandler,
|
onSlackHandler,
|
||||||
onWebhookHandler,
|
onWebhookHandler,
|
||||||
@ -472,14 +527,25 @@ function CreateAlertChannels({
|
|||||||
description: t('channel_test_failed'),
|
description: t('channel_test_failed'),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logEvent('Alert Channel: Test notification', {
|
||||||
|
type: channelType,
|
||||||
|
sendResolvedAlert: selectedConfig.send_resolved,
|
||||||
|
name: selectedConfig.name,
|
||||||
|
new: 'true',
|
||||||
|
status:
|
||||||
|
response && response.statusCode === 200 ? 'Test success' : 'Test failed',
|
||||||
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('channel_test_unexpected'),
|
description: t('channel_test_unexpected'),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
setTestingState(false);
|
setTestingState(false);
|
||||||
},
|
},
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
[
|
[
|
||||||
prepareWebhookRequest,
|
prepareWebhookRequest,
|
||||||
t,
|
t,
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import { Row, Typography } from 'antd';
|
import { Row, Typography } from 'antd';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
|
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||||
import { useMemo } from 'react';
|
import { useMemo } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||||
@ -34,6 +36,13 @@ function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element {
|
|||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logEvent('Alert: Sample alert link clicked', {
|
||||||
|
dataSource: ALERTS_DATA_SOURCE_MAP[option],
|
||||||
|
link: url,
|
||||||
|
page: 'New alert data source selection page',
|
||||||
|
});
|
||||||
|
|
||||||
window.open(url, '_blank');
|
window.open(url, '_blank');
|
||||||
}
|
}
|
||||||
const renderOptions = useMemo(
|
const renderOptions = useMemo(
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { Form, Row } from 'antd';
|
import { Form, Row } from 'antd';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||||
import { QueryParams } from 'constants/query';
|
import { QueryParams } from 'constants/query';
|
||||||
import FormAlertRules from 'container/FormAlertRules';
|
import FormAlertRules from 'container/FormAlertRules';
|
||||||
@ -68,6 +69,8 @@ function CreateRules(): JSX.Element {
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (alertType) {
|
if (alertType) {
|
||||||
onSelectType(alertType);
|
onSelectType(alertType);
|
||||||
|
} else {
|
||||||
|
logEvent('Alert: New alert data source selection page visited', {});
|
||||||
}
|
}
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [alertType]);
|
}, [alertType]);
|
||||||
|
@ -11,6 +11,7 @@ import testOpsgenie from 'api/channels/testOpsgenie';
|
|||||||
import testPagerApi from 'api/channels/testPager';
|
import testPagerApi from 'api/channels/testPager';
|
||||||
import testSlackApi from 'api/channels/testSlack';
|
import testSlackApi from 'api/channels/testSlack';
|
||||||
import testWebhookApi from 'api/channels/testWebhook';
|
import testWebhookApi from 'api/channels/testWebhook';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
import {
|
import {
|
||||||
ChannelType,
|
ChannelType,
|
||||||
@ -89,7 +90,7 @@ function EditAlertChannels({
|
|||||||
description: t('webhook_url_required'),
|
description: t('webhook_url_required'),
|
||||||
});
|
});
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
return;
|
return { status: 'failed', statusMessage: t('webhook_url_required') };
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await editSlackApi(prepareSlackRequest());
|
const response = await editSlackApi(prepareSlackRequest());
|
||||||
@ -101,13 +102,17 @@ function EditAlertChannels({
|
|||||||
});
|
});
|
||||||
|
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_edit_failed'),
|
description: response.error || t('channel_edit_failed'),
|
||||||
});
|
});
|
||||||
}
|
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_edit_failed'),
|
||||||
|
};
|
||||||
}, [prepareSlackRequest, t, notifications, selectedConfig]);
|
}, [prepareSlackRequest, t, notifications, selectedConfig]);
|
||||||
|
|
||||||
const prepareWebhookRequest = useCallback(() => {
|
const prepareWebhookRequest = useCallback(() => {
|
||||||
@ -136,13 +141,13 @@ function EditAlertChannels({
|
|||||||
if (selectedConfig?.api_url === '') {
|
if (selectedConfig?.api_url === '') {
|
||||||
showError(t('webhook_url_required'));
|
showError(t('webhook_url_required'));
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
return;
|
return { status: 'failed', statusMessage: t('webhook_url_required') };
|
||||||
}
|
}
|
||||||
|
|
||||||
if (username && (!password || password === '')) {
|
if (username && (!password || password === '')) {
|
||||||
showError(t('username_no_password'));
|
showError(t('username_no_password'));
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
return;
|
return { status: 'failed', statusMessage: t('username_no_password') };
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await editWebhookApi(prepareWebhookRequest());
|
const response = await editWebhookApi(prepareWebhookRequest());
|
||||||
@ -154,10 +159,15 @@ function EditAlertChannels({
|
|||||||
});
|
});
|
||||||
|
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||||
showError(response.error || t('channel_edit_failed'));
|
|
||||||
}
|
}
|
||||||
|
showError(response.error || t('channel_edit_failed'));
|
||||||
|
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_edit_failed'),
|
||||||
|
};
|
||||||
}, [prepareWebhookRequest, t, notifications, selectedConfig]);
|
}, [prepareWebhookRequest, t, notifications, selectedConfig]);
|
||||||
|
|
||||||
const prepareEmailRequest = useCallback(
|
const prepareEmailRequest = useCallback(
|
||||||
@ -181,13 +191,18 @@ function EditAlertChannels({
|
|||||||
description: t('channel_edit_done'),
|
description: t('channel_edit_done'),
|
||||||
});
|
});
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_edit_failed'),
|
description: response.error || t('channel_edit_failed'),
|
||||||
});
|
});
|
||||||
}
|
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_edit_failed'),
|
||||||
|
};
|
||||||
}, [prepareEmailRequest, t, notifications]);
|
}, [prepareEmailRequest, t, notifications]);
|
||||||
|
|
||||||
const preparePagerRequest = useCallback(
|
const preparePagerRequest = useCallback(
|
||||||
@ -218,7 +233,7 @@ function EditAlertChannels({
|
|||||||
description: validationError,
|
description: validationError,
|
||||||
});
|
});
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
return;
|
return { status: 'failed', statusMessage: validationError };
|
||||||
}
|
}
|
||||||
const response = await editPagerApi(preparePagerRequest());
|
const response = await editPagerApi(preparePagerRequest());
|
||||||
|
|
||||||
@ -229,13 +244,18 @@ function EditAlertChannels({
|
|||||||
});
|
});
|
||||||
|
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_edit_failed'),
|
description: response.error || t('channel_edit_failed'),
|
||||||
});
|
});
|
||||||
}
|
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_edit_failed'),
|
||||||
|
};
|
||||||
}, [preparePagerRequest, notifications, selectedConfig, t]);
|
}, [preparePagerRequest, notifications, selectedConfig, t]);
|
||||||
|
|
||||||
const prepareOpsgenieRequest = useCallback(
|
const prepareOpsgenieRequest = useCallback(
|
||||||
@ -259,7 +279,7 @@ function EditAlertChannels({
|
|||||||
description: t('api_key_required'),
|
description: t('api_key_required'),
|
||||||
});
|
});
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
return;
|
return { status: 'failed', statusMessage: t('api_key_required') };
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await editOpsgenie(prepareOpsgenieRequest());
|
const response = await editOpsgenie(prepareOpsgenieRequest());
|
||||||
@ -271,13 +291,18 @@ function EditAlertChannels({
|
|||||||
});
|
});
|
||||||
|
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_edit_failed'),
|
description: response.error || t('channel_edit_failed'),
|
||||||
});
|
});
|
||||||
}
|
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_edit_failed'),
|
||||||
|
};
|
||||||
}, [prepareOpsgenieRequest, t, notifications, selectedConfig]);
|
}, [prepareOpsgenieRequest, t, notifications, selectedConfig]);
|
||||||
|
|
||||||
const prepareMsTeamsRequest = useCallback(
|
const prepareMsTeamsRequest = useCallback(
|
||||||
@ -301,7 +326,7 @@ function EditAlertChannels({
|
|||||||
description: t('webhook_url_required'),
|
description: t('webhook_url_required'),
|
||||||
});
|
});
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
return;
|
return { status: 'failed', statusMessage: t('webhook_url_required') };
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await editMsTeamsApi(prepareMsTeamsRequest());
|
const response = await editMsTeamsApi(prepareMsTeamsRequest());
|
||||||
@ -313,31 +338,46 @@ function EditAlertChannels({
|
|||||||
});
|
});
|
||||||
|
|
||||||
history.replace(ROUTES.ALL_CHANNELS);
|
history.replace(ROUTES.ALL_CHANNELS);
|
||||||
} else {
|
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||||
|
}
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('channel_edit_failed'),
|
description: response.error || t('channel_edit_failed'),
|
||||||
});
|
});
|
||||||
}
|
|
||||||
setSavingState(false);
|
setSavingState(false);
|
||||||
|
return {
|
||||||
|
status: 'failed',
|
||||||
|
statusMessage: response.error || t('channel_edit_failed'),
|
||||||
|
};
|
||||||
}, [prepareMsTeamsRequest, t, notifications, selectedConfig]);
|
}, [prepareMsTeamsRequest, t, notifications, selectedConfig]);
|
||||||
|
|
||||||
const onSaveHandler = useCallback(
|
const onSaveHandler = useCallback(
|
||||||
(value: ChannelType) => {
|
async (value: ChannelType) => {
|
||||||
|
let result;
|
||||||
if (value === ChannelType.Slack) {
|
if (value === ChannelType.Slack) {
|
||||||
onSlackEditHandler();
|
result = await onSlackEditHandler();
|
||||||
} else if (value === ChannelType.Webhook) {
|
} else if (value === ChannelType.Webhook) {
|
||||||
onWebhookEditHandler();
|
result = await onWebhookEditHandler();
|
||||||
} else if (value === ChannelType.Pagerduty) {
|
} else if (value === ChannelType.Pagerduty) {
|
||||||
onPagerEditHandler();
|
result = await onPagerEditHandler();
|
||||||
} else if (value === ChannelType.MsTeams) {
|
} else if (value === ChannelType.MsTeams) {
|
||||||
onMsTeamsEditHandler();
|
result = await onMsTeamsEditHandler();
|
||||||
} else if (value === ChannelType.Opsgenie) {
|
} else if (value === ChannelType.Opsgenie) {
|
||||||
onOpsgenieEditHandler();
|
result = await onOpsgenieEditHandler();
|
||||||
} else if (value === ChannelType.Email) {
|
} else if (value === ChannelType.Email) {
|
||||||
onEmailEditHandler();
|
result = await onEmailEditHandler();
|
||||||
}
|
}
|
||||||
|
logEvent('Alert Channel: Save channel', {
|
||||||
|
type: value,
|
||||||
|
sendResolvedAlert: selectedConfig.send_resolved,
|
||||||
|
name: selectedConfig.name,
|
||||||
|
new: 'false',
|
||||||
|
status: result?.status,
|
||||||
|
statusMessage: result?.statusMessage,
|
||||||
|
});
|
||||||
},
|
},
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
[
|
[
|
||||||
onSlackEditHandler,
|
onSlackEditHandler,
|
||||||
onWebhookEditHandler,
|
onWebhookEditHandler,
|
||||||
@ -399,6 +439,14 @@ function EditAlertChannels({
|
|||||||
description: t('channel_test_failed'),
|
description: t('channel_test_failed'),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
logEvent('Alert Channel: Test notification', {
|
||||||
|
type: channelType,
|
||||||
|
sendResolvedAlert: selectedConfig.send_resolved,
|
||||||
|
name: selectedConfig.name,
|
||||||
|
new: 'false',
|
||||||
|
status:
|
||||||
|
response && response.statusCode === 200 ? 'Test success' : 'Test failed',
|
||||||
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
@ -407,6 +455,7 @@ function EditAlertChannels({
|
|||||||
}
|
}
|
||||||
setTestingState(false);
|
setTestingState(false);
|
||||||
},
|
},
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
[
|
[
|
||||||
t,
|
t,
|
||||||
prepareWebhookRequest,
|
prepareWebhookRequest,
|
||||||
|
@ -3,6 +3,8 @@ import './FormAlertRules.styles.scss';
|
|||||||
import { PlusOutlined } from '@ant-design/icons';
|
import { PlusOutlined } from '@ant-design/icons';
|
||||||
import { Button, Form, Select, Switch, Tooltip } from 'antd';
|
import { Button, Form, Select, Switch, Tooltip } from 'antd';
|
||||||
import getChannels from 'api/channels/getAll';
|
import getChannels from 'api/channels/getAll';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
|
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
import useComponentPermission from 'hooks/useComponentPermission';
|
import useComponentPermission from 'hooks/useComponentPermission';
|
||||||
import useFetch from 'hooks/useFetch';
|
import useFetch from 'hooks/useFetch';
|
||||||
@ -10,6 +12,7 @@ import { useCallback, useEffect, useState } from 'react';
|
|||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
import { AppState } from 'store/reducers';
|
import { AppState } from 'store/reducers';
|
||||||
|
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||||
import { AlertDef, Labels } from 'types/api/alerts/def';
|
import { AlertDef, Labels } from 'types/api/alerts/def';
|
||||||
import AppReducer from 'types/reducer/app';
|
import AppReducer from 'types/reducer/app';
|
||||||
import { requireErrorMessage } from 'utils/form/requireErrorMessage';
|
import { requireErrorMessage } from 'utils/form/requireErrorMessage';
|
||||||
@ -73,9 +76,24 @@ function BasicInfo({
|
|||||||
|
|
||||||
const noChannels = channels.payload?.length === 0;
|
const noChannels = channels.payload?.length === 0;
|
||||||
const handleCreateNewChannels = useCallback(() => {
|
const handleCreateNewChannels = useCallback(() => {
|
||||||
|
logEvent('Alert: Create notification channel button clicked', {
|
||||||
|
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||||
|
ruleId: isNewRule ? 0 : alertDef?.id,
|
||||||
|
});
|
||||||
window.open(ROUTES.CHANNELS_NEW, '_blank');
|
window.open(ROUTES.CHANNELS_NEW, '_blank');
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!channels.loading && isNewRule) {
|
||||||
|
logEvent('Alert: New alert creation page visited', {
|
||||||
|
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||||
|
numberOfChannels: channels.payload?.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [channels.payload, channels.loading]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<StepHeading> {t('alert_form_step3')} </StepHeading>
|
<StepHeading> {t('alert_form_step3')} </StepHeading>
|
||||||
|
@ -2,6 +2,7 @@ import './QuerySection.styles.scss';
|
|||||||
|
|
||||||
import { Color } from '@signozhq/design-tokens';
|
import { Color } from '@signozhq/design-tokens';
|
||||||
import { Button, Tabs, Tooltip } from 'antd';
|
import { Button, Tabs, Tooltip } from 'antd';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import PromQLIcon from 'assets/Dashboard/PromQl';
|
import PromQLIcon from 'assets/Dashboard/PromQl';
|
||||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||||
@ -31,6 +32,7 @@ function QuerySection({
|
|||||||
runQuery,
|
runQuery,
|
||||||
alertDef,
|
alertDef,
|
||||||
panelType,
|
panelType,
|
||||||
|
ruleId,
|
||||||
}: QuerySectionProps): JSX.Element {
|
}: QuerySectionProps): JSX.Element {
|
||||||
// init namespace for translations
|
// init namespace for translations
|
||||||
const { t } = useTranslation('alerts');
|
const { t } = useTranslation('alerts');
|
||||||
@ -158,7 +160,15 @@ function QuerySection({
|
|||||||
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
|
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
|
||||||
<Button
|
<Button
|
||||||
type="primary"
|
type="primary"
|
||||||
onClick={runQuery}
|
onClick={(): void => {
|
||||||
|
runQuery();
|
||||||
|
logEvent('Alert: Stage and run query', {
|
||||||
|
dataSource: ALERTS_DATA_SOURCE_MAP[alertType],
|
||||||
|
isNewRule: !ruleId || ruleId === 0,
|
||||||
|
ruleId,
|
||||||
|
queryType: queryCategory,
|
||||||
|
});
|
||||||
|
}}
|
||||||
className="stage-run-query"
|
className="stage-run-query"
|
||||||
icon={<Play size={14} />}
|
icon={<Play size={14} />}
|
||||||
>
|
>
|
||||||
@ -228,6 +238,7 @@ interface QuerySectionProps {
|
|||||||
runQuery: VoidFunction;
|
runQuery: VoidFunction;
|
||||||
alertDef: AlertDef;
|
alertDef: AlertDef;
|
||||||
panelType: PANEL_TYPES;
|
panelType: PANEL_TYPES;
|
||||||
|
ruleId: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export default QuerySection;
|
export default QuerySection;
|
||||||
|
@ -12,8 +12,10 @@ import {
|
|||||||
} from 'antd';
|
} from 'antd';
|
||||||
import saveAlertApi from 'api/alerts/save';
|
import saveAlertApi from 'api/alerts/save';
|
||||||
import testAlertApi from 'api/alerts/testAlert';
|
import testAlertApi from 'api/alerts/testAlert';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
||||||
import { alertHelpMessage } from 'components/facingIssueBtn/util';
|
import { alertHelpMessage } from 'components/facingIssueBtn/util';
|
||||||
|
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||||
import { FeatureKeys } from 'constants/features';
|
import { FeatureKeys } from 'constants/features';
|
||||||
import { QueryParams } from 'constants/query';
|
import { QueryParams } from 'constants/query';
|
||||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||||
@ -338,8 +340,13 @@ function FormAlertRules({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const postableAlert = memoizedPreparePostData();
|
const postableAlert = memoizedPreparePostData();
|
||||||
|
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
|
|
||||||
|
let logData = {
|
||||||
|
status: 'error',
|
||||||
|
statusMessage: t('unexpected_error'),
|
||||||
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const apiReq =
|
const apiReq =
|
||||||
ruleId && ruleId > 0
|
ruleId && ruleId > 0
|
||||||
@ -349,10 +356,15 @@ function FormAlertRules({
|
|||||||
const response = await saveAlertApi(apiReq);
|
const response = await saveAlertApi(apiReq);
|
||||||
|
|
||||||
if (response.statusCode === 200) {
|
if (response.statusCode === 200) {
|
||||||
|
logData = {
|
||||||
|
status: 'success',
|
||||||
|
statusMessage:
|
||||||
|
!ruleId || ruleId === 0 ? t('rule_created') : t('rule_edited'),
|
||||||
|
};
|
||||||
|
|
||||||
notifications.success({
|
notifications.success({
|
||||||
message: 'Success',
|
message: 'Success',
|
||||||
description:
|
description: logData.statusMessage,
|
||||||
!ruleId || ruleId === 0 ? t('rule_created') : t('rule_edited'),
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// invalidate rule in cache
|
// invalidate rule in cache
|
||||||
@ -367,18 +379,42 @@ function FormAlertRules({
|
|||||||
history.replace(`${ROUTES.LIST_ALL_ALERT}?${urlQuery.toString()}`);
|
history.replace(`${ROUTES.LIST_ALL_ALERT}?${urlQuery.toString()}`);
|
||||||
}, 2000);
|
}, 2000);
|
||||||
} else {
|
} else {
|
||||||
|
logData = {
|
||||||
|
status: 'error',
|
||||||
|
statusMessage: response.error || t('unexpected_error'),
|
||||||
|
};
|
||||||
|
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('unexpected_error'),
|
description: logData.statusMessage,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
logData = {
|
||||||
|
status: 'error',
|
||||||
|
statusMessage: t('unexpected_error'),
|
||||||
|
};
|
||||||
|
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('unexpected_error'),
|
description: logData.statusMessage,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
|
|
||||||
|
logEvent('Alert: Save alert', {
|
||||||
|
...logData,
|
||||||
|
dataSource: ALERTS_DATA_SOURCE_MAP[postableAlert?.alertType as AlertTypes],
|
||||||
|
channelNames: postableAlert?.preferredChannels,
|
||||||
|
broadcastToAll: postableAlert?.broadcastToAll,
|
||||||
|
isNewRule: !ruleId || ruleId === 0,
|
||||||
|
ruleId,
|
||||||
|
queryType: currentQuery.queryType,
|
||||||
|
alertId: postableAlert?.id,
|
||||||
|
alertName: postableAlert?.alert,
|
||||||
|
});
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [
|
}, [
|
||||||
isFormValid,
|
isFormValid,
|
||||||
memoizedPreparePostData,
|
memoizedPreparePostData,
|
||||||
@ -414,6 +450,7 @@ function FormAlertRules({
|
|||||||
}
|
}
|
||||||
const postableAlert = memoizedPreparePostData();
|
const postableAlert = memoizedPreparePostData();
|
||||||
|
|
||||||
|
let statusResponse = { status: 'failed', message: '' };
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
try {
|
try {
|
||||||
const response = await testAlertApi({ data: postableAlert });
|
const response = await testAlertApi({ data: postableAlert });
|
||||||
@ -425,25 +462,43 @@ function FormAlertRules({
|
|||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('no_alerts_found'),
|
description: t('no_alerts_found'),
|
||||||
});
|
});
|
||||||
|
statusResponse = { status: 'failed', message: t('no_alerts_found') };
|
||||||
} else {
|
} else {
|
||||||
notifications.success({
|
notifications.success({
|
||||||
message: 'Success',
|
message: 'Success',
|
||||||
description: t('rule_test_fired'),
|
description: t('rule_test_fired'),
|
||||||
});
|
});
|
||||||
|
statusResponse = { status: 'success', message: t('rule_test_fired') };
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: response.error || t('unexpected_error'),
|
description: response.error || t('unexpected_error'),
|
||||||
});
|
});
|
||||||
|
statusResponse = {
|
||||||
|
status: 'failed',
|
||||||
|
message: response.error || t('unexpected_error'),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('unexpected_error'),
|
description: t('unexpected_error'),
|
||||||
});
|
});
|
||||||
|
statusResponse = { status: 'failed', message: t('unexpected_error') };
|
||||||
}
|
}
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
|
logEvent('Alert: Test notification', {
|
||||||
|
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||||
|
channelNames: postableAlert?.preferredChannels,
|
||||||
|
broadcastToAll: postableAlert?.broadcastToAll,
|
||||||
|
isNewRule: !ruleId || ruleId === 0,
|
||||||
|
ruleId,
|
||||||
|
queryType: currentQuery.queryType,
|
||||||
|
status: statusResponse.status,
|
||||||
|
statusMessage: statusResponse.message,
|
||||||
|
});
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [t, isFormValid, memoizedPreparePostData, notifications]);
|
}, [t, isFormValid, memoizedPreparePostData, notifications]);
|
||||||
|
|
||||||
const renderBasicInfo = (): JSX.Element => (
|
const renderBasicInfo = (): JSX.Element => (
|
||||||
@ -513,6 +568,16 @@ function FormAlertRules({
|
|||||||
|
|
||||||
const isRuleCreated = !ruleId || ruleId === 0;
|
const isRuleCreated = !ruleId || ruleId === 0;
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isRuleCreated) {
|
||||||
|
logEvent('Alert: Edit page visited', {
|
||||||
|
ruleId,
|
||||||
|
dataSource: ALERTS_DATA_SOURCE_MAP[alertType as AlertTypes],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, []);
|
||||||
|
|
||||||
function handleRedirection(option: AlertTypes): void {
|
function handleRedirection(option: AlertTypes): void {
|
||||||
let url = '';
|
let url = '';
|
||||||
switch (option) {
|
switch (option) {
|
||||||
@ -535,6 +600,13 @@ function FormAlertRules({
|
|||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
logEvent('Alert: Check example alert clicked', {
|
||||||
|
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||||
|
isNewRule: !ruleId || ruleId === 0,
|
||||||
|
ruleId,
|
||||||
|
queryType: currentQuery.queryType,
|
||||||
|
link: url,
|
||||||
|
});
|
||||||
window.open(url, '_blank');
|
window.open(url, '_blank');
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -572,6 +644,7 @@ function FormAlertRules({
|
|||||||
alertDef={alertDef}
|
alertDef={alertDef}
|
||||||
panelType={panelType || PANEL_TYPES.TIME_SERIES}
|
panelType={panelType || PANEL_TYPES.TIME_SERIES}
|
||||||
key={currentQuery.queryType}
|
key={currentQuery.queryType}
|
||||||
|
ruleId={ruleId}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<RuleOptions
|
<RuleOptions
|
||||||
|
@ -80,6 +80,8 @@ function FullView({
|
|||||||
query: updatedQuery,
|
query: updatedQuery,
|
||||||
globalSelectedInterval: globalSelectedTime,
|
globalSelectedInterval: globalSelectedTime,
|
||||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||||
|
fillGaps: widget.fillSpans,
|
||||||
|
formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||||
|
@ -109,6 +109,7 @@ function GridCardGraph({
|
|||||||
globalSelectedInterval,
|
globalSelectedInterval,
|
||||||
variables: getDashboardVariables(variables),
|
variables: getDashboardVariables(variables),
|
||||||
fillGaps: widget.fillSpans,
|
fillGaps: widget.fillSpans,
|
||||||
|
formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||||
|
215
frontend/src/container/GridTableComponent/__tests__/response.ts
Normal file
215
frontend/src/container/GridTableComponent/__tests__/response.ts
Normal file
@ -0,0 +1,215 @@
|
|||||||
|
export const tableDataMultipleQueriesSuccessResponse = {
|
||||||
|
columns: [
|
||||||
|
{
|
||||||
|
name: 'service_name',
|
||||||
|
queryName: '',
|
||||||
|
isValueColumn: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'A',
|
||||||
|
queryName: 'A',
|
||||||
|
isValueColumn: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'B',
|
||||||
|
queryName: 'B',
|
||||||
|
isValueColumn: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
A: 4196.71,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'demo-app',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
A: 500.83,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'customer',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
A: 499.5,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'mysql',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
A: 293.22,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'frontend',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
A: 230.03,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'driver',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
A: 67.09,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'route',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
A: 30.96,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'redis',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
A: 'n/a',
|
||||||
|
B: 112.27,
|
||||||
|
service_name: 'n/a',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
export const widgetQueryWithLegend = {
|
||||||
|
clickhouse_sql: [
|
||||||
|
{
|
||||||
|
name: 'A',
|
||||||
|
legend: '',
|
||||||
|
disabled: false,
|
||||||
|
query: '',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
promql: [
|
||||||
|
{
|
||||||
|
name: 'A',
|
||||||
|
query: '',
|
||||||
|
legend: '',
|
||||||
|
disabled: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
builder: {
|
||||||
|
queryData: [
|
||||||
|
{
|
||||||
|
dataSource: 'metrics',
|
||||||
|
queryName: 'A',
|
||||||
|
aggregateOperator: 'count',
|
||||||
|
aggregateAttribute: {
|
||||||
|
dataType: 'float64',
|
||||||
|
id: 'signoz_latency--float64--ExponentialHistogram--true',
|
||||||
|
isColumn: true,
|
||||||
|
isJSON: false,
|
||||||
|
key: 'signoz_latency',
|
||||||
|
type: 'ExponentialHistogram',
|
||||||
|
},
|
||||||
|
timeAggregation: '',
|
||||||
|
spaceAggregation: 'p90',
|
||||||
|
functions: [],
|
||||||
|
filters: {
|
||||||
|
items: [],
|
||||||
|
op: 'AND',
|
||||||
|
},
|
||||||
|
expression: 'A',
|
||||||
|
disabled: false,
|
||||||
|
stepInterval: 60,
|
||||||
|
having: [],
|
||||||
|
limit: null,
|
||||||
|
orderBy: [],
|
||||||
|
groupBy: [
|
||||||
|
{
|
||||||
|
dataType: 'string',
|
||||||
|
isColumn: false,
|
||||||
|
isJSON: false,
|
||||||
|
key: 'service_name',
|
||||||
|
type: 'tag',
|
||||||
|
id: 'service_name--string--tag--false',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
legend: 'p99',
|
||||||
|
reduceTo: 'avg',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
dataSource: 'metrics',
|
||||||
|
queryName: 'B',
|
||||||
|
aggregateOperator: 'rate',
|
||||||
|
aggregateAttribute: {
|
||||||
|
dataType: 'float64',
|
||||||
|
id: 'system_disk_operations--float64--Sum--true',
|
||||||
|
isColumn: true,
|
||||||
|
isJSON: false,
|
||||||
|
key: 'system_disk_operations',
|
||||||
|
type: 'Sum',
|
||||||
|
},
|
||||||
|
timeAggregation: 'rate',
|
||||||
|
spaceAggregation: 'sum',
|
||||||
|
functions: [],
|
||||||
|
filters: {
|
||||||
|
items: [],
|
||||||
|
op: 'AND',
|
||||||
|
},
|
||||||
|
expression: 'B',
|
||||||
|
disabled: false,
|
||||||
|
stepInterval: 60,
|
||||||
|
having: [],
|
||||||
|
limit: null,
|
||||||
|
orderBy: [],
|
||||||
|
groupBy: [],
|
||||||
|
legend: '',
|
||||||
|
reduceTo: 'avg',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
queryFormulas: [],
|
||||||
|
},
|
||||||
|
id: '48ad5a67-9a3c-49d4-a886-d7a34f8b875d',
|
||||||
|
queryType: 'builder',
|
||||||
|
};
|
||||||
|
|
||||||
|
export const expectedOutputWithLegends = {
|
||||||
|
dataSource: [
|
||||||
|
{
|
||||||
|
A: 4196.71,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'demo-app',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
A: 500.83,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'customer',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
A: 499.5,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'mysql',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
A: 293.22,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'frontend',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
A: 230.03,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'driver',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
A: 67.09,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'route',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
A: 30.96,
|
||||||
|
B: 'n/a',
|
||||||
|
service_name: 'redis',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
A: 'n/a',
|
||||||
|
B: 112.27,
|
||||||
|
service_name: 'n/a',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
@ -0,0 +1,42 @@
|
|||||||
|
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
|
|
||||||
|
import { createColumnsAndDataSource, getQueryLegend } from '../utils';
|
||||||
|
import {
|
||||||
|
expectedOutputWithLegends,
|
||||||
|
tableDataMultipleQueriesSuccessResponse,
|
||||||
|
widgetQueryWithLegend,
|
||||||
|
} from './response';
|
||||||
|
|
||||||
|
describe('Table Panel utils', () => {
|
||||||
|
it('createColumnsAndDataSource function', () => {
|
||||||
|
const data = tableDataMultipleQueriesSuccessResponse;
|
||||||
|
const query = widgetQueryWithLegend as Query;
|
||||||
|
|
||||||
|
const { columns, dataSource } = createColumnsAndDataSource(data, query);
|
||||||
|
|
||||||
|
expect(dataSource).toStrictEqual(expectedOutputWithLegends.dataSource);
|
||||||
|
|
||||||
|
// this makes sure that the columns are rendered in the same order as response
|
||||||
|
expect(columns[0].title).toBe('service_name');
|
||||||
|
// the next specifically makes sure that the legends are properly applied in multiple queries
|
||||||
|
expect(columns[1].title).toBe('p99');
|
||||||
|
// this makes sure that the query without a legend takes the title from the query response
|
||||||
|
expect(columns[2].title).toBe('B');
|
||||||
|
|
||||||
|
// this is to ensure that the rows properly map to the column data indexes as the dataIndex should be equal to name of the columns
|
||||||
|
// returned in the response as the rows will be mapped with them
|
||||||
|
expect((columns[0] as any).dataIndex).toBe('service_name');
|
||||||
|
expect((columns[1] as any).dataIndex).toBe('A');
|
||||||
|
expect((columns[2] as any).dataIndex).toBe('B');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getQueryLegend function', () => {
|
||||||
|
const query = widgetQueryWithLegend as Query;
|
||||||
|
|
||||||
|
// query A has a legend of p99
|
||||||
|
expect(getQueryLegend(query, 'A')).toBe('p99');
|
||||||
|
|
||||||
|
// should return undefined when legend not present
|
||||||
|
expect(getQueryLegend(query, 'B')).toBe(undefined);
|
||||||
|
});
|
||||||
|
});
|
@ -3,10 +3,7 @@ import { Space, Tooltip } from 'antd';
|
|||||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||||
import { Events } from 'constants/events';
|
import { Events } from 'constants/events';
|
||||||
import { QueryTable } from 'container/QueryTable';
|
import { QueryTable } from 'container/QueryTable';
|
||||||
import {
|
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||||
createTableColumnsFromQuery,
|
|
||||||
RowData,
|
|
||||||
} from 'lib/query/createTableColumnsFromQuery';
|
|
||||||
import { cloneDeep, get, isEmpty, set } from 'lodash-es';
|
import { cloneDeep, get, isEmpty, set } from 'lodash-es';
|
||||||
import { memo, ReactNode, useCallback, useEffect, useMemo } from 'react';
|
import { memo, ReactNode, useCallback, useEffect, useMemo } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
@ -14,7 +11,11 @@ import { eventEmitter } from 'utils/getEventEmitter';
|
|||||||
|
|
||||||
import { WrapperStyled } from './styles';
|
import { WrapperStyled } from './styles';
|
||||||
import { GridTableComponentProps } from './types';
|
import { GridTableComponentProps } from './types';
|
||||||
import { findMatchingThreshold } from './utils';
|
import {
|
||||||
|
createColumnsAndDataSource,
|
||||||
|
findMatchingThreshold,
|
||||||
|
TableData,
|
||||||
|
} from './utils';
|
||||||
|
|
||||||
function GridTableComponent({
|
function GridTableComponent({
|
||||||
data,
|
data,
|
||||||
@ -25,28 +26,26 @@ function GridTableComponent({
|
|||||||
...props
|
...props
|
||||||
}: GridTableComponentProps): JSX.Element {
|
}: GridTableComponentProps): JSX.Element {
|
||||||
const { t } = useTranslation(['valueGraph']);
|
const { t } = useTranslation(['valueGraph']);
|
||||||
|
|
||||||
|
// create columns and dataSource in the ui friendly structure
|
||||||
|
// use the query from the widget here to extract the legend information
|
||||||
const { columns, dataSource: originalDataSource } = useMemo(
|
const { columns, dataSource: originalDataSource } = useMemo(
|
||||||
() =>
|
() => createColumnsAndDataSource((data as unknown) as TableData, query),
|
||||||
createTableColumnsFromQuery({
|
[query, data],
|
||||||
query,
|
|
||||||
queryTableData: data,
|
|
||||||
}),
|
|
||||||
[data, query],
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const createDataInCorrectFormat = useCallback(
|
const createDataInCorrectFormat = useCallback(
|
||||||
(dataSource: RowData[]): RowData[] =>
|
(dataSource: RowData[]): RowData[] =>
|
||||||
dataSource.map((d) => {
|
dataSource.map((d) => {
|
||||||
const finalObject = {};
|
const finalObject = {};
|
||||||
const keys = Object.keys(d);
|
|
||||||
keys.forEach((k) => {
|
// we use the order of the columns here to have similar download as the user view
|
||||||
const label = get(
|
columns.forEach((k) => {
|
||||||
columns.find((c) => get(c, 'dataIndex', '') === k) || {},
|
set(
|
||||||
'title',
|
finalObject,
|
||||||
'',
|
get(k, 'title', '') as string,
|
||||||
|
get(d, get(k, 'dataIndex', ''), 'n/a'),
|
||||||
);
|
);
|
||||||
if (label) {
|
|
||||||
set(finalObject, label as string, d[k]);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
return finalObject as RowData;
|
return finalObject as RowData;
|
||||||
}),
|
}),
|
||||||
@ -65,7 +64,11 @@ function GridTableComponent({
|
|||||||
const newValue = { ...val };
|
const newValue = { ...val };
|
||||||
Object.keys(val).forEach((k) => {
|
Object.keys(val).forEach((k) => {
|
||||||
if (columnUnits[k]) {
|
if (columnUnits[k]) {
|
||||||
newValue[k] = getYAxisFormattedValue(String(val[k]), columnUnits[k]);
|
// the check below takes care of not adding units for rows that have n/a values
|
||||||
|
newValue[k] =
|
||||||
|
val[k] !== 'n/a'
|
||||||
|
? getYAxisFormattedValue(String(val[k]), columnUnits[k])
|
||||||
|
: val[k];
|
||||||
newValue[`${k}_without_unit`] = val[k];
|
newValue[`${k}_without_unit`] = val[k];
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -1,4 +1,11 @@
|
|||||||
|
import { ColumnsType, ColumnType } from 'antd/es/table';
|
||||||
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
|
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
|
||||||
|
import { QUERY_TABLE_CONFIG } from 'container/QueryTable/config';
|
||||||
|
import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces';
|
||||||
|
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||||
|
import { isEmpty, isNaN } from 'lodash-es';
|
||||||
|
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
|
import { EQueryType } from 'types/common/dashboard';
|
||||||
|
|
||||||
// Helper function to evaluate the condition based on the operator
|
// Helper function to evaluate the condition based on the operator
|
||||||
function evaluateCondition(
|
function evaluateCondition(
|
||||||
@ -56,3 +63,85 @@ export function findMatchingThreshold(
|
|||||||
hasMultipleMatches,
|
hasMultipleMatches,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface TableData {
|
||||||
|
columns: { name: string; queryName: string; isValueColumn: boolean }[];
|
||||||
|
rows: { data: any }[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getQueryLegend(
|
||||||
|
currentQuery: Query,
|
||||||
|
queryName: string,
|
||||||
|
): string | undefined {
|
||||||
|
let legend: string | undefined;
|
||||||
|
switch (currentQuery.queryType) {
|
||||||
|
case EQueryType.QUERY_BUILDER:
|
||||||
|
// check if the value is present in the queries
|
||||||
|
legend = currentQuery.builder.queryData.find(
|
||||||
|
(query) => query.queryName === queryName,
|
||||||
|
)?.legend;
|
||||||
|
|
||||||
|
if (!legend) {
|
||||||
|
// check if the value is present in the formula
|
||||||
|
legend = currentQuery.builder.queryFormulas.find(
|
||||||
|
(query) => query.queryName === queryName,
|
||||||
|
)?.legend;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case EQueryType.CLICKHOUSE:
|
||||||
|
legend = currentQuery.clickhouse_sql.find(
|
||||||
|
(query) => query.name === queryName,
|
||||||
|
)?.legend;
|
||||||
|
break;
|
||||||
|
case EQueryType.PROM:
|
||||||
|
legend = currentQuery.promql.find((query) => query.name === queryName)
|
||||||
|
?.legend;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
legend = undefined;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return legend;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createColumnsAndDataSource(
|
||||||
|
data: TableData,
|
||||||
|
currentQuery: Query,
|
||||||
|
renderColumnCell?: QueryTableProps['renderColumnCell'],
|
||||||
|
): { columns: ColumnsType<RowData>; dataSource: RowData[] } {
|
||||||
|
const columns: ColumnsType<RowData> =
|
||||||
|
data.columns?.reduce<ColumnsType<RowData>>((acc, item) => {
|
||||||
|
// is the column is the value column then we need to check for the available legend
|
||||||
|
const legend = item.isValueColumn
|
||||||
|
? getQueryLegend(currentQuery, item.queryName)
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
const column: ColumnType<RowData> = {
|
||||||
|
dataIndex: item.name,
|
||||||
|
// if no legend present then rely on the column name value
|
||||||
|
title: !isEmpty(legend) ? legend : item.name,
|
||||||
|
width: QUERY_TABLE_CONFIG.width,
|
||||||
|
render: renderColumnCell && renderColumnCell[item.name],
|
||||||
|
sorter: (a: RowData, b: RowData): number => {
|
||||||
|
const valueA = Number(a[`${item.name}_without_unit`] ?? a[item.name]);
|
||||||
|
const valueB = Number(b[`${item.name}_without_unit`] ?? b[item.name]);
|
||||||
|
|
||||||
|
if (!isNaN(valueA) && !isNaN(valueB)) {
|
||||||
|
return valueA - valueB;
|
||||||
|
}
|
||||||
|
|
||||||
|
return ((a[item.name] as string) || '').localeCompare(
|
||||||
|
(b[item.name] as string) || '',
|
||||||
|
);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return [...acc, column];
|
||||||
|
}, []) || [];
|
||||||
|
|
||||||
|
// the rows returned have data encapsulation hence removing the same here
|
||||||
|
const dataSource = data.rows?.map((d) => d.data) || [];
|
||||||
|
|
||||||
|
return { columns, dataSource };
|
||||||
|
}
|
||||||
|
@ -7,17 +7,20 @@ interface AlertInfoCardProps {
|
|||||||
header: string;
|
header: string;
|
||||||
subheader: string;
|
subheader: string;
|
||||||
link: string;
|
link: string;
|
||||||
|
onClick: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
function AlertInfoCard({
|
function AlertInfoCard({
|
||||||
header,
|
header,
|
||||||
subheader,
|
subheader,
|
||||||
link,
|
link,
|
||||||
|
onClick,
|
||||||
}: AlertInfoCardProps): JSX.Element {
|
}: AlertInfoCardProps): JSX.Element {
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className="alert-info-card"
|
className="alert-info-card"
|
||||||
onClick={(): void => {
|
onClick={(): void => {
|
||||||
|
onClick();
|
||||||
window.open(link, '_blank');
|
window.open(link, '_blank');
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
|
@ -2,6 +2,7 @@ import './AlertsEmptyState.styles.scss';
|
|||||||
|
|
||||||
import { PlusOutlined } from '@ant-design/icons';
|
import { PlusOutlined } from '@ant-design/icons';
|
||||||
import { Button, Divider, Typography } from 'antd';
|
import { Button, Divider, Typography } from 'antd';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
import useComponentPermission from 'hooks/useComponentPermission';
|
import useComponentPermission from 'hooks/useComponentPermission';
|
||||||
import { useNotifications } from 'hooks/useNotifications';
|
import { useNotifications } from 'hooks/useNotifications';
|
||||||
@ -10,12 +11,26 @@ import { useCallback, useState } from 'react';
|
|||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
import { AppState } from 'store/reducers';
|
import { AppState } from 'store/reducers';
|
||||||
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
import AppReducer from 'types/reducer/app';
|
import AppReducer from 'types/reducer/app';
|
||||||
|
|
||||||
import AlertInfoCard from './AlertInfoCard';
|
import AlertInfoCard from './AlertInfoCard';
|
||||||
import { ALERT_CARDS, ALERT_INFO_LINKS } from './alertLinks';
|
import { ALERT_CARDS, ALERT_INFO_LINKS } from './alertLinks';
|
||||||
import InfoLinkText from './InfoLinkText';
|
import InfoLinkText from './InfoLinkText';
|
||||||
|
|
||||||
|
const alertLogEvents = (
|
||||||
|
title: string,
|
||||||
|
link: string,
|
||||||
|
dataSource?: DataSource,
|
||||||
|
): void => {
|
||||||
|
const attributes = {
|
||||||
|
link,
|
||||||
|
page: 'Alert empty state page',
|
||||||
|
};
|
||||||
|
|
||||||
|
logEvent(title, dataSource ? { ...attributes, dataSource } : attributes);
|
||||||
|
};
|
||||||
|
|
||||||
export function AlertsEmptyState(): JSX.Element {
|
export function AlertsEmptyState(): JSX.Element {
|
||||||
const { t } = useTranslation('common');
|
const { t } = useTranslation('common');
|
||||||
const { role, featureResponse } = useSelector<AppState, AppReducer>(
|
const { role, featureResponse } = useSelector<AppState, AppReducer>(
|
||||||
@ -91,18 +106,33 @@ export function AlertsEmptyState(): JSX.Element {
|
|||||||
link="https://youtu.be/xjxNIqiv4_M"
|
link="https://youtu.be/xjxNIqiv4_M"
|
||||||
leftIconVisible
|
leftIconVisible
|
||||||
rightIconVisible
|
rightIconVisible
|
||||||
|
onClick={(): void =>
|
||||||
|
alertLogEvents(
|
||||||
|
'Alert: Video tutorial link clicked',
|
||||||
|
'https://youtu.be/xjxNIqiv4_M',
|
||||||
|
)
|
||||||
|
}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{ALERT_INFO_LINKS.map((info) => (
|
{ALERT_INFO_LINKS.map((info) => {
|
||||||
|
const logEventTriggered = (): void =>
|
||||||
|
alertLogEvents(
|
||||||
|
'Alert: Tutorial doc link clicked',
|
||||||
|
info.link,
|
||||||
|
info.dataSource,
|
||||||
|
);
|
||||||
|
return (
|
||||||
<InfoLinkText
|
<InfoLinkText
|
||||||
key={info.link}
|
key={info.link}
|
||||||
infoText={info.infoText}
|
infoText={info.infoText}
|
||||||
link={info.link}
|
link={info.link}
|
||||||
leftIconVisible={info.leftIconVisible}
|
leftIconVisible={info.leftIconVisible}
|
||||||
rightIconVisible={info.rightIconVisible}
|
rightIconVisible={info.rightIconVisible}
|
||||||
|
onClick={logEventTriggered}
|
||||||
/>
|
/>
|
||||||
))}
|
);
|
||||||
|
})}
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
<div className="get-started-text">
|
<div className="get-started-text">
|
||||||
@ -113,14 +143,23 @@ export function AlertsEmptyState(): JSX.Element {
|
|||||||
</Divider>
|
</Divider>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{ALERT_CARDS.map((card) => (
|
{ALERT_CARDS.map((card) => {
|
||||||
|
const logEventTriggered = (): void =>
|
||||||
|
alertLogEvents(
|
||||||
|
'Alert: Sample alert link clicked',
|
||||||
|
card.link,
|
||||||
|
card.dataSource,
|
||||||
|
);
|
||||||
|
return (
|
||||||
<AlertInfoCard
|
<AlertInfoCard
|
||||||
key={card.link}
|
key={card.link}
|
||||||
header={card.header}
|
header={card.header}
|
||||||
subheader={card.subheader}
|
subheader={card.subheader}
|
||||||
link={card.link}
|
link={card.link}
|
||||||
|
onClick={logEventTriggered}
|
||||||
/>
|
/>
|
||||||
))}
|
);
|
||||||
|
})}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
@ -6,6 +6,7 @@ interface InfoLinkTextProps {
|
|||||||
link: string;
|
link: string;
|
||||||
leftIconVisible: boolean;
|
leftIconVisible: boolean;
|
||||||
rightIconVisible: boolean;
|
rightIconVisible: boolean;
|
||||||
|
onClick: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
function InfoLinkText({
|
function InfoLinkText({
|
||||||
@ -13,10 +14,12 @@ function InfoLinkText({
|
|||||||
link,
|
link,
|
||||||
leftIconVisible,
|
leftIconVisible,
|
||||||
rightIconVisible,
|
rightIconVisible,
|
||||||
|
onClick,
|
||||||
}: InfoLinkTextProps): JSX.Element {
|
}: InfoLinkTextProps): JSX.Element {
|
||||||
return (
|
return (
|
||||||
<Flex
|
<Flex
|
||||||
onClick={(): void => {
|
onClick={(): void => {
|
||||||
|
onClick();
|
||||||
window.open(link, '_blank');
|
window.open(link, '_blank');
|
||||||
}}
|
}}
|
||||||
className="info-link-container"
|
className="info-link-container"
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
|
|
||||||
export const ALERT_INFO_LINKS = [
|
export const ALERT_INFO_LINKS = [
|
||||||
{
|
{
|
||||||
infoText: 'How to create Metrics-based alerts',
|
infoText: 'How to create Metrics-based alerts',
|
||||||
@ -5,6 +7,7 @@ export const ALERT_INFO_LINKS = [
|
|||||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
||||||
leftIconVisible: false,
|
leftIconVisible: false,
|
||||||
rightIconVisible: true,
|
rightIconVisible: true,
|
||||||
|
dataSource: DataSource.METRICS,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
infoText: 'How to create Log-based alerts',
|
infoText: 'How to create Log-based alerts',
|
||||||
@ -12,6 +15,7 @@ export const ALERT_INFO_LINKS = [
|
|||||||
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
||||||
leftIconVisible: false,
|
leftIconVisible: false,
|
||||||
rightIconVisible: true,
|
rightIconVisible: true,
|
||||||
|
dataSource: DataSource.LOGS,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
infoText: 'How to create Trace-based alerts',
|
infoText: 'How to create Trace-based alerts',
|
||||||
@ -19,6 +23,7 @@ export const ALERT_INFO_LINKS = [
|
|||||||
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
||||||
leftIconVisible: false,
|
leftIconVisible: false,
|
||||||
rightIconVisible: true,
|
rightIconVisible: true,
|
||||||
|
dataSource: DataSource.TRACES,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -26,24 +31,28 @@ export const ALERT_CARDS = [
|
|||||||
{
|
{
|
||||||
header: 'Alert on high memory usage',
|
header: 'Alert on high memory usage',
|
||||||
subheader: "Monitor your host's memory usage",
|
subheader: "Monitor your host's memory usage",
|
||||||
|
dataSource: DataSource.METRICS,
|
||||||
link:
|
link:
|
||||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-memory-usage-for-host-goes-above-400-mb-or-any-fixed-memory',
|
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-memory-usage-for-host-goes-above-400-mb-or-any-fixed-memory',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
header: 'Alert on slow external API calls',
|
header: 'Alert on slow external API calls',
|
||||||
subheader: 'Monitor your external API calls',
|
subheader: 'Monitor your external API calls',
|
||||||
|
dataSource: DataSource.TRACES,
|
||||||
link:
|
link:
|
||||||
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-external-api-latency-p90-is-over-1-second-for-last-5-mins',
|
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-external-api-latency-p90-is-over-1-second-for-last-5-mins',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
header: 'Alert on high percentage of timeout errors in logs',
|
header: 'Alert on high percentage of timeout errors in logs',
|
||||||
subheader: 'Monitor your logs for errors',
|
subheader: 'Monitor your logs for errors',
|
||||||
|
dataSource: DataSource.LOGS,
|
||||||
link:
|
link:
|
||||||
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-percentage-of-redis-timeout-error-logs-greater-than-7-in-last-5-mins',
|
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-percentage-of-redis-timeout-error-logs-greater-than-7-in-last-5-mins',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
header: 'Alert on high error percentage of an endpoint',
|
header: 'Alert on high error percentage of an endpoint',
|
||||||
subheader: 'Monitor your API endpoint',
|
subheader: 'Monitor your API endpoint',
|
||||||
|
dataSource: DataSource.METRICS,
|
||||||
link:
|
link:
|
||||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page#3-alert-when-the-error-percentage-for-an-endpoint-exceeds-5',
|
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page#3-alert-when-the-error-percentage-for-an-endpoint-exceeds-5',
|
||||||
},
|
},
|
||||||
|
@ -3,6 +3,7 @@ import { PlusOutlined } from '@ant-design/icons';
|
|||||||
import { Input, Typography } from 'antd';
|
import { Input, Typography } from 'antd';
|
||||||
import type { ColumnsType } from 'antd/es/table/interface';
|
import type { ColumnsType } from 'antd/es/table/interface';
|
||||||
import saveAlertApi from 'api/alerts/save';
|
import saveAlertApi from 'api/alerts/save';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import DropDown from 'components/DropDown/DropDown';
|
import DropDown from 'components/DropDown/DropDown';
|
||||||
import { listAlertMessage } from 'components/facingIssueBtn/util';
|
import { listAlertMessage } from 'components/facingIssueBtn/util';
|
||||||
import {
|
import {
|
||||||
@ -41,7 +42,7 @@ import {
|
|||||||
} from './styles';
|
} from './styles';
|
||||||
import Status from './TableComponents/Status';
|
import Status from './TableComponents/Status';
|
||||||
import ToggleAlertState from './ToggleAlertState';
|
import ToggleAlertState from './ToggleAlertState';
|
||||||
import { filterAlerts } from './utils';
|
import { alertActionLogEvent, filterAlerts } from './utils';
|
||||||
|
|
||||||
const { Search } = Input;
|
const { Search } = Input;
|
||||||
|
|
||||||
@ -107,12 +108,16 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
|||||||
}, [notificationsApi, t]);
|
}, [notificationsApi, t]);
|
||||||
|
|
||||||
const onClickNewAlertHandler = useCallback(() => {
|
const onClickNewAlertHandler = useCallback(() => {
|
||||||
|
logEvent('Alert: New alert button clicked', {
|
||||||
|
number: allAlertRules?.length,
|
||||||
|
});
|
||||||
featureResponse
|
featureResponse
|
||||||
.refetch()
|
.refetch()
|
||||||
.then(() => {
|
.then(() => {
|
||||||
history.push(ROUTES.ALERTS_NEW);
|
history.push(ROUTES.ALERTS_NEW);
|
||||||
})
|
})
|
||||||
.catch(handleError);
|
.catch(handleError);
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [featureResponse, handleError]);
|
}, [featureResponse, handleError]);
|
||||||
|
|
||||||
const onEditHandler = (record: GettableAlert) => (): void => {
|
const onEditHandler = (record: GettableAlert) => (): void => {
|
||||||
@ -321,6 +326,7 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
|||||||
width: 10,
|
width: 10,
|
||||||
render: (id: GettableAlert['id'], record): JSX.Element => (
|
render: (id: GettableAlert['id'], record): JSX.Element => (
|
||||||
<DropDown
|
<DropDown
|
||||||
|
onDropDownItemClick={(item): void => alertActionLogEvent(item.key, record)}
|
||||||
element={[
|
element={[
|
||||||
<ToggleAlertState
|
<ToggleAlertState
|
||||||
key="1"
|
key="1"
|
||||||
@ -356,6 +362,9 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const paginationConfig = {
|
||||||
|
defaultCurrent: Number(paginationParam) || 1,
|
||||||
|
};
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<SearchContainer>
|
<SearchContainer>
|
||||||
@ -385,11 +394,10 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
|||||||
columns={columns}
|
columns={columns}
|
||||||
rowKey="id"
|
rowKey="id"
|
||||||
dataSource={data}
|
dataSource={data}
|
||||||
|
shouldSendAlertsLogEvent
|
||||||
dynamicColumns={dynamicColumns}
|
dynamicColumns={dynamicColumns}
|
||||||
onChange={handleChange}
|
onChange={handleChange}
|
||||||
pagination={{
|
pagination={paginationConfig}
|
||||||
defaultCurrent: Number(paginationParam) || 1,
|
|
||||||
}}
|
|
||||||
facingIssueBtn={{
|
facingIssueBtn={{
|
||||||
attributes: {
|
attributes: {
|
||||||
screen: 'Alert list page',
|
screen: 'Alert list page',
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
import { Space } from 'antd';
|
import { Space } from 'antd';
|
||||||
import getAll from 'api/alerts/getAll';
|
import getAll from 'api/alerts/getAll';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import ReleaseNote from 'components/ReleaseNote';
|
import ReleaseNote from 'components/ReleaseNote';
|
||||||
import Spinner from 'components/Spinner';
|
import Spinner from 'components/Spinner';
|
||||||
import { useNotifications } from 'hooks/useNotifications';
|
import { useNotifications } from 'hooks/useNotifications';
|
||||||
import { useEffect } from 'react';
|
import { isUndefined } from 'lodash-es';
|
||||||
|
import { useEffect, useRef } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useQuery } from 'react-query';
|
import { useQuery } from 'react-query';
|
||||||
import { useLocation } from 'react-router-dom';
|
import { useLocation } from 'react-router-dom';
|
||||||
@ -19,8 +21,19 @@ function ListAlertRules(): JSX.Element {
|
|||||||
cacheTime: 0,
|
cacheTime: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const logEventCalledRef = useRef(false);
|
||||||
|
|
||||||
const { notifications } = useNotifications();
|
const { notifications } = useNotifications();
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!logEventCalledRef.current && !isUndefined(data?.payload)) {
|
||||||
|
logEvent('Alert: List page visited', {
|
||||||
|
number: data?.payload?.length,
|
||||||
|
});
|
||||||
|
logEventCalledRef.current = true;
|
||||||
|
}
|
||||||
|
}, [data?.payload]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (status === 'error' || (status === 'success' && data.statusCode >= 400)) {
|
if (status === 'error' || (status === 'success' && data.statusCode >= 400)) {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
|
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||||
|
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||||
import { GettableAlert } from 'types/api/alerts/get';
|
import { GettableAlert } from 'types/api/alerts/get';
|
||||||
|
|
||||||
export const filterAlerts = (
|
export const filterAlerts = (
|
||||||
@ -23,3 +26,32 @@ export const filterAlerts = (
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const alertActionLogEvent = (
|
||||||
|
action: string,
|
||||||
|
record: GettableAlert,
|
||||||
|
): void => {
|
||||||
|
let actionValue = '';
|
||||||
|
switch (action) {
|
||||||
|
case '0':
|
||||||
|
actionValue = 'Enable/Disable';
|
||||||
|
break;
|
||||||
|
case '1':
|
||||||
|
actionValue = 'Edit';
|
||||||
|
break;
|
||||||
|
case '2':
|
||||||
|
actionValue = 'Clone';
|
||||||
|
break;
|
||||||
|
case '3':
|
||||||
|
actionValue = 'Delete';
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
logEvent('Alert: Action', {
|
||||||
|
ruleId: record.id,
|
||||||
|
dataSource: ALERTS_DATA_SOURCE_MAP[record.alertType as AlertTypes],
|
||||||
|
name: record.alert,
|
||||||
|
action: actionValue,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
@ -609,6 +609,16 @@ function DashboardsList(): JSX.Element {
|
|||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const paginationConfig = data.length > 20 && {
|
||||||
|
pageSize: 20,
|
||||||
|
showTotal: showPaginationItem,
|
||||||
|
showSizeChanger: false,
|
||||||
|
onChange: (page: any): void => handlePageSizeUpdate(page),
|
||||||
|
current: Number(sortOrder.pagination),
|
||||||
|
defaultCurrent: Number(sortOrder.pagination) || 1,
|
||||||
|
hideOnSinglePage: true,
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="dashboards-list-container">
|
<div className="dashboards-list-container">
|
||||||
<div className="dashboards-list-view-content">
|
<div className="dashboards-list-view-content">
|
||||||
@ -822,16 +832,7 @@ function DashboardsList(): JSX.Element {
|
|||||||
showSorterTooltip
|
showSorterTooltip
|
||||||
loading={isDashboardListLoading || isFilteringDashboards}
|
loading={isDashboardListLoading || isFilteringDashboards}
|
||||||
showHeader={false}
|
showHeader={false}
|
||||||
pagination={
|
pagination={paginationConfig}
|
||||||
data.length > 20 && {
|
|
||||||
pageSize: 20,
|
|
||||||
showTotal: showPaginationItem,
|
|
||||||
showSizeChanger: false,
|
|
||||||
onChange: (page): void => handlePageSizeUpdate(page),
|
|
||||||
current: Number(sortOrder.pagination),
|
|
||||||
defaultCurrent: Number(sortOrder.pagination) || 1,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/>
|
/>
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
|
@ -15,6 +15,7 @@ import {
|
|||||||
} from 'hooks/useResourceAttribute/utils';
|
} from 'hooks/useResourceAttribute/utils';
|
||||||
import { useMemo, useState } from 'react';
|
import { useMemo, useState } from 'react';
|
||||||
import { useParams } from 'react-router-dom';
|
import { useParams } from 'react-router-dom';
|
||||||
|
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||||
import { EQueryType } from 'types/common/dashboard';
|
import { EQueryType } from 'types/common/dashboard';
|
||||||
import { v4 as uuid } from 'uuid';
|
import { v4 as uuid } from 'uuid';
|
||||||
|
|
||||||
@ -93,6 +94,26 @@ function External(): JSX.Element {
|
|||||||
[servicename, tagFilterItems],
|
[servicename, tagFilterItems],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const errorApmToTraceQuery = useGetAPMToTracesQueries({
|
||||||
|
servicename,
|
||||||
|
isExternalCall: true,
|
||||||
|
filters: [
|
||||||
|
{
|
||||||
|
id: uuid().slice(0, 8),
|
||||||
|
key: {
|
||||||
|
key: 'hasError',
|
||||||
|
dataType: DataTypes.bool,
|
||||||
|
type: 'tag',
|
||||||
|
isColumn: true,
|
||||||
|
isJSON: false,
|
||||||
|
id: 'hasError--bool--tag--true',
|
||||||
|
},
|
||||||
|
op: 'in',
|
||||||
|
value: ['true'],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
const externalCallRPSWidget = useMemo(
|
const externalCallRPSWidget = useMemo(
|
||||||
() =>
|
() =>
|
||||||
getWidgetQueryBuilder({
|
getWidgetQueryBuilder({
|
||||||
@ -156,7 +177,7 @@ function External(): JSX.Element {
|
|||||||
servicename,
|
servicename,
|
||||||
selectedTraceTags,
|
selectedTraceTags,
|
||||||
timestamp: selectedTimeStamp,
|
timestamp: selectedTimeStamp,
|
||||||
apmToTraceQuery,
|
apmToTraceQuery: errorApmToTraceQuery,
|
||||||
})}
|
})}
|
||||||
>
|
>
|
||||||
View Traces
|
View Traces
|
||||||
|
@ -2,8 +2,6 @@ import { Card, Typography } from 'antd';
|
|||||||
import Spinner from 'components/Spinner';
|
import Spinner from 'components/Spinner';
|
||||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||||
import { WidgetGraphContainerProps } from 'container/NewWidget/types';
|
import { WidgetGraphContainerProps } from 'container/NewWidget/types';
|
||||||
// import useUrlQuery from 'hooks/useUrlQuery';
|
|
||||||
// import { useDashboard } from 'providers/Dashboard/Dashboard';
|
|
||||||
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
|
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
|
||||||
|
|
||||||
import { NotFoundContainer } from './styles';
|
import { NotFoundContainer } from './styles';
|
||||||
@ -14,6 +12,7 @@ function WidgetGraphContainer({
|
|||||||
queryResponse,
|
queryResponse,
|
||||||
setRequestData,
|
setRequestData,
|
||||||
selectedWidget,
|
selectedWidget,
|
||||||
|
isLoadingPanelData,
|
||||||
}: WidgetGraphContainerProps): JSX.Element {
|
}: WidgetGraphContainerProps): JSX.Element {
|
||||||
if (queryResponse.data && selectedGraph === PANEL_TYPES.BAR) {
|
if (queryResponse.data && selectedGraph === PANEL_TYPES.BAR) {
|
||||||
const sortedSeriesData = getSortedSeriesData(
|
const sortedSeriesData = getSortedSeriesData(
|
||||||
@ -38,6 +37,10 @@ function WidgetGraphContainer({
|
|||||||
return <Spinner size="large" tip="Loading..." />;
|
return <Spinner size="large" tip="Loading..." />;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (isLoadingPanelData) {
|
||||||
|
return <Spinner size="large" tip="Loading..." />;
|
||||||
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
selectedGraph !== PANEL_TYPES.LIST &&
|
selectedGraph !== PANEL_TYPES.LIST &&
|
||||||
queryResponse.data?.payload.data?.result?.length === 0
|
queryResponse.data?.payload.data?.result?.length === 0
|
||||||
@ -59,6 +62,14 @@ function WidgetGraphContainer({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (queryResponse.isIdle) {
|
||||||
|
return (
|
||||||
|
<NotFoundContainer>
|
||||||
|
<Typography>No Data</Typography>
|
||||||
|
</NotFoundContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<WidgetGraph
|
<WidgetGraph
|
||||||
selectedWidget={selectedWidget}
|
selectedWidget={selectedWidget}
|
||||||
|
@ -17,6 +17,7 @@ function WidgetGraph({
|
|||||||
queryResponse,
|
queryResponse,
|
||||||
setRequestData,
|
setRequestData,
|
||||||
selectedWidget,
|
selectedWidget,
|
||||||
|
isLoadingPanelData,
|
||||||
}: WidgetGraphContainerProps): JSX.Element {
|
}: WidgetGraphContainerProps): JSX.Element {
|
||||||
const { currentQuery } = useQueryBuilder();
|
const { currentQuery } = useQueryBuilder();
|
||||||
|
|
||||||
@ -43,6 +44,7 @@ function WidgetGraph({
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
<WidgetGraphComponent
|
<WidgetGraphComponent
|
||||||
|
isLoadingPanelData={isLoadingPanelData}
|
||||||
selectedGraph={selectedGraph}
|
selectedGraph={selectedGraph}
|
||||||
queryResponse={queryResponse}
|
queryResponse={queryResponse}
|
||||||
setRequestData={setRequestData}
|
setRequestData={setRequestData}
|
||||||
|
@ -1,18 +1,15 @@
|
|||||||
import './LeftContainer.styles.scss';
|
import './LeftContainer.styles.scss';
|
||||||
|
|
||||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
|
||||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
|
||||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||||
import { memo, useEffect, useState } from 'react';
|
import { memo } from 'react';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
import { AppState } from 'store/reducers';
|
import { AppState } from 'store/reducers';
|
||||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||||
import { getGraphType } from 'utils/getGraphType';
|
|
||||||
|
|
||||||
import { WidgetGraphProps } from '../types';
|
import { WidgetGraphProps } from '../types';
|
||||||
import ExplorerColumnsRenderer from './ExplorerColumnsRenderer';
|
import ExplorerColumnsRenderer from './ExplorerColumnsRenderer';
|
||||||
@ -27,62 +24,17 @@ function LeftContainer({
|
|||||||
selectedTracesFields,
|
selectedTracesFields,
|
||||||
setSelectedTracesFields,
|
setSelectedTracesFields,
|
||||||
selectedWidget,
|
selectedWidget,
|
||||||
selectedTime,
|
requestData,
|
||||||
|
setRequestData,
|
||||||
|
isLoadingPanelData,
|
||||||
}: WidgetGraphProps): JSX.Element {
|
}: WidgetGraphProps): JSX.Element {
|
||||||
const { stagedQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
const { stagedQuery } = useQueryBuilder();
|
||||||
const { selectedDashboard } = useDashboard();
|
const { selectedDashboard } = useDashboard();
|
||||||
|
|
||||||
const { selectedTime: globalSelectedInterval } = useSelector<
|
const { selectedTime: globalSelectedInterval } = useSelector<
|
||||||
AppState,
|
AppState,
|
||||||
GlobalReducer
|
GlobalReducer
|
||||||
>((state) => state.globalTime);
|
>((state) => state.globalTime);
|
||||||
|
|
||||||
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
|
|
||||||
if (selectedWidget && selectedGraph !== PANEL_TYPES.LIST) {
|
|
||||||
return {
|
|
||||||
selectedTime: selectedWidget?.timePreferance,
|
|
||||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
|
||||||
query: stagedQuery || initialQueriesMap.metrics,
|
|
||||||
globalSelectedInterval,
|
|
||||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const updatedQuery = { ...(stagedQuery || initialQueriesMap.metrics) };
|
|
||||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
|
||||||
redirectWithQueryBuilderData(updatedQuery);
|
|
||||||
return {
|
|
||||||
query: updatedQuery,
|
|
||||||
graphType: PANEL_TYPES.LIST,
|
|
||||||
selectedTime: selectedTime.enum || 'GLOBAL_TIME',
|
|
||||||
globalSelectedInterval,
|
|
||||||
tableParams: {
|
|
||||||
pagination: {
|
|
||||||
offset: 0,
|
|
||||||
limit: updatedQuery.builder.queryData[0].limit || 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (stagedQuery) {
|
|
||||||
setRequestData((prev) => ({
|
|
||||||
...prev,
|
|
||||||
selectedTime: selectedTime.enum || prev.selectedTime,
|
|
||||||
globalSelectedInterval,
|
|
||||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
|
||||||
query: stagedQuery,
|
|
||||||
fillGaps: selectedWidget.fillSpans || false,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [
|
|
||||||
stagedQuery,
|
|
||||||
selectedTime,
|
|
||||||
selectedWidget.fillSpans,
|
|
||||||
globalSelectedInterval,
|
|
||||||
]);
|
|
||||||
|
|
||||||
const queryResponse = useGetQueryRange(
|
const queryResponse = useGetQueryRange(
|
||||||
requestData,
|
requestData,
|
||||||
selectedDashboard?.data?.version || DEFAULT_ENTITY_VERSION,
|
selectedDashboard?.data?.version || DEFAULT_ENTITY_VERSION,
|
||||||
@ -104,6 +56,7 @@ function LeftContainer({
|
|||||||
queryResponse={queryResponse}
|
queryResponse={queryResponse}
|
||||||
setRequestData={setRequestData}
|
setRequestData={setRequestData}
|
||||||
selectedWidget={selectedWidget}
|
selectedWidget={selectedWidget}
|
||||||
|
isLoadingPanelData={isLoadingPanelData}
|
||||||
/>
|
/>
|
||||||
<QueryContainer className="query-section-left-container">
|
<QueryContainer className="query-section-left-container">
|
||||||
<QuerySection selectedGraph={selectedGraph} queryResponse={queryResponse} />
|
<QuerySection selectedGraph={selectedGraph} queryResponse={queryResponse} />
|
||||||
|
@ -7,7 +7,7 @@ import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
|||||||
import { chartHelpMessage } from 'components/facingIssueBtn/util';
|
import { chartHelpMessage } from 'components/facingIssueBtn/util';
|
||||||
import { FeatureKeys } from 'constants/features';
|
import { FeatureKeys } from 'constants/features';
|
||||||
import { QueryParams } from 'constants/query';
|
import { QueryParams } from 'constants/query';
|
||||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
import { DashboardShortcuts } from 'constants/shortcuts/DashboardShortcuts';
|
import { DashboardShortcuts } from 'constants/shortcuts/DashboardShortcuts';
|
||||||
import { DEFAULT_BUCKET_COUNT } from 'container/PanelWrapper/constants';
|
import { DEFAULT_BUCKET_COUNT } from 'container/PanelWrapper/constants';
|
||||||
@ -18,6 +18,8 @@ import useAxiosError from 'hooks/useAxiosError';
|
|||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
import { MESSAGE, useIsFeatureDisabled } from 'hooks/useFeatureFlag';
|
import { MESSAGE, useIsFeatureDisabled } from 'hooks/useFeatureFlag';
|
||||||
import useUrlQuery from 'hooks/useUrlQuery';
|
import useUrlQuery from 'hooks/useUrlQuery';
|
||||||
|
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||||
|
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { defaultTo, isUndefined } from 'lodash-es';
|
import { defaultTo, isUndefined } from 'lodash-es';
|
||||||
import { Check, X } from 'lucide-react';
|
import { Check, X } from 'lucide-react';
|
||||||
@ -38,6 +40,8 @@ import { IField } from 'types/api/logs/fields';
|
|||||||
import { EQueryType } from 'types/common/dashboard';
|
import { EQueryType } from 'types/common/dashboard';
|
||||||
import { DataSource } from 'types/common/queryBuilder';
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
import AppReducer from 'types/reducer/app';
|
import AppReducer from 'types/reducer/app';
|
||||||
|
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||||
|
import { getGraphType, getGraphTypeForFormat } from 'utils/getGraphType';
|
||||||
|
|
||||||
import LeftContainer from './LeftContainer';
|
import LeftContainer from './LeftContainer';
|
||||||
import QueryTypeTag from './LeftContainer/QueryTypeTag';
|
import QueryTypeTag from './LeftContainer/QueryTypeTag';
|
||||||
@ -83,6 +87,10 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
|||||||
const { featureResponse } = useSelector<AppState, AppReducer>(
|
const { featureResponse } = useSelector<AppState, AppReducer>(
|
||||||
(state) => state.app,
|
(state) => state.app,
|
||||||
);
|
);
|
||||||
|
const { selectedTime: globalSelectedInterval } = useSelector<
|
||||||
|
AppState,
|
||||||
|
GlobalReducer
|
||||||
|
>((state) => state.globalTime);
|
||||||
|
|
||||||
const { widgets = [] } = selectedDashboard?.data || {};
|
const { widgets = [] } = selectedDashboard?.data || {};
|
||||||
|
|
||||||
@ -278,6 +286,65 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
|||||||
|
|
||||||
const handleError = useAxiosError();
|
const handleError = useAxiosError();
|
||||||
|
|
||||||
|
// this loading state is to take care of mismatch in the responses for table and other panels
|
||||||
|
// hence while changing the query contains the older value and the processing logic fails
|
||||||
|
const [isLoadingPanelData, setIsLoadingPanelData] = useState<boolean>(false);
|
||||||
|
|
||||||
|
// request data should be handled by the parent and the child components should consume the same
|
||||||
|
// this has been moved here from the left container
|
||||||
|
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
|
||||||
|
if (selectedWidget && selectedGraph !== PANEL_TYPES.LIST) {
|
||||||
|
return {
|
||||||
|
selectedTime: selectedWidget?.timePreferance,
|
||||||
|
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||||
|
query: stagedQuery || initialQueriesMap.metrics,
|
||||||
|
globalSelectedInterval,
|
||||||
|
formatForWeb:
|
||||||
|
getGraphTypeForFormat(selectedGraph || selectedWidget.panelTypes) ===
|
||||||
|
PANEL_TYPES.TABLE,
|
||||||
|
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const updatedQuery = { ...(stagedQuery || initialQueriesMap.metrics) };
|
||||||
|
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||||
|
redirectWithQueryBuilderData(updatedQuery);
|
||||||
|
return {
|
||||||
|
query: updatedQuery,
|
||||||
|
graphType: PANEL_TYPES.LIST,
|
||||||
|
selectedTime: selectedTime.enum || 'GLOBAL_TIME',
|
||||||
|
globalSelectedInterval,
|
||||||
|
tableParams: {
|
||||||
|
pagination: {
|
||||||
|
offset: 0,
|
||||||
|
limit: updatedQuery.builder.queryData[0].limit || 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (stagedQuery) {
|
||||||
|
setIsLoadingPanelData(false);
|
||||||
|
setRequestData((prev) => ({
|
||||||
|
...prev,
|
||||||
|
selectedTime: selectedTime.enum || prev.selectedTime,
|
||||||
|
globalSelectedInterval,
|
||||||
|
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||||
|
query: stagedQuery,
|
||||||
|
fillGaps: selectedWidget.fillSpans || false,
|
||||||
|
formatForWeb:
|
||||||
|
getGraphTypeForFormat(selectedGraph || selectedWidget.panelTypes) ===
|
||||||
|
PANEL_TYPES.TABLE,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [
|
||||||
|
stagedQuery,
|
||||||
|
selectedTime,
|
||||||
|
selectedWidget.fillSpans,
|
||||||
|
globalSelectedInterval,
|
||||||
|
]);
|
||||||
|
|
||||||
const onClickSaveHandler = useCallback(() => {
|
const onClickSaveHandler = useCallback(() => {
|
||||||
if (!selectedDashboard) {
|
if (!selectedDashboard) {
|
||||||
return;
|
return;
|
||||||
@ -402,6 +469,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
|||||||
}, [dashboardId]);
|
}, [dashboardId]);
|
||||||
|
|
||||||
const setGraphHandler = (type: PANEL_TYPES): void => {
|
const setGraphHandler = (type: PANEL_TYPES): void => {
|
||||||
|
setIsLoadingPanelData(true);
|
||||||
const updatedQuery = handleQueryChange(type as any, supersetQuery);
|
const updatedQuery = handleQueryChange(type as any, supersetQuery);
|
||||||
setGraphType(type);
|
setGraphType(type);
|
||||||
redirectWithQueryBuilderData(
|
redirectWithQueryBuilderData(
|
||||||
@ -527,6 +595,9 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
|||||||
setSelectedTracesFields={setSelectedTracesFields}
|
setSelectedTracesFields={setSelectedTracesFields}
|
||||||
selectedWidget={selectedWidget}
|
selectedWidget={selectedWidget}
|
||||||
selectedTime={selectedTime}
|
selectedTime={selectedTime}
|
||||||
|
requestData={requestData}
|
||||||
|
setRequestData={setRequestData}
|
||||||
|
isLoadingPanelData={isLoadingPanelData}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</LeftContainerWrapper>
|
</LeftContainerWrapper>
|
||||||
|
@ -24,6 +24,9 @@ export interface WidgetGraphProps {
|
|||||||
selectedWidget: Widgets;
|
selectedWidget: Widgets;
|
||||||
selectedGraph: PANEL_TYPES;
|
selectedGraph: PANEL_TYPES;
|
||||||
selectedTime: timePreferance;
|
selectedTime: timePreferance;
|
||||||
|
requestData: GetQueryResultsProps;
|
||||||
|
setRequestData: Dispatch<SetStateAction<GetQueryResultsProps>>;
|
||||||
|
isLoadingPanelData: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type WidgetGraphContainerProps = {
|
export type WidgetGraphContainerProps = {
|
||||||
@ -34,4 +37,5 @@ export type WidgetGraphContainerProps = {
|
|||||||
setRequestData: Dispatch<SetStateAction<GetQueryResultsProps>>;
|
setRequestData: Dispatch<SetStateAction<GetQueryResultsProps>>;
|
||||||
selectedGraph: PANEL_TYPES;
|
selectedGraph: PANEL_TYPES;
|
||||||
selectedWidget: Widgets;
|
selectedWidget: Widgets;
|
||||||
|
isLoadingPanelData: boolean;
|
||||||
};
|
};
|
||||||
|
@ -4,25 +4,32 @@
|
|||||||
|
|
||||||
Prior to installation, you must ensure your Kubernetes cluster is ready and that you have the necessary permissions to deploy applications. Follow these steps to use Helm for setting up the Collector:
|
Prior to installation, you must ensure your Kubernetes cluster is ready and that you have the necessary permissions to deploy applications. Follow these steps to use Helm for setting up the Collector:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
1. **Add the OpenTelemetry Helm repository:**
|
1. **Add the OpenTelemetry Helm repository:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts
|
helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
2. **Prepare the `otel-collector-values.yaml` Configuration**
|
2. **Prepare the `otel-collector-values.yaml` Configuration**
|
||||||
|
|
||||||
#### Azure Event Hub Receiver Configuration
|
|
||||||
If you haven't created the logs Event Hub, you can create one by following the steps in the [Azure Event Hubs documentation](../../bootstrapping/data-ingestion).
|
|
||||||
|
|
||||||
and replace the placeholders `<Primary Connection String>` with the primary connection string for your Event Hub, it should look something like this:
|
#### Azure Event Hub Receiver Configuration
|
||||||
|
|
||||||
|
Replace the placeholders `<Primary Connection String>` with the primary connection string for your Event Hub, it should look something like this:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
connection: Endpoint=sb://namespace.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=superSecret1234=;EntityPath=hubName
|
connection: Endpoint=sb://namespace.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=superSecret1234=;EntityPath=hubName
|
||||||
```
|
```
|
||||||
The Event Hub docs have a step to create a SAS policy for the event hub and copy the connection string.
|
The Event Hub setup have a step to create a SAS policy for the event hub and copy the connection string.
|
||||||
|
|
||||||
#### Azure Monitor Receiver Configuration
|
|
||||||
|
|
||||||
|
#### Azure Monitor Receiver Configuration
|
||||||
|
|
||||||
You will need to set up a [service principal](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal) with Read permissions to receive data from Azure Monitor.
|
You will need to set up a [service principal](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal) with Read permissions to receive data from Azure Monitor.
|
||||||
|
|
||||||
@ -33,18 +40,6 @@ helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm
|
|||||||
|
|
||||||
4. To find `client_id` and `tenant_id`, go to the [Azure Portal](https://portal.azure.com/) and search for the `Application` you created. You would see the `Application (client) ID` and `Directory (tenant) ID` in the Overview section.
|
4. To find `client_id` and `tenant_id`, go to the [Azure Portal](https://portal.azure.com/) and search for the `Application` you created. You would see the `Application (client) ID` and `Directory (tenant) ID` in the Overview section.
|
||||||
|
|
||||||
<figure data-zoomable align="center">
|
|
||||||
<img
|
|
||||||
src="/img/docs/azure-monitoring/service-principal-app-overview.webp"
|
|
||||||
alt="Application Overview"
|
|
||||||
/>
|
|
||||||
<figcaption>
|
|
||||||
<i>
|
|
||||||
Application Overview
|
|
||||||
</i>
|
|
||||||
</figcaption>
|
|
||||||
</figure>
|
|
||||||
|
|
||||||
5. To find `subscription_id`, follow steps in [Find Your Subscription](https://learn.microsoft.com/en-us/azure/azure-portal/get-subscription-tenant-id#find-your-azure-subscription) and populate them in the configuration file.
|
5. To find `subscription_id`, follow steps in [Find Your Subscription](https://learn.microsoft.com/en-us/azure/azure-portal/get-subscription-tenant-id#find-your-azure-subscription) and populate them in the configuration file.
|
||||||
|
|
||||||
6. Ensure you replace the placeholders `<region>` and `<ingestion-key>` with the appropriate values for your signoz cloud instance.
|
6. Ensure you replace the placeholders `<region>` and `<ingestion-key>` with the appropriate values for your signoz cloud instance.
|
||||||
@ -92,13 +87,15 @@ processors:
|
|||||||
batch: {}
|
batch: {}
|
||||||
exporters:
|
exporters:
|
||||||
otlp:
|
otlp:
|
||||||
endpoint: "ingest.<region>.signoz.cloud:443"
|
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
|
||||||
tls:
|
tls:
|
||||||
insecure: false
|
insecure: false
|
||||||
headers:
|
headers:
|
||||||
"signoz-access-token": "<ingestion-key>"
|
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
3. **Deploy the OpenTelemetry Collector to your Kubernetes cluster:**
|
3. **Deploy the OpenTelemetry Collector to your Kubernetes cluster:**
|
||||||
|
|
||||||
You'll need to prepare a custom configuration file, say `otel-collector-values.yaml`, that matches your environment's specific needs. Replace `<namespace>` with the Kubernetes namespace where you wish to install the Collector.
|
You'll need to prepare a custom configuration file, say `otel-collector-values.yaml`, that matches your environment's specific needs. Replace `<namespace>` with the Kubernetes namespace where you wish to install the Collector.
|
||||||
|
@ -0,0 +1,37 @@
|
|||||||
|
import { act, render, screen, waitFor } from 'tests/test-utils';
|
||||||
|
|
||||||
|
import Members from '../Members';
|
||||||
|
|
||||||
|
describe('Organization Settings Page', () => {
|
||||||
|
afterEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('render list of members', async () => {
|
||||||
|
act(() => {
|
||||||
|
render(<Members />);
|
||||||
|
});
|
||||||
|
|
||||||
|
const title = await screen.findByText(/Members/i);
|
||||||
|
expect(title).toBeInTheDocument();
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('firstUser@test.io')).toBeInTheDocument(); // first item
|
||||||
|
expect(screen.getByText('lastUser@test.io')).toBeInTheDocument(); // last item
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// this is required as our edit/delete logic is dependent on the index and it will break with pagination enabled
|
||||||
|
it('render list of members without pagination', async () => {
|
||||||
|
render(<Members />);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('firstUser@test.io')).toBeInTheDocument(); // first item
|
||||||
|
expect(screen.getByText('lastUser@test.io')).toBeInTheDocument(); // last item
|
||||||
|
|
||||||
|
expect(
|
||||||
|
document.querySelector('.ant-table-pagination'),
|
||||||
|
).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -9,7 +9,7 @@ function TablePanelWrapper({
|
|||||||
tableProcessedDataRef,
|
tableProcessedDataRef,
|
||||||
}: PanelWrapperProps): JSX.Element {
|
}: PanelWrapperProps): JSX.Element {
|
||||||
const panelData =
|
const panelData =
|
||||||
queryResponse.data?.payload?.data?.newResult?.data?.result || [];
|
(queryResponse.data?.payload?.data?.result?.[0] as any)?.table || [];
|
||||||
const { thresholds } = widget;
|
const { thresholds } = widget;
|
||||||
return (
|
return (
|
||||||
<GridTableComponent
|
<GridTableComponent
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
export const historyPagination = {
|
export const historyPagination = {
|
||||||
defaultPageSize: 5,
|
defaultPageSize: 5,
|
||||||
|
hideOnSinglePage: true,
|
||||||
};
|
};
|
||||||
|
@ -334,6 +334,11 @@ export function PlannedDowntimeList({
|
|||||||
}
|
}
|
||||||
}, [downtimeSchedules.error, downtimeSchedules.isError, notifications]);
|
}, [downtimeSchedules.error, downtimeSchedules.isError, notifications]);
|
||||||
|
|
||||||
|
const paginationConfig = {
|
||||||
|
pageSize: 5,
|
||||||
|
showSizeChanger: false,
|
||||||
|
hideOnSinglePage: true,
|
||||||
|
};
|
||||||
return (
|
return (
|
||||||
<Table<DowntimeSchedulesTableData>
|
<Table<DowntimeSchedulesTableData>
|
||||||
columns={columns}
|
columns={columns}
|
||||||
@ -342,7 +347,7 @@ export function PlannedDowntimeList({
|
|||||||
dataSource={tableData || []}
|
dataSource={tableData || []}
|
||||||
loading={downtimeSchedules.isLoading || downtimeSchedules.isFetching}
|
loading={downtimeSchedules.isLoading || downtimeSchedules.isFetching}
|
||||||
showHeader={false}
|
showHeader={false}
|
||||||
pagination={{ pageSize: 5, showSizeChanger: false }}
|
pagination={paginationConfig}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -33,10 +33,12 @@ export const getColumnSearchProps = (
|
|||||||
record: ServicesList,
|
record: ServicesList,
|
||||||
): boolean => {
|
): boolean => {
|
||||||
if (record[dataIndex]) {
|
if (record[dataIndex]) {
|
||||||
|
return (
|
||||||
record[dataIndex]
|
record[dataIndex]
|
||||||
?.toString()
|
?.toString()
|
||||||
.toLowerCase()
|
.toLowerCase()
|
||||||
.includes(value.toString().toLowerCase());
|
.includes(value.toString().toLowerCase()) || false
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
|
@ -79,6 +79,11 @@ function ServiceMetricTable({
|
|||||||
}
|
}
|
||||||
}, [services, licenseData, isFetching, isCloudUserVal]);
|
}, [services, licenseData, isFetching, isCloudUserVal]);
|
||||||
|
|
||||||
|
const paginationConfig = {
|
||||||
|
defaultPageSize: 10,
|
||||||
|
showTotal: (total: number, range: number[]): string =>
|
||||||
|
`${range[0]}-${range[1]} of ${total} items`,
|
||||||
|
};
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{RPS > MAX_RPS_LIMIT && (
|
{RPS > MAX_RPS_LIMIT && (
|
||||||
@ -92,11 +97,7 @@ function ServiceMetricTable({
|
|||||||
<ResourceAttributesFilter />
|
<ResourceAttributesFilter />
|
||||||
|
|
||||||
<ResizeTable
|
<ResizeTable
|
||||||
pagination={{
|
pagination={paginationConfig}
|
||||||
defaultPageSize: 10,
|
|
||||||
showTotal: (total: number, range: number[]): string =>
|
|
||||||
`${range[0]}-${range[1]} of ${total} items`,
|
|
||||||
}}
|
|
||||||
columns={tableColumns}
|
columns={tableColumns}
|
||||||
loading={isLoading}
|
loading={isLoading}
|
||||||
dataSource={services}
|
dataSource={services}
|
||||||
|
@ -36,6 +36,11 @@ function ServiceTraceTable({
|
|||||||
}
|
}
|
||||||
}, [services, licenseData, isFetching, isCloudUserVal]);
|
}, [services, licenseData, isFetching, isCloudUserVal]);
|
||||||
|
|
||||||
|
const paginationConfig = {
|
||||||
|
defaultPageSize: 10,
|
||||||
|
showTotal: (total: number, range: number[]): string =>
|
||||||
|
`${range[0]}-${range[1]} of ${total} items`,
|
||||||
|
};
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{RPS > MAX_RPS_LIMIT && (
|
{RPS > MAX_RPS_LIMIT && (
|
||||||
@ -49,11 +54,7 @@ function ServiceTraceTable({
|
|||||||
<ResourceAttributesFilter />
|
<ResourceAttributesFilter />
|
||||||
|
|
||||||
<ResizeTable
|
<ResizeTable
|
||||||
pagination={{
|
pagination={paginationConfig}
|
||||||
defaultPageSize: 10,
|
|
||||||
showTotal: (total: number, range: number[]): string =>
|
|
||||||
`${range[0]}-${range[1]} of ${total} items`,
|
|
||||||
}}
|
|
||||||
columns={tableColumns}
|
columns={tableColumns}
|
||||||
loading={loading}
|
loading={loading}
|
||||||
dataSource={services}
|
dataSource={services}
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
import getTriggeredApi from 'api/alerts/getTriggered';
|
import getTriggeredApi from 'api/alerts/getTriggered';
|
||||||
|
import logEvent from 'api/common/logEvent';
|
||||||
import Spinner from 'components/Spinner';
|
import Spinner from 'components/Spinner';
|
||||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||||
import useAxiosError from 'hooks/useAxiosError';
|
import useAxiosError from 'hooks/useAxiosError';
|
||||||
|
import { isUndefined } from 'lodash-es';
|
||||||
|
import { useEffect, useRef } from 'react';
|
||||||
import { useQuery } from 'react-query';
|
import { useQuery } from 'react-query';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
import { AppState } from 'store/reducers';
|
import { AppState } from 'store/reducers';
|
||||||
@ -13,6 +16,8 @@ function TriggeredAlerts(): JSX.Element {
|
|||||||
(state) => state.app.user?.userId,
|
(state) => state.app.user?.userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const hasLoggedEvent = useRef(false); // Track if logEvent has been called
|
||||||
|
|
||||||
const handleError = useAxiosError();
|
const handleError = useAxiosError();
|
||||||
|
|
||||||
const alertsResponse = useQuery(
|
const alertsResponse = useQuery(
|
||||||
@ -29,6 +34,15 @@ function TriggeredAlerts(): JSX.Element {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!hasLoggedEvent.current && !isUndefined(alertsResponse.data?.payload)) {
|
||||||
|
logEvent('Alert: Triggered alert list page visited', {
|
||||||
|
number: alertsResponse.data?.payload?.length,
|
||||||
|
});
|
||||||
|
hasLoggedEvent.current = true;
|
||||||
|
}
|
||||||
|
}, [alertsResponse.data?.payload]);
|
||||||
|
|
||||||
if (alertsResponse.error) {
|
if (alertsResponse.error) {
|
||||||
return <TriggerComponent allAlerts={[]} />;
|
return <TriggerComponent allAlerts={[]} />;
|
||||||
}
|
}
|
||||||
|
@ -12,7 +12,7 @@ import {
|
|||||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||||
import { Pagination } from 'hooks/queryPagination';
|
import { Pagination } from 'hooks/queryPagination';
|
||||||
import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
|
import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
|
||||||
import { isEmpty } from 'lodash-es';
|
import { isEmpty, cloneDeep } from 'lodash-es';
|
||||||
import { SuccessResponse } from 'types/api';
|
import { SuccessResponse } from 'types/api';
|
||||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
@ -40,6 +40,10 @@ export async function GetMetricQueryRange(
|
|||||||
throw new Error(error);
|
throw new Error(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (props.formatForWeb) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
if (response.payload?.data?.result) {
|
if (response.payload?.data?.result) {
|
||||||
const v2Range = convertNewDataToOld(response.payload);
|
const v2Range = convertNewDataToOld(response.payload);
|
||||||
|
|
||||||
@ -76,6 +80,7 @@ export interface GetQueryResultsProps {
|
|||||||
variables?: Record<string, unknown>;
|
variables?: Record<string, unknown>;
|
||||||
params?: Record<string, unknown>;
|
params?: Record<string, unknown>;
|
||||||
fillGaps?: boolean;
|
fillGaps?: boolean;
|
||||||
|
formatForWeb?: boolean;
|
||||||
tableParams?: {
|
tableParams?: {
|
||||||
pagination?: Pagination;
|
pagination?: Pagination;
|
||||||
selectColumns?: any;
|
selectColumns?: any;
|
||||||
|
@ -16,6 +16,7 @@ export const prepareQueryRangePayload = ({
|
|||||||
query,
|
query,
|
||||||
globalSelectedInterval,
|
globalSelectedInterval,
|
||||||
graphType,
|
graphType,
|
||||||
|
formatForWeb,
|
||||||
selectedTime,
|
selectedTime,
|
||||||
tableParams,
|
tableParams,
|
||||||
variables = {},
|
variables = {},
|
||||||
@ -102,6 +103,7 @@ export const prepareQueryRangePayload = ({
|
|||||||
inputFormat: 'ns',
|
inputFormat: 'ns',
|
||||||
}),
|
}),
|
||||||
variables,
|
variables,
|
||||||
|
formatForWeb,
|
||||||
compositeQuery,
|
compositeQuery,
|
||||||
...restParams,
|
...restParams,
|
||||||
};
|
};
|
||||||
|
@ -583,11 +583,11 @@ export const createTableColumnsFromQuery: CreateTableDataFromQuery = ({
|
|||||||
q.series?.sort((a, b) => {
|
q.series?.sort((a, b) => {
|
||||||
let labelA = '';
|
let labelA = '';
|
||||||
let labelB = '';
|
let labelB = '';
|
||||||
a.labelsArray.forEach((lab) => {
|
a.labelsArray?.forEach((lab) => {
|
||||||
labelA += Object.values(lab)[0];
|
labelA += Object.values(lab)[0];
|
||||||
});
|
});
|
||||||
|
|
||||||
b.labelsArray.forEach((lab) => {
|
b.labelsArray?.forEach((lab) => {
|
||||||
labelB += Object.values(lab)[0];
|
labelB += Object.values(lab)[0];
|
||||||
});
|
});
|
||||||
|
|
||||||
|
220
frontend/src/mocks-server/__mockdata__/members.ts
Normal file
220
frontend/src/mocks-server/__mockdata__/members.ts
Normal file
@ -0,0 +1,220 @@
|
|||||||
|
/* eslint-disable sonarjs/no-duplicate-string */
|
||||||
|
|
||||||
|
export const membersResponse = [
|
||||||
|
{
|
||||||
|
id: '3223a874-5678458745786',
|
||||||
|
name: 'John Doe',
|
||||||
|
email: 'firstUser@test.io',
|
||||||
|
createdAt: 1666357530,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '5e9681b1-5678458745786',
|
||||||
|
name: 'Jane Doe',
|
||||||
|
email: 'johndoe2@test.io',
|
||||||
|
createdAt: 1666365394,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '11e8c55d-5678458745786',
|
||||||
|
name: 'Alex',
|
||||||
|
email: 'blah@test.io',
|
||||||
|
createdAt: 1666366317,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: 'd878012367813286731aab62',
|
||||||
|
role: 'VIEWER',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '2ad2e404-5678458745786',
|
||||||
|
name: 'Tom',
|
||||||
|
email: 'johndoe4@test.io',
|
||||||
|
createdAt: 1673441483,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '6f532456-5678458745786',
|
||||||
|
name: 'Harry',
|
||||||
|
email: 'harry@test.io',
|
||||||
|
createdAt: 1691551672,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'ae22fa73-5678458745786',
|
||||||
|
name: 'Ron',
|
||||||
|
email: 'ron@test.io',
|
||||||
|
createdAt: 1691668239,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '3223a874-5678458745786',
|
||||||
|
name: 'John Doe',
|
||||||
|
email: 'johndoe@test.io',
|
||||||
|
createdAt: 1666357530,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '5e9681b1-5678458745786',
|
||||||
|
name: 'Jane Doe',
|
||||||
|
email: 'johndoe2@test.io',
|
||||||
|
createdAt: 1666365394,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '11e8c55d-5678458745786',
|
||||||
|
name: 'Alex',
|
||||||
|
email: 'blah@test.io',
|
||||||
|
createdAt: 1666366317,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: 'd878012367813286731aab62',
|
||||||
|
role: 'VIEWER',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '2ad2e404-5678458745786',
|
||||||
|
name: 'Tom',
|
||||||
|
email: 'johndoe4@test.io',
|
||||||
|
createdAt: 1673441483,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '6f532456-5678458745786',
|
||||||
|
name: 'Harry',
|
||||||
|
email: 'harry@test.io',
|
||||||
|
createdAt: 1691551672,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'ae22fa73-5678458745786',
|
||||||
|
name: 'Ron',
|
||||||
|
email: 'ron@test.io',
|
||||||
|
createdAt: 1691668239,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '3223a874-5678458745786',
|
||||||
|
name: 'John Doe',
|
||||||
|
email: 'johndoe@test.io',
|
||||||
|
createdAt: 1666357530,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '5e9681b1-5678458745786',
|
||||||
|
name: 'Jane Doe',
|
||||||
|
email: 'johndoe2@test.io',
|
||||||
|
createdAt: 1666365394,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '11e8c55d-5678458745786',
|
||||||
|
name: 'Alex',
|
||||||
|
email: 'blah@test.io',
|
||||||
|
createdAt: 1666366317,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: 'd878012367813286731aab62',
|
||||||
|
role: 'VIEWER',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '2ad2e404-5678458745786',
|
||||||
|
name: 'Tom',
|
||||||
|
email: 'johndoe4@test.io',
|
||||||
|
createdAt: 1673441483,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '6f532456-5678458745786',
|
||||||
|
name: 'Harry',
|
||||||
|
email: 'harry@test.io',
|
||||||
|
createdAt: 1691551672,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'ae22fa73-5678458745786',
|
||||||
|
name: 'Ron',
|
||||||
|
email: 'lastUser@test.io',
|
||||||
|
createdAt: 1691668239,
|
||||||
|
profilePictureURL: '',
|
||||||
|
orgId: '1287612376312867312867',
|
||||||
|
groupId: '5678458745786',
|
||||||
|
role: 'ADMIN',
|
||||||
|
organization: 'Test Inc',
|
||||||
|
flags: null,
|
||||||
|
},
|
||||||
|
];
|
@ -2,6 +2,7 @@ import { rest } from 'msw';
|
|||||||
|
|
||||||
import { billingSuccessResponse } from './__mockdata__/billing';
|
import { billingSuccessResponse } from './__mockdata__/billing';
|
||||||
import { licensesSuccessResponse } from './__mockdata__/licenses';
|
import { licensesSuccessResponse } from './__mockdata__/licenses';
|
||||||
|
import { membersResponse } from './__mockdata__/members';
|
||||||
import { queryRangeSuccessResponse } from './__mockdata__/query_range';
|
import { queryRangeSuccessResponse } from './__mockdata__/query_range';
|
||||||
import { serviceSuccessResponse } from './__mockdata__/services';
|
import { serviceSuccessResponse } from './__mockdata__/services';
|
||||||
import { topLevelOperationSuccessResponse } from './__mockdata__/top_level_operations';
|
import { topLevelOperationSuccessResponse } from './__mockdata__/top_level_operations';
|
||||||
@ -25,6 +26,9 @@ export const handlers = [
|
|||||||
res(ctx.status(200), ctx.json(topLevelOperationSuccessResponse)),
|
res(ctx.status(200), ctx.json(topLevelOperationSuccessResponse)),
|
||||||
),
|
),
|
||||||
|
|
||||||
|
rest.get('http://localhost/api/v1/orgUsers/*', (req, res, ctx) =>
|
||||||
|
res(ctx.status(200), ctx.json(membersResponse)),
|
||||||
|
),
|
||||||
rest.get(
|
rest.get(
|
||||||
'http://localhost/api/v3/autocomplete/attribute_keys',
|
'http://localhost/api/v3/autocomplete/attribute_keys',
|
||||||
(req, res, ctx) => {
|
(req, res, ctx) => {
|
||||||
|
@ -46,6 +46,8 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
const paginationConfig = { pageSize: 20, hideOnSinglePage: true };
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="integration-data-collected">
|
<div className="integration-data-collected">
|
||||||
<div className="logs-section">
|
<div className="logs-section">
|
||||||
@ -59,7 +61,7 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
|
|||||||
index % 2 === 0 ? 'table-row-dark' : ''
|
index % 2 === 0 ? 'table-row-dark' : ''
|
||||||
}
|
}
|
||||||
dataSource={logsData}
|
dataSource={logsData}
|
||||||
pagination={{ pageSize: 20 }}
|
pagination={paginationConfig}
|
||||||
className="logs-section-table"
|
className="logs-section-table"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
@ -74,7 +76,7 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
|
|||||||
index % 2 === 0 ? 'table-row-dark' : ''
|
index % 2 === 0 ? 'table-row-dark' : ''
|
||||||
}
|
}
|
||||||
dataSource={metricsData}
|
dataSource={metricsData}
|
||||||
pagination={{ pageSize: 20 }}
|
pagination={paginationConfig}
|
||||||
className="metrics-section-table"
|
className="metrics-section-table"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
@ -277,6 +277,8 @@ function SaveView(): JSX.Element {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
const paginationConfig = { pageSize: 5, hideOnSinglePage: true };
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="save-view-container">
|
<div className="save-view-container">
|
||||||
<div className="save-view-content">
|
<div className="save-view-content">
|
||||||
@ -303,7 +305,7 @@ function SaveView(): JSX.Element {
|
|||||||
dataSource={dataSource}
|
dataSource={dataSource}
|
||||||
loading={isLoading || isRefetching}
|
loading={isLoading || isRefetching}
|
||||||
showHeader={false}
|
showHeader={false}
|
||||||
pagination={{ pageSize: 5 }}
|
pagination={paginationConfig}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -42,6 +42,15 @@ const mockStored = (role?: string): any =>
|
|||||||
accessJwt: '',
|
accessJwt: '',
|
||||||
refreshJwt: '',
|
refreshJwt: '',
|
||||||
},
|
},
|
||||||
|
org: [
|
||||||
|
{
|
||||||
|
createdAt: 0,
|
||||||
|
hasOptedUpdates: false,
|
||||||
|
id: 'xyz',
|
||||||
|
isAnonymous: false,
|
||||||
|
name: 'Test Inc. - India',
|
||||||
|
},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -24,6 +24,7 @@ export type QueryRangePayload = {
|
|||||||
start: number;
|
start: number;
|
||||||
step: number;
|
step: number;
|
||||||
variables?: Record<string, unknown>;
|
variables?: Record<string, unknown>;
|
||||||
|
formatForWeb?: boolean;
|
||||||
[param: string]: unknown;
|
[param: string]: unknown;
|
||||||
};
|
};
|
||||||
export interface MetricRangePayloadProps {
|
export interface MetricRangePayloadProps {
|
||||||
|
@ -10,3 +10,6 @@ export const getGraphType = (panelType: PANEL_TYPES): PANEL_TYPES => {
|
|||||||
}
|
}
|
||||||
return panelType;
|
return panelType;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const getGraphTypeForFormat = (panelType: PANEL_TYPES): PANEL_TYPES =>
|
||||||
|
panelType;
|
||||||
|
@ -6195,11 +6195,11 @@ brace-expansion@^2.0.1:
|
|||||||
balanced-match "^1.0.0"
|
balanced-match "^1.0.0"
|
||||||
|
|
||||||
braces@^3.0.2, braces@~3.0.2:
|
braces@^3.0.2, braces@~3.0.2:
|
||||||
version "3.0.2"
|
version "3.0.3"
|
||||||
resolved "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz"
|
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789"
|
||||||
integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
|
integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==
|
||||||
dependencies:
|
dependencies:
|
||||||
fill-range "^7.0.1"
|
fill-range "^7.1.1"
|
||||||
|
|
||||||
broadcast-channel@^3.4.1:
|
broadcast-channel@^3.4.1:
|
||||||
version "3.7.0"
|
version "3.7.0"
|
||||||
@ -8808,10 +8808,10 @@ file-saver@^2.0.2:
|
|||||||
resolved "https://registry.yarnpkg.com/file-saver/-/file-saver-2.0.5.tgz#d61cfe2ce059f414d899e9dd6d4107ee25670c38"
|
resolved "https://registry.yarnpkg.com/file-saver/-/file-saver-2.0.5.tgz#d61cfe2ce059f414d899e9dd6d4107ee25670c38"
|
||||||
integrity sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA==
|
integrity sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA==
|
||||||
|
|
||||||
fill-range@^7.0.1:
|
fill-range@^7.1.1:
|
||||||
version "7.0.1"
|
version "7.1.1"
|
||||||
resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz"
|
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292"
|
||||||
integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
|
integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==
|
||||||
dependencies:
|
dependencies:
|
||||||
to-regex-range "^5.0.1"
|
to-regex-range "^5.0.1"
|
||||||
|
|
||||||
@ -13705,13 +13705,14 @@ postcss@8.4.38, postcss@^8.0.0, postcss@^8.1.1, postcss@^8.3.7, postcss@^8.4.21,
|
|||||||
picocolors "^1.0.0"
|
picocolors "^1.0.0"
|
||||||
source-map-js "^1.2.0"
|
source-map-js "^1.2.0"
|
||||||
|
|
||||||
posthog-js@1.140.1:
|
posthog-js@1.142.1:
|
||||||
version "1.140.1"
|
version "1.142.1"
|
||||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.140.1.tgz#34efc0d326fa5fcf7950106f350fb4f0e73b2da6"
|
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.142.1.tgz#3b91229732938c5c76b5ee6d410698a267e073e9"
|
||||||
integrity sha512-UeKuAtQSvbzmTCzNVaauku8F194EYwAP33WrRrWZlDlMNbMy7GKcZOgKbr7jZqnha7FlVlHrWk+Rpyr1zCFhPQ==
|
integrity sha512-yqeWTWitlb0sCaH5v6s7UJ+pPspzf/lkzPaSE5pMMXRM2i2KNsMoZEAZqbPCW8fQ8QL6lHs6d8PLjHrvbR288w==
|
||||||
dependencies:
|
dependencies:
|
||||||
fflate "^0.4.8"
|
fflate "^0.4.8"
|
||||||
preact "^10.19.3"
|
preact "^10.19.3"
|
||||||
|
web-vitals "^4.0.1"
|
||||||
|
|
||||||
preact@^10.19.3:
|
preact@^10.19.3:
|
||||||
version "10.22.0"
|
version "10.22.0"
|
||||||
@ -17218,6 +17219,11 @@ web-vitals@^0.2.4:
|
|||||||
resolved "https://registry.npmjs.org/web-vitals/-/web-vitals-0.2.4.tgz"
|
resolved "https://registry.npmjs.org/web-vitals/-/web-vitals-0.2.4.tgz"
|
||||||
integrity sha512-6BjspCO9VriYy12z356nL6JBS0GYeEcA457YyRzD+dD6XYCQ75NKhcOHUMHentOE7OcVCIXXDvOm0jKFfQG2Gg==
|
integrity sha512-6BjspCO9VriYy12z356nL6JBS0GYeEcA457YyRzD+dD6XYCQ75NKhcOHUMHentOE7OcVCIXXDvOm0jKFfQG2Gg==
|
||||||
|
|
||||||
|
web-vitals@^4.0.1:
|
||||||
|
version "4.2.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-4.2.0.tgz#008949ab79717a68ccaaa3c4371cbc7bbbd78a92"
|
||||||
|
integrity sha512-ohj72kbtVWCpKYMxcbJ+xaOBV3En76hW47j52dG+tEGG36LZQgfFw5yHl9xyjmosy3XUMn8d/GBUAy4YPM839w==
|
||||||
|
|
||||||
web-worker@^1.2.0:
|
web-worker@^1.2.0:
|
||||||
version "1.2.0"
|
version "1.2.0"
|
||||||
resolved "https://registry.npmjs.org/web-worker/-/web-worker-1.2.0.tgz"
|
resolved "https://registry.npmjs.org/web-worker/-/web-worker-1.2.0.tgz"
|
||||||
@ -17632,14 +17638,14 @@ write-file-atomic@^4.0.2:
|
|||||||
signal-exit "^3.0.7"
|
signal-exit "^3.0.7"
|
||||||
|
|
||||||
ws@^7.3.1, ws@^7.4.6:
|
ws@^7.3.1, ws@^7.4.6:
|
||||||
version "7.5.9"
|
version "7.5.10"
|
||||||
resolved "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz"
|
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9"
|
||||||
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
|
integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==
|
||||||
|
|
||||||
ws@^8.13.0:
|
ws@^8.13.0:
|
||||||
version "8.13.0"
|
version "8.17.1"
|
||||||
resolved "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz"
|
resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b"
|
||||||
integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==
|
integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==
|
||||||
|
|
||||||
xhr-request@^1.0.1:
|
xhr-request@^1.0.1:
|
||||||
version "1.1.0"
|
version "1.1.0"
|
||||||
|
2
go.mod
2
go.mod
@ -6,7 +6,7 @@ require (
|
|||||||
github.com/ClickHouse/clickhouse-go/v2 v2.20.0
|
github.com/ClickHouse/clickhouse-go/v2 v2.20.0
|
||||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||||
github.com/SigNoz/signoz-otel-collector v0.102.0
|
github.com/SigNoz/signoz-otel-collector v0.102.1
|
||||||
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
|
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
|
||||||
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
|
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
|
||||||
github.com/antonmedv/expr v1.15.3
|
github.com/antonmedv/expr v1.15.3
|
||||||
|
8
go.sum
8
go.sum
@ -64,8 +64,8 @@ github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkb
|
|||||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||||
github.com/SigNoz/prometheus v1.11.1 h1:roM8ugYf4UxaeKKujEeBvoX7ybq3IrS+TB26KiRtIJg=
|
github.com/SigNoz/prometheus v1.11.1 h1:roM8ugYf4UxaeKKujEeBvoX7ybq3IrS+TB26KiRtIJg=
|
||||||
github.com/SigNoz/prometheus v1.11.1/go.mod h1:uv4mQwZQtx7y4GQ6EdHOi8Wsk07uHNn2XHd1zM85m6I=
|
github.com/SigNoz/prometheus v1.11.1/go.mod h1:uv4mQwZQtx7y4GQ6EdHOi8Wsk07uHNn2XHd1zM85m6I=
|
||||||
github.com/SigNoz/signoz-otel-collector v0.102.0 h1:v6ap+gdvrKklMwU+M9FJgrn28vN0YxrINl3kvdcLonA=
|
github.com/SigNoz/signoz-otel-collector v0.102.1 h1:RXzs/dA9IMFGi6mXecEFVvShWfilqx5cCEXmzzvVfK0=
|
||||||
github.com/SigNoz/signoz-otel-collector v0.102.0/go.mod h1:kCx5BfzDujq6C0+kotiqLp5COG2ut4Cb039+55rbWE0=
|
github.com/SigNoz/signoz-otel-collector v0.102.1/go.mod h1:ISAXYhZenojCWg6CdDJtPMpfS6Zwc08+uoxH25tc6Y0=
|
||||||
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
|
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
|
||||||
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
|
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
|
||||||
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=
|
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=
|
||||||
@ -378,8 +378,8 @@ github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+l
|
|||||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||||
github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY=
|
github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY=
|
||||||
github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
|
github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
|
||||||
github.com/hashicorp/go-retryablehttp v0.7.4 h1:ZQgVdpTdAL7WpMIwLzCfbalOcSUdkDZnpUv3/+BxzFA=
|
github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=
|
||||||
github.com/hashicorp/go-retryablehttp v0.7.4/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8=
|
github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
|
||||||
github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
||||||
github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc=
|
github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc=
|
||||||
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
||||||
|
@ -53,7 +53,6 @@ import (
|
|||||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||||
"go.signoz.io/signoz/pkg/query-service/model"
|
"go.signoz.io/signoz/pkg/query-service/model"
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
"go.signoz.io/signoz/pkg/query-service/rules"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/telemetry"
|
"go.signoz.io/signoz/pkg/query-service/telemetry"
|
||||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||||
)
|
)
|
||||||
@ -1942,7 +1941,7 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
|||||||
end := time.Now()
|
end := time.Now()
|
||||||
zap.L().Debug("getTraceSQLQuery took: ", zap.Duration("duration", end.Sub(start)))
|
zap.L().Debug("getTraceSQLQuery took: ", zap.Duration("duration", end.Sub(start)))
|
||||||
searchSpansResult := []model.SearchSpansResult{{
|
searchSpansResult := []model.SearchSpansResult{{
|
||||||
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"},
|
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError", "StatusMessage", "StatusCodeString", "SpanKind"},
|
||||||
Events: make([][]interface{}, len(searchScanResponses)),
|
Events: make([][]interface{}, len(searchScanResponses)),
|
||||||
IsSubTree: false,
|
IsSubTree: false,
|
||||||
},
|
},
|
||||||
@ -1993,8 +1992,8 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
searchSpansResult[0].StartTimestampMillis = startTime - (durationNano/1000000)
|
searchSpansResult[0].StartTimestampMillis = startTime - (durationNano / 1000000)
|
||||||
searchSpansResult[0].EndTimestampMillis = endTime + (durationNano/1000000)
|
searchSpansResult[0].EndTimestampMillis = endTime + (durationNano / 1000000)
|
||||||
|
|
||||||
return &searchSpansResult, nil
|
return &searchSpansResult, nil
|
||||||
}
|
}
|
||||||
@ -3420,36 +3419,6 @@ func countPanelsInDashboard(data map[string]interface{}) model.DashboardsInfo {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *ClickHouseReader) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
|
|
||||||
alertsInfo := model.AlertsInfo{}
|
|
||||||
// fetch alerts from rules db
|
|
||||||
query := "SELECT data FROM rules"
|
|
||||||
var alertsData []string
|
|
||||||
err := r.localDB.Select(&alertsData, query)
|
|
||||||
if err != nil {
|
|
||||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
|
||||||
return &alertsInfo, err
|
|
||||||
}
|
|
||||||
for _, alert := range alertsData {
|
|
||||||
var rule rules.GettableRule
|
|
||||||
err = json.Unmarshal([]byte(alert), &rule)
|
|
||||||
if err != nil {
|
|
||||||
zap.L().Error("invalid rule data", zap.Error(err))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if rule.AlertType == "LOGS_BASED_ALERT" {
|
|
||||||
alertsInfo.LogsBasedAlerts = alertsInfo.LogsBasedAlerts + 1
|
|
||||||
} else if rule.AlertType == "METRIC_BASED_ALERT" {
|
|
||||||
alertsInfo.MetricBasedAlerts = alertsInfo.MetricBasedAlerts + 1
|
|
||||||
} else if rule.AlertType == "TRACES_BASED_ALERT" {
|
|
||||||
alertsInfo.TracesBasedAlerts = alertsInfo.TracesBasedAlerts + 1
|
|
||||||
}
|
|
||||||
alertsInfo.TotalAlerts = alertsInfo.TotalAlerts + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
return &alertsInfo, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *ClickHouseReader) GetSavedViewsInfo(ctx context.Context) (*model.SavedViewsInfo, error) {
|
func (r *ClickHouseReader) GetSavedViewsInfo(ctx context.Context) (*model.SavedViewsInfo, error) {
|
||||||
savedViewsInfo := model.SavedViewsInfo{}
|
savedViewsInfo := model.SavedViewsInfo{}
|
||||||
savedViews, err := explorer.GetViews()
|
savedViews, err := explorer.GetViews()
|
||||||
@ -4434,8 +4403,8 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
|||||||
case *time.Time:
|
case *time.Time:
|
||||||
point.Timestamp = v.UnixMilli()
|
point.Timestamp = v.UnixMilli()
|
||||||
case *float64, *float32:
|
case *float64, *float32:
|
||||||
isValidPoint = true
|
|
||||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||||
|
isValidPoint = true
|
||||||
point.Value = float64(reflect.ValueOf(v).Elem().Float())
|
point.Value = float64(reflect.ValueOf(v).Elem().Float())
|
||||||
} else {
|
} else {
|
||||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()))
|
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()))
|
||||||
@ -4447,9 +4416,9 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
|||||||
case **float64, **float32:
|
case **float64, **float32:
|
||||||
val := reflect.ValueOf(v)
|
val := reflect.ValueOf(v)
|
||||||
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
||||||
isValidPoint = true
|
|
||||||
value := reflect.ValueOf(v).Elem().Elem().Float()
|
value := reflect.ValueOf(v).Elem().Elem().Float()
|
||||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||||
|
isValidPoint = true
|
||||||
point.Value = value
|
point.Value = value
|
||||||
} else {
|
} else {
|
||||||
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
||||||
@ -4460,8 +4429,8 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case *uint, *uint8, *uint64, *uint16, *uint32:
|
case *uint, *uint8, *uint64, *uint16, *uint32:
|
||||||
isValidPoint = true
|
|
||||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||||
|
isValidPoint = true
|
||||||
point.Value = float64(reflect.ValueOf(v).Elem().Uint())
|
point.Value = float64(reflect.ValueOf(v).Elem().Uint())
|
||||||
} else {
|
} else {
|
||||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
|
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
|
||||||
@ -4473,9 +4442,9 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
|||||||
case **uint, **uint8, **uint64, **uint16, **uint32:
|
case **uint, **uint8, **uint64, **uint16, **uint32:
|
||||||
val := reflect.ValueOf(v)
|
val := reflect.ValueOf(v)
|
||||||
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
||||||
isValidPoint = true
|
|
||||||
value := reflect.ValueOf(v).Elem().Elem().Uint()
|
value := reflect.ValueOf(v).Elem().Elem().Uint()
|
||||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||||
|
isValidPoint = true
|
||||||
point.Value = float64(value)
|
point.Value = float64(value)
|
||||||
} else {
|
} else {
|
||||||
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
||||||
@ -4486,8 +4455,8 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case *int, *int8, *int16, *int32, *int64:
|
case *int, *int8, *int16, *int32, *int64:
|
||||||
isValidPoint = true
|
|
||||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||||
|
isValidPoint = true
|
||||||
point.Value = float64(reflect.ValueOf(v).Elem().Int())
|
point.Value = float64(reflect.ValueOf(v).Elem().Int())
|
||||||
} else {
|
} else {
|
||||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
|
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
|
||||||
@ -4499,9 +4468,9 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
|||||||
case **int, **int8, **int16, **int32, **int64:
|
case **int, **int8, **int16, **int32, **int64:
|
||||||
val := reflect.ValueOf(v)
|
val := reflect.ValueOf(v)
|
||||||
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
||||||
isValidPoint = true
|
|
||||||
value := reflect.ValueOf(v).Elem().Elem().Int()
|
value := reflect.ValueOf(v).Elem().Elem().Int()
|
||||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||||
|
isValidPoint = true
|
||||||
point.Value = float64(value)
|
point.Value = float64(value)
|
||||||
} else {
|
} else {
|
||||||
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
||||||
|
@ -142,6 +142,11 @@ func checkDuplicateString(pipeline []string) bool {
|
|||||||
for _, processor := range pipeline {
|
for _, processor := range pipeline {
|
||||||
name := processor
|
name := processor
|
||||||
if _, ok := exists[name]; ok {
|
if _, ok := exists[name]; ok {
|
||||||
|
zap.L().Error(
|
||||||
|
"duplicate processor name detected in generated collector config for log pipelines",
|
||||||
|
zap.String("processor", processor),
|
||||||
|
zap.Any("pipeline", pipeline),
|
||||||
|
)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,7 +5,10 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
var buildProcessorTestData = []struct {
|
var buildProcessorTestData = []struct {
|
||||||
@ -204,3 +207,89 @@ func TestBuildLogsPipeline(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestPipelineAliasCollisionsDontResultInDuplicateCollectorProcessors(t *testing.T) {
|
||||||
|
require := require.New(t)
|
||||||
|
|
||||||
|
baseConf := []byte(`
|
||||||
|
receivers:
|
||||||
|
memory:
|
||||||
|
id: in-memory-receiver
|
||||||
|
exporters:
|
||||||
|
memory:
|
||||||
|
id: in-memory-exporter
|
||||||
|
service:
|
||||||
|
pipelines:
|
||||||
|
logs:
|
||||||
|
receivers:
|
||||||
|
- memory
|
||||||
|
processors: []
|
||||||
|
exporters:
|
||||||
|
- memory
|
||||||
|
`)
|
||||||
|
|
||||||
|
makeTestPipeline := func(name string, alias string) Pipeline {
|
||||||
|
return Pipeline{
|
||||||
|
OrderId: 1,
|
||||||
|
Name: name,
|
||||||
|
Alias: alias,
|
||||||
|
Enabled: true,
|
||||||
|
Filter: &v3.FilterSet{
|
||||||
|
Operator: "AND",
|
||||||
|
Items: []v3.FilterItem{
|
||||||
|
{
|
||||||
|
Key: v3.AttributeKey{
|
||||||
|
Key: "method",
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
},
|
||||||
|
Operator: "=",
|
||||||
|
Value: "GET",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Config: []PipelineOperator{
|
||||||
|
{
|
||||||
|
ID: "regex",
|
||||||
|
Type: "regex_parser",
|
||||||
|
Enabled: true,
|
||||||
|
Name: "regex parser",
|
||||||
|
ParseFrom: "attributes.test_regex_target",
|
||||||
|
ParseTo: "attributes",
|
||||||
|
Regex: `^\s*(?P<json_data>{.*})\s*$`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
testPipelines := []Pipeline{
|
||||||
|
makeTestPipeline("test pipeline 1", "pipeline-alias"),
|
||||||
|
makeTestPipeline("test pipeline 2", "pipeline-alias"),
|
||||||
|
}
|
||||||
|
|
||||||
|
recommendedConfYaml, apiErr := GenerateCollectorConfigWithPipelines(
|
||||||
|
baseConf, testPipelines,
|
||||||
|
)
|
||||||
|
require.Nil(apiErr, fmt.Sprintf("couldn't generate config recommendation: %v", apiErr))
|
||||||
|
|
||||||
|
var recommendedConf map[string]interface{}
|
||||||
|
err := yaml.Unmarshal(recommendedConfYaml, &recommendedConf)
|
||||||
|
require.Nil(err, "couldn't unmarshal recommended config")
|
||||||
|
|
||||||
|
logsProcessors := recommendedConf["service"].(map[string]any)["pipelines"].(map[string]any)["logs"].(map[string]any)["processors"].([]any)
|
||||||
|
|
||||||
|
require.Equal(
|
||||||
|
len(logsProcessors), len(testPipelines),
|
||||||
|
"test pipelines not included in recommended config as expected",
|
||||||
|
)
|
||||||
|
|
||||||
|
recommendedConfYaml2, apiErr := GenerateCollectorConfigWithPipelines(
|
||||||
|
baseConf, testPipelines,
|
||||||
|
)
|
||||||
|
require.Nil(apiErr, fmt.Sprintf("couldn't generate config recommendation again: %v", apiErr))
|
||||||
|
require.Equal(
|
||||||
|
string(recommendedConfYaml), string(recommendedConfYaml2),
|
||||||
|
"collector config should not change across recommendations for same set of pipelines",
|
||||||
|
)
|
||||||
|
|
||||||
|
}
|
||||||
|
@ -24,7 +24,7 @@ func CollectorConfProcessorName(p Pipeline) string {
|
|||||||
func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []string, error) {
|
func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []string, error) {
|
||||||
processors := map[string]interface{}{}
|
processors := map[string]interface{}{}
|
||||||
names := []string{}
|
names := []string{}
|
||||||
for _, v := range pipelines {
|
for pipelineIdx, v := range pipelines {
|
||||||
if !v.Enabled {
|
if !v.Enabled {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -70,6 +70,12 @@ func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []s
|
|||||||
Operators: v.Config,
|
Operators: v.Config,
|
||||||
}
|
}
|
||||||
name := CollectorConfProcessorName(v)
|
name := CollectorConfProcessorName(v)
|
||||||
|
|
||||||
|
// Ensure name is unique
|
||||||
|
if _, nameExists := processors[name]; nameExists {
|
||||||
|
name = fmt.Sprintf("%s-%d", name, pipelineIdx)
|
||||||
|
}
|
||||||
|
|
||||||
processors[name] = processor
|
processors[name] = processor
|
||||||
names = append(names, name)
|
names = append(names, name)
|
||||||
}
|
}
|
||||||
|
@ -803,76 +803,3 @@ func TestContainsFilterIsCaseInsensitive(t *testing.T) {
|
|||||||
_, test2Exists := result[0].Attributes_string["test2"]
|
_, test2Exists := result[0].Attributes_string["test2"]
|
||||||
require.False(test2Exists)
|
require.False(test2Exists)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTemporaryWorkaroundForSupportingAttribsContainingDots(t *testing.T) {
|
|
||||||
// TODO(Raj): Remove this after dots are supported
|
|
||||||
|
|
||||||
require := require.New(t)
|
|
||||||
|
|
||||||
testPipeline := Pipeline{
|
|
||||||
OrderId: 1,
|
|
||||||
Name: "pipeline1",
|
|
||||||
Alias: "pipeline1",
|
|
||||||
Enabled: true,
|
|
||||||
Filter: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{
|
|
||||||
{
|
|
||||||
Key: v3.AttributeKey{
|
|
||||||
Key: "k8s_deployment_name",
|
|
||||||
DataType: v3.AttributeKeyDataTypeString,
|
|
||||||
Type: v3.AttributeKeyTypeResource,
|
|
||||||
},
|
|
||||||
Operator: "=",
|
|
||||||
Value: "ingress",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Config: []PipelineOperator{
|
|
||||||
{
|
|
||||||
ID: "add",
|
|
||||||
Type: "add",
|
|
||||||
Enabled: true,
|
|
||||||
Name: "add",
|
|
||||||
Field: "attributes.test",
|
|
||||||
Value: "test-value",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
testLogs := []model.SignozLog{{
|
|
||||||
Timestamp: uint64(time.Now().UnixNano()),
|
|
||||||
Body: "test log",
|
|
||||||
Attributes_string: map[string]string{},
|
|
||||||
Resources_string: map[string]string{
|
|
||||||
"k8s_deployment_name": "ingress",
|
|
||||||
},
|
|
||||||
SeverityText: entry.Info.String(),
|
|
||||||
SeverityNumber: uint8(entry.Info),
|
|
||||||
SpanID: "",
|
|
||||||
TraceID: "",
|
|
||||||
}, {
|
|
||||||
Timestamp: uint64(time.Now().UnixNano()),
|
|
||||||
Body: "test log",
|
|
||||||
Attributes_string: map[string]string{},
|
|
||||||
Resources_string: map[string]string{
|
|
||||||
"k8s.deployment.name": "ingress",
|
|
||||||
},
|
|
||||||
SeverityText: entry.Info.String(),
|
|
||||||
SeverityNumber: uint8(entry.Info),
|
|
||||||
SpanID: "",
|
|
||||||
TraceID: "",
|
|
||||||
}}
|
|
||||||
|
|
||||||
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
|
|
||||||
context.Background(),
|
|
||||||
[]Pipeline{testPipeline},
|
|
||||||
testLogs,
|
|
||||||
)
|
|
||||||
require.Nil(err)
|
|
||||||
require.Equal(0, len(collectorWarnAndErrorLogs), strings.Join(collectorWarnAndErrorLogs, "\n"))
|
|
||||||
require.Equal(2, len(result))
|
|
||||||
for _, processedLog := range result {
|
|
||||||
require.Equal(processedLog.Attributes_string["test"], "test-value")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -58,8 +58,10 @@ func ParseLogFilterParams(r *http.Request) (*model.LogsFilterParams, error) {
|
|||||||
res.OrderBy = val[0]
|
res.OrderBy = val[0]
|
||||||
}
|
}
|
||||||
if val, ok := params[ORDER]; ok {
|
if val, ok := params[ORDER]; ok {
|
||||||
|
if val[0] == ASC || val[0] == DESC {
|
||||||
res.Order = val[0]
|
res.Order = val[0]
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if val, ok := params["q"]; ok {
|
if val, ok := params["q"]; ok {
|
||||||
res.Query = val[0]
|
res.Query = val[0]
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
package logs
|
package logs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
@ -432,3 +434,51 @@ func TestGenerateSQLQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var parseLogFilterParams = []struct {
|
||||||
|
Name string
|
||||||
|
ReqParams string
|
||||||
|
ExpectedLogFilterParams *model.LogsFilterParams
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
Name: "test with proper order by",
|
||||||
|
ReqParams: "order=desc&q=service.name='myservice'&limit=10",
|
||||||
|
ExpectedLogFilterParams: &model.LogsFilterParams{
|
||||||
|
Limit: 10,
|
||||||
|
OrderBy: "timestamp",
|
||||||
|
Order: DESC,
|
||||||
|
Query: "service.name='myservice'",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "test with proper order by asc",
|
||||||
|
ReqParams: "order=asc&q=service.name='myservice'&limit=10",
|
||||||
|
ExpectedLogFilterParams: &model.LogsFilterParams{
|
||||||
|
Limit: 10,
|
||||||
|
OrderBy: "timestamp",
|
||||||
|
Order: ASC,
|
||||||
|
Query: "service.name='myservice'",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "test with incorrect order by",
|
||||||
|
ReqParams: "order=undefined&q=service.name='myservice'&limit=10",
|
||||||
|
ExpectedLogFilterParams: &model.LogsFilterParams{
|
||||||
|
Limit: 10,
|
||||||
|
OrderBy: "timestamp",
|
||||||
|
Order: DESC,
|
||||||
|
Query: "service.name='myservice'",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseLogFilterParams(t *testing.T) {
|
||||||
|
for _, test := range parseLogFilterParams {
|
||||||
|
Convey(test.Name, t, func() {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/logs?"+test.ReqParams, nil)
|
||||||
|
params, err := ParseLogFilterParams(req)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(params, ShouldEqual, test.ExpectedLogFilterParams)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -73,7 +73,6 @@ type Reader interface {
|
|||||||
LiveTailLogsV3(ctx context.Context, query string, timestampStart uint64, idStart string, client *v3.LogsLiveTailClient)
|
LiveTailLogsV3(ctx context.Context, query string, timestampStart uint64, idStart string, client *v3.LogsLiveTailClient)
|
||||||
|
|
||||||
GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error)
|
GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error)
|
||||||
GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error)
|
|
||||||
GetSavedViewsInfo(ctx context.Context) (*model.SavedViewsInfo, error)
|
GetSavedViewsInfo(ctx context.Context) (*model.SavedViewsInfo, error)
|
||||||
GetTotalSpans(ctx context.Context) (uint64, error)
|
GetTotalSpans(ctx context.Context) (uint64, error)
|
||||||
GetTotalLogs(ctx context.Context) (uint64, error)
|
GetTotalLogs(ctx context.Context) (uint64, error)
|
||||||
|
@ -265,6 +265,9 @@ type SearchSpanResponseItem struct {
|
|||||||
TagMap map[string]string `json:"tagMap"`
|
TagMap map[string]string `json:"tagMap"`
|
||||||
Events []string `json:"event"`
|
Events []string `json:"event"`
|
||||||
RootName string `json:"rootName"`
|
RootName string `json:"rootName"`
|
||||||
|
StatusMessage string `json:"statusMessage"`
|
||||||
|
StatusCodeString string `json:"statusCodeString"`
|
||||||
|
SpanKind string `json:"spanKind"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type OtelSpanRef struct {
|
type OtelSpanRef struct {
|
||||||
@ -301,7 +304,7 @@ func (item *SearchSpanResponseItem) GetValues() []interface{} {
|
|||||||
keys = append(keys, k)
|
keys = append(keys, k)
|
||||||
values = append(values, v)
|
values = append(values, v)
|
||||||
}
|
}
|
||||||
returnArray := []interface{}{item.TimeUnixNano, item.SpanID, item.TraceID, item.ServiceName, item.Name, strconv.Itoa(int(item.Kind)), strconv.FormatInt(item.DurationNano, 10), keys, values, referencesStringArray, item.Events, item.HasError}
|
returnArray := []interface{}{item.TimeUnixNano, item.SpanID, item.TraceID, item.ServiceName, item.Name, strconv.Itoa(int(item.Kind)), strconv.FormatInt(item.DurationNano, 10), keys, values, referencesStringArray, item.Events, item.HasError, item.StatusMessage, item.StatusCodeString, item.SpanKind}
|
||||||
|
|
||||||
return returnArray
|
return returnArray
|
||||||
}
|
}
|
||||||
|
@ -118,6 +118,12 @@ func easyjson6ff3ac1dDecodeGoSignozIoSignozPkgQueryServiceModel(in *jlexer.Lexer
|
|||||||
}
|
}
|
||||||
case "rootName":
|
case "rootName":
|
||||||
out.RootName = string(in.String())
|
out.RootName = string(in.String())
|
||||||
|
case "statusMessage":
|
||||||
|
out.StatusMessage = string(in.String())
|
||||||
|
case "statusCodeString":
|
||||||
|
out.StatusCodeString = string(in.String())
|
||||||
|
case "spanKind":
|
||||||
|
out.SpanKind = string(in.String())
|
||||||
default:
|
default:
|
||||||
in.SkipRecursive()
|
in.SkipRecursive()
|
||||||
}
|
}
|
||||||
@ -233,6 +239,21 @@ func easyjson6ff3ac1dEncodeGoSignozIoSignozPkgQueryServiceModel(out *jwriter.Wri
|
|||||||
out.RawString(prefix)
|
out.RawString(prefix)
|
||||||
out.String(string(in.RootName))
|
out.String(string(in.RootName))
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
const prefix string = ",\"statusMessage\":"
|
||||||
|
out.RawString(prefix)
|
||||||
|
out.String(string(in.StatusMessage))
|
||||||
|
}
|
||||||
|
{
|
||||||
|
const prefix string = ",\"statusCodeString\":"
|
||||||
|
out.RawString(prefix)
|
||||||
|
out.String(string(in.StatusCodeString))
|
||||||
|
}
|
||||||
|
{
|
||||||
|
const prefix string = ",\"spanKind\":"
|
||||||
|
out.RawString(prefix)
|
||||||
|
out.String(string(in.SpanKind))
|
||||||
|
}
|
||||||
out.RawByte('}')
|
out.RawByte('}')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -401,7 +401,10 @@ type CompositeQuery struct {
|
|||||||
PromQueries map[string]*PromQuery `json:"promQueries,omitempty"`
|
PromQueries map[string]*PromQuery `json:"promQueries,omitempty"`
|
||||||
PanelType PanelType `json:"panelType"`
|
PanelType PanelType `json:"panelType"`
|
||||||
QueryType QueryType `json:"queryType"`
|
QueryType QueryType `json:"queryType"`
|
||||||
|
// Unit for the time series data shown in the graph
|
||||||
|
// This is used in alerts to format the value and threshold
|
||||||
Unit string `json:"unit,omitempty"`
|
Unit string `json:"unit,omitempty"`
|
||||||
|
// FillGaps is used to fill the gaps in the time series data
|
||||||
FillGaps bool `json:"fillGaps,omitempty"`
|
FillGaps bool `json:"fillGaps,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -990,10 +993,16 @@ type QueryRangeResponse struct {
|
|||||||
|
|
||||||
type TableColumn struct {
|
type TableColumn struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
|
// QueryName is the name of the query that this column belongs to
|
||||||
|
QueryName string `json:"queryName"`
|
||||||
|
// IsValueColumn is true if this column is a value column
|
||||||
|
// i.e it is the column that contains the actual value that is being plotted
|
||||||
|
IsValueColumn bool `json:"isValueColumn"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type TableRow struct {
|
type TableRow struct {
|
||||||
Data []interface{} `json:"data"`
|
Data map[string]interface{} `json:"data"`
|
||||||
|
QueryName string `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Table struct {
|
type Table struct {
|
||||||
|
@ -46,6 +46,9 @@ func fillGap(series *v3.Series, start, end, step int64) *v3.Series {
|
|||||||
|
|
||||||
// TODO(srikanthccv): can WITH FILL be perfect substitute for all cases https://clickhouse.com/docs/en/sql-reference/statements/select/order-by#order-by-expr-with-fill-modifier
|
// TODO(srikanthccv): can WITH FILL be perfect substitute for all cases https://clickhouse.com/docs/en/sql-reference/statements/select/order-by#order-by-expr-with-fill-modifier
|
||||||
func FillGaps(results []*v3.Result, params *v3.QueryRangeParamsV3) {
|
func FillGaps(results []*v3.Result, params *v3.QueryRangeParamsV3) {
|
||||||
|
if params.CompositeQuery.PanelType != v3.PanelTypeGraph {
|
||||||
|
return
|
||||||
|
}
|
||||||
for _, result := range results {
|
for _, result := range results {
|
||||||
// A `result` item in `results` contains the query result for individual query.
|
// A `result` item in `results` contains the query result for individual query.
|
||||||
// If there are no series in the result, we add empty series and `fillGap` adds all zeros
|
// If there are no series in the result, we add empty series and `fillGap` adds all zeros
|
||||||
|
@ -43,6 +43,7 @@ func TestFillGaps(t *testing.T) {
|
|||||||
Start: 1000,
|
Start: 1000,
|
||||||
End: 5000,
|
End: 5000,
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
PanelType: v3.PanelTypeGraph,
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
"query1": {
|
"query1": {
|
||||||
QueryName: "query1",
|
QueryName: "query1",
|
||||||
@ -82,6 +83,7 @@ func TestFillGaps(t *testing.T) {
|
|||||||
Start: 1000,
|
Start: 1000,
|
||||||
End: 5000,
|
End: 5000,
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
PanelType: v3.PanelTypeGraph,
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
"query1": {
|
"query1": {
|
||||||
QueryName: "query1",
|
QueryName: "query1",
|
||||||
@ -121,6 +123,7 @@ func TestFillGaps(t *testing.T) {
|
|||||||
Start: 1000,
|
Start: 1000,
|
||||||
End: 5000,
|
End: 5000,
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
PanelType: v3.PanelTypeGraph,
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
"query1": {
|
"query1": {
|
||||||
QueryName: "query1",
|
QueryName: "query1",
|
||||||
@ -142,6 +145,39 @@ func TestFillGaps(t *testing.T) {
|
|||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "Single series with gaps and panel type is not graph",
|
||||||
|
results: []*v3.Result{
|
||||||
|
createResult("query1", []*v3.Series{
|
||||||
|
createSeries([]v3.Point{
|
||||||
|
{Timestamp: 1000, Value: 1.0},
|
||||||
|
{Timestamp: 3000, Value: 3.0},
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
params: &v3.QueryRangeParamsV3{
|
||||||
|
Start: 1000,
|
||||||
|
End: 5000,
|
||||||
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
PanelType: v3.PanelTypeList,
|
||||||
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
|
"query1": {
|
||||||
|
QueryName: "query1",
|
||||||
|
Expression: "query1",
|
||||||
|
StepInterval: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: []*v3.Result{
|
||||||
|
createResult("query1", []*v3.Series{
|
||||||
|
createSeries([]v3.Point{
|
||||||
|
{Timestamp: 1000, Value: 1.0},
|
||||||
|
{Timestamp: 3000, Value: 3.0},
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute test cases
|
// Execute test cases
|
||||||
|
@ -2,6 +2,7 @@ package postprocess
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -9,20 +10,21 @@ import (
|
|||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func getAutoColNameForQuery(queryName string, params *v3.QueryRangeParamsV3) string {
|
func roundToTwoDecimal(number float64) float64 {
|
||||||
q := params.CompositeQuery.BuilderQueries[queryName]
|
// Handle very small numbers
|
||||||
if q.DataSource == v3.DataSourceTraces || q.DataSource == v3.DataSourceLogs {
|
if math.Abs(number) < 0.000001 {
|
||||||
if q.AggregateAttribute.Key != "" {
|
return 0
|
||||||
return fmt.Sprintf("%s(%s)", q.AggregateOperator, q.AggregateAttribute.Key)
|
|
||||||
}
|
}
|
||||||
return string(q.AggregateOperator)
|
|
||||||
} else if q.DataSource == v3.DataSourceMetrics {
|
// Determine the number of decimal places to round to
|
||||||
if q.SpaceAggregation != "" && params.Version == "v4" {
|
decimalPlaces := 2
|
||||||
return fmt.Sprintf("%s(%s)", q.SpaceAggregation, q.AggregateAttribute.Key)
|
if math.Abs(number) < 0.01 {
|
||||||
|
decimalPlaces = int(math.Ceil(-math.Log10(math.Abs(number)))) + 1
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("%s(%s)", q.AggregateOperator, q.AggregateAttribute.Key)
|
|
||||||
}
|
// Round to the determined number of decimal places
|
||||||
return queryName
|
scale := math.Pow(10, float64(decimalPlaces))
|
||||||
|
return math.Round(number*scale) / scale
|
||||||
}
|
}
|
||||||
|
|
||||||
func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRangeParamsV3) []*v3.Result {
|
func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRangeParamsV3) []*v3.Result {
|
||||||
@ -55,10 +57,10 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
|
|||||||
// There will be one column for each label key and one column for each query name
|
// There will be one column for each label key and one column for each query name
|
||||||
columns := make([]*v3.TableColumn, 0, len(labelKeys)+len(results))
|
columns := make([]*v3.TableColumn, 0, len(labelKeys)+len(results))
|
||||||
for _, key := range labelKeys {
|
for _, key := range labelKeys {
|
||||||
columns = append(columns, &v3.TableColumn{Name: key})
|
columns = append(columns, &v3.TableColumn{Name: key, IsValueColumn: false})
|
||||||
}
|
}
|
||||||
for _, result := range results {
|
for _, result := range results {
|
||||||
columns = append(columns, &v3.TableColumn{Name: result.QueryName})
|
columns = append(columns, &v3.TableColumn{Name: result.QueryName, QueryName: result.QueryName, IsValueColumn: true})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a map to store unique rows
|
// Create a map to store unique rows
|
||||||
@ -72,8 +74,8 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
|
|||||||
|
|
||||||
// Create a key for the row based on labels
|
// Create a key for the row based on labels
|
||||||
var keyParts []string
|
var keyParts []string
|
||||||
rowData := make([]interface{}, len(columns))
|
rowData := make(map[string]interface{}, len(columns))
|
||||||
for i, key := range labelKeys {
|
for _, key := range labelKeys {
|
||||||
value := "n/a"
|
value := "n/a"
|
||||||
for _, labels := range series.LabelsArray {
|
for _, labels := range series.LabelsArray {
|
||||||
if v, ok := labels[key]; ok {
|
if v, ok := labels[key]; ok {
|
||||||
@ -82,21 +84,21 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
keyParts = append(keyParts, fmt.Sprintf("%s=%s", key, value))
|
keyParts = append(keyParts, fmt.Sprintf("%s=%s", key, value))
|
||||||
rowData[i] = value
|
rowData[key] = value
|
||||||
}
|
}
|
||||||
rowKey := strings.Join(keyParts, ",")
|
rowKey := strings.Join(keyParts, ",")
|
||||||
|
|
||||||
// Get or create the row
|
// Get or create the row
|
||||||
row, ok := rowMap[rowKey]
|
row, ok := rowMap[rowKey]
|
||||||
if !ok {
|
if !ok {
|
||||||
row = &v3.TableRow{Data: rowData}
|
row = &v3.TableRow{Data: rowData, QueryName: result.QueryName}
|
||||||
rowMap[rowKey] = row
|
rowMap[rowKey] = row
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the value for this query
|
// Add the value for this query
|
||||||
for i, col := range columns {
|
for _, col := range columns {
|
||||||
if col.Name == result.QueryName {
|
if col.Name == result.QueryName {
|
||||||
row.Data[i] = series.Points[0].Value
|
row.Data[col.Name] = roundToTwoDecimal(series.Points[0].Value)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -106,11 +108,6 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
|
|||||||
// Convert rowMap to a slice of TableRows
|
// Convert rowMap to a slice of TableRows
|
||||||
rows := make([]*v3.TableRow, 0, len(rowMap))
|
rows := make([]*v3.TableRow, 0, len(rowMap))
|
||||||
for _, row := range rowMap {
|
for _, row := range rowMap {
|
||||||
for i, value := range row.Data {
|
|
||||||
if value == nil {
|
|
||||||
row.Data[i] = "n/a"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rows = append(rows, row)
|
rows = append(rows, row)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,11 +119,15 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
|
|||||||
sort.Strings(queryNames)
|
sort.Strings(queryNames)
|
||||||
|
|
||||||
// Sort rows based on OrderBy from BuilderQueries
|
// Sort rows based on OrderBy from BuilderQueries
|
||||||
sortRows(rows, columns, params.CompositeQuery.BuilderQueries, queryNames)
|
sortRows(rows, params.CompositeQuery.BuilderQueries, queryNames)
|
||||||
|
|
||||||
for _, column := range columns {
|
for _, row := range rows {
|
||||||
if _, exists := params.CompositeQuery.BuilderQueries[column.Name]; exists {
|
for _, col := range columns {
|
||||||
column.Name = getAutoColNameForQuery(column.Name, params)
|
if col.IsValueColumn {
|
||||||
|
if row.Data[col.Name] == nil {
|
||||||
|
row.Data[col.Name] = "n/a"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,9 +142,11 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
|
|||||||
return []*v3.Result{&tableResult}
|
return []*v3.Result{&tableResult}
|
||||||
}
|
}
|
||||||
|
|
||||||
func sortRows(rows []*v3.TableRow, columns []*v3.TableColumn, builderQueries map[string]*v3.BuilderQuery, queryNames []string) {
|
func sortRows(rows []*v3.TableRow, builderQueries map[string]*v3.BuilderQuery, queryNames []string) {
|
||||||
|
// use reverse order of queryNames
|
||||||
|
for i := len(queryNames) - 1; i >= 0; i-- {
|
||||||
|
queryName := queryNames[i]
|
||||||
sort.SliceStable(rows, func(i, j int) bool {
|
sort.SliceStable(rows, func(i, j int) bool {
|
||||||
for _, queryName := range queryNames {
|
|
||||||
query := builderQueries[queryName]
|
query := builderQueries[queryName]
|
||||||
orderByList := query.OrderBy
|
orderByList := query.OrderBy
|
||||||
if len(orderByList) == 0 {
|
if len(orderByList) == 0 {
|
||||||
@ -155,23 +158,12 @@ func sortRows(rows []*v3.TableRow, columns []*v3.TableColumn, builderQueries map
|
|||||||
if name == constants.SigNozOrderByValue {
|
if name == constants.SigNozOrderByValue {
|
||||||
name = queryName
|
name = queryName
|
||||||
}
|
}
|
||||||
colIndex := -1
|
|
||||||
for k, col := range columns {
|
|
||||||
if col.Name == name {
|
|
||||||
colIndex = k
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if colIndex == -1 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
valI := rows[i].Data[colIndex]
|
valI := rows[i].Data[name]
|
||||||
valJ := rows[j].Data[colIndex]
|
valJ := rows[j].Data[name]
|
||||||
|
|
||||||
// Handle "n/a" values
|
if valI == nil || valJ == nil {
|
||||||
if valI == "n/a" && valJ == "n/a" {
|
return rows[i].QueryName < rows[j].QueryName
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compare based on the data type
|
// Compare based on the data type
|
||||||
@ -211,9 +203,9 @@ func sortRows(rows []*v3.TableRow, columns []*v3.TableColumn, builderQueries map
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return false
|
return false
|
||||||
})
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
|
func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
|
||||||
@ -248,11 +240,11 @@ func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
|
|||||||
// So we create a column for each query name that has at least one point
|
// So we create a column for each query name that has at least one point
|
||||||
columns := make([]*v3.TableColumn, 0)
|
columns := make([]*v3.TableColumn, 0)
|
||||||
for _, key := range labelKeys {
|
for _, key := range labelKeys {
|
||||||
columns = append(columns, &v3.TableColumn{Name: key})
|
columns = append(columns, &v3.TableColumn{Name: key, IsValueColumn: false})
|
||||||
}
|
}
|
||||||
for _, result := range results {
|
for _, result := range results {
|
||||||
if len(result.Series) > 0 && len(result.Series[0].Points) > 0 {
|
if len(result.Series) > 0 && len(result.Series[0].Points) > 0 {
|
||||||
columns = append(columns, &v3.TableColumn{Name: result.QueryName})
|
columns = append(columns, &v3.TableColumn{Name: result.QueryName, QueryName: result.QueryName, IsValueColumn: true})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -261,8 +253,8 @@ func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
|
|||||||
for _, series := range result.Series {
|
for _, series := range result.Series {
|
||||||
|
|
||||||
// Create a key for the row based on labels
|
// Create a key for the row based on labels
|
||||||
rowData := make([]interface{}, len(columns))
|
rowData := make(map[string]interface{}, len(columns))
|
||||||
for i, key := range labelKeys {
|
for _, key := range labelKeys {
|
||||||
value := "n/a"
|
value := "n/a"
|
||||||
for _, labels := range series.LabelsArray {
|
for _, labels := range series.LabelsArray {
|
||||||
if v, ok := labels[key]; ok {
|
if v, ok := labels[key]; ok {
|
||||||
@ -270,16 +262,16 @@ func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rowData[i] = value
|
rowData[key] = value
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get or create the row
|
// Get or create the row
|
||||||
row := &v3.TableRow{Data: rowData}
|
row := &v3.TableRow{Data: rowData, QueryName: result.QueryName}
|
||||||
|
|
||||||
// Add the value for this query
|
// Add the value for this query
|
||||||
for i, col := range columns {
|
for _, col := range columns {
|
||||||
if col.Name == result.QueryName && len(series.Points) > 0 {
|
if col.Name == result.QueryName && len(series.Points) > 0 {
|
||||||
row.Data[i] = series.Points[0].Value
|
row.Data[col.Name] = roundToTwoDecimal(series.Points[0].Value)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -287,6 +279,16 @@ func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, row := range rows {
|
||||||
|
for _, col := range columns {
|
||||||
|
if col.IsValueColumn {
|
||||||
|
if row.Data[col.Name] == nil {
|
||||||
|
row.Data[col.Name] = "n/a"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Create the final result
|
// Create the final result
|
||||||
tableResult := v3.Result{
|
tableResult := v3.Result{
|
||||||
Table: &v3.Table{
|
Table: &v3.Table{
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package postprocess
|
package postprocess
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"reflect"
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
@ -21,9 +22,9 @@ func TestSortRows(t *testing.T) {
|
|||||||
{
|
{
|
||||||
name: "Sort by single numeric query, ascending order",
|
name: "Sort by single numeric query, ascending order",
|
||||||
rows: []*v3.TableRow{
|
rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service2", 20.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||||
{Data: []interface{}{"service1", 10.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||||
{Data: []interface{}{"service3", 30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||||
},
|
},
|
||||||
columns: []*v3.TableColumn{
|
columns: []*v3.TableColumn{
|
||||||
{Name: "service_name"},
|
{Name: "service_name"},
|
||||||
@ -34,17 +35,17 @@ func TestSortRows(t *testing.T) {
|
|||||||
},
|
},
|
||||||
queryNames: []string{"A"},
|
queryNames: []string{"A"},
|
||||||
expected: []*v3.TableRow{
|
expected: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", 10.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||||
{Data: []interface{}{"service2", 20.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||||
{Data: []interface{}{"service3", 30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Sort by single numeric query, descending order",
|
name: "Sort by single numeric query, descending order",
|
||||||
rows: []*v3.TableRow{
|
rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service2", 20.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||||
{Data: []interface{}{"service1", 10.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||||
{Data: []interface{}{"service3", 30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||||
},
|
},
|
||||||
columns: []*v3.TableColumn{
|
columns: []*v3.TableColumn{
|
||||||
{Name: "service_name"},
|
{Name: "service_name"},
|
||||||
@ -55,17 +56,17 @@ func TestSortRows(t *testing.T) {
|
|||||||
},
|
},
|
||||||
queryNames: []string{"A"},
|
queryNames: []string{"A"},
|
||||||
expected: []*v3.TableRow{
|
expected: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service3", 30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||||
{Data: []interface{}{"service2", 20.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||||
{Data: []interface{}{"service1", 10.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Sort by single string query, ascending order",
|
name: "Sort by single string query, ascending order",
|
||||||
rows: []*v3.TableRow{
|
rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service2", "b"}},
|
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
|
||||||
{Data: []interface{}{"service1", "c"}},
|
{Data: map[string]interface{}{"service": "service1", "A": "c"}},
|
||||||
{Data: []interface{}{"service3", "a"}},
|
{Data: map[string]interface{}{"service": "service3", "A": "a"}},
|
||||||
},
|
},
|
||||||
columns: []*v3.TableColumn{
|
columns: []*v3.TableColumn{
|
||||||
{Name: "service_name"},
|
{Name: "service_name"},
|
||||||
@ -76,18 +77,18 @@ func TestSortRows(t *testing.T) {
|
|||||||
},
|
},
|
||||||
queryNames: []string{"A"},
|
queryNames: []string{"A"},
|
||||||
expected: []*v3.TableRow{
|
expected: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service3", "a"}},
|
{Data: map[string]interface{}{"service": "service3", "A": "a"}},
|
||||||
{Data: []interface{}{"service2", "b"}},
|
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
|
||||||
{Data: []interface{}{"service1", "c"}},
|
{Data: map[string]interface{}{"service": "service1", "A": "c"}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Sort with n/a values",
|
name: "Sort with n/a values",
|
||||||
rows: []*v3.TableRow{
|
rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", 10.0, "n/a"}},
|
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||||
{Data: []interface{}{"service2", "n/a", 15.0}},
|
{Data: map[string]interface{}{"service": "service2", "B": 15.0}},
|
||||||
{Data: []interface{}{"service3", 30.0, 25.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0, "B": 25.0}},
|
||||||
{Data: []interface{}{"service4", "n/a", "n/a"}},
|
{Data: map[string]interface{}{"service": "service4"}},
|
||||||
},
|
},
|
||||||
columns: []*v3.TableColumn{
|
columns: []*v3.TableColumn{
|
||||||
{Name: "service_name"},
|
{Name: "service_name"},
|
||||||
@ -100,43 +101,18 @@ func TestSortRows(t *testing.T) {
|
|||||||
},
|
},
|
||||||
queryNames: []string{"A", "B"},
|
queryNames: []string{"A", "B"},
|
||||||
expected: []*v3.TableRow{
|
expected: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", 10.0, "n/a"}},
|
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||||
{Data: []interface{}{"service3", 30.0, 25.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0, "B": 25.0}},
|
||||||
{Data: []interface{}{"service4", "n/a", "n/a"}},
|
{Data: map[string]interface{}{"service": "service2", "B": 15.0}},
|
||||||
{Data: []interface{}{"service2", "n/a", 15.0}},
|
{Data: map[string]interface{}{"service": "service4"}},
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Sort with different data types",
|
|
||||||
rows: []*v3.TableRow{
|
|
||||||
{Data: []interface{}{"service1", "string", 10.0, true}},
|
|
||||||
{Data: []interface{}{"service2", 20.0, "string", false}},
|
|
||||||
{Data: []interface{}{"service3", true, 30.0, "string"}},
|
|
||||||
},
|
|
||||||
columns: []*v3.TableColumn{
|
|
||||||
{Name: "service_name"},
|
|
||||||
{Name: "A"},
|
|
||||||
{Name: "B"},
|
|
||||||
{Name: "C"},
|
|
||||||
},
|
|
||||||
builderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "asc"}}},
|
|
||||||
"B": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "desc"}}},
|
|
||||||
"C": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "asc"}}},
|
|
||||||
},
|
|
||||||
queryNames: []string{"A", "B", "C"},
|
|
||||||
expected: []*v3.TableRow{
|
|
||||||
{Data: []interface{}{"service2", 20.0, "string", false}},
|
|
||||||
{Data: []interface{}{"service1", "string", 10.0, true}},
|
|
||||||
{Data: []interface{}{"service3", true, 30.0, "string"}},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Sort with SigNozOrderByValue",
|
name: "Sort with SigNozOrderByValue",
|
||||||
rows: []*v3.TableRow{
|
rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", 20.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||||
{Data: []interface{}{"service2", 10.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||||
{Data: []interface{}{"service3", 30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||||
},
|
},
|
||||||
columns: []*v3.TableColumn{
|
columns: []*v3.TableColumn{
|
||||||
{Name: "service_name"},
|
{Name: "service_name"},
|
||||||
@ -147,44 +123,17 @@ func TestSortRows(t *testing.T) {
|
|||||||
},
|
},
|
||||||
queryNames: []string{"A"},
|
queryNames: []string{"A"},
|
||||||
expected: []*v3.TableRow{
|
expected: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service3", 30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||||
{Data: []interface{}{"service1", 20.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||||
{Data: []interface{}{"service2", 10.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Sort by multiple queries with mixed types",
|
|
||||||
rows: []*v3.TableRow{
|
|
||||||
{Data: []interface{}{"service1", 10.0, "b", true}},
|
|
||||||
{Data: []interface{}{"service2", 20.0, "a", false}},
|
|
||||||
{Data: []interface{}{"service3", 10.0, "c", true}},
|
|
||||||
{Data: []interface{}{"service4", 20.0, "b", false}},
|
|
||||||
},
|
|
||||||
columns: []*v3.TableColumn{
|
|
||||||
{Name: "service_name"},
|
|
||||||
{Name: "A"},
|
|
||||||
{Name: "B"},
|
|
||||||
{Name: "C"},
|
|
||||||
},
|
|
||||||
builderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
|
|
||||||
"B": {OrderBy: []v3.OrderBy{{ColumnName: "B", Order: "desc"}}},
|
|
||||||
"C": {OrderBy: []v3.OrderBy{{ColumnName: "C", Order: "asc"}}},
|
|
||||||
},
|
|
||||||
queryNames: []string{"A", "B", "C"},
|
|
||||||
expected: []*v3.TableRow{
|
|
||||||
{Data: []interface{}{"service3", 10.0, "c", true}},
|
|
||||||
{Data: []interface{}{"service1", 10.0, "b", true}},
|
|
||||||
{Data: []interface{}{"service4", 20.0, "b", false}},
|
|
||||||
{Data: []interface{}{"service2", 20.0, "a", false}},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Sort with all n/a values",
|
name: "Sort with all n/a values",
|
||||||
rows: []*v3.TableRow{
|
rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", "n/a", "n/a"}},
|
{Data: map[string]interface{}{"service": "service1", "A": "n/a", "B": "n/a"}},
|
||||||
{Data: []interface{}{"service2", "n/a", "n/a"}},
|
{Data: map[string]interface{}{"service": "service2", "A": "n/a", "B": "n/a"}},
|
||||||
{Data: []interface{}{"service3", "n/a", "n/a"}},
|
{Data: map[string]interface{}{"service": "service3", "A": "n/a", "B": "n/a"}},
|
||||||
},
|
},
|
||||||
columns: []*v3.TableColumn{
|
columns: []*v3.TableColumn{
|
||||||
{Name: "service_name"},
|
{Name: "service_name"},
|
||||||
@ -197,18 +146,18 @@ func TestSortRows(t *testing.T) {
|
|||||||
},
|
},
|
||||||
queryNames: []string{"A", "B"},
|
queryNames: []string{"A", "B"},
|
||||||
expected: []*v3.TableRow{
|
expected: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", "n/a", "n/a"}},
|
{Data: map[string]interface{}{"service": "service1", "A": "n/a", "B": "n/a"}},
|
||||||
{Data: []interface{}{"service2", "n/a", "n/a"}},
|
{Data: map[string]interface{}{"service": "service2", "A": "n/a", "B": "n/a"}},
|
||||||
{Data: []interface{}{"service3", "n/a", "n/a"}},
|
{Data: map[string]interface{}{"service": "service3", "A": "n/a", "B": "n/a"}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Sort with negative numbers",
|
name: "Sort with negative numbers",
|
||||||
rows: []*v3.TableRow{
|
rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", -10.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": -10.0}},
|
||||||
{Data: []interface{}{"service2", 20.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||||
{Data: []interface{}{"service3", -30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": -30.0}},
|
||||||
{Data: []interface{}{"service4", 0.0}},
|
{Data: map[string]interface{}{"service": "service4", "A": 0.0}},
|
||||||
},
|
},
|
||||||
columns: []*v3.TableColumn{
|
columns: []*v3.TableColumn{
|
||||||
{Name: "service_name"},
|
{Name: "service_name"},
|
||||||
@ -219,19 +168,19 @@ func TestSortRows(t *testing.T) {
|
|||||||
},
|
},
|
||||||
queryNames: []string{"A"},
|
queryNames: []string{"A"},
|
||||||
expected: []*v3.TableRow{
|
expected: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service3", -30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": -30.0}},
|
||||||
{Data: []interface{}{"service1", -10.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": -10.0}},
|
||||||
{Data: []interface{}{"service4", 0.0}},
|
{Data: map[string]interface{}{"service": "service4", "A": 0.0}},
|
||||||
{Data: []interface{}{"service2", 20.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Sort with mixed case strings",
|
name: "Sort with mixed case strings",
|
||||||
rows: []*v3.TableRow{
|
rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", "Apple"}},
|
{Data: map[string]interface{}{"service": "service1", "A": "Apple"}},
|
||||||
{Data: []interface{}{"service2", "banana"}},
|
{Data: map[string]interface{}{"service": "service2", "A": "banana"}},
|
||||||
{Data: []interface{}{"service3", "Cherry"}},
|
{Data: map[string]interface{}{"service": "service3", "A": "Cherry"}},
|
||||||
{Data: []interface{}{"service4", "date"}},
|
{Data: map[string]interface{}{"service": "service4", "A": "date"}},
|
||||||
},
|
},
|
||||||
columns: []*v3.TableColumn{
|
columns: []*v3.TableColumn{
|
||||||
{Name: "service_name"},
|
{Name: "service_name"},
|
||||||
@ -242,19 +191,19 @@ func TestSortRows(t *testing.T) {
|
|||||||
},
|
},
|
||||||
queryNames: []string{"A"},
|
queryNames: []string{"A"},
|
||||||
expected: []*v3.TableRow{
|
expected: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", "Apple"}},
|
{Data: map[string]interface{}{"service": "service1", "A": "Apple"}},
|
||||||
{Data: []interface{}{"service3", "Cherry"}},
|
{Data: map[string]interface{}{"service": "service3", "A": "Cherry"}},
|
||||||
{Data: []interface{}{"service2", "banana"}},
|
{Data: map[string]interface{}{"service": "service2", "A": "banana"}},
|
||||||
{Data: []interface{}{"service4", "date"}},
|
{Data: map[string]interface{}{"service": "service4", "A": "date"}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Sort with empty strings",
|
name: "Sort with empty strings",
|
||||||
rows: []*v3.TableRow{
|
rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", ""}},
|
{Data: map[string]interface{}{"service": "service1", "A": ""}},
|
||||||
{Data: []interface{}{"service2", "b"}},
|
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
|
||||||
{Data: []interface{}{"service3", ""}},
|
{Data: map[string]interface{}{"service": "service3", "A": ""}},
|
||||||
{Data: []interface{}{"service4", "a"}},
|
{Data: map[string]interface{}{"service": "service4", "A": "a"}},
|
||||||
},
|
},
|
||||||
columns: []*v3.TableColumn{
|
columns: []*v3.TableColumn{
|
||||||
{Name: "service_name"},
|
{Name: "service_name"},
|
||||||
@ -265,17 +214,17 @@ func TestSortRows(t *testing.T) {
|
|||||||
},
|
},
|
||||||
queryNames: []string{"A"},
|
queryNames: []string{"A"},
|
||||||
expected: []*v3.TableRow{
|
expected: []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", ""}},
|
{Data: map[string]interface{}{"service": "service1", "A": ""}},
|
||||||
{Data: []interface{}{"service3", ""}},
|
{Data: map[string]interface{}{"service": "service3", "A": ""}},
|
||||||
{Data: []interface{}{"service4", "a"}},
|
{Data: map[string]interface{}{"service": "service4", "A": "a"}},
|
||||||
{Data: []interface{}{"service2", "b"}},
|
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
sortRows(tt.rows, tt.columns, tt.builderQueries, tt.queryNames)
|
sortRows(tt.rows, tt.builderQueries, tt.queryNames)
|
||||||
if !reflect.DeepEqual(tt.rows, tt.expected) {
|
if !reflect.DeepEqual(tt.rows, tt.expected) {
|
||||||
exp, _ := json.Marshal(tt.expected)
|
exp, _ := json.Marshal(tt.expected)
|
||||||
got, _ := json.Marshal(tt.rows)
|
got, _ := json.Marshal(tt.rows)
|
||||||
@ -287,24 +236,20 @@ func TestSortRows(t *testing.T) {
|
|||||||
|
|
||||||
func TestSortRowsWithEmptyQueries(t *testing.T) {
|
func TestSortRowsWithEmptyQueries(t *testing.T) {
|
||||||
rows := []*v3.TableRow{
|
rows := []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", 20.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||||
{Data: []interface{}{"service2", 10.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||||
{Data: []interface{}{"service3", 30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||||
}
|
|
||||||
columns := []*v3.TableColumn{
|
|
||||||
{Name: "service_name"},
|
|
||||||
{Name: "A"},
|
|
||||||
}
|
}
|
||||||
builderQueries := map[string]*v3.BuilderQuery{}
|
builderQueries := map[string]*v3.BuilderQuery{}
|
||||||
queryNames := []string{}
|
queryNames := []string{}
|
||||||
|
|
||||||
sortRows(rows, columns, builderQueries, queryNames)
|
sortRows(rows, builderQueries, queryNames)
|
||||||
|
|
||||||
// Expect the original order to be maintained
|
// Expect the original order to be maintained
|
||||||
expected := []*v3.TableRow{
|
expected := []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", 20.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||||
{Data: []interface{}{"service2", 10.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||||
{Data: []interface{}{"service3", 30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(rows, expected) {
|
if !reflect.DeepEqual(rows, expected) {
|
||||||
@ -314,26 +259,22 @@ func TestSortRowsWithEmptyQueries(t *testing.T) {
|
|||||||
|
|
||||||
func TestSortRowsWithInvalidColumnName(t *testing.T) {
|
func TestSortRowsWithInvalidColumnName(t *testing.T) {
|
||||||
rows := []*v3.TableRow{
|
rows := []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", 20.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||||
{Data: []interface{}{"service2", 10.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||||
{Data: []interface{}{"service3", 30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||||
}
|
|
||||||
columns := []*v3.TableColumn{
|
|
||||||
{Name: "service_name"},
|
|
||||||
{Name: "A"},
|
|
||||||
}
|
}
|
||||||
builderQueries := map[string]*v3.BuilderQuery{
|
builderQueries := map[string]*v3.BuilderQuery{
|
||||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: "InvalidColumn", Order: "asc"}}},
|
"A": {OrderBy: []v3.OrderBy{{ColumnName: "InvalidColumn", Order: "asc"}}},
|
||||||
}
|
}
|
||||||
queryNames := []string{"A"}
|
queryNames := []string{"A"}
|
||||||
|
|
||||||
sortRows(rows, columns, builderQueries, queryNames)
|
sortRows(rows, builderQueries, queryNames)
|
||||||
|
|
||||||
// Expect the original order to be maintained
|
// Expect the original order to be maintained
|
||||||
expected := []*v3.TableRow{
|
expected := []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", 20.0}},
|
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||||
{Data: []interface{}{"service2", 10.0}},
|
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||||
{Data: []interface{}{"service3", 30.0}},
|
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(rows, expected) {
|
if !reflect.DeepEqual(rows, expected) {
|
||||||
@ -343,27 +284,22 @@ func TestSortRowsWithInvalidColumnName(t *testing.T) {
|
|||||||
|
|
||||||
func TestSortRowsStability(t *testing.T) {
|
func TestSortRowsStability(t *testing.T) {
|
||||||
rows := []*v3.TableRow{
|
rows := []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", 10.0, "a"}},
|
{Data: map[string]interface{}{"service": "service1", "A": 10.0, "B": "a"}},
|
||||||
{Data: []interface{}{"service2", 10.0, "b"}},
|
{Data: map[string]interface{}{"service": "service2", "A": 10.0, "B": "b"}},
|
||||||
{Data: []interface{}{"service3", 10.0, "c"}},
|
{Data: map[string]interface{}{"service": "service3", "A": 10.0, "B": "c"}},
|
||||||
}
|
|
||||||
columns := []*v3.TableColumn{
|
|
||||||
{Name: "service_name"},
|
|
||||||
{Name: "A"},
|
|
||||||
{Name: "B"},
|
|
||||||
}
|
}
|
||||||
builderQueries := map[string]*v3.BuilderQuery{
|
builderQueries := map[string]*v3.BuilderQuery{
|
||||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
|
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
|
||||||
}
|
}
|
||||||
queryNames := []string{"A"}
|
queryNames := []string{"A"}
|
||||||
|
|
||||||
sortRows(rows, columns, builderQueries, queryNames)
|
sortRows(rows, builderQueries, queryNames)
|
||||||
|
|
||||||
// Expect the original order to be maintained for equal values
|
// Expect the original order to be maintained for equal values
|
||||||
expected := []*v3.TableRow{
|
expected := []*v3.TableRow{
|
||||||
{Data: []interface{}{"service1", 10.0, "a"}},
|
{Data: map[string]interface{}{"service": "service1", "A": 10.0, "B": "a"}},
|
||||||
{Data: []interface{}{"service2", 10.0, "b"}},
|
{Data: map[string]interface{}{"service": "service2", "A": 10.0, "B": "b"}},
|
||||||
{Data: []interface{}{"service3", 10.0, "c"}},
|
{Data: map[string]interface{}{"service": "service3", "A": 10.0, "B": "c"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(rows, expected) {
|
if !reflect.DeepEqual(rows, expected) {
|
||||||
@ -404,10 +340,10 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
|||||||
Table: &v3.Table{
|
Table: &v3.Table{
|
||||||
Columns: []*v3.TableColumn{
|
Columns: []*v3.TableColumn{
|
||||||
{Name: "service"},
|
{Name: "service"},
|
||||||
{Name: "A"},
|
{Name: "A", QueryName: "A", IsValueColumn: true},
|
||||||
},
|
},
|
||||||
Rows: []*v3.TableRow{
|
Rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"frontend", 10.0}},
|
{Data: map[string]interface{}{"service": "frontend", "A": 10.0}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -421,7 +357,8 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
|||||||
Series: []*v3.Series{
|
Series: []*v3.Series{
|
||||||
{
|
{
|
||||||
LabelsArray: []map[string]string{
|
LabelsArray: []map[string]string{
|
||||||
{"service": "frontend", "env": "prod"},
|
{"service": "frontend"},
|
||||||
|
{"env": "prod"},
|
||||||
},
|
},
|
||||||
Points: []v3.Point{
|
Points: []v3.Point{
|
||||||
{Value: 10.0},
|
{Value: 10.0},
|
||||||
@ -429,7 +366,8 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
LabelsArray: []map[string]string{
|
LabelsArray: []map[string]string{
|
||||||
{"service": "backend", "env": "prod"},
|
{"service": "backend"},
|
||||||
|
{"env": "prod"},
|
||||||
},
|
},
|
||||||
Points: []v3.Point{
|
Points: []v3.Point{
|
||||||
{Value: 20.0},
|
{Value: 20.0},
|
||||||
@ -442,7 +380,8 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
|||||||
Series: []*v3.Series{
|
Series: []*v3.Series{
|
||||||
{
|
{
|
||||||
LabelsArray: []map[string]string{
|
LabelsArray: []map[string]string{
|
||||||
{"service": "frontend", "env": "prod"},
|
{"service": "frontend"},
|
||||||
|
{"env": "prod"},
|
||||||
},
|
},
|
||||||
Points: []v3.Point{
|
Points: []v3.Point{
|
||||||
{Value: 15.0},
|
{Value: 15.0},
|
||||||
@ -450,7 +389,8 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
LabelsArray: []map[string]string{
|
LabelsArray: []map[string]string{
|
||||||
{"service": "backend", "env": "prod"},
|
{"service": "backend"},
|
||||||
|
{"env": "prod"},
|
||||||
},
|
},
|
||||||
Points: []v3.Point{
|
Points: []v3.Point{
|
||||||
{Value: 25.0},
|
{Value: 25.0},
|
||||||
@ -465,14 +405,14 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
|||||||
Columns: []*v3.TableColumn{
|
Columns: []*v3.TableColumn{
|
||||||
{Name: "service"},
|
{Name: "service"},
|
||||||
{Name: "env"},
|
{Name: "env"},
|
||||||
{Name: "A"},
|
{Name: "A", QueryName: "A", IsValueColumn: true},
|
||||||
{Name: "B"},
|
{Name: "B", QueryName: "B", IsValueColumn: true},
|
||||||
},
|
},
|
||||||
Rows: []*v3.TableRow{
|
Rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"frontend", "prod", 10.0, nil}},
|
{Data: map[string]interface{}{"service": "frontend", "env": "prod", "A": 10.0, "B": "n/a"}},
|
||||||
{Data: []interface{}{"backend", "prod", 20.0, nil}},
|
{Data: map[string]interface{}{"service": "backend", "env": "prod", "A": 20.0, "B": "n/a"}},
|
||||||
{Data: []interface{}{"frontend", "prod", nil, 15.0}},
|
{Data: map[string]interface{}{"service": "frontend", "env": "prod", "A": "n/a", "B": 15.0}},
|
||||||
{Data: []interface{}{"backend", "prod", nil, 25.0}},
|
{Data: map[string]interface{}{"service": "backend", "env": "prod", "A": "n/a", "B": 25.0}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -514,12 +454,12 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
|||||||
Columns: []*v3.TableColumn{
|
Columns: []*v3.TableColumn{
|
||||||
{Name: "service"},
|
{Name: "service"},
|
||||||
{Name: "env"},
|
{Name: "env"},
|
||||||
{Name: "A"},
|
{Name: "A", QueryName: "A", IsValueColumn: true},
|
||||||
{Name: "B"},
|
{Name: "B", QueryName: "B", IsValueColumn: true},
|
||||||
},
|
},
|
||||||
Rows: []*v3.TableRow{
|
Rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"frontend", "n/a", 10.0, nil}},
|
{Data: map[string]interface{}{"service": "frontend", "env": "n/a", "A": 10.0, "B": "n/a"}},
|
||||||
{Data: []interface{}{"n/a", "prod", nil, 20.0}},
|
{Data: map[string]interface{}{"service": "n/a", "env": "prod", "A": "n/a", "B": 20.0}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -551,10 +491,10 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
|||||||
Table: &v3.Table{
|
Table: &v3.Table{
|
||||||
Columns: []*v3.TableColumn{
|
Columns: []*v3.TableColumn{
|
||||||
{Name: "service"},
|
{Name: "service"},
|
||||||
{Name: "A"},
|
{Name: "A", QueryName: "A", IsValueColumn: true},
|
||||||
},
|
},
|
||||||
Rows: []*v3.TableRow{
|
Rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"frontend", 10.0}},
|
{Data: map[string]interface{}{"service": "frontend", "A": 10.0}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -593,11 +533,11 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
|||||||
Table: &v3.Table{
|
Table: &v3.Table{
|
||||||
Columns: []*v3.TableColumn{
|
Columns: []*v3.TableColumn{
|
||||||
{Name: "service"},
|
{Name: "service"},
|
||||||
{Name: "B"},
|
{Name: "B", QueryName: "B", IsValueColumn: true},
|
||||||
},
|
},
|
||||||
Rows: []*v3.TableRow{
|
Rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"frontend", nil}},
|
{Data: map[string]interface{}{"service": "frontend", "B": "n/a"}},
|
||||||
{Data: []interface{}{"backend", 20.0}},
|
{Data: map[string]interface{}{"service": "backend", "B": 20.0}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -608,8 +548,10 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
|||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
result := TransformToTableForClickHouseQueries(tt.input)
|
result := TransformToTableForClickHouseQueries(tt.input)
|
||||||
if !reflect.DeepEqual(result, tt.expected) {
|
exp, _ := json.Marshal(tt.expected)
|
||||||
t.Errorf("TransformToTableForClickHouseQueries() = %v, want %v", result, tt.expected)
|
got, _ := json.Marshal(result)
|
||||||
|
if !bytes.Equal(got, exp) {
|
||||||
|
t.Errorf("TransformToTableForClickHouseQueries() = %v, want %v", string(got), string(exp))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -650,19 +592,21 @@ func TestTransformToTableForClickHouseQueriesSorting(t *testing.T) {
|
|||||||
Table: &v3.Table{
|
Table: &v3.Table{
|
||||||
Columns: []*v3.TableColumn{
|
Columns: []*v3.TableColumn{
|
||||||
{Name: "service"},
|
{Name: "service"},
|
||||||
{Name: "A"},
|
{Name: "A", QueryName: "A", IsValueColumn: true},
|
||||||
{Name: "B"},
|
{Name: "B", QueryName: "B", IsValueColumn: true},
|
||||||
},
|
},
|
||||||
Rows: []*v3.TableRow{
|
Rows: []*v3.TableRow{
|
||||||
{Data: []interface{}{"backend", 20.0, nil}},
|
{Data: map[string]interface{}{"service": "backend", "A": 20.0, "B": "n/a"}},
|
||||||
{Data: []interface{}{"frontend", nil, 10.0}},
|
{Data: map[string]interface{}{"service": "frontend", "A": "n/a", "B": 10.0}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
result := TransformToTableForClickHouseQueries(input)
|
result := TransformToTableForClickHouseQueries(input)
|
||||||
if !reflect.DeepEqual(result, expected) {
|
exp, _ := json.Marshal(expected)
|
||||||
t.Errorf("TransformToTableForClickHouseQueries() sorting test failed. Got %v, want %v", result, expected)
|
got, _ := json.Marshal(result)
|
||||||
|
if !bytes.Equal(got, exp) {
|
||||||
|
t.Errorf("TransformToTableForClickHouseQueries() sorting test failed. Got %v, want %v", string(got), string(exp))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -53,17 +53,8 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
|||||||
return "", fmt.Errorf("operator not supported")
|
return "", fmt.Errorf("operator not supported")
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(Raj): Remove the use of dot replaced alternative when key
|
name := getName(v.Key)
|
||||||
// contains underscore after dots are supported in keys
|
|
||||||
names := []string{getName(v.Key)}
|
|
||||||
if strings.Contains(v.Key.Key, "_") {
|
|
||||||
dotKey := v.Key
|
|
||||||
dotKey.Key = strings.Replace(v.Key.Key, "_", ".", -1)
|
|
||||||
names = append(names, getName(dotKey))
|
|
||||||
}
|
|
||||||
|
|
||||||
filterParts := []string{}
|
|
||||||
for _, name := range names {
|
|
||||||
var filter string
|
var filter string
|
||||||
|
|
||||||
switch v.Operator {
|
switch v.Operator {
|
||||||
@ -91,11 +82,6 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
filterParts = append(filterParts, filter)
|
|
||||||
}
|
|
||||||
|
|
||||||
filter := strings.Join(filterParts, " || ")
|
|
||||||
|
|
||||||
// check if the filter is a correct expression language
|
// check if the filter is a correct expression language
|
||||||
_, err := expr.Compile(filter)
|
_, err := expr.Compile(filter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -2,6 +2,7 @@ package rules
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
@ -9,6 +10,7 @@ import (
|
|||||||
"github.com/jmoiron/sqlx"
|
"github.com/jmoiron/sqlx"
|
||||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||||
"go.signoz.io/signoz/pkg/query-service/common"
|
"go.signoz.io/signoz/pkg/query-service/common"
|
||||||
|
"go.signoz.io/signoz/pkg/query-service/model"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -43,6 +45,9 @@ type RuleDB interface {
|
|||||||
|
|
||||||
// GetAllPlannedMaintenance fetches the maintenance definitions from db
|
// GetAllPlannedMaintenance fetches the maintenance definitions from db
|
||||||
GetAllPlannedMaintenance(ctx context.Context) ([]PlannedMaintenance, error)
|
GetAllPlannedMaintenance(ctx context.Context) ([]PlannedMaintenance, error)
|
||||||
|
|
||||||
|
// used for internal telemetry
|
||||||
|
GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type StoredRule struct {
|
type StoredRule struct {
|
||||||
@ -295,3 +300,33 @@ func (r *ruleDB) EditPlannedMaintenance(ctx context.Context, maintenance Planned
|
|||||||
|
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *ruleDB) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
|
||||||
|
alertsInfo := model.AlertsInfo{}
|
||||||
|
// fetch alerts from rules db
|
||||||
|
query := "SELECT data FROM rules"
|
||||||
|
var alertsData []string
|
||||||
|
err := r.Select(&alertsData, query)
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return &alertsInfo, err
|
||||||
|
}
|
||||||
|
for _, alert := range alertsData {
|
||||||
|
var rule GettableRule
|
||||||
|
err = json.Unmarshal([]byte(alert), &rule)
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("invalid rule data", zap.Error(err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if rule.AlertType == "LOGS_BASED_ALERT" {
|
||||||
|
alertsInfo.LogsBasedAlerts = alertsInfo.LogsBasedAlerts + 1
|
||||||
|
} else if rule.AlertType == "METRIC_BASED_ALERT" {
|
||||||
|
alertsInfo.MetricBasedAlerts = alertsInfo.MetricBasedAlerts + 1
|
||||||
|
} else if rule.AlertType == "TRACES_BASED_ALERT" {
|
||||||
|
alertsInfo.TracesBasedAlerts = alertsInfo.TracesBasedAlerts + 1
|
||||||
|
}
|
||||||
|
alertsInfo.TotalAlerts = alertsInfo.TotalAlerts + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return &alertsInfo, nil
|
||||||
|
}
|
||||||
|
@ -25,6 +25,7 @@ import (
|
|||||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||||
"go.signoz.io/signoz/pkg/query-service/model"
|
"go.signoz.io/signoz/pkg/query-service/model"
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
"go.signoz.io/signoz/pkg/query-service/telemetry"
|
||||||
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -112,6 +113,8 @@ func NewManager(o *ManagerOptions) (*Manager, error) {
|
|||||||
|
|
||||||
db := NewRuleDB(o.DBConn)
|
db := NewRuleDB(o.DBConn)
|
||||||
|
|
||||||
|
telemetry.GetInstance().SetAlertsInfoCallback(db.GetAlertsInfo)
|
||||||
|
|
||||||
m := &Manager{
|
m := &Manager{
|
||||||
tasks: map[string]Task{},
|
tasks: map[string]Task{},
|
||||||
rules: map[string]Rule{},
|
rules: map[string]Rule{},
|
||||||
|
@ -111,13 +111,22 @@ func (r *PromRule) Condition() *RuleCondition {
|
|||||||
return r.ruleCondition
|
return r.ruleCondition
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// targetVal returns the target value for the rule condition
|
||||||
|
// when the y-axis and target units are non-empty, it
|
||||||
|
// converts the target value to the y-axis unit
|
||||||
func (r *PromRule) targetVal() float64 {
|
func (r *PromRule) targetVal() float64 {
|
||||||
if r.ruleCondition == nil || r.ruleCondition.Target == nil {
|
if r.ruleCondition == nil || r.ruleCondition.Target == nil {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// get the converter for the target unit
|
||||||
unitConverter := converter.FromUnit(converter.Unit(r.ruleCondition.TargetUnit))
|
unitConverter := converter.FromUnit(converter.Unit(r.ruleCondition.TargetUnit))
|
||||||
value := unitConverter.Convert(converter.Value{F: *r.ruleCondition.Target, U: converter.Unit(r.ruleCondition.TargetUnit)}, converter.Unit(r.Unit()))
|
// convert the target value to the y-axis unit
|
||||||
|
value := unitConverter.Convert(converter.Value{
|
||||||
|
F: *r.ruleCondition.Target,
|
||||||
|
U: converter.Unit(r.ruleCondition.TargetUnit),
|
||||||
|
}, converter.Unit(r.Unit()))
|
||||||
|
|
||||||
return value.F
|
return value.F
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -370,8 +379,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (
|
|||||||
}
|
}
|
||||||
zap.L().Debug("alerting for series", zap.String("name", r.Name()), zap.Any("series", series))
|
zap.L().Debug("alerting for series", zap.String("name", r.Name()), zap.Any("series", series))
|
||||||
|
|
||||||
thresholdFormatter := formatter.FromUnit(r.ruleCondition.TargetUnit)
|
threshold := valueFormatter.Format(r.targetVal(), r.Unit())
|
||||||
threshold := thresholdFormatter.Format(r.targetVal(), r.ruleCondition.TargetUnit)
|
|
||||||
|
|
||||||
tmplData := AlertTemplateData(l, valueFormatter.Format(alertSmpl.F, r.Unit()), threshold)
|
tmplData := AlertTemplateData(l, valueFormatter.Format(alertSmpl.F, r.Unit()), threshold)
|
||||||
// Inject some convenience variables that are easier to remember for users
|
// Inject some convenience variables that are easier to remember for users
|
||||||
|
@ -165,13 +165,22 @@ func (r *ThresholdRule) PreferredChannels() []string {
|
|||||||
return r.preferredChannels
|
return r.preferredChannels
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// targetVal returns the target value for the rule condition
|
||||||
|
// when the y-axis and target units are non-empty, it
|
||||||
|
// converts the target value to the y-axis unit
|
||||||
func (r *ThresholdRule) targetVal() float64 {
|
func (r *ThresholdRule) targetVal() float64 {
|
||||||
if r.ruleCondition == nil || r.ruleCondition.Target == nil {
|
if r.ruleCondition == nil || r.ruleCondition.Target == nil {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// get the converter for the target unit
|
||||||
unitConverter := converter.FromUnit(converter.Unit(r.ruleCondition.TargetUnit))
|
unitConverter := converter.FromUnit(converter.Unit(r.ruleCondition.TargetUnit))
|
||||||
value := unitConverter.Convert(converter.Value{F: *r.ruleCondition.Target, U: converter.Unit(r.ruleCondition.TargetUnit)}, converter.Unit(r.Unit()))
|
// convert the target value to the y-axis unit
|
||||||
|
value := unitConverter.Convert(converter.Value{
|
||||||
|
F: *r.ruleCondition.Target,
|
||||||
|
U: converter.Unit(r.ruleCondition.TargetUnit),
|
||||||
|
}, converter.Unit(r.Unit()))
|
||||||
|
|
||||||
return value.F
|
return value.F
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -874,8 +883,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie
|
|||||||
}
|
}
|
||||||
|
|
||||||
value := valueFormatter.Format(smpl.V, r.Unit())
|
value := valueFormatter.Format(smpl.V, r.Unit())
|
||||||
thresholdFormatter := formatter.FromUnit(r.ruleCondition.TargetUnit)
|
threshold := valueFormatter.Format(r.targetVal(), r.Unit())
|
||||||
threshold := thresholdFormatter.Format(r.targetVal(), r.ruleCondition.TargetUnit)
|
|
||||||
zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold))
|
zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold))
|
||||||
|
|
||||||
tmplData := AlertTemplateData(l, value, threshold)
|
tmplData := AlertTemplateData(l, value, threshold)
|
||||||
|
@ -185,6 +185,12 @@ type Telemetry struct {
|
|||||||
patTokenUser bool
|
patTokenUser bool
|
||||||
countUsers int8
|
countUsers int8
|
||||||
mutex sync.RWMutex
|
mutex sync.RWMutex
|
||||||
|
|
||||||
|
alertsInfoCallback func(ctx context.Context) (*model.AlertsInfo, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Telemetry) SetAlertsInfoCallback(callback func(ctx context.Context) (*model.AlertsInfo, error)) {
|
||||||
|
a.alertsInfoCallback = callback
|
||||||
}
|
}
|
||||||
|
|
||||||
func createTelemetry() {
|
func createTelemetry() {
|
||||||
@ -310,7 +316,7 @@ func createTelemetry() {
|
|||||||
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, user.Email, false, false)
|
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, user.Email, false, false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
alertsInfo, err := telemetry.reader.GetAlertsInfo(context.Background())
|
alertsInfo, err := telemetry.alertsInfoCallback(context.Background())
|
||||||
if err == nil {
|
if err == nil {
|
||||||
dashboardsInfo, err := telemetry.reader.GetDashboardsInfo(context.Background())
|
dashboardsInfo, err := telemetry.reader.GetDashboardsInfo(context.Background())
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
@ -192,7 +192,7 @@ services:
|
|||||||
<<: *db-depend
|
<<: *db-depend
|
||||||
|
|
||||||
otel-collector-migrator:
|
otel-collector-migrator:
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.1}
|
||||||
container_name: otel-migrator
|
container_name: otel-migrator
|
||||||
command:
|
command:
|
||||||
- "--dsn=tcp://clickhouse:9000"
|
- "--dsn=tcp://clickhouse:9000"
|
||||||
@ -205,7 +205,7 @@ services:
|
|||||||
# condition: service_healthy
|
# condition: service_healthy
|
||||||
|
|
||||||
otel-collector:
|
otel-collector:
|
||||||
image: signoz/signoz-otel-collector:0.102.0
|
image: signoz/signoz-otel-collector:0.102.1
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
|
Loading…
x
Reference in New Issue
Block a user