Merge pull request #5418 from SigNoz/release/v0.49.x

Release/v0.49.x
This commit is contained in:
Prashant Shahi 2024-07-03 18:54:14 +05:30 committed by GitHub
commit 858a0cb0de
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
84 changed files with 1939 additions and 726 deletions

View File

@ -347,7 +347,7 @@ curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-
```bash
kubectl -n sample-application run strzal --image=djbingham/curl \
--restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \
'locust_count=6' -F 'hatch_rate=2' http://locust-master:8089/swarm
'user_count=6' -F 'spawn_rate=2' http://locust-master:8089/swarm
```
**5.1.3 To stop the load generation:**

View File

@ -188,3 +188,4 @@ test:
go test ./pkg/query-service/tests/integration/...
go test ./pkg/query-service/rules/...
go test ./pkg/query-service/collectorsimulator/...
go test ./pkg/query-service/postprocess/...

View File

@ -146,7 +146,7 @@ services:
condition: on-failure
query-service:
image: signoz/query-service:0.48.1
image: signoz/query-service:0.49.0
command:
[
"-config=/root/config/prometheus.yml",
@ -199,7 +199,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
image: signoz/signoz-otel-collector:0.102.0
image: signoz/signoz-otel-collector:0.102.1
command:
[
"--config=/etc/otel-collector-config.yaml",
@ -237,7 +237,7 @@ services:
- query-service
otel-collector-migrator:
image: signoz/signoz-schema-migrator:0.102.0
image: signoz/signoz-schema-migrator:0.102.1
deploy:
restart_policy:
condition: on-failure

View File

@ -66,7 +66,7 @@ services:
- --storage.path=/data
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.1}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@ -81,7 +81,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:0.102.0
image: signoz/signoz-otel-collector:0.102.1
command:
[
"--config=/etc/otel-collector-config.yaml",

View File

@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.48.1}
image: signoz/query-service:${DOCKER_TAG:-0.49.0}
container_name: signoz-query-service
command:
[
@ -204,7 +204,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.48.1}
image: signoz/frontend:${DOCKER_TAG:-0.49.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@ -216,7 +216,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.1}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@ -230,7 +230,7 @@ services:
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.0}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.1}
container_name: signoz-otel-collector
command:
[

View File

@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.48.1}
image: signoz/query-service:${DOCKER_TAG:-0.49.0}
container_name: signoz-query-service
command:
[
@ -203,7 +203,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.48.1}
image: signoz/frontend:${DOCKER_TAG:-0.49.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@ -215,7 +215,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.1}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@ -229,7 +229,7 @@ services:
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.0}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.1}
container_name: signoz-otel-collector
command:
[

View File

@ -88,7 +88,7 @@
"lucide-react": "0.379.0",
"mini-css-extract-plugin": "2.4.5",
"papaparse": "5.4.1",
"posthog-js": "1.140.1",
"posthog-js": "1.142.1",
"rc-tween-one": "3.0.6",
"react": "18.2.0",
"react-addons-update": "15.6.3",

View File

@ -5,7 +5,13 @@ import { Button, Dropdown, MenuProps } from 'antd';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useState } from 'react';
function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
function DropDown({
element,
onDropDownItemClick,
}: {
element: JSX.Element[];
onDropDownItemClick?: MenuProps['onClick'];
}): JSX.Element {
const isDarkMode = useIsDarkMode();
const items: MenuProps['items'] = element.map(
@ -23,6 +29,7 @@ function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
items,
onMouseEnter: (): void => setDdOpen(true),
onMouseLeave: (): void => setDdOpen(false),
onClick: (item): void => onDropDownItemClick?.(item),
}}
open={isDdOpen}
>
@ -40,4 +47,8 @@ function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
);
}
DropDown.defaultProps = {
onDropDownItemClick: (): void => {},
};
export default DropDown;

View File

@ -62,8 +62,6 @@ function RawLogView({
const isDarkMode = useIsDarkMode();
const isReadOnlyLog = !isLogsExplorerPage || isReadOnly;
const severityText = data.severity_text ? `${data.severity_text} |` : '';
const logType = getLogIndicatorType(data);
const updatedSelecedFields = useMemo(
@ -88,17 +86,16 @@ function RawLogView({
attributesText += ' | ';
}
const text = useMemo(
() =>
const text = useMemo(() => {
const date =
typeof data.timestamp === 'string'
? `${dayjs(data.timestamp).format(
'YYYY-MM-DD HH:mm:ss.SSS',
)} | ${attributesText} ${severityText} ${data.body}`
: `${dayjs(data.timestamp / 1e6).format(
'YYYY-MM-DD HH:mm:ss.SSS',
)} | ${attributesText} ${severityText} ${data.body}`,
[data.timestamp, data.body, severityText, attributesText],
);
? dayjs(data.timestamp)
: dayjs(data.timestamp / 1e6);
return `${date.format('YYYY-MM-DD HH:mm:ss.SSS')} | ${attributesText} ${
data.body
}`;
}, [data.timestamp, data.body, attributesText]);
const handleClickExpand = useCallback(() => {
if (activeContextLog || isReadOnly) return;

View File

@ -2,7 +2,9 @@
import './DynamicColumnTable.syles.scss';
import { Button, Dropdown, Flex, MenuProps, Switch } from 'antd';
import { ColumnGroupType, ColumnType } from 'antd/es/table';
import { ColumnsType } from 'antd/lib/table';
import logEvent from 'api/common/logEvent';
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
import { SlidersHorizontal } from 'lucide-react';
import { memo, useEffect, useState } from 'react';
@ -22,6 +24,7 @@ function DynamicColumnTable({
dynamicColumns,
onDragColumn,
facingIssueBtn,
shouldSendAlertsLogEvent,
...restProps
}: DynamicColumnTableProps): JSX.Element {
const [columnsData, setColumnsData] = useState<ColumnsType | undefined>(
@ -47,11 +50,18 @@ function DynamicColumnTable({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [columns, dynamicColumns]);
const onToggleHandler = (index: number) => (
checked: boolean,
event: React.MouseEvent<HTMLButtonElement>,
): void => {
const onToggleHandler = (
index: number,
column: ColumnGroupType<any> | ColumnType<any>,
) => (checked: boolean, event: React.MouseEvent<HTMLButtonElement>): void => {
event.stopPropagation();
if (shouldSendAlertsLogEvent) {
logEvent('Alert: Column toggled', {
column: column?.title,
action: checked ? 'Enable' : 'Disable',
});
}
setVisibleColumns({
tablesource,
dynamicColumns,
@ -75,7 +85,7 @@ function DynamicColumnTable({
<div>{column.title?.toString()}</div>
<Switch
checked={columnsData?.findIndex((c) => c.key === column.key) !== -1}
onChange={onToggleHandler(index)}
onChange={onToggleHandler(index, column)}
/>
</div>
),

View File

@ -3,6 +3,7 @@
import { Table } from 'antd';
import { ColumnsType } from 'antd/lib/table';
import { dragColumnParams } from 'hooks/useDragColumns/configs';
import { set } from 'lodash-es';
import {
SyntheticEvent,
useCallback,
@ -20,6 +21,7 @@ import { ResizeTableProps } from './types';
function ResizeTable({
columns,
onDragColumn,
pagination,
...restProps
}: ResizeTableProps): JSX.Element {
const [columnsData, setColumns] = useState<ColumnsType>([]);
@ -58,14 +60,21 @@ function ResizeTable({
[columnsData, onDragColumn, handleResize],
);
const tableParams = useMemo(
() => ({
const tableParams = useMemo(() => {
const props = {
...restProps,
components: { header: { cell: ResizableHeader } },
columns: mergedColumns,
}),
[mergedColumns, restProps],
);
};
set(
props,
'pagination',
pagination ? { ...pagination, hideOnSinglePage: true } : false,
);
return props;
}, [mergedColumns, pagination, restProps]);
useEffect(() => {
if (columns) {

View File

@ -14,6 +14,7 @@ export interface DynamicColumnTableProps extends TableProps<any> {
dynamicColumns: TableProps<any>['columns'];
onDragColumn?: (fromIndex: number, toIndex: number) => void;
facingIssueBtn?: FacingIssueBtnProps;
shouldSendAlertsLogEvent?: boolean;
}
export type GetVisibleColumnsFunction = (

View File

@ -1,13 +1,15 @@
import { PlusOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import getAll from 'api/channels/getAll';
import logEvent from 'api/common/logEvent';
import Spinner from 'components/Spinner';
import TextToolTip from 'components/TextToolTip';
import ROUTES from 'constants/routes';
import useComponentPermission from 'hooks/useComponentPermission';
import useFetch from 'hooks/useFetch';
import history from 'lib/history';
import { useCallback } from 'react';
import { isUndefined } from 'lodash-es';
import { useCallback, useEffect } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@ -31,6 +33,14 @@ function AlertChannels(): JSX.Element {
const { loading, payload, error, errorMessage } = useFetch(getAll);
useEffect(() => {
if (!isUndefined(payload)) {
logEvent('Alert Channel: Channel list page visited', {
number: payload?.length,
});
}
}, [payload]);
if (error) {
return <Typography>{errorMessage}</Typography>;
}

View File

@ -11,11 +11,12 @@ import testOpsGenie from 'api/channels/testOpsgenie';
import testPagerApi from 'api/channels/testPager';
import testSlackApi from 'api/channels/testSlack';
import testWebhookApi from 'api/channels/testWebhook';
import logEvent from 'api/common/logEvent';
import ROUTES from 'constants/routes';
import FormAlertChannels from 'container/FormAlertChannels';
import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
import { useCallback, useState } from 'react';
import { useCallback, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import {
@ -43,6 +44,10 @@ function CreateAlertChannels({
const [formInstance] = Form.useForm();
useEffect(() => {
logEvent('Alert Channel: Create channel page visited', {});
}, []);
const [selectedConfig, setSelectedConfig] = useState<
Partial<
SlackChannel &
@ -139,19 +144,25 @@ function CreateAlertChannels({
description: t('channel_creation_done'),
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return { status: 'success', statusMessage: t('channel_creation_done') };
}
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return {
status: 'failed',
statusMessage: response.error || t('channel_creation_failed'),
};
} catch (error) {
notifications.error({
message: 'Error',
description: t('channel_creation_failed'),
});
return { status: 'failed', statusMessage: t('channel_creation_failed') };
} finally {
setSavingState(false);
}
setSavingState(false);
}, [prepareSlackRequest, t, notifications]);
const prepareWebhookRequest = useCallback(() => {
@ -200,19 +211,25 @@ function CreateAlertChannels({
description: t('channel_creation_done'),
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return { status: 'success', statusMessage: t('channel_creation_done') };
}
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return {
status: 'failed',
statusMessage: response.error || t('channel_creation_failed'),
};
} catch (error) {
notifications.error({
message: 'Error',
description: t('channel_creation_failed'),
});
return { status: 'failed', statusMessage: t('channel_creation_failed') };
} finally {
setSavingState(false);
}
setSavingState(false);
}, [prepareWebhookRequest, t, notifications]);
const preparePagerRequest = useCallback(() => {
@ -245,8 +262,8 @@ function CreateAlertChannels({
setSavingState(true);
const request = preparePagerRequest();
if (request) {
try {
try {
if (request) {
const response = await createPagerApi(request);
if (response.statusCode === 200) {
@ -255,20 +272,31 @@ function CreateAlertChannels({
description: t('channel_creation_done'),
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return { status: 'success', statusMessage: t('channel_creation_done') };
}
} catch (e) {
notifications.error({
message: 'Error',
description: t('channel_creation_failed'),
description: response.error || t('channel_creation_failed'),
});
return {
status: 'failed',
statusMessage: response.error || t('channel_creation_failed'),
};
}
notifications.error({
message: 'Error',
description: t('channel_creation_failed'),
});
return { status: 'failed', statusMessage: t('channel_creation_failed') };
} catch (error) {
notifications.error({
message: 'Error',
description: t('channel_creation_failed'),
});
return { status: 'failed', statusMessage: t('channel_creation_failed') };
} finally {
setSavingState(false);
}
setSavingState(false);
}, [t, notifications, preparePagerRequest]);
const prepareOpsgenieRequest = useCallback(
@ -295,19 +323,25 @@ function CreateAlertChannels({
description: t('channel_creation_done'),
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return { status: 'success', statusMessage: t('channel_creation_done') };
}
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return {
status: 'failed',
statusMessage: response.error || t('channel_creation_failed'),
};
} catch (error) {
notifications.error({
message: 'Error',
description: t('channel_creation_failed'),
});
return { status: 'failed', statusMessage: t('channel_creation_failed') };
} finally {
setSavingState(false);
}
setSavingState(false);
}, [prepareOpsgenieRequest, t, notifications]);
const prepareEmailRequest = useCallback(
@ -332,19 +366,25 @@ function CreateAlertChannels({
description: t('channel_creation_done'),
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return { status: 'success', statusMessage: t('channel_creation_done') };
}
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return {
status: 'failed',
statusMessage: response.error || t('channel_creation_failed'),
};
} catch (error) {
notifications.error({
message: 'Error',
description: t('channel_creation_failed'),
});
return { status: 'failed', statusMessage: t('channel_creation_failed') };
} finally {
setSavingState(false);
}
setSavingState(false);
}, [prepareEmailRequest, t, notifications]);
const prepareMsTeamsRequest = useCallback(
@ -370,19 +410,25 @@ function CreateAlertChannels({
description: t('channel_creation_done'),
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return { status: 'success', statusMessage: t('channel_creation_done') };
}
notifications.error({
message: 'Error',
description: response.error || t('channel_creation_failed'),
});
return {
status: 'failed',
statusMessage: response.error || t('channel_creation_failed'),
};
} catch (error) {
notifications.error({
message: 'Error',
description: t('channel_creation_failed'),
});
return { status: 'failed', statusMessage: t('channel_creation_failed') };
} finally {
setSavingState(false);
}
setSavingState(false);
}, [prepareMsTeamsRequest, t, notifications]);
const onSaveHandler = useCallback(
@ -400,7 +446,15 @@ function CreateAlertChannels({
const functionToCall = functionMapper[value as keyof typeof functionMapper];
if (functionToCall) {
functionToCall();
const result = await functionToCall();
logEvent('Alert Channel: Save channel', {
type: value,
sendResolvedAlert: selectedConfig.send_resolved,
name: selectedConfig.name,
new: 'true',
status: result?.status,
statusMessage: result?.statusMessage,
});
} else {
notifications.error({
message: 'Error',
@ -409,6 +463,7 @@ function CreateAlertChannels({
}
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[
onSlackHandler,
onWebhookHandler,
@ -472,14 +527,25 @@ function CreateAlertChannels({
description: t('channel_test_failed'),
});
}
logEvent('Alert Channel: Test notification', {
type: channelType,
sendResolvedAlert: selectedConfig.send_resolved,
name: selectedConfig.name,
new: 'true',
status:
response && response.statusCode === 200 ? 'Test success' : 'Test failed',
});
} catch (error) {
notifications.error({
message: 'Error',
description: t('channel_test_unexpected'),
});
}
setTestingState(false);
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[
prepareWebhookRequest,
t,

View File

@ -1,4 +1,6 @@
import { Row, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import { useMemo } from 'react';
import { useTranslation } from 'react-i18next';
import { AlertTypes } from 'types/api/alerts/alertTypes';
@ -34,6 +36,13 @@ function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element {
default:
break;
}
logEvent('Alert: Sample alert link clicked', {
dataSource: ALERTS_DATA_SOURCE_MAP[option],
link: url,
page: 'New alert data source selection page',
});
window.open(url, '_blank');
}
const renderOptions = useMemo(

View File

@ -1,4 +1,5 @@
import { Form, Row } from 'antd';
import logEvent from 'api/common/logEvent';
import { ENTITY_VERSION_V4 } from 'constants/app';
import { QueryParams } from 'constants/query';
import FormAlertRules from 'container/FormAlertRules';
@ -68,6 +69,8 @@ function CreateRules(): JSX.Element {
useEffect(() => {
if (alertType) {
onSelectType(alertType);
} else {
logEvent('Alert: New alert data source selection page visited', {});
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [alertType]);

View File

@ -11,6 +11,7 @@ import testOpsgenie from 'api/channels/testOpsgenie';
import testPagerApi from 'api/channels/testPager';
import testSlackApi from 'api/channels/testSlack';
import testWebhookApi from 'api/channels/testWebhook';
import logEvent from 'api/common/logEvent';
import ROUTES from 'constants/routes';
import {
ChannelType,
@ -89,7 +90,7 @@ function EditAlertChannels({
description: t('webhook_url_required'),
});
setSavingState(false);
return;
return { status: 'failed', statusMessage: t('webhook_url_required') };
}
const response = await editSlackApi(prepareSlackRequest());
@ -101,13 +102,17 @@ function EditAlertChannels({
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_edit_failed'),
});
return { status: 'success', statusMessage: t('channel_edit_done') };
}
notifications.error({
message: 'Error',
description: response.error || t('channel_edit_failed'),
});
setSavingState(false);
return {
status: 'failed',
statusMessage: response.error || t('channel_edit_failed'),
};
}, [prepareSlackRequest, t, notifications, selectedConfig]);
const prepareWebhookRequest = useCallback(() => {
@ -136,13 +141,13 @@ function EditAlertChannels({
if (selectedConfig?.api_url === '') {
showError(t('webhook_url_required'));
setSavingState(false);
return;
return { status: 'failed', statusMessage: t('webhook_url_required') };
}
if (username && (!password || password === '')) {
showError(t('username_no_password'));
setSavingState(false);
return;
return { status: 'failed', statusMessage: t('username_no_password') };
}
const response = await editWebhookApi(prepareWebhookRequest());
@ -154,10 +159,15 @@ function EditAlertChannels({
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
showError(response.error || t('channel_edit_failed'));
return { status: 'success', statusMessage: t('channel_edit_done') };
}
showError(response.error || t('channel_edit_failed'));
setSavingState(false);
return {
status: 'failed',
statusMessage: response.error || t('channel_edit_failed'),
};
}, [prepareWebhookRequest, t, notifications, selectedConfig]);
const prepareEmailRequest = useCallback(
@ -181,13 +191,18 @@ function EditAlertChannels({
description: t('channel_edit_done'),
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_edit_failed'),
});
return { status: 'success', statusMessage: t('channel_edit_done') };
}
notifications.error({
message: 'Error',
description: response.error || t('channel_edit_failed'),
});
setSavingState(false);
return {
status: 'failed',
statusMessage: response.error || t('channel_edit_failed'),
};
}, [prepareEmailRequest, t, notifications]);
const preparePagerRequest = useCallback(
@ -218,7 +233,7 @@ function EditAlertChannels({
description: validationError,
});
setSavingState(false);
return;
return { status: 'failed', statusMessage: validationError };
}
const response = await editPagerApi(preparePagerRequest());
@ -229,13 +244,18 @@ function EditAlertChannels({
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_edit_failed'),
});
return { status: 'success', statusMessage: t('channel_edit_done') };
}
notifications.error({
message: 'Error',
description: response.error || t('channel_edit_failed'),
});
setSavingState(false);
return {
status: 'failed',
statusMessage: response.error || t('channel_edit_failed'),
};
}, [preparePagerRequest, notifications, selectedConfig, t]);
const prepareOpsgenieRequest = useCallback(
@ -259,7 +279,7 @@ function EditAlertChannels({
description: t('api_key_required'),
});
setSavingState(false);
return;
return { status: 'failed', statusMessage: t('api_key_required') };
}
const response = await editOpsgenie(prepareOpsgenieRequest());
@ -271,13 +291,18 @@ function EditAlertChannels({
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_edit_failed'),
});
return { status: 'success', statusMessage: t('channel_edit_done') };
}
notifications.error({
message: 'Error',
description: response.error || t('channel_edit_failed'),
});
setSavingState(false);
return {
status: 'failed',
statusMessage: response.error || t('channel_edit_failed'),
};
}, [prepareOpsgenieRequest, t, notifications, selectedConfig]);
const prepareMsTeamsRequest = useCallback(
@ -301,7 +326,7 @@ function EditAlertChannels({
description: t('webhook_url_required'),
});
setSavingState(false);
return;
return { status: 'failed', statusMessage: t('webhook_url_required') };
}
const response = await editMsTeamsApi(prepareMsTeamsRequest());
@ -313,31 +338,46 @@ function EditAlertChannels({
});
history.replace(ROUTES.ALL_CHANNELS);
} else {
notifications.error({
message: 'Error',
description: response.error || t('channel_edit_failed'),
});
return { status: 'success', statusMessage: t('channel_edit_done') };
}
notifications.error({
message: 'Error',
description: response.error || t('channel_edit_failed'),
});
setSavingState(false);
return {
status: 'failed',
statusMessage: response.error || t('channel_edit_failed'),
};
}, [prepareMsTeamsRequest, t, notifications, selectedConfig]);
const onSaveHandler = useCallback(
(value: ChannelType) => {
async (value: ChannelType) => {
let result;
if (value === ChannelType.Slack) {
onSlackEditHandler();
result = await onSlackEditHandler();
} else if (value === ChannelType.Webhook) {
onWebhookEditHandler();
result = await onWebhookEditHandler();
} else if (value === ChannelType.Pagerduty) {
onPagerEditHandler();
result = await onPagerEditHandler();
} else if (value === ChannelType.MsTeams) {
onMsTeamsEditHandler();
result = await onMsTeamsEditHandler();
} else if (value === ChannelType.Opsgenie) {
onOpsgenieEditHandler();
result = await onOpsgenieEditHandler();
} else if (value === ChannelType.Email) {
onEmailEditHandler();
result = await onEmailEditHandler();
}
logEvent('Alert Channel: Save channel', {
type: value,
sendResolvedAlert: selectedConfig.send_resolved,
name: selectedConfig.name,
new: 'false',
status: result?.status,
statusMessage: result?.statusMessage,
});
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[
onSlackEditHandler,
onWebhookEditHandler,
@ -399,6 +439,14 @@ function EditAlertChannels({
description: t('channel_test_failed'),
});
}
logEvent('Alert Channel: Test notification', {
type: channelType,
sendResolvedAlert: selectedConfig.send_resolved,
name: selectedConfig.name,
new: 'false',
status:
response && response.statusCode === 200 ? 'Test success' : 'Test failed',
});
} catch (error) {
notifications.error({
message: 'Error',
@ -407,6 +455,7 @@ function EditAlertChannels({
}
setTestingState(false);
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[
t,
prepareWebhookRequest,

View File

@ -3,6 +3,8 @@ import './FormAlertRules.styles.scss';
import { PlusOutlined } from '@ant-design/icons';
import { Button, Form, Select, Switch, Tooltip } from 'antd';
import getChannels from 'api/channels/getAll';
import logEvent from 'api/common/logEvent';
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import ROUTES from 'constants/routes';
import useComponentPermission from 'hooks/useComponentPermission';
import useFetch from 'hooks/useFetch';
@ -10,6 +12,7 @@ import { useCallback, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { AlertDef, Labels } from 'types/api/alerts/def';
import AppReducer from 'types/reducer/app';
import { requireErrorMessage } from 'utils/form/requireErrorMessage';
@ -73,9 +76,24 @@ function BasicInfo({
const noChannels = channels.payload?.length === 0;
const handleCreateNewChannels = useCallback(() => {
logEvent('Alert: Create notification channel button clicked', {
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
ruleId: isNewRule ? 0 : alertDef?.id,
});
window.open(ROUTES.CHANNELS_NEW, '_blank');
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useEffect(() => {
if (!channels.loading && isNewRule) {
logEvent('Alert: New alert creation page visited', {
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
numberOfChannels: channels.payload?.length,
});
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [channels.payload, channels.loading]);
return (
<>
<StepHeading> {t('alert_form_step3')} </StepHeading>

View File

@ -2,6 +2,7 @@ import './QuerySection.styles.scss';
import { Color } from '@signozhq/design-tokens';
import { Button, Tabs, Tooltip } from 'antd';
import logEvent from 'api/common/logEvent';
import PromQLIcon from 'assets/Dashboard/PromQl';
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import { ENTITY_VERSION_V4 } from 'constants/app';
@ -31,6 +32,7 @@ function QuerySection({
runQuery,
alertDef,
panelType,
ruleId,
}: QuerySectionProps): JSX.Element {
// init namespace for translations
const { t } = useTranslation('alerts');
@ -158,7 +160,15 @@ function QuerySection({
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
<Button
type="primary"
onClick={runQuery}
onClick={(): void => {
runQuery();
logEvent('Alert: Stage and run query', {
dataSource: ALERTS_DATA_SOURCE_MAP[alertType],
isNewRule: !ruleId || ruleId === 0,
ruleId,
queryType: queryCategory,
});
}}
className="stage-run-query"
icon={<Play size={14} />}
>
@ -228,6 +238,7 @@ interface QuerySectionProps {
runQuery: VoidFunction;
alertDef: AlertDef;
panelType: PANEL_TYPES;
ruleId: number;
}
export default QuerySection;

View File

@ -12,8 +12,10 @@ import {
} from 'antd';
import saveAlertApi from 'api/alerts/save';
import testAlertApi from 'api/alerts/testAlert';
import logEvent from 'api/common/logEvent';
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
import { alertHelpMessage } from 'components/facingIssueBtn/util';
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import { FeatureKeys } from 'constants/features';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
@ -338,8 +340,13 @@ function FormAlertRules({
return;
}
const postableAlert = memoizedPreparePostData();
setLoading(true);
let logData = {
status: 'error',
statusMessage: t('unexpected_error'),
};
try {
const apiReq =
ruleId && ruleId > 0
@ -349,10 +356,15 @@ function FormAlertRules({
const response = await saveAlertApi(apiReq);
if (response.statusCode === 200) {
logData = {
status: 'success',
statusMessage:
!ruleId || ruleId === 0 ? t('rule_created') : t('rule_edited'),
};
notifications.success({
message: 'Success',
description:
!ruleId || ruleId === 0 ? t('rule_created') : t('rule_edited'),
description: logData.statusMessage,
});
// invalidate rule in cache
@ -367,18 +379,42 @@ function FormAlertRules({
history.replace(`${ROUTES.LIST_ALL_ALERT}?${urlQuery.toString()}`);
}, 2000);
} else {
logData = {
status: 'error',
statusMessage: response.error || t('unexpected_error'),
};
notifications.error({
message: 'Error',
description: response.error || t('unexpected_error'),
description: logData.statusMessage,
});
}
} catch (e) {
logData = {
status: 'error',
statusMessage: t('unexpected_error'),
};
notifications.error({
message: 'Error',
description: t('unexpected_error'),
description: logData.statusMessage,
});
}
setLoading(false);
logEvent('Alert: Save alert', {
...logData,
dataSource: ALERTS_DATA_SOURCE_MAP[postableAlert?.alertType as AlertTypes],
channelNames: postableAlert?.preferredChannels,
broadcastToAll: postableAlert?.broadcastToAll,
isNewRule: !ruleId || ruleId === 0,
ruleId,
queryType: currentQuery.queryType,
alertId: postableAlert?.id,
alertName: postableAlert?.alert,
});
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
isFormValid,
memoizedPreparePostData,
@ -414,6 +450,7 @@ function FormAlertRules({
}
const postableAlert = memoizedPreparePostData();
let statusResponse = { status: 'failed', message: '' };
setLoading(true);
try {
const response = await testAlertApi({ data: postableAlert });
@ -425,25 +462,43 @@ function FormAlertRules({
message: 'Error',
description: t('no_alerts_found'),
});
statusResponse = { status: 'failed', message: t('no_alerts_found') };
} else {
notifications.success({
message: 'Success',
description: t('rule_test_fired'),
});
statusResponse = { status: 'success', message: t('rule_test_fired') };
}
} else {
notifications.error({
message: 'Error',
description: response.error || t('unexpected_error'),
});
statusResponse = {
status: 'failed',
message: response.error || t('unexpected_error'),
};
}
} catch (e) {
notifications.error({
message: 'Error',
description: t('unexpected_error'),
});
statusResponse = { status: 'failed', message: t('unexpected_error') };
}
setLoading(false);
logEvent('Alert: Test notification', {
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
channelNames: postableAlert?.preferredChannels,
broadcastToAll: postableAlert?.broadcastToAll,
isNewRule: !ruleId || ruleId === 0,
ruleId,
queryType: currentQuery.queryType,
status: statusResponse.status,
statusMessage: statusResponse.message,
});
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [t, isFormValid, memoizedPreparePostData, notifications]);
const renderBasicInfo = (): JSX.Element => (
@ -513,6 +568,16 @@ function FormAlertRules({
const isRuleCreated = !ruleId || ruleId === 0;
useEffect(() => {
if (!isRuleCreated) {
logEvent('Alert: Edit page visited', {
ruleId,
dataSource: ALERTS_DATA_SOURCE_MAP[alertType as AlertTypes],
});
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
function handleRedirection(option: AlertTypes): void {
let url = '';
switch (option) {
@ -535,6 +600,13 @@ function FormAlertRules({
default:
break;
}
logEvent('Alert: Check example alert clicked', {
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
isNewRule: !ruleId || ruleId === 0,
ruleId,
queryType: currentQuery.queryType,
link: url,
});
window.open(url, '_blank');
}
@ -572,6 +644,7 @@ function FormAlertRules({
alertDef={alertDef}
panelType={panelType || PANEL_TYPES.TIME_SERIES}
key={currentQuery.queryType}
ruleId={ruleId}
/>
<RuleOptions

View File

@ -80,6 +80,8 @@ function FullView({
query: updatedQuery,
globalSelectedInterval: globalSelectedTime,
variables: getDashboardVariables(selectedDashboard?.data.variables),
fillGaps: widget.fillSpans,
formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE,
};
}
updatedQuery.builder.queryData[0].pageSize = 10;

View File

@ -109,6 +109,7 @@ function GridCardGraph({
globalSelectedInterval,
variables: getDashboardVariables(variables),
fillGaps: widget.fillSpans,
formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE,
};
}
updatedQuery.builder.queryData[0].pageSize = 10;

View File

@ -0,0 +1,215 @@
export const tableDataMultipleQueriesSuccessResponse = {
columns: [
{
name: 'service_name',
queryName: '',
isValueColumn: false,
},
{
name: 'A',
queryName: 'A',
isValueColumn: true,
},
{
name: 'B',
queryName: 'B',
isValueColumn: true,
},
],
rows: [
{
data: {
A: 4196.71,
B: 'n/a',
service_name: 'demo-app',
},
},
{
data: {
A: 500.83,
B: 'n/a',
service_name: 'customer',
},
},
{
data: {
A: 499.5,
B: 'n/a',
service_name: 'mysql',
},
},
{
data: {
A: 293.22,
B: 'n/a',
service_name: 'frontend',
},
},
{
data: {
A: 230.03,
B: 'n/a',
service_name: 'driver',
},
},
{
data: {
A: 67.09,
B: 'n/a',
service_name: 'route',
},
},
{
data: {
A: 30.96,
B: 'n/a',
service_name: 'redis',
},
},
{
data: {
A: 'n/a',
B: 112.27,
service_name: 'n/a',
},
},
],
};
export const widgetQueryWithLegend = {
clickhouse_sql: [
{
name: 'A',
legend: '',
disabled: false,
query: '',
},
],
promql: [
{
name: 'A',
query: '',
legend: '',
disabled: false,
},
],
builder: {
queryData: [
{
dataSource: 'metrics',
queryName: 'A',
aggregateOperator: 'count',
aggregateAttribute: {
dataType: 'float64',
id: 'signoz_latency--float64--ExponentialHistogram--true',
isColumn: true,
isJSON: false,
key: 'signoz_latency',
type: 'ExponentialHistogram',
},
timeAggregation: '',
spaceAggregation: 'p90',
functions: [],
filters: {
items: [],
op: 'AND',
},
expression: 'A',
disabled: false,
stepInterval: 60,
having: [],
limit: null,
orderBy: [],
groupBy: [
{
dataType: 'string',
isColumn: false,
isJSON: false,
key: 'service_name',
type: 'tag',
id: 'service_name--string--tag--false',
},
],
legend: 'p99',
reduceTo: 'avg',
},
{
dataSource: 'metrics',
queryName: 'B',
aggregateOperator: 'rate',
aggregateAttribute: {
dataType: 'float64',
id: 'system_disk_operations--float64--Sum--true',
isColumn: true,
isJSON: false,
key: 'system_disk_operations',
type: 'Sum',
},
timeAggregation: 'rate',
spaceAggregation: 'sum',
functions: [],
filters: {
items: [],
op: 'AND',
},
expression: 'B',
disabled: false,
stepInterval: 60,
having: [],
limit: null,
orderBy: [],
groupBy: [],
legend: '',
reduceTo: 'avg',
},
],
queryFormulas: [],
},
id: '48ad5a67-9a3c-49d4-a886-d7a34f8b875d',
queryType: 'builder',
};
export const expectedOutputWithLegends = {
dataSource: [
{
A: 4196.71,
B: 'n/a',
service_name: 'demo-app',
},
{
A: 500.83,
B: 'n/a',
service_name: 'customer',
},
{
A: 499.5,
B: 'n/a',
service_name: 'mysql',
},
{
A: 293.22,
B: 'n/a',
service_name: 'frontend',
},
{
A: 230.03,
B: 'n/a',
service_name: 'driver',
},
{
A: 67.09,
B: 'n/a',
service_name: 'route',
},
{
A: 30.96,
B: 'n/a',
service_name: 'redis',
},
{
A: 'n/a',
B: 112.27,
service_name: 'n/a',
},
],
};

View File

@ -0,0 +1,42 @@
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { createColumnsAndDataSource, getQueryLegend } from '../utils';
import {
expectedOutputWithLegends,
tableDataMultipleQueriesSuccessResponse,
widgetQueryWithLegend,
} from './response';
describe('Table Panel utils', () => {
it('createColumnsAndDataSource function', () => {
const data = tableDataMultipleQueriesSuccessResponse;
const query = widgetQueryWithLegend as Query;
const { columns, dataSource } = createColumnsAndDataSource(data, query);
expect(dataSource).toStrictEqual(expectedOutputWithLegends.dataSource);
// this makes sure that the columns are rendered in the same order as response
expect(columns[0].title).toBe('service_name');
// the next specifically makes sure that the legends are properly applied in multiple queries
expect(columns[1].title).toBe('p99');
// this makes sure that the query without a legend takes the title from the query response
expect(columns[2].title).toBe('B');
// this is to ensure that the rows properly map to the column data indexes as the dataIndex should be equal to name of the columns
// returned in the response as the rows will be mapped with them
expect((columns[0] as any).dataIndex).toBe('service_name');
expect((columns[1] as any).dataIndex).toBe('A');
expect((columns[2] as any).dataIndex).toBe('B');
});
it('getQueryLegend function', () => {
const query = widgetQueryWithLegend as Query;
// query A has a legend of p99
expect(getQueryLegend(query, 'A')).toBe('p99');
// should return undefined when legend not present
expect(getQueryLegend(query, 'B')).toBe(undefined);
});
});

View File

@ -3,10 +3,7 @@ import { Space, Tooltip } from 'antd';
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
import { Events } from 'constants/events';
import { QueryTable } from 'container/QueryTable';
import {
createTableColumnsFromQuery,
RowData,
} from 'lib/query/createTableColumnsFromQuery';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { cloneDeep, get, isEmpty, set } from 'lodash-es';
import { memo, ReactNode, useCallback, useEffect, useMemo } from 'react';
import { useTranslation } from 'react-i18next';
@ -14,7 +11,11 @@ import { eventEmitter } from 'utils/getEventEmitter';
import { WrapperStyled } from './styles';
import { GridTableComponentProps } from './types';
import { findMatchingThreshold } from './utils';
import {
createColumnsAndDataSource,
findMatchingThreshold,
TableData,
} from './utils';
function GridTableComponent({
data,
@ -25,28 +26,26 @@ function GridTableComponent({
...props
}: GridTableComponentProps): JSX.Element {
const { t } = useTranslation(['valueGraph']);
// create columns and dataSource in the ui friendly structure
// use the query from the widget here to extract the legend information
const { columns, dataSource: originalDataSource } = useMemo(
() =>
createTableColumnsFromQuery({
query,
queryTableData: data,
}),
[data, query],
() => createColumnsAndDataSource((data as unknown) as TableData, query),
[query, data],
);
const createDataInCorrectFormat = useCallback(
(dataSource: RowData[]): RowData[] =>
dataSource.map((d) => {
const finalObject = {};
const keys = Object.keys(d);
keys.forEach((k) => {
const label = get(
columns.find((c) => get(c, 'dataIndex', '') === k) || {},
'title',
'',
// we use the order of the columns here to have similar download as the user view
columns.forEach((k) => {
set(
finalObject,
get(k, 'title', '') as string,
get(d, get(k, 'dataIndex', ''), 'n/a'),
);
if (label) {
set(finalObject, label as string, d[k]);
}
});
return finalObject as RowData;
}),
@ -65,7 +64,11 @@ function GridTableComponent({
const newValue = { ...val };
Object.keys(val).forEach((k) => {
if (columnUnits[k]) {
newValue[k] = getYAxisFormattedValue(String(val[k]), columnUnits[k]);
// the check below takes care of not adding units for rows that have n/a values
newValue[k] =
val[k] !== 'n/a'
? getYAxisFormattedValue(String(val[k]), columnUnits[k])
: val[k];
newValue[`${k}_without_unit`] = val[k];
}
});

View File

@ -1,4 +1,11 @@
import { ColumnsType, ColumnType } from 'antd/es/table';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import { QUERY_TABLE_CONFIG } from 'container/QueryTable/config';
import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { isEmpty, isNaN } from 'lodash-es';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
// Helper function to evaluate the condition based on the operator
function evaluateCondition(
@ -56,3 +63,85 @@ export function findMatchingThreshold(
hasMultipleMatches,
};
}
export interface TableData {
columns: { name: string; queryName: string; isValueColumn: boolean }[];
rows: { data: any }[];
}
export function getQueryLegend(
currentQuery: Query,
queryName: string,
): string | undefined {
let legend: string | undefined;
switch (currentQuery.queryType) {
case EQueryType.QUERY_BUILDER:
// check if the value is present in the queries
legend = currentQuery.builder.queryData.find(
(query) => query.queryName === queryName,
)?.legend;
if (!legend) {
// check if the value is present in the formula
legend = currentQuery.builder.queryFormulas.find(
(query) => query.queryName === queryName,
)?.legend;
}
break;
case EQueryType.CLICKHOUSE:
legend = currentQuery.clickhouse_sql.find(
(query) => query.name === queryName,
)?.legend;
break;
case EQueryType.PROM:
legend = currentQuery.promql.find((query) => query.name === queryName)
?.legend;
break;
default:
legend = undefined;
break;
}
return legend;
}
export function createColumnsAndDataSource(
data: TableData,
currentQuery: Query,
renderColumnCell?: QueryTableProps['renderColumnCell'],
): { columns: ColumnsType<RowData>; dataSource: RowData[] } {
const columns: ColumnsType<RowData> =
data.columns?.reduce<ColumnsType<RowData>>((acc, item) => {
// is the column is the value column then we need to check for the available legend
const legend = item.isValueColumn
? getQueryLegend(currentQuery, item.queryName)
: undefined;
const column: ColumnType<RowData> = {
dataIndex: item.name,
// if no legend present then rely on the column name value
title: !isEmpty(legend) ? legend : item.name,
width: QUERY_TABLE_CONFIG.width,
render: renderColumnCell && renderColumnCell[item.name],
sorter: (a: RowData, b: RowData): number => {
const valueA = Number(a[`${item.name}_without_unit`] ?? a[item.name]);
const valueB = Number(b[`${item.name}_without_unit`] ?? b[item.name]);
if (!isNaN(valueA) && !isNaN(valueB)) {
return valueA - valueB;
}
return ((a[item.name] as string) || '').localeCompare(
(b[item.name] as string) || '',
);
},
};
return [...acc, column];
}, []) || [];
// the rows returned have data encapsulation hence removing the same here
const dataSource = data.rows?.map((d) => d.data) || [];
return { columns, dataSource };
}

View File

@ -7,17 +7,20 @@ interface AlertInfoCardProps {
header: string;
subheader: string;
link: string;
onClick: () => void;
}
function AlertInfoCard({
header,
subheader,
link,
onClick,
}: AlertInfoCardProps): JSX.Element {
return (
<div
className="alert-info-card"
onClick={(): void => {
onClick();
window.open(link, '_blank');
}}
>

View File

@ -2,6 +2,7 @@ import './AlertsEmptyState.styles.scss';
import { PlusOutlined } from '@ant-design/icons';
import { Button, Divider, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import ROUTES from 'constants/routes';
import useComponentPermission from 'hooks/useComponentPermission';
import { useNotifications } from 'hooks/useNotifications';
@ -10,12 +11,26 @@ import { useCallback, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { DataSource } from 'types/common/queryBuilder';
import AppReducer from 'types/reducer/app';
import AlertInfoCard from './AlertInfoCard';
import { ALERT_CARDS, ALERT_INFO_LINKS } from './alertLinks';
import InfoLinkText from './InfoLinkText';
const alertLogEvents = (
title: string,
link: string,
dataSource?: DataSource,
): void => {
const attributes = {
link,
page: 'Alert empty state page',
};
logEvent(title, dataSource ? { ...attributes, dataSource } : attributes);
};
export function AlertsEmptyState(): JSX.Element {
const { t } = useTranslation('common');
const { role, featureResponse } = useSelector<AppState, AppReducer>(
@ -91,18 +106,33 @@ export function AlertsEmptyState(): JSX.Element {
link="https://youtu.be/xjxNIqiv4_M"
leftIconVisible
rightIconVisible
onClick={(): void =>
alertLogEvents(
'Alert: Video tutorial link clicked',
'https://youtu.be/xjxNIqiv4_M',
)
}
/>
</div>
{ALERT_INFO_LINKS.map((info) => (
<InfoLinkText
key={info.link}
infoText={info.infoText}
link={info.link}
leftIconVisible={info.leftIconVisible}
rightIconVisible={info.rightIconVisible}
/>
))}
{ALERT_INFO_LINKS.map((info) => {
const logEventTriggered = (): void =>
alertLogEvents(
'Alert: Tutorial doc link clicked',
info.link,
info.dataSource,
);
return (
<InfoLinkText
key={info.link}
infoText={info.infoText}
link={info.link}
leftIconVisible={info.leftIconVisible}
rightIconVisible={info.rightIconVisible}
onClick={logEventTriggered}
/>
);
})}
</div>
</section>
<div className="get-started-text">
@ -113,14 +143,23 @@ export function AlertsEmptyState(): JSX.Element {
</Divider>
</div>
{ALERT_CARDS.map((card) => (
<AlertInfoCard
key={card.link}
header={card.header}
subheader={card.subheader}
link={card.link}
/>
))}
{ALERT_CARDS.map((card) => {
const logEventTriggered = (): void =>
alertLogEvents(
'Alert: Sample alert link clicked',
card.link,
card.dataSource,
);
return (
<AlertInfoCard
key={card.link}
header={card.header}
subheader={card.subheader}
link={card.link}
onClick={logEventTriggered}
/>
);
})}
</div>
</div>
);

View File

@ -6,6 +6,7 @@ interface InfoLinkTextProps {
link: string;
leftIconVisible: boolean;
rightIconVisible: boolean;
onClick: () => void;
}
function InfoLinkText({
@ -13,10 +14,12 @@ function InfoLinkText({
link,
leftIconVisible,
rightIconVisible,
onClick,
}: InfoLinkTextProps): JSX.Element {
return (
<Flex
onClick={(): void => {
onClick();
window.open(link, '_blank');
}}
className="info-link-container"

View File

@ -1,3 +1,5 @@
import { DataSource } from 'types/common/queryBuilder';
export const ALERT_INFO_LINKS = [
{
infoText: 'How to create Metrics-based alerts',
@ -5,6 +7,7 @@ export const ALERT_INFO_LINKS = [
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
leftIconVisible: false,
rightIconVisible: true,
dataSource: DataSource.METRICS,
},
{
infoText: 'How to create Log-based alerts',
@ -12,6 +15,7 @@ export const ALERT_INFO_LINKS = [
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
leftIconVisible: false,
rightIconVisible: true,
dataSource: DataSource.LOGS,
},
{
infoText: 'How to create Trace-based alerts',
@ -19,6 +23,7 @@ export const ALERT_INFO_LINKS = [
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
leftIconVisible: false,
rightIconVisible: true,
dataSource: DataSource.TRACES,
},
];
@ -26,24 +31,28 @@ export const ALERT_CARDS = [
{
header: 'Alert on high memory usage',
subheader: "Monitor your host's memory usage",
dataSource: DataSource.METRICS,
link:
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-memory-usage-for-host-goes-above-400-mb-or-any-fixed-memory',
},
{
header: 'Alert on slow external API calls',
subheader: 'Monitor your external API calls',
dataSource: DataSource.TRACES,
link:
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-external-api-latency-p90-is-over-1-second-for-last-5-mins',
},
{
header: 'Alert on high percentage of timeout errors in logs',
subheader: 'Monitor your logs for errors',
dataSource: DataSource.LOGS,
link:
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-percentage-of-redis-timeout-error-logs-greater-than-7-in-last-5-mins',
},
{
header: 'Alert on high error percentage of an endpoint',
subheader: 'Monitor your API endpoint',
dataSource: DataSource.METRICS,
link:
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page#3-alert-when-the-error-percentage-for-an-endpoint-exceeds-5',
},

View File

@ -3,6 +3,7 @@ import { PlusOutlined } from '@ant-design/icons';
import { Input, Typography } from 'antd';
import type { ColumnsType } from 'antd/es/table/interface';
import saveAlertApi from 'api/alerts/save';
import logEvent from 'api/common/logEvent';
import DropDown from 'components/DropDown/DropDown';
import { listAlertMessage } from 'components/facingIssueBtn/util';
import {
@ -41,7 +42,7 @@ import {
} from './styles';
import Status from './TableComponents/Status';
import ToggleAlertState from './ToggleAlertState';
import { filterAlerts } from './utils';
import { alertActionLogEvent, filterAlerts } from './utils';
const { Search } = Input;
@ -107,12 +108,16 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
}, [notificationsApi, t]);
const onClickNewAlertHandler = useCallback(() => {
logEvent('Alert: New alert button clicked', {
number: allAlertRules?.length,
});
featureResponse
.refetch()
.then(() => {
history.push(ROUTES.ALERTS_NEW);
})
.catch(handleError);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [featureResponse, handleError]);
const onEditHandler = (record: GettableAlert) => (): void => {
@ -321,6 +326,7 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
width: 10,
render: (id: GettableAlert['id'], record): JSX.Element => (
<DropDown
onDropDownItemClick={(item): void => alertActionLogEvent(item.key, record)}
element={[
<ToggleAlertState
key="1"
@ -356,6 +362,9 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
});
}
const paginationConfig = {
defaultCurrent: Number(paginationParam) || 1,
};
return (
<>
<SearchContainer>
@ -385,11 +394,10 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
columns={columns}
rowKey="id"
dataSource={data}
shouldSendAlertsLogEvent
dynamicColumns={dynamicColumns}
onChange={handleChange}
pagination={{
defaultCurrent: Number(paginationParam) || 1,
}}
pagination={paginationConfig}
facingIssueBtn={{
attributes: {
screen: 'Alert list page',

View File

@ -1,9 +1,11 @@
import { Space } from 'antd';
import getAll from 'api/alerts/getAll';
import logEvent from 'api/common/logEvent';
import ReleaseNote from 'components/ReleaseNote';
import Spinner from 'components/Spinner';
import { useNotifications } from 'hooks/useNotifications';
import { useEffect } from 'react';
import { isUndefined } from 'lodash-es';
import { useEffect, useRef } from 'react';
import { useTranslation } from 'react-i18next';
import { useQuery } from 'react-query';
import { useLocation } from 'react-router-dom';
@ -19,8 +21,19 @@ function ListAlertRules(): JSX.Element {
cacheTime: 0,
});
const logEventCalledRef = useRef(false);
const { notifications } = useNotifications();
useEffect(() => {
if (!logEventCalledRef.current && !isUndefined(data?.payload)) {
logEvent('Alert: List page visited', {
number: data?.payload?.length,
});
logEventCalledRef.current = true;
}
}, [data?.payload]);
useEffect(() => {
if (status === 'error' || (status === 'success' && data.statusCode >= 400)) {
notifications.error({

View File

@ -1,3 +1,6 @@
import logEvent from 'api/common/logEvent';
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { GettableAlert } from 'types/api/alerts/get';
export const filterAlerts = (
@ -23,3 +26,32 @@ export const filterAlerts = (
);
});
};
export const alertActionLogEvent = (
action: string,
record: GettableAlert,
): void => {
let actionValue = '';
switch (action) {
case '0':
actionValue = 'Enable/Disable';
break;
case '1':
actionValue = 'Edit';
break;
case '2':
actionValue = 'Clone';
break;
case '3':
actionValue = 'Delete';
break;
default:
break;
}
logEvent('Alert: Action', {
ruleId: record.id,
dataSource: ALERTS_DATA_SOURCE_MAP[record.alertType as AlertTypes],
name: record.alert,
action: actionValue,
});
};

View File

@ -609,6 +609,16 @@ function DashboardsList(): JSX.Element {
</>
);
const paginationConfig = data.length > 20 && {
pageSize: 20,
showTotal: showPaginationItem,
showSizeChanger: false,
onChange: (page: any): void => handlePageSizeUpdate(page),
current: Number(sortOrder.pagination),
defaultCurrent: Number(sortOrder.pagination) || 1,
hideOnSinglePage: true,
};
return (
<div className="dashboards-list-container">
<div className="dashboards-list-view-content">
@ -822,16 +832,7 @@ function DashboardsList(): JSX.Element {
showSorterTooltip
loading={isDashboardListLoading || isFilteringDashboards}
showHeader={false}
pagination={
data.length > 20 && {
pageSize: 20,
showTotal: showPaginationItem,
showSizeChanger: false,
onChange: (page): void => handlePageSizeUpdate(page),
current: Number(sortOrder.pagination),
defaultCurrent: Number(sortOrder.pagination) || 1,
}
}
pagination={paginationConfig}
/>
</>
)}

View File

@ -15,6 +15,7 @@ import {
} from 'hooks/useResourceAttribute/utils';
import { useMemo, useState } from 'react';
import { useParams } from 'react-router-dom';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { EQueryType } from 'types/common/dashboard';
import { v4 as uuid } from 'uuid';
@ -93,6 +94,26 @@ function External(): JSX.Element {
[servicename, tagFilterItems],
);
const errorApmToTraceQuery = useGetAPMToTracesQueries({
servicename,
isExternalCall: true,
filters: [
{
id: uuid().slice(0, 8),
key: {
key: 'hasError',
dataType: DataTypes.bool,
type: 'tag',
isColumn: true,
isJSON: false,
id: 'hasError--bool--tag--true',
},
op: 'in',
value: ['true'],
},
],
});
const externalCallRPSWidget = useMemo(
() =>
getWidgetQueryBuilder({
@ -156,7 +177,7 @@ function External(): JSX.Element {
servicename,
selectedTraceTags,
timestamp: selectedTimeStamp,
apmToTraceQuery,
apmToTraceQuery: errorApmToTraceQuery,
})}
>
View Traces

View File

@ -2,8 +2,6 @@ import { Card, Typography } from 'antd';
import Spinner from 'components/Spinner';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { WidgetGraphContainerProps } from 'container/NewWidget/types';
// import useUrlQuery from 'hooks/useUrlQuery';
// import { useDashboard } from 'providers/Dashboard/Dashboard';
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
import { NotFoundContainer } from './styles';
@ -14,6 +12,7 @@ function WidgetGraphContainer({
queryResponse,
setRequestData,
selectedWidget,
isLoadingPanelData,
}: WidgetGraphContainerProps): JSX.Element {
if (queryResponse.data && selectedGraph === PANEL_TYPES.BAR) {
const sortedSeriesData = getSortedSeriesData(
@ -38,6 +37,10 @@ function WidgetGraphContainer({
return <Spinner size="large" tip="Loading..." />;
}
if (isLoadingPanelData) {
return <Spinner size="large" tip="Loading..." />;
}
if (
selectedGraph !== PANEL_TYPES.LIST &&
queryResponse.data?.payload.data?.result?.length === 0
@ -59,6 +62,14 @@ function WidgetGraphContainer({
);
}
if (queryResponse.isIdle) {
return (
<NotFoundContainer>
<Typography>No Data</Typography>
</NotFoundContainer>
);
}
return (
<WidgetGraph
selectedWidget={selectedWidget}

View File

@ -17,6 +17,7 @@ function WidgetGraph({
queryResponse,
setRequestData,
selectedWidget,
isLoadingPanelData,
}: WidgetGraphContainerProps): JSX.Element {
const { currentQuery } = useQueryBuilder();
@ -43,6 +44,7 @@ function WidgetGraph({
)}
<WidgetGraphComponent
isLoadingPanelData={isLoadingPanelData}
selectedGraph={selectedGraph}
queryResponse={queryResponse}
setRequestData={setRequestData}

View File

@ -1,18 +1,15 @@
import './LeftContainer.styles.scss';
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { memo, useEffect, useState } from 'react';
import { memo } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { GlobalReducer } from 'types/reducer/globalTime';
import { getGraphType } from 'utils/getGraphType';
import { WidgetGraphProps } from '../types';
import ExplorerColumnsRenderer from './ExplorerColumnsRenderer';
@ -27,62 +24,17 @@ function LeftContainer({
selectedTracesFields,
setSelectedTracesFields,
selectedWidget,
selectedTime,
requestData,
setRequestData,
isLoadingPanelData,
}: WidgetGraphProps): JSX.Element {
const { stagedQuery, redirectWithQueryBuilderData } = useQueryBuilder();
const { stagedQuery } = useQueryBuilder();
const { selectedDashboard } = useDashboard();
const { selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
>((state) => state.globalTime);
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
if (selectedWidget && selectedGraph !== PANEL_TYPES.LIST) {
return {
selectedTime: selectedWidget?.timePreferance,
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
query: stagedQuery || initialQueriesMap.metrics,
globalSelectedInterval,
variables: getDashboardVariables(selectedDashboard?.data.variables),
};
}
const updatedQuery = { ...(stagedQuery || initialQueriesMap.metrics) };
updatedQuery.builder.queryData[0].pageSize = 10;
redirectWithQueryBuilderData(updatedQuery);
return {
query: updatedQuery,
graphType: PANEL_TYPES.LIST,
selectedTime: selectedTime.enum || 'GLOBAL_TIME',
globalSelectedInterval,
tableParams: {
pagination: {
offset: 0,
limit: updatedQuery.builder.queryData[0].limit || 0,
},
},
};
});
useEffect(() => {
if (stagedQuery) {
setRequestData((prev) => ({
...prev,
selectedTime: selectedTime.enum || prev.selectedTime,
globalSelectedInterval,
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
query: stagedQuery,
fillGaps: selectedWidget.fillSpans || false,
}));
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
stagedQuery,
selectedTime,
selectedWidget.fillSpans,
globalSelectedInterval,
]);
const queryResponse = useGetQueryRange(
requestData,
selectedDashboard?.data?.version || DEFAULT_ENTITY_VERSION,
@ -104,6 +56,7 @@ function LeftContainer({
queryResponse={queryResponse}
setRequestData={setRequestData}
selectedWidget={selectedWidget}
isLoadingPanelData={isLoadingPanelData}
/>
<QueryContainer className="query-section-left-container">
<QuerySection selectedGraph={selectedGraph} queryResponse={queryResponse} />

View File

@ -7,7 +7,7 @@ import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
import { chartHelpMessage } from 'components/facingIssueBtn/util';
import { FeatureKeys } from 'constants/features';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import { DashboardShortcuts } from 'constants/shortcuts/DashboardShortcuts';
import { DEFAULT_BUCKET_COUNT } from 'container/PanelWrapper/constants';
@ -18,6 +18,8 @@ import useAxiosError from 'hooks/useAxiosError';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { MESSAGE, useIsFeatureDisabled } from 'hooks/useFeatureFlag';
import useUrlQuery from 'hooks/useUrlQuery';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import history from 'lib/history';
import { defaultTo, isUndefined } from 'lodash-es';
import { Check, X } from 'lucide-react';
@ -38,6 +40,8 @@ import { IField } from 'types/api/logs/fields';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
import AppReducer from 'types/reducer/app';
import { GlobalReducer } from 'types/reducer/globalTime';
import { getGraphType, getGraphTypeForFormat } from 'utils/getGraphType';
import LeftContainer from './LeftContainer';
import QueryTypeTag from './LeftContainer/QueryTypeTag';
@ -83,6 +87,10 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
const { featureResponse } = useSelector<AppState, AppReducer>(
(state) => state.app,
);
const { selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
>((state) => state.globalTime);
const { widgets = [] } = selectedDashboard?.data || {};
@ -278,6 +286,65 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
const handleError = useAxiosError();
// this loading state is to take care of mismatch in the responses for table and other panels
// hence while changing the query contains the older value and the processing logic fails
const [isLoadingPanelData, setIsLoadingPanelData] = useState<boolean>(false);
// request data should be handled by the parent and the child components should consume the same
// this has been moved here from the left container
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
if (selectedWidget && selectedGraph !== PANEL_TYPES.LIST) {
return {
selectedTime: selectedWidget?.timePreferance,
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
query: stagedQuery || initialQueriesMap.metrics,
globalSelectedInterval,
formatForWeb:
getGraphTypeForFormat(selectedGraph || selectedWidget.panelTypes) ===
PANEL_TYPES.TABLE,
variables: getDashboardVariables(selectedDashboard?.data.variables),
};
}
const updatedQuery = { ...(stagedQuery || initialQueriesMap.metrics) };
updatedQuery.builder.queryData[0].pageSize = 10;
redirectWithQueryBuilderData(updatedQuery);
return {
query: updatedQuery,
graphType: PANEL_TYPES.LIST,
selectedTime: selectedTime.enum || 'GLOBAL_TIME',
globalSelectedInterval,
tableParams: {
pagination: {
offset: 0,
limit: updatedQuery.builder.queryData[0].limit || 0,
},
},
};
});
useEffect(() => {
if (stagedQuery) {
setIsLoadingPanelData(false);
setRequestData((prev) => ({
...prev,
selectedTime: selectedTime.enum || prev.selectedTime,
globalSelectedInterval,
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
query: stagedQuery,
fillGaps: selectedWidget.fillSpans || false,
formatForWeb:
getGraphTypeForFormat(selectedGraph || selectedWidget.panelTypes) ===
PANEL_TYPES.TABLE,
}));
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
stagedQuery,
selectedTime,
selectedWidget.fillSpans,
globalSelectedInterval,
]);
const onClickSaveHandler = useCallback(() => {
if (!selectedDashboard) {
return;
@ -402,6 +469,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
}, [dashboardId]);
const setGraphHandler = (type: PANEL_TYPES): void => {
setIsLoadingPanelData(true);
const updatedQuery = handleQueryChange(type as any, supersetQuery);
setGraphType(type);
redirectWithQueryBuilderData(
@ -527,6 +595,9 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
setSelectedTracesFields={setSelectedTracesFields}
selectedWidget={selectedWidget}
selectedTime={selectedTime}
requestData={requestData}
setRequestData={setRequestData}
isLoadingPanelData={isLoadingPanelData}
/>
)}
</LeftContainerWrapper>

View File

@ -24,6 +24,9 @@ export interface WidgetGraphProps {
selectedWidget: Widgets;
selectedGraph: PANEL_TYPES;
selectedTime: timePreferance;
requestData: GetQueryResultsProps;
setRequestData: Dispatch<SetStateAction<GetQueryResultsProps>>;
isLoadingPanelData: boolean;
}
export type WidgetGraphContainerProps = {
@ -34,4 +37,5 @@ export type WidgetGraphContainerProps = {
setRequestData: Dispatch<SetStateAction<GetQueryResultsProps>>;
selectedGraph: PANEL_TYPES;
selectedWidget: Widgets;
isLoadingPanelData: boolean;
};

View File

@ -4,50 +4,45 @@
Prior to installation, you must ensure your Kubernetes cluster is ready and that you have the necessary permissions to deploy applications. Follow these steps to use Helm for setting up the Collector:
&nbsp;
1. **Add the OpenTelemetry Helm repository:**
```bash
helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts
```
&nbsp;
2. **Prepare the `otel-collector-values.yaml` Configuration**
#### Azure Event Hub Receiver Configuration
If you haven't created the logs Event Hub, you can create one by following the steps in the [Azure Event Hubs documentation](../../bootstrapping/data-ingestion).
&nbsp;
and replace the placeholders `<Primary Connection String>` with the primary connection string for your Event Hub, it should look something like this:
#### Azure Event Hub Receiver Configuration
```yaml
connection: Endpoint=sb://namespace.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=superSecret1234=;EntityPath=hubName
```
The Event Hub docs have a step to create a SAS policy for the event hub and copy the connection string.
Replace the placeholders `<Primary Connection String>` with the primary connection string for your Event Hub, it should look something like this:
#### Azure Monitor Receiver Configuration
```yaml
connection: Endpoint=sb://namespace.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=superSecret1234=;EntityPath=hubName
```
The Event Hub setup have a step to create a SAS policy for the event hub and copy the connection string.
You will need to set up a [service principal](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal) with Read permissions to receive data from Azure Monitor.
&nbsp;
1. Follow the steps in the [Create a service principal Azure Doc](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#register-an-application-with-microsoft-entra-id-and-create-a-service-principal) documentation to create a service principal.
You can name it `signoz-central-collector-app` the redirect URI can be empty.
2. To add read permissions to Azure Monitor, Follow the [Assign Role](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#assign-a-role-to-the-application) documentation. The read acess can be given to the full subscription.
3. There are multiple ways to authenticate the service principal, we will use the client secret option, follow [Creating a client secret](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#option-3-create-a-new-client-secret) and don't forget to copy the client secret. The secret is used in the configuration file as `client_secret`.
#### Azure Monitor Receiver Configuration
4. To find `client_id` and `tenant_id`, go to the [Azure Portal](https://portal.azure.com/) and search for the `Application` you created. You would see the `Application (client) ID` and `Directory (tenant) ID` in the Overview section.
You will need to set up a [service principal](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal) with Read permissions to receive data from Azure Monitor.
<figure data-zoomable align="center">
<img
src="/img/docs/azure-monitoring/service-principal-app-overview.webp"
alt="Application Overview"
/>
<figcaption>
<i>
Application Overview
</i>
</figcaption>
</figure>
1. Follow the steps in the [Create a service principal Azure Doc](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#register-an-application-with-microsoft-entra-id-and-create-a-service-principal) documentation to create a service principal.
You can name it `signoz-central-collector-app` the redirect URI can be empty.
2. To add read permissions to Azure Monitor, Follow the [Assign Role](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#assign-a-role-to-the-application) documentation. The read acess can be given to the full subscription.
3. There are multiple ways to authenticate the service principal, we will use the client secret option, follow [Creating a client secret](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#option-3-create-a-new-client-secret) and don't forget to copy the client secret. The secret is used in the configuration file as `client_secret`.
5. To find `subscription_id`, follow steps in [Find Your Subscription](https://learn.microsoft.com/en-us/azure/azure-portal/get-subscription-tenant-id#find-your-azure-subscription) and populate them in the configuration file.
6. Ensure you replace the placeholders `<region>` and `<ingestion-key>` with the appropriate values for your signoz cloud instance.
4. To find `client_id` and `tenant_id`, go to the [Azure Portal](https://portal.azure.com/) and search for the `Application` you created. You would see the `Application (client) ID` and `Directory (tenant) ID` in the Overview section.
5. To find `subscription_id`, follow steps in [Find Your Subscription](https://learn.microsoft.com/en-us/azure/azure-portal/get-subscription-tenant-id#find-your-azure-subscription) and populate them in the configuration file.
6. Ensure you replace the placeholders `<region>` and `<ingestion-key>` with the appropriate values for your signoz cloud instance.
@ -92,13 +87,15 @@ processors:
batch: {}
exporters:
otlp:
endpoint: "ingest.<region>.signoz.cloud:443"
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "<ingestion-key>"
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
```
&nbsp;
3. **Deploy the OpenTelemetry Collector to your Kubernetes cluster:**
You'll need to prepare a custom configuration file, say `otel-collector-values.yaml`, that matches your environment's specific needs. Replace `<namespace>` with the Kubernetes namespace where you wish to install the Collector.

View File

@ -0,0 +1,37 @@
import { act, render, screen, waitFor } from 'tests/test-utils';
import Members from '../Members';
describe('Organization Settings Page', () => {
afterEach(() => {
jest.clearAllMocks();
});
it('render list of members', async () => {
act(() => {
render(<Members />);
});
const title = await screen.findByText(/Members/i);
expect(title).toBeInTheDocument();
await waitFor(() => {
expect(screen.getByText('firstUser@test.io')).toBeInTheDocument(); // first item
expect(screen.getByText('lastUser@test.io')).toBeInTheDocument(); // last item
});
});
// this is required as our edit/delete logic is dependent on the index and it will break with pagination enabled
it('render list of members without pagination', async () => {
render(<Members />);
await waitFor(() => {
expect(screen.getByText('firstUser@test.io')).toBeInTheDocument(); // first item
expect(screen.getByText('lastUser@test.io')).toBeInTheDocument(); // last item
expect(
document.querySelector('.ant-table-pagination'),
).not.toBeInTheDocument();
});
});
});

View File

@ -9,7 +9,7 @@ function TablePanelWrapper({
tableProcessedDataRef,
}: PanelWrapperProps): JSX.Element {
const panelData =
queryResponse.data?.payload?.data?.newResult?.data?.result || [];
(queryResponse.data?.payload?.data?.result?.[0] as any)?.table || [];
const { thresholds } = widget;
return (
<GridTableComponent

View File

@ -1,3 +1,4 @@
export const historyPagination = {
defaultPageSize: 5,
hideOnSinglePage: true,
};

View File

@ -334,6 +334,11 @@ export function PlannedDowntimeList({
}
}, [downtimeSchedules.error, downtimeSchedules.isError, notifications]);
const paginationConfig = {
pageSize: 5,
showSizeChanger: false,
hideOnSinglePage: true,
};
return (
<Table<DowntimeSchedulesTableData>
columns={columns}
@ -342,7 +347,7 @@ export function PlannedDowntimeList({
dataSource={tableData || []}
loading={downtimeSchedules.isLoading || downtimeSchedules.isFetching}
showHeader={false}
pagination={{ pageSize: 5, showSizeChanger: false }}
pagination={paginationConfig}
/>
);
}

View File

@ -33,10 +33,12 @@ export const getColumnSearchProps = (
record: ServicesList,
): boolean => {
if (record[dataIndex]) {
record[dataIndex]
?.toString()
.toLowerCase()
.includes(value.toString().toLowerCase());
return (
record[dataIndex]
?.toString()
.toLowerCase()
.includes(value.toString().toLowerCase()) || false
);
}
return false;

View File

@ -79,6 +79,11 @@ function ServiceMetricTable({
}
}, [services, licenseData, isFetching, isCloudUserVal]);
const paginationConfig = {
defaultPageSize: 10,
showTotal: (total: number, range: number[]): string =>
`${range[0]}-${range[1]} of ${total} items`,
};
return (
<>
{RPS > MAX_RPS_LIMIT && (
@ -92,11 +97,7 @@ function ServiceMetricTable({
<ResourceAttributesFilter />
<ResizeTable
pagination={{
defaultPageSize: 10,
showTotal: (total: number, range: number[]): string =>
`${range[0]}-${range[1]} of ${total} items`,
}}
pagination={paginationConfig}
columns={tableColumns}
loading={isLoading}
dataSource={services}

View File

@ -36,6 +36,11 @@ function ServiceTraceTable({
}
}, [services, licenseData, isFetching, isCloudUserVal]);
const paginationConfig = {
defaultPageSize: 10,
showTotal: (total: number, range: number[]): string =>
`${range[0]}-${range[1]} of ${total} items`,
};
return (
<>
{RPS > MAX_RPS_LIMIT && (
@ -49,11 +54,7 @@ function ServiceTraceTable({
<ResourceAttributesFilter />
<ResizeTable
pagination={{
defaultPageSize: 10,
showTotal: (total: number, range: number[]): string =>
`${range[0]}-${range[1]} of ${total} items`,
}}
pagination={paginationConfig}
columns={tableColumns}
loading={loading}
dataSource={services}

View File

@ -1,7 +1,10 @@
import getTriggeredApi from 'api/alerts/getTriggered';
import logEvent from 'api/common/logEvent';
import Spinner from 'components/Spinner';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import useAxiosError from 'hooks/useAxiosError';
import { isUndefined } from 'lodash-es';
import { useEffect, useRef } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@ -13,6 +16,8 @@ function TriggeredAlerts(): JSX.Element {
(state) => state.app.user?.userId,
);
const hasLoggedEvent = useRef(false); // Track if logEvent has been called
const handleError = useAxiosError();
const alertsResponse = useQuery(
@ -29,6 +34,15 @@ function TriggeredAlerts(): JSX.Element {
},
);
useEffect(() => {
if (!hasLoggedEvent.current && !isUndefined(alertsResponse.data?.payload)) {
logEvent('Alert: Triggered alert list page visited', {
number: alertsResponse.data?.payload?.length,
});
hasLoggedEvent.current = true;
}
}, [alertsResponse.data?.payload]);
if (alertsResponse.error) {
return <TriggerComponent allAlerts={[]} />;
}

View File

@ -12,7 +12,7 @@ import {
} from 'container/TopNav/DateTimeSelectionV2/config';
import { Pagination } from 'hooks/queryPagination';
import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
import { isEmpty } from 'lodash-es';
import { isEmpty, cloneDeep } from 'lodash-es';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
@ -40,6 +40,10 @@ export async function GetMetricQueryRange(
throw new Error(error);
}
if (props.formatForWeb) {
return response;
}
if (response.payload?.data?.result) {
const v2Range = convertNewDataToOld(response.payload);
@ -76,6 +80,7 @@ export interface GetQueryResultsProps {
variables?: Record<string, unknown>;
params?: Record<string, unknown>;
fillGaps?: boolean;
formatForWeb?: boolean;
tableParams?: {
pagination?: Pagination;
selectColumns?: any;

View File

@ -16,6 +16,7 @@ export const prepareQueryRangePayload = ({
query,
globalSelectedInterval,
graphType,
formatForWeb,
selectedTime,
tableParams,
variables = {},
@ -102,6 +103,7 @@ export const prepareQueryRangePayload = ({
inputFormat: 'ns',
}),
variables,
formatForWeb,
compositeQuery,
...restParams,
};

View File

@ -583,11 +583,11 @@ export const createTableColumnsFromQuery: CreateTableDataFromQuery = ({
q.series?.sort((a, b) => {
let labelA = '';
let labelB = '';
a.labelsArray.forEach((lab) => {
a.labelsArray?.forEach((lab) => {
labelA += Object.values(lab)[0];
});
b.labelsArray.forEach((lab) => {
b.labelsArray?.forEach((lab) => {
labelB += Object.values(lab)[0];
});

View File

@ -0,0 +1,220 @@
/* eslint-disable sonarjs/no-duplicate-string */
export const membersResponse = [
{
id: '3223a874-5678458745786',
name: 'John Doe',
email: 'firstUser@test.io',
createdAt: 1666357530,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '5e9681b1-5678458745786',
name: 'Jane Doe',
email: 'johndoe2@test.io',
createdAt: 1666365394,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '11e8c55d-5678458745786',
name: 'Alex',
email: 'blah@test.io',
createdAt: 1666366317,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: 'd878012367813286731aab62',
role: 'VIEWER',
organization: 'Test Inc',
flags: null,
},
{
id: '2ad2e404-5678458745786',
name: 'Tom',
email: 'johndoe4@test.io',
createdAt: 1673441483,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '6f532456-5678458745786',
name: 'Harry',
email: 'harry@test.io',
createdAt: 1691551672,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: 'ae22fa73-5678458745786',
name: 'Ron',
email: 'ron@test.io',
createdAt: 1691668239,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '3223a874-5678458745786',
name: 'John Doe',
email: 'johndoe@test.io',
createdAt: 1666357530,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '5e9681b1-5678458745786',
name: 'Jane Doe',
email: 'johndoe2@test.io',
createdAt: 1666365394,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '11e8c55d-5678458745786',
name: 'Alex',
email: 'blah@test.io',
createdAt: 1666366317,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: 'd878012367813286731aab62',
role: 'VIEWER',
organization: 'Test Inc',
flags: null,
},
{
id: '2ad2e404-5678458745786',
name: 'Tom',
email: 'johndoe4@test.io',
createdAt: 1673441483,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '6f532456-5678458745786',
name: 'Harry',
email: 'harry@test.io',
createdAt: 1691551672,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: 'ae22fa73-5678458745786',
name: 'Ron',
email: 'ron@test.io',
createdAt: 1691668239,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '3223a874-5678458745786',
name: 'John Doe',
email: 'johndoe@test.io',
createdAt: 1666357530,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '5e9681b1-5678458745786',
name: 'Jane Doe',
email: 'johndoe2@test.io',
createdAt: 1666365394,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '11e8c55d-5678458745786',
name: 'Alex',
email: 'blah@test.io',
createdAt: 1666366317,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: 'd878012367813286731aab62',
role: 'VIEWER',
organization: 'Test Inc',
flags: null,
},
{
id: '2ad2e404-5678458745786',
name: 'Tom',
email: 'johndoe4@test.io',
createdAt: 1673441483,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: '6f532456-5678458745786',
name: 'Harry',
email: 'harry@test.io',
createdAt: 1691551672,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
{
id: 'ae22fa73-5678458745786',
name: 'Ron',
email: 'lastUser@test.io',
createdAt: 1691668239,
profilePictureURL: '',
orgId: '1287612376312867312867',
groupId: '5678458745786',
role: 'ADMIN',
organization: 'Test Inc',
flags: null,
},
];

View File

@ -2,6 +2,7 @@ import { rest } from 'msw';
import { billingSuccessResponse } from './__mockdata__/billing';
import { licensesSuccessResponse } from './__mockdata__/licenses';
import { membersResponse } from './__mockdata__/members';
import { queryRangeSuccessResponse } from './__mockdata__/query_range';
import { serviceSuccessResponse } from './__mockdata__/services';
import { topLevelOperationSuccessResponse } from './__mockdata__/top_level_operations';
@ -25,6 +26,9 @@ export const handlers = [
res(ctx.status(200), ctx.json(topLevelOperationSuccessResponse)),
),
rest.get('http://localhost/api/v1/orgUsers/*', (req, res, ctx) =>
res(ctx.status(200), ctx.json(membersResponse)),
),
rest.get(
'http://localhost/api/v3/autocomplete/attribute_keys',
(req, res, ctx) => {

View File

@ -46,6 +46,8 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
},
];
const paginationConfig = { pageSize: 20, hideOnSinglePage: true };
return (
<div className="integration-data-collected">
<div className="logs-section">
@ -59,7 +61,7 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
index % 2 === 0 ? 'table-row-dark' : ''
}
dataSource={logsData}
pagination={{ pageSize: 20 }}
pagination={paginationConfig}
className="logs-section-table"
/>
</div>
@ -74,7 +76,7 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
index % 2 === 0 ? 'table-row-dark' : ''
}
dataSource={metricsData}
pagination={{ pageSize: 20 }}
pagination={paginationConfig}
className="metrics-section-table"
/>
</div>

View File

@ -277,6 +277,8 @@ function SaveView(): JSX.Element {
},
];
const paginationConfig = { pageSize: 5, hideOnSinglePage: true };
return (
<div className="save-view-container">
<div className="save-view-content">
@ -303,7 +305,7 @@ function SaveView(): JSX.Element {
dataSource={dataSource}
loading={isLoading || isRefetching}
showHeader={false}
pagination={{ pageSize: 5 }}
pagination={paginationConfig}
/>
</div>

View File

@ -42,6 +42,15 @@ const mockStored = (role?: string): any =>
accessJwt: '',
refreshJwt: '',
},
org: [
{
createdAt: 0,
hasOptedUpdates: false,
id: 'xyz',
isAnonymous: false,
name: 'Test Inc. - India',
},
],
},
});

View File

@ -24,6 +24,7 @@ export type QueryRangePayload = {
start: number;
step: number;
variables?: Record<string, unknown>;
formatForWeb?: boolean;
[param: string]: unknown;
};
export interface MetricRangePayloadProps {

View File

@ -10,3 +10,6 @@ export const getGraphType = (panelType: PANEL_TYPES): PANEL_TYPES => {
}
return panelType;
};
export const getGraphTypeForFormat = (panelType: PANEL_TYPES): PANEL_TYPES =>
panelType;

View File

@ -6195,11 +6195,11 @@ brace-expansion@^2.0.1:
balanced-match "^1.0.0"
braces@^3.0.2, braces@~3.0.2:
version "3.0.2"
resolved "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz"
integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
version "3.0.3"
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789"
integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==
dependencies:
fill-range "^7.0.1"
fill-range "^7.1.1"
broadcast-channel@^3.4.1:
version "3.7.0"
@ -8808,10 +8808,10 @@ file-saver@^2.0.2:
resolved "https://registry.yarnpkg.com/file-saver/-/file-saver-2.0.5.tgz#d61cfe2ce059f414d899e9dd6d4107ee25670c38"
integrity sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA==
fill-range@^7.0.1:
version "7.0.1"
resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz"
integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
fill-range@^7.1.1:
version "7.1.1"
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292"
integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==
dependencies:
to-regex-range "^5.0.1"
@ -13705,13 +13705,14 @@ postcss@8.4.38, postcss@^8.0.0, postcss@^8.1.1, postcss@^8.3.7, postcss@^8.4.21,
picocolors "^1.0.0"
source-map-js "^1.2.0"
posthog-js@1.140.1:
version "1.140.1"
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.140.1.tgz#34efc0d326fa5fcf7950106f350fb4f0e73b2da6"
integrity sha512-UeKuAtQSvbzmTCzNVaauku8F194EYwAP33WrRrWZlDlMNbMy7GKcZOgKbr7jZqnha7FlVlHrWk+Rpyr1zCFhPQ==
posthog-js@1.142.1:
version "1.142.1"
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.142.1.tgz#3b91229732938c5c76b5ee6d410698a267e073e9"
integrity sha512-yqeWTWitlb0sCaH5v6s7UJ+pPspzf/lkzPaSE5pMMXRM2i2KNsMoZEAZqbPCW8fQ8QL6lHs6d8PLjHrvbR288w==
dependencies:
fflate "^0.4.8"
preact "^10.19.3"
web-vitals "^4.0.1"
preact@^10.19.3:
version "10.22.0"
@ -17218,6 +17219,11 @@ web-vitals@^0.2.4:
resolved "https://registry.npmjs.org/web-vitals/-/web-vitals-0.2.4.tgz"
integrity sha512-6BjspCO9VriYy12z356nL6JBS0GYeEcA457YyRzD+dD6XYCQ75NKhcOHUMHentOE7OcVCIXXDvOm0jKFfQG2Gg==
web-vitals@^4.0.1:
version "4.2.0"
resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-4.2.0.tgz#008949ab79717a68ccaaa3c4371cbc7bbbd78a92"
integrity sha512-ohj72kbtVWCpKYMxcbJ+xaOBV3En76hW47j52dG+tEGG36LZQgfFw5yHl9xyjmosy3XUMn8d/GBUAy4YPM839w==
web-worker@^1.2.0:
version "1.2.0"
resolved "https://registry.npmjs.org/web-worker/-/web-worker-1.2.0.tgz"
@ -17632,14 +17638,14 @@ write-file-atomic@^4.0.2:
signal-exit "^3.0.7"
ws@^7.3.1, ws@^7.4.6:
version "7.5.9"
resolved "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz"
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
version "7.5.10"
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9"
integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==
ws@^8.13.0:
version "8.13.0"
resolved "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz"
integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==
version "8.17.1"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b"
integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==
xhr-request@^1.0.1:
version "1.1.0"

2
go.mod
View File

@ -6,7 +6,7 @@ require (
github.com/ClickHouse/clickhouse-go/v2 v2.20.0
github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
github.com/SigNoz/signoz-otel-collector v0.102.0
github.com/SigNoz/signoz-otel-collector v0.102.1
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
github.com/antonmedv/expr v1.15.3

8
go.sum
View File

@ -64,8 +64,8 @@ github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkb
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
github.com/SigNoz/prometheus v1.11.1 h1:roM8ugYf4UxaeKKujEeBvoX7ybq3IrS+TB26KiRtIJg=
github.com/SigNoz/prometheus v1.11.1/go.mod h1:uv4mQwZQtx7y4GQ6EdHOi8Wsk07uHNn2XHd1zM85m6I=
github.com/SigNoz/signoz-otel-collector v0.102.0 h1:v6ap+gdvrKklMwU+M9FJgrn28vN0YxrINl3kvdcLonA=
github.com/SigNoz/signoz-otel-collector v0.102.0/go.mod h1:kCx5BfzDujq6C0+kotiqLp5COG2ut4Cb039+55rbWE0=
github.com/SigNoz/signoz-otel-collector v0.102.1 h1:RXzs/dA9IMFGi6mXecEFVvShWfilqx5cCEXmzzvVfK0=
github.com/SigNoz/signoz-otel-collector v0.102.1/go.mod h1:ISAXYhZenojCWg6CdDJtPMpfS6Zwc08+uoxH25tc6Y0=
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=
@ -378,8 +378,8 @@ github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+l
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY=
github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
github.com/hashicorp/go-retryablehttp v0.7.4 h1:ZQgVdpTdAL7WpMIwLzCfbalOcSUdkDZnpUv3/+BxzFA=
github.com/hashicorp/go-retryablehttp v0.7.4/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8=
github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=
github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc=
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=

View File

@ -53,7 +53,6 @@ import (
"go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/rules"
"go.signoz.io/signoz/pkg/query-service/telemetry"
"go.signoz.io/signoz/pkg/query-service/utils"
)
@ -1942,7 +1941,7 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
end := time.Now()
zap.L().Debug("getTraceSQLQuery took: ", zap.Duration("duration", end.Sub(start)))
searchSpansResult := []model.SearchSpansResult{{
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"},
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError", "StatusMessage", "StatusCodeString", "SpanKind"},
Events: make([][]interface{}, len(searchScanResponses)),
IsSubTree: false,
},
@ -1993,8 +1992,8 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
}
}
searchSpansResult[0].StartTimestampMillis = startTime - (durationNano/1000000)
searchSpansResult[0].EndTimestampMillis = endTime + (durationNano/1000000)
searchSpansResult[0].StartTimestampMillis = startTime - (durationNano / 1000000)
searchSpansResult[0].EndTimestampMillis = endTime + (durationNano / 1000000)
return &searchSpansResult, nil
}
@ -3420,36 +3419,6 @@ func countPanelsInDashboard(data map[string]interface{}) model.DashboardsInfo {
}
}
func (r *ClickHouseReader) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
alertsInfo := model.AlertsInfo{}
// fetch alerts from rules db
query := "SELECT data FROM rules"
var alertsData []string
err := r.localDB.Select(&alertsData, query)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return &alertsInfo, err
}
for _, alert := range alertsData {
var rule rules.GettableRule
err = json.Unmarshal([]byte(alert), &rule)
if err != nil {
zap.L().Error("invalid rule data", zap.Error(err))
continue
}
if rule.AlertType == "LOGS_BASED_ALERT" {
alertsInfo.LogsBasedAlerts = alertsInfo.LogsBasedAlerts + 1
} else if rule.AlertType == "METRIC_BASED_ALERT" {
alertsInfo.MetricBasedAlerts = alertsInfo.MetricBasedAlerts + 1
} else if rule.AlertType == "TRACES_BASED_ALERT" {
alertsInfo.TracesBasedAlerts = alertsInfo.TracesBasedAlerts + 1
}
alertsInfo.TotalAlerts = alertsInfo.TotalAlerts + 1
}
return &alertsInfo, nil
}
func (r *ClickHouseReader) GetSavedViewsInfo(ctx context.Context) (*model.SavedViewsInfo, error) {
savedViewsInfo := model.SavedViewsInfo{}
savedViews, err := explorer.GetViews()
@ -4434,8 +4403,8 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
case *time.Time:
point.Timestamp = v.UnixMilli()
case *float64, *float32:
isValidPoint = true
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
isValidPoint = true
point.Value = float64(reflect.ValueOf(v).Elem().Float())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()))
@ -4447,9 +4416,9 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
case **float64, **float32:
val := reflect.ValueOf(v)
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
isValidPoint = true
value := reflect.ValueOf(v).Elem().Elem().Float()
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
isValidPoint = true
point.Value = value
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", value))
@ -4460,8 +4429,8 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
}
}
case *uint, *uint8, *uint64, *uint16, *uint32:
isValidPoint = true
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
isValidPoint = true
point.Value = float64(reflect.ValueOf(v).Elem().Uint())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
@ -4473,9 +4442,9 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
case **uint, **uint8, **uint64, **uint16, **uint32:
val := reflect.ValueOf(v)
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
isValidPoint = true
value := reflect.ValueOf(v).Elem().Elem().Uint()
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
isValidPoint = true
point.Value = float64(value)
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", value))
@ -4486,8 +4455,8 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
}
}
case *int, *int8, *int16, *int32, *int64:
isValidPoint = true
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
isValidPoint = true
point.Value = float64(reflect.ValueOf(v).Elem().Int())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
@ -4499,9 +4468,9 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
case **int, **int8, **int16, **int32, **int64:
val := reflect.ValueOf(v)
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
isValidPoint = true
value := reflect.ValueOf(v).Elem().Elem().Int()
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
isValidPoint = true
point.Value = float64(value)
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", value))

View File

@ -142,6 +142,11 @@ func checkDuplicateString(pipeline []string) bool {
for _, processor := range pipeline {
name := processor
if _, ok := exists[name]; ok {
zap.L().Error(
"duplicate processor name detected in generated collector config for log pipelines",
zap.String("processor", processor),
zap.Any("pipeline", pipeline),
)
return true
}

View File

@ -5,7 +5,10 @@ import (
"testing"
. "github.com/smartystreets/goconvey/convey"
"github.com/stretchr/testify/require"
"go.signoz.io/signoz/pkg/query-service/constants"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"gopkg.in/yaml.v3"
)
var buildProcessorTestData = []struct {
@ -204,3 +207,89 @@ func TestBuildLogsPipeline(t *testing.T) {
})
}
}
func TestPipelineAliasCollisionsDontResultInDuplicateCollectorProcessors(t *testing.T) {
require := require.New(t)
baseConf := []byte(`
receivers:
memory:
id: in-memory-receiver
exporters:
memory:
id: in-memory-exporter
service:
pipelines:
logs:
receivers:
- memory
processors: []
exporters:
- memory
`)
makeTestPipeline := func(name string, alias string) Pipeline {
return Pipeline{
OrderId: 1,
Name: name,
Alias: alias,
Enabled: true,
Filter: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "method",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeTag,
},
Operator: "=",
Value: "GET",
},
},
},
Config: []PipelineOperator{
{
ID: "regex",
Type: "regex_parser",
Enabled: true,
Name: "regex parser",
ParseFrom: "attributes.test_regex_target",
ParseTo: "attributes",
Regex: `^\s*(?P<json_data>{.*})\s*$`,
},
},
}
}
testPipelines := []Pipeline{
makeTestPipeline("test pipeline 1", "pipeline-alias"),
makeTestPipeline("test pipeline 2", "pipeline-alias"),
}
recommendedConfYaml, apiErr := GenerateCollectorConfigWithPipelines(
baseConf, testPipelines,
)
require.Nil(apiErr, fmt.Sprintf("couldn't generate config recommendation: %v", apiErr))
var recommendedConf map[string]interface{}
err := yaml.Unmarshal(recommendedConfYaml, &recommendedConf)
require.Nil(err, "couldn't unmarshal recommended config")
logsProcessors := recommendedConf["service"].(map[string]any)["pipelines"].(map[string]any)["logs"].(map[string]any)["processors"].([]any)
require.Equal(
len(logsProcessors), len(testPipelines),
"test pipelines not included in recommended config as expected",
)
recommendedConfYaml2, apiErr := GenerateCollectorConfigWithPipelines(
baseConf, testPipelines,
)
require.Nil(apiErr, fmt.Sprintf("couldn't generate config recommendation again: %v", apiErr))
require.Equal(
string(recommendedConfYaml), string(recommendedConfYaml2),
"collector config should not change across recommendations for same set of pipelines",
)
}

View File

@ -24,7 +24,7 @@ func CollectorConfProcessorName(p Pipeline) string {
func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []string, error) {
processors := map[string]interface{}{}
names := []string{}
for _, v := range pipelines {
for pipelineIdx, v := range pipelines {
if !v.Enabled {
continue
}
@ -70,6 +70,12 @@ func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []s
Operators: v.Config,
}
name := CollectorConfProcessorName(v)
// Ensure name is unique
if _, nameExists := processors[name]; nameExists {
name = fmt.Sprintf("%s-%d", name, pipelineIdx)
}
processors[name] = processor
names = append(names, name)
}

View File

@ -803,76 +803,3 @@ func TestContainsFilterIsCaseInsensitive(t *testing.T) {
_, test2Exists := result[0].Attributes_string["test2"]
require.False(test2Exists)
}
func TestTemporaryWorkaroundForSupportingAttribsContainingDots(t *testing.T) {
// TODO(Raj): Remove this after dots are supported
require := require.New(t)
testPipeline := Pipeline{
OrderId: 1,
Name: "pipeline1",
Alias: "pipeline1",
Enabled: true,
Filter: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "k8s_deployment_name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
},
Operator: "=",
Value: "ingress",
},
},
},
Config: []PipelineOperator{
{
ID: "add",
Type: "add",
Enabled: true,
Name: "add",
Field: "attributes.test",
Value: "test-value",
},
},
}
testLogs := []model.SignozLog{{
Timestamp: uint64(time.Now().UnixNano()),
Body: "test log",
Attributes_string: map[string]string{},
Resources_string: map[string]string{
"k8s_deployment_name": "ingress",
},
SeverityText: entry.Info.String(),
SeverityNumber: uint8(entry.Info),
SpanID: "",
TraceID: "",
}, {
Timestamp: uint64(time.Now().UnixNano()),
Body: "test log",
Attributes_string: map[string]string{},
Resources_string: map[string]string{
"k8s.deployment.name": "ingress",
},
SeverityText: entry.Info.String(),
SeverityNumber: uint8(entry.Info),
SpanID: "",
TraceID: "",
}}
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
context.Background(),
[]Pipeline{testPipeline},
testLogs,
)
require.Nil(err)
require.Equal(0, len(collectorWarnAndErrorLogs), strings.Join(collectorWarnAndErrorLogs, "\n"))
require.Equal(2, len(result))
for _, processedLog := range result {
require.Equal(processedLog.Attributes_string["test"], "test-value")
}
}

View File

@ -58,7 +58,9 @@ func ParseLogFilterParams(r *http.Request) (*model.LogsFilterParams, error) {
res.OrderBy = val[0]
}
if val, ok := params[ORDER]; ok {
res.Order = val[0]
if val[0] == ASC || val[0] == DESC {
res.Order = val[0]
}
}
if val, ok := params["q"]; ok {
res.Query = val[0]

View File

@ -1,6 +1,8 @@
package logs
import (
"net/http"
"net/http/httptest"
"testing"
. "github.com/smartystreets/goconvey/convey"
@ -432,3 +434,51 @@ func TestGenerateSQLQuery(t *testing.T) {
})
}
}
var parseLogFilterParams = []struct {
Name string
ReqParams string
ExpectedLogFilterParams *model.LogsFilterParams
}{
{
Name: "test with proper order by",
ReqParams: "order=desc&q=service.name='myservice'&limit=10",
ExpectedLogFilterParams: &model.LogsFilterParams{
Limit: 10,
OrderBy: "timestamp",
Order: DESC,
Query: "service.name='myservice'",
},
},
{
Name: "test with proper order by asc",
ReqParams: "order=asc&q=service.name='myservice'&limit=10",
ExpectedLogFilterParams: &model.LogsFilterParams{
Limit: 10,
OrderBy: "timestamp",
Order: ASC,
Query: "service.name='myservice'",
},
},
{
Name: "test with incorrect order by",
ReqParams: "order=undefined&q=service.name='myservice'&limit=10",
ExpectedLogFilterParams: &model.LogsFilterParams{
Limit: 10,
OrderBy: "timestamp",
Order: DESC,
Query: "service.name='myservice'",
},
},
}
func TestParseLogFilterParams(t *testing.T) {
for _, test := range parseLogFilterParams {
Convey(test.Name, t, func() {
req := httptest.NewRequest(http.MethodGet, "/logs?"+test.ReqParams, nil)
params, err := ParseLogFilterParams(req)
So(err, ShouldBeNil)
So(params, ShouldEqual, test.ExpectedLogFilterParams)
})
}
}

View File

@ -73,7 +73,6 @@ type Reader interface {
LiveTailLogsV3(ctx context.Context, query string, timestampStart uint64, idStart string, client *v3.LogsLiveTailClient)
GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error)
GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error)
GetSavedViewsInfo(ctx context.Context) (*model.SavedViewsInfo, error)
GetTotalSpans(ctx context.Context) (uint64, error)
GetTotalLogs(ctx context.Context) (uint64, error)

View File

@ -252,19 +252,22 @@ type Event struct {
//easyjson:json
type SearchSpanResponseItem struct {
TimeUnixNano uint64 `json:"timestamp"`
DurationNano int64 `json:"durationNano"`
SpanID string `json:"spanId"`
RootSpanID string `json:"rootSpanId"`
TraceID string `json:"traceId"`
HasError bool `json:"hasError"`
Kind int32 `json:"kind"`
ServiceName string `json:"serviceName"`
Name string `json:"name"`
References []OtelSpanRef `json:"references,omitempty"`
TagMap map[string]string `json:"tagMap"`
Events []string `json:"event"`
RootName string `json:"rootName"`
TimeUnixNano uint64 `json:"timestamp"`
DurationNano int64 `json:"durationNano"`
SpanID string `json:"spanId"`
RootSpanID string `json:"rootSpanId"`
TraceID string `json:"traceId"`
HasError bool `json:"hasError"`
Kind int32 `json:"kind"`
ServiceName string `json:"serviceName"`
Name string `json:"name"`
References []OtelSpanRef `json:"references,omitempty"`
TagMap map[string]string `json:"tagMap"`
Events []string `json:"event"`
RootName string `json:"rootName"`
StatusMessage string `json:"statusMessage"`
StatusCodeString string `json:"statusCodeString"`
SpanKind string `json:"spanKind"`
}
type OtelSpanRef struct {
@ -301,7 +304,7 @@ func (item *SearchSpanResponseItem) GetValues() []interface{} {
keys = append(keys, k)
values = append(values, v)
}
returnArray := []interface{}{item.TimeUnixNano, item.SpanID, item.TraceID, item.ServiceName, item.Name, strconv.Itoa(int(item.Kind)), strconv.FormatInt(item.DurationNano, 10), keys, values, referencesStringArray, item.Events, item.HasError}
returnArray := []interface{}{item.TimeUnixNano, item.SpanID, item.TraceID, item.ServiceName, item.Name, strconv.Itoa(int(item.Kind)), strconv.FormatInt(item.DurationNano, 10), keys, values, referencesStringArray, item.Events, item.HasError, item.StatusMessage, item.StatusCodeString, item.SpanKind}
return returnArray
}

View File

@ -118,6 +118,12 @@ func easyjson6ff3ac1dDecodeGoSignozIoSignozPkgQueryServiceModel(in *jlexer.Lexer
}
case "rootName":
out.RootName = string(in.String())
case "statusMessage":
out.StatusMessage = string(in.String())
case "statusCodeString":
out.StatusCodeString = string(in.String())
case "spanKind":
out.SpanKind = string(in.String())
default:
in.SkipRecursive()
}
@ -233,6 +239,21 @@ func easyjson6ff3ac1dEncodeGoSignozIoSignozPkgQueryServiceModel(out *jwriter.Wri
out.RawString(prefix)
out.String(string(in.RootName))
}
{
const prefix string = ",\"statusMessage\":"
out.RawString(prefix)
out.String(string(in.StatusMessage))
}
{
const prefix string = ",\"statusCodeString\":"
out.RawString(prefix)
out.String(string(in.StatusCodeString))
}
{
const prefix string = ",\"spanKind\":"
out.RawString(prefix)
out.String(string(in.SpanKind))
}
out.RawByte('}')
}

View File

@ -401,8 +401,11 @@ type CompositeQuery struct {
PromQueries map[string]*PromQuery `json:"promQueries,omitempty"`
PanelType PanelType `json:"panelType"`
QueryType QueryType `json:"queryType"`
Unit string `json:"unit,omitempty"`
FillGaps bool `json:"fillGaps,omitempty"`
// Unit for the time series data shown in the graph
// This is used in alerts to format the value and threshold
Unit string `json:"unit,omitempty"`
// FillGaps is used to fill the gaps in the time series data
FillGaps bool `json:"fillGaps,omitempty"`
}
func (c *CompositeQuery) EnabledQueries() int {
@ -990,10 +993,16 @@ type QueryRangeResponse struct {
type TableColumn struct {
Name string `json:"name"`
// QueryName is the name of the query that this column belongs to
QueryName string `json:"queryName"`
// IsValueColumn is true if this column is a value column
// i.e it is the column that contains the actual value that is being plotted
IsValueColumn bool `json:"isValueColumn"`
}
type TableRow struct {
Data []interface{} `json:"data"`
Data map[string]interface{} `json:"data"`
QueryName string `json:"-"`
}
type Table struct {

View File

@ -46,6 +46,9 @@ func fillGap(series *v3.Series, start, end, step int64) *v3.Series {
// TODO(srikanthccv): can WITH FILL be perfect substitute for all cases https://clickhouse.com/docs/en/sql-reference/statements/select/order-by#order-by-expr-with-fill-modifier
func FillGaps(results []*v3.Result, params *v3.QueryRangeParamsV3) {
if params.CompositeQuery.PanelType != v3.PanelTypeGraph {
return
}
for _, result := range results {
// A `result` item in `results` contains the query result for individual query.
// If there are no series in the result, we add empty series and `fillGap` adds all zeros

View File

@ -43,6 +43,7 @@ func TestFillGaps(t *testing.T) {
Start: 1000,
End: 5000,
CompositeQuery: &v3.CompositeQuery{
PanelType: v3.PanelTypeGraph,
BuilderQueries: map[string]*v3.BuilderQuery{
"query1": {
QueryName: "query1",
@ -82,6 +83,7 @@ func TestFillGaps(t *testing.T) {
Start: 1000,
End: 5000,
CompositeQuery: &v3.CompositeQuery{
PanelType: v3.PanelTypeGraph,
BuilderQueries: map[string]*v3.BuilderQuery{
"query1": {
QueryName: "query1",
@ -121,6 +123,7 @@ func TestFillGaps(t *testing.T) {
Start: 1000,
End: 5000,
CompositeQuery: &v3.CompositeQuery{
PanelType: v3.PanelTypeGraph,
BuilderQueries: map[string]*v3.BuilderQuery{
"query1": {
QueryName: "query1",
@ -142,6 +145,39 @@ func TestFillGaps(t *testing.T) {
}),
},
},
{
name: "Single series with gaps and panel type is not graph",
results: []*v3.Result{
createResult("query1", []*v3.Series{
createSeries([]v3.Point{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 3000, Value: 3.0},
}),
}),
},
params: &v3.QueryRangeParamsV3{
Start: 1000,
End: 5000,
CompositeQuery: &v3.CompositeQuery{
PanelType: v3.PanelTypeList,
BuilderQueries: map[string]*v3.BuilderQuery{
"query1": {
QueryName: "query1",
Expression: "query1",
StepInterval: 1,
},
},
},
},
expected: []*v3.Result{
createResult("query1", []*v3.Series{
createSeries([]v3.Point{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 3000, Value: 3.0},
}),
}),
},
},
}
// Execute test cases

View File

@ -2,6 +2,7 @@ package postprocess
import (
"fmt"
"math"
"sort"
"strings"
@ -9,20 +10,21 @@ import (
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
func getAutoColNameForQuery(queryName string, params *v3.QueryRangeParamsV3) string {
q := params.CompositeQuery.BuilderQueries[queryName]
if q.DataSource == v3.DataSourceTraces || q.DataSource == v3.DataSourceLogs {
if q.AggregateAttribute.Key != "" {
return fmt.Sprintf("%s(%s)", q.AggregateOperator, q.AggregateAttribute.Key)
}
return string(q.AggregateOperator)
} else if q.DataSource == v3.DataSourceMetrics {
if q.SpaceAggregation != "" && params.Version == "v4" {
return fmt.Sprintf("%s(%s)", q.SpaceAggregation, q.AggregateAttribute.Key)
}
return fmt.Sprintf("%s(%s)", q.AggregateOperator, q.AggregateAttribute.Key)
func roundToTwoDecimal(number float64) float64 {
// Handle very small numbers
if math.Abs(number) < 0.000001 {
return 0
}
return queryName
// Determine the number of decimal places to round to
decimalPlaces := 2
if math.Abs(number) < 0.01 {
decimalPlaces = int(math.Ceil(-math.Log10(math.Abs(number)))) + 1
}
// Round to the determined number of decimal places
scale := math.Pow(10, float64(decimalPlaces))
return math.Round(number*scale) / scale
}
func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRangeParamsV3) []*v3.Result {
@ -55,10 +57,10 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
// There will be one column for each label key and one column for each query name
columns := make([]*v3.TableColumn, 0, len(labelKeys)+len(results))
for _, key := range labelKeys {
columns = append(columns, &v3.TableColumn{Name: key})
columns = append(columns, &v3.TableColumn{Name: key, IsValueColumn: false})
}
for _, result := range results {
columns = append(columns, &v3.TableColumn{Name: result.QueryName})
columns = append(columns, &v3.TableColumn{Name: result.QueryName, QueryName: result.QueryName, IsValueColumn: true})
}
// Create a map to store unique rows
@ -72,8 +74,8 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
// Create a key for the row based on labels
var keyParts []string
rowData := make([]interface{}, len(columns))
for i, key := range labelKeys {
rowData := make(map[string]interface{}, len(columns))
for _, key := range labelKeys {
value := "n/a"
for _, labels := range series.LabelsArray {
if v, ok := labels[key]; ok {
@ -82,21 +84,21 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
}
}
keyParts = append(keyParts, fmt.Sprintf("%s=%s", key, value))
rowData[i] = value
rowData[key] = value
}
rowKey := strings.Join(keyParts, ",")
// Get or create the row
row, ok := rowMap[rowKey]
if !ok {
row = &v3.TableRow{Data: rowData}
row = &v3.TableRow{Data: rowData, QueryName: result.QueryName}
rowMap[rowKey] = row
}
// Add the value for this query
for i, col := range columns {
for _, col := range columns {
if col.Name == result.QueryName {
row.Data[i] = series.Points[0].Value
row.Data[col.Name] = roundToTwoDecimal(series.Points[0].Value)
break
}
}
@ -106,11 +108,6 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
// Convert rowMap to a slice of TableRows
rows := make([]*v3.TableRow, 0, len(rowMap))
for _, row := range rowMap {
for i, value := range row.Data {
if value == nil {
row.Data[i] = "n/a"
}
}
rows = append(rows, row)
}
@ -122,11 +119,15 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
sort.Strings(queryNames)
// Sort rows based on OrderBy from BuilderQueries
sortRows(rows, columns, params.CompositeQuery.BuilderQueries, queryNames)
sortRows(rows, params.CompositeQuery.BuilderQueries, queryNames)
for _, column := range columns {
if _, exists := params.CompositeQuery.BuilderQueries[column.Name]; exists {
column.Name = getAutoColNameForQuery(column.Name, params)
for _, row := range rows {
for _, col := range columns {
if col.IsValueColumn {
if row.Data[col.Name] == nil {
row.Data[col.Name] = "n/a"
}
}
}
}
@ -141,9 +142,11 @@ func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRan
return []*v3.Result{&tableResult}
}
func sortRows(rows []*v3.TableRow, columns []*v3.TableColumn, builderQueries map[string]*v3.BuilderQuery, queryNames []string) {
sort.SliceStable(rows, func(i, j int) bool {
for _, queryName := range queryNames {
func sortRows(rows []*v3.TableRow, builderQueries map[string]*v3.BuilderQuery, queryNames []string) {
// use reverse order of queryNames
for i := len(queryNames) - 1; i >= 0; i-- {
queryName := queryNames[i]
sort.SliceStable(rows, func(i, j int) bool {
query := builderQueries[queryName]
orderByList := query.OrderBy
if len(orderByList) == 0 {
@ -155,23 +158,12 @@ func sortRows(rows []*v3.TableRow, columns []*v3.TableColumn, builderQueries map
if name == constants.SigNozOrderByValue {
name = queryName
}
colIndex := -1
for k, col := range columns {
if col.Name == name {
colIndex = k
break
}
}
if colIndex == -1 {
continue
}
valI := rows[i].Data[colIndex]
valJ := rows[j].Data[colIndex]
valI := rows[i].Data[name]
valJ := rows[j].Data[name]
// Handle "n/a" values
if valI == "n/a" && valJ == "n/a" {
continue
if valI == nil || valJ == nil {
return rows[i].QueryName < rows[j].QueryName
}
// Compare based on the data type
@ -211,9 +203,9 @@ func sortRows(rows []*v3.TableRow, columns []*v3.TableColumn, builderQueries map
}
}
}
}
return false
})
return false
})
}
}
func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
@ -248,11 +240,11 @@ func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
// So we create a column for each query name that has at least one point
columns := make([]*v3.TableColumn, 0)
for _, key := range labelKeys {
columns = append(columns, &v3.TableColumn{Name: key})
columns = append(columns, &v3.TableColumn{Name: key, IsValueColumn: false})
}
for _, result := range results {
if len(result.Series) > 0 && len(result.Series[0].Points) > 0 {
columns = append(columns, &v3.TableColumn{Name: result.QueryName})
columns = append(columns, &v3.TableColumn{Name: result.QueryName, QueryName: result.QueryName, IsValueColumn: true})
}
}
@ -261,8 +253,8 @@ func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
for _, series := range result.Series {
// Create a key for the row based on labels
rowData := make([]interface{}, len(columns))
for i, key := range labelKeys {
rowData := make(map[string]interface{}, len(columns))
for _, key := range labelKeys {
value := "n/a"
for _, labels := range series.LabelsArray {
if v, ok := labels[key]; ok {
@ -270,16 +262,16 @@ func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
break
}
}
rowData[i] = value
rowData[key] = value
}
// Get or create the row
row := &v3.TableRow{Data: rowData}
row := &v3.TableRow{Data: rowData, QueryName: result.QueryName}
// Add the value for this query
for i, col := range columns {
for _, col := range columns {
if col.Name == result.QueryName && len(series.Points) > 0 {
row.Data[i] = series.Points[0].Value
row.Data[col.Name] = roundToTwoDecimal(series.Points[0].Value)
break
}
}
@ -287,6 +279,16 @@ func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
}
}
for _, row := range rows {
for _, col := range columns {
if col.IsValueColumn {
if row.Data[col.Name] == nil {
row.Data[col.Name] = "n/a"
}
}
}
}
// Create the final result
tableResult := v3.Result{
Table: &v3.Table{

View File

@ -1,6 +1,7 @@
package postprocess
import (
"bytes"
"encoding/json"
"reflect"
"testing"
@ -21,9 +22,9 @@ func TestSortRows(t *testing.T) {
{
name: "Sort by single numeric query, ascending order",
rows: []*v3.TableRow{
{Data: []interface{}{"service2", 20.0}},
{Data: []interface{}{"service1", 10.0}},
{Data: []interface{}{"service3", 30.0}},
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
@ -34,17 +35,17 @@ func TestSortRows(t *testing.T) {
},
queryNames: []string{"A"},
expected: []*v3.TableRow{
{Data: []interface{}{"service1", 10.0}},
{Data: []interface{}{"service2", 20.0}},
{Data: []interface{}{"service3", 30.0}},
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
},
},
{
name: "Sort by single numeric query, descending order",
rows: []*v3.TableRow{
{Data: []interface{}{"service2", 20.0}},
{Data: []interface{}{"service1", 10.0}},
{Data: []interface{}{"service3", 30.0}},
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
@ -55,17 +56,17 @@ func TestSortRows(t *testing.T) {
},
queryNames: []string{"A"},
expected: []*v3.TableRow{
{Data: []interface{}{"service3", 30.0}},
{Data: []interface{}{"service2", 20.0}},
{Data: []interface{}{"service1", 10.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
},
},
{
name: "Sort by single string query, ascending order",
rows: []*v3.TableRow{
{Data: []interface{}{"service2", "b"}},
{Data: []interface{}{"service1", "c"}},
{Data: []interface{}{"service3", "a"}},
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
{Data: map[string]interface{}{"service": "service1", "A": "c"}},
{Data: map[string]interface{}{"service": "service3", "A": "a"}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
@ -76,18 +77,18 @@ func TestSortRows(t *testing.T) {
},
queryNames: []string{"A"},
expected: []*v3.TableRow{
{Data: []interface{}{"service3", "a"}},
{Data: []interface{}{"service2", "b"}},
{Data: []interface{}{"service1", "c"}},
{Data: map[string]interface{}{"service": "service3", "A": "a"}},
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
{Data: map[string]interface{}{"service": "service1", "A": "c"}},
},
},
{
name: "Sort with n/a values",
rows: []*v3.TableRow{
{Data: []interface{}{"service1", 10.0, "n/a"}},
{Data: []interface{}{"service2", "n/a", 15.0}},
{Data: []interface{}{"service3", 30.0, 25.0}},
{Data: []interface{}{"service4", "n/a", "n/a"}},
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
{Data: map[string]interface{}{"service": "service2", "B": 15.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0, "B": 25.0}},
{Data: map[string]interface{}{"service": "service4"}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
@ -100,43 +101,18 @@ func TestSortRows(t *testing.T) {
},
queryNames: []string{"A", "B"},
expected: []*v3.TableRow{
{Data: []interface{}{"service1", 10.0, "n/a"}},
{Data: []interface{}{"service3", 30.0, 25.0}},
{Data: []interface{}{"service4", "n/a", "n/a"}},
{Data: []interface{}{"service2", "n/a", 15.0}},
},
},
{
name: "Sort with different data types",
rows: []*v3.TableRow{
{Data: []interface{}{"service1", "string", 10.0, true}},
{Data: []interface{}{"service2", 20.0, "string", false}},
{Data: []interface{}{"service3", true, 30.0, "string"}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
{Name: "A"},
{Name: "B"},
{Name: "C"},
},
builderQueries: map[string]*v3.BuilderQuery{
"A": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "asc"}}},
"B": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "desc"}}},
"C": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "asc"}}},
},
queryNames: []string{"A", "B", "C"},
expected: []*v3.TableRow{
{Data: []interface{}{"service2", 20.0, "string", false}},
{Data: []interface{}{"service1", "string", 10.0, true}},
{Data: []interface{}{"service3", true, 30.0, "string"}},
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0, "B": 25.0}},
{Data: map[string]interface{}{"service": "service2", "B": 15.0}},
{Data: map[string]interface{}{"service": "service4"}},
},
},
{
name: "Sort with SigNozOrderByValue",
rows: []*v3.TableRow{
{Data: []interface{}{"service1", 20.0}},
{Data: []interface{}{"service2", 10.0}},
{Data: []interface{}{"service3", 30.0}},
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
@ -147,44 +123,17 @@ func TestSortRows(t *testing.T) {
},
queryNames: []string{"A"},
expected: []*v3.TableRow{
{Data: []interface{}{"service3", 30.0}},
{Data: []interface{}{"service1", 20.0}},
{Data: []interface{}{"service2", 10.0}},
},
},
{
name: "Sort by multiple queries with mixed types",
rows: []*v3.TableRow{
{Data: []interface{}{"service1", 10.0, "b", true}},
{Data: []interface{}{"service2", 20.0, "a", false}},
{Data: []interface{}{"service3", 10.0, "c", true}},
{Data: []interface{}{"service4", 20.0, "b", false}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
{Name: "A"},
{Name: "B"},
{Name: "C"},
},
builderQueries: map[string]*v3.BuilderQuery{
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
"B": {OrderBy: []v3.OrderBy{{ColumnName: "B", Order: "desc"}}},
"C": {OrderBy: []v3.OrderBy{{ColumnName: "C", Order: "asc"}}},
},
queryNames: []string{"A", "B", "C"},
expected: []*v3.TableRow{
{Data: []interface{}{"service3", 10.0, "c", true}},
{Data: []interface{}{"service1", 10.0, "b", true}},
{Data: []interface{}{"service4", 20.0, "b", false}},
{Data: []interface{}{"service2", 20.0, "a", false}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
},
},
{
name: "Sort with all n/a values",
rows: []*v3.TableRow{
{Data: []interface{}{"service1", "n/a", "n/a"}},
{Data: []interface{}{"service2", "n/a", "n/a"}},
{Data: []interface{}{"service3", "n/a", "n/a"}},
{Data: map[string]interface{}{"service": "service1", "A": "n/a", "B": "n/a"}},
{Data: map[string]interface{}{"service": "service2", "A": "n/a", "B": "n/a"}},
{Data: map[string]interface{}{"service": "service3", "A": "n/a", "B": "n/a"}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
@ -197,18 +146,18 @@ func TestSortRows(t *testing.T) {
},
queryNames: []string{"A", "B"},
expected: []*v3.TableRow{
{Data: []interface{}{"service1", "n/a", "n/a"}},
{Data: []interface{}{"service2", "n/a", "n/a"}},
{Data: []interface{}{"service3", "n/a", "n/a"}},
{Data: map[string]interface{}{"service": "service1", "A": "n/a", "B": "n/a"}},
{Data: map[string]interface{}{"service": "service2", "A": "n/a", "B": "n/a"}},
{Data: map[string]interface{}{"service": "service3", "A": "n/a", "B": "n/a"}},
},
},
{
name: "Sort with negative numbers",
rows: []*v3.TableRow{
{Data: []interface{}{"service1", -10.0}},
{Data: []interface{}{"service2", 20.0}},
{Data: []interface{}{"service3", -30.0}},
{Data: []interface{}{"service4", 0.0}},
{Data: map[string]interface{}{"service": "service1", "A": -10.0}},
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
{Data: map[string]interface{}{"service": "service3", "A": -30.0}},
{Data: map[string]interface{}{"service": "service4", "A": 0.0}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
@ -219,19 +168,19 @@ func TestSortRows(t *testing.T) {
},
queryNames: []string{"A"},
expected: []*v3.TableRow{
{Data: []interface{}{"service3", -30.0}},
{Data: []interface{}{"service1", -10.0}},
{Data: []interface{}{"service4", 0.0}},
{Data: []interface{}{"service2", 20.0}},
{Data: map[string]interface{}{"service": "service3", "A": -30.0}},
{Data: map[string]interface{}{"service": "service1", "A": -10.0}},
{Data: map[string]interface{}{"service": "service4", "A": 0.0}},
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
},
},
{
name: "Sort with mixed case strings",
rows: []*v3.TableRow{
{Data: []interface{}{"service1", "Apple"}},
{Data: []interface{}{"service2", "banana"}},
{Data: []interface{}{"service3", "Cherry"}},
{Data: []interface{}{"service4", "date"}},
{Data: map[string]interface{}{"service": "service1", "A": "Apple"}},
{Data: map[string]interface{}{"service": "service2", "A": "banana"}},
{Data: map[string]interface{}{"service": "service3", "A": "Cherry"}},
{Data: map[string]interface{}{"service": "service4", "A": "date"}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
@ -242,19 +191,19 @@ func TestSortRows(t *testing.T) {
},
queryNames: []string{"A"},
expected: []*v3.TableRow{
{Data: []interface{}{"service1", "Apple"}},
{Data: []interface{}{"service3", "Cherry"}},
{Data: []interface{}{"service2", "banana"}},
{Data: []interface{}{"service4", "date"}},
{Data: map[string]interface{}{"service": "service1", "A": "Apple"}},
{Data: map[string]interface{}{"service": "service3", "A": "Cherry"}},
{Data: map[string]interface{}{"service": "service2", "A": "banana"}},
{Data: map[string]interface{}{"service": "service4", "A": "date"}},
},
},
{
name: "Sort with empty strings",
rows: []*v3.TableRow{
{Data: []interface{}{"service1", ""}},
{Data: []interface{}{"service2", "b"}},
{Data: []interface{}{"service3", ""}},
{Data: []interface{}{"service4", "a"}},
{Data: map[string]interface{}{"service": "service1", "A": ""}},
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
{Data: map[string]interface{}{"service": "service3", "A": ""}},
{Data: map[string]interface{}{"service": "service4", "A": "a"}},
},
columns: []*v3.TableColumn{
{Name: "service_name"},
@ -265,17 +214,17 @@ func TestSortRows(t *testing.T) {
},
queryNames: []string{"A"},
expected: []*v3.TableRow{
{Data: []interface{}{"service1", ""}},
{Data: []interface{}{"service3", ""}},
{Data: []interface{}{"service4", "a"}},
{Data: []interface{}{"service2", "b"}},
{Data: map[string]interface{}{"service": "service1", "A": ""}},
{Data: map[string]interface{}{"service": "service3", "A": ""}},
{Data: map[string]interface{}{"service": "service4", "A": "a"}},
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
sortRows(tt.rows, tt.columns, tt.builderQueries, tt.queryNames)
sortRows(tt.rows, tt.builderQueries, tt.queryNames)
if !reflect.DeepEqual(tt.rows, tt.expected) {
exp, _ := json.Marshal(tt.expected)
got, _ := json.Marshal(tt.rows)
@ -287,24 +236,20 @@ func TestSortRows(t *testing.T) {
func TestSortRowsWithEmptyQueries(t *testing.T) {
rows := []*v3.TableRow{
{Data: []interface{}{"service1", 20.0}},
{Data: []interface{}{"service2", 10.0}},
{Data: []interface{}{"service3", 30.0}},
}
columns := []*v3.TableColumn{
{Name: "service_name"},
{Name: "A"},
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
}
builderQueries := map[string]*v3.BuilderQuery{}
queryNames := []string{}
sortRows(rows, columns, builderQueries, queryNames)
sortRows(rows, builderQueries, queryNames)
// Expect the original order to be maintained
expected := []*v3.TableRow{
{Data: []interface{}{"service1", 20.0}},
{Data: []interface{}{"service2", 10.0}},
{Data: []interface{}{"service3", 30.0}},
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
}
if !reflect.DeepEqual(rows, expected) {
@ -314,26 +259,22 @@ func TestSortRowsWithEmptyQueries(t *testing.T) {
func TestSortRowsWithInvalidColumnName(t *testing.T) {
rows := []*v3.TableRow{
{Data: []interface{}{"service1", 20.0}},
{Data: []interface{}{"service2", 10.0}},
{Data: []interface{}{"service3", 30.0}},
}
columns := []*v3.TableColumn{
{Name: "service_name"},
{Name: "A"},
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
}
builderQueries := map[string]*v3.BuilderQuery{
"A": {OrderBy: []v3.OrderBy{{ColumnName: "InvalidColumn", Order: "asc"}}},
}
queryNames := []string{"A"}
sortRows(rows, columns, builderQueries, queryNames)
sortRows(rows, builderQueries, queryNames)
// Expect the original order to be maintained
expected := []*v3.TableRow{
{Data: []interface{}{"service1", 20.0}},
{Data: []interface{}{"service2", 10.0}},
{Data: []interface{}{"service3", 30.0}},
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
}
if !reflect.DeepEqual(rows, expected) {
@ -343,27 +284,22 @@ func TestSortRowsWithInvalidColumnName(t *testing.T) {
func TestSortRowsStability(t *testing.T) {
rows := []*v3.TableRow{
{Data: []interface{}{"service1", 10.0, "a"}},
{Data: []interface{}{"service2", 10.0, "b"}},
{Data: []interface{}{"service3", 10.0, "c"}},
}
columns := []*v3.TableColumn{
{Name: "service_name"},
{Name: "A"},
{Name: "B"},
{Data: map[string]interface{}{"service": "service1", "A": 10.0, "B": "a"}},
{Data: map[string]interface{}{"service": "service2", "A": 10.0, "B": "b"}},
{Data: map[string]interface{}{"service": "service3", "A": 10.0, "B": "c"}},
}
builderQueries := map[string]*v3.BuilderQuery{
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
}
queryNames := []string{"A"}
sortRows(rows, columns, builderQueries, queryNames)
sortRows(rows, builderQueries, queryNames)
// Expect the original order to be maintained for equal values
expected := []*v3.TableRow{
{Data: []interface{}{"service1", 10.0, "a"}},
{Data: []interface{}{"service2", 10.0, "b"}},
{Data: []interface{}{"service3", 10.0, "c"}},
{Data: map[string]interface{}{"service": "service1", "A": 10.0, "B": "a"}},
{Data: map[string]interface{}{"service": "service2", "A": 10.0, "B": "b"}},
{Data: map[string]interface{}{"service": "service3", "A": 10.0, "B": "c"}},
}
if !reflect.DeepEqual(rows, expected) {
@ -404,10 +340,10 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
Table: &v3.Table{
Columns: []*v3.TableColumn{
{Name: "service"},
{Name: "A"},
{Name: "A", QueryName: "A", IsValueColumn: true},
},
Rows: []*v3.TableRow{
{Data: []interface{}{"frontend", 10.0}},
{Data: map[string]interface{}{"service": "frontend", "A": 10.0}},
},
},
},
@ -421,7 +357,8 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
Series: []*v3.Series{
{
LabelsArray: []map[string]string{
{"service": "frontend", "env": "prod"},
{"service": "frontend"},
{"env": "prod"},
},
Points: []v3.Point{
{Value: 10.0},
@ -429,7 +366,8 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
},
{
LabelsArray: []map[string]string{
{"service": "backend", "env": "prod"},
{"service": "backend"},
{"env": "prod"},
},
Points: []v3.Point{
{Value: 20.0},
@ -442,7 +380,8 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
Series: []*v3.Series{
{
LabelsArray: []map[string]string{
{"service": "frontend", "env": "prod"},
{"service": "frontend"},
{"env": "prod"},
},
Points: []v3.Point{
{Value: 15.0},
@ -450,7 +389,8 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
},
{
LabelsArray: []map[string]string{
{"service": "backend", "env": "prod"},
{"service": "backend"},
{"env": "prod"},
},
Points: []v3.Point{
{Value: 25.0},
@ -465,14 +405,14 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
Columns: []*v3.TableColumn{
{Name: "service"},
{Name: "env"},
{Name: "A"},
{Name: "B"},
{Name: "A", QueryName: "A", IsValueColumn: true},
{Name: "B", QueryName: "B", IsValueColumn: true},
},
Rows: []*v3.TableRow{
{Data: []interface{}{"frontend", "prod", 10.0, nil}},
{Data: []interface{}{"backend", "prod", 20.0, nil}},
{Data: []interface{}{"frontend", "prod", nil, 15.0}},
{Data: []interface{}{"backend", "prod", nil, 25.0}},
{Data: map[string]interface{}{"service": "frontend", "env": "prod", "A": 10.0, "B": "n/a"}},
{Data: map[string]interface{}{"service": "backend", "env": "prod", "A": 20.0, "B": "n/a"}},
{Data: map[string]interface{}{"service": "frontend", "env": "prod", "A": "n/a", "B": 15.0}},
{Data: map[string]interface{}{"service": "backend", "env": "prod", "A": "n/a", "B": 25.0}},
},
},
},
@ -514,12 +454,12 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
Columns: []*v3.TableColumn{
{Name: "service"},
{Name: "env"},
{Name: "A"},
{Name: "B"},
{Name: "A", QueryName: "A", IsValueColumn: true},
{Name: "B", QueryName: "B", IsValueColumn: true},
},
Rows: []*v3.TableRow{
{Data: []interface{}{"frontend", "n/a", 10.0, nil}},
{Data: []interface{}{"n/a", "prod", nil, 20.0}},
{Data: map[string]interface{}{"service": "frontend", "env": "n/a", "A": 10.0, "B": "n/a"}},
{Data: map[string]interface{}{"service": "n/a", "env": "prod", "A": "n/a", "B": 20.0}},
},
},
},
@ -551,10 +491,10 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
Table: &v3.Table{
Columns: []*v3.TableColumn{
{Name: "service"},
{Name: "A"},
{Name: "A", QueryName: "A", IsValueColumn: true},
},
Rows: []*v3.TableRow{
{Data: []interface{}{"frontend", 10.0}},
{Data: map[string]interface{}{"service": "frontend", "A": 10.0}},
},
},
},
@ -593,11 +533,11 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
Table: &v3.Table{
Columns: []*v3.TableColumn{
{Name: "service"},
{Name: "B"},
{Name: "B", QueryName: "B", IsValueColumn: true},
},
Rows: []*v3.TableRow{
{Data: []interface{}{"frontend", nil}},
{Data: []interface{}{"backend", 20.0}},
{Data: map[string]interface{}{"service": "frontend", "B": "n/a"}},
{Data: map[string]interface{}{"service": "backend", "B": 20.0}},
},
},
},
@ -608,8 +548,10 @@ func TestTransformToTableForClickHouseQueries(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := TransformToTableForClickHouseQueries(tt.input)
if !reflect.DeepEqual(result, tt.expected) {
t.Errorf("TransformToTableForClickHouseQueries() = %v, want %v", result, tt.expected)
exp, _ := json.Marshal(tt.expected)
got, _ := json.Marshal(result)
if !bytes.Equal(got, exp) {
t.Errorf("TransformToTableForClickHouseQueries() = %v, want %v", string(got), string(exp))
}
})
}
@ -650,19 +592,21 @@ func TestTransformToTableForClickHouseQueriesSorting(t *testing.T) {
Table: &v3.Table{
Columns: []*v3.TableColumn{
{Name: "service"},
{Name: "A"},
{Name: "B"},
{Name: "A", QueryName: "A", IsValueColumn: true},
{Name: "B", QueryName: "B", IsValueColumn: true},
},
Rows: []*v3.TableRow{
{Data: []interface{}{"backend", 20.0, nil}},
{Data: []interface{}{"frontend", nil, 10.0}},
{Data: map[string]interface{}{"service": "backend", "A": 20.0, "B": "n/a"}},
{Data: map[string]interface{}{"service": "frontend", "A": "n/a", "B": 10.0}},
},
},
},
}
result := TransformToTableForClickHouseQueries(input)
if !reflect.DeepEqual(result, expected) {
t.Errorf("TransformToTableForClickHouseQueries() sorting test failed. Got %v, want %v", result, expected)
exp, _ := json.Marshal(expected)
got, _ := json.Marshal(result)
if !bytes.Equal(got, exp) {
t.Errorf("TransformToTableForClickHouseQueries() sorting test failed. Got %v, want %v", string(got), string(exp))
}
}

View File

@ -53,49 +53,35 @@ func Parse(filters *v3.FilterSet) (string, error) {
return "", fmt.Errorf("operator not supported")
}
// TODO(Raj): Remove the use of dot replaced alternative when key
// contains underscore after dots are supported in keys
names := []string{getName(v.Key)}
if strings.Contains(v.Key.Key, "_") {
dotKey := v.Key
dotKey.Key = strings.Replace(v.Key.Key, "_", ".", -1)
names = append(names, getName(dotKey))
}
name := getName(v.Key)
filterParts := []string{}
for _, name := range names {
var filter string
var filter string
switch v.Operator {
// uncomment following lines when new version of expr is used
// case v3.FilterOperatorIn, v3.FilterOperatorNotIn:
// filter = fmt.Sprintf("%s %s list%s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
switch v.Operator {
// uncomment following lines when new version of expr is used
// case v3.FilterOperatorIn, v3.FilterOperatorNotIn:
// filter = fmt.Sprintf("%s %s list%s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], getTypeName(v.Key.Type))
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], getTypeName(v.Key.Type))
default:
filter = fmt.Sprintf("%s %s %s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
default:
filter = fmt.Sprintf("%s %s %s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
if v.Operator == v3.FilterOperatorContains || v.Operator == v3.FilterOperatorNotContains {
// `contains` and `ncontains` should be case insensitive to match how they work when querying logs.
filter = fmt.Sprintf(
"lower(%s) %s lower(%s)",
name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value),
)
}
// Avoid running operators on nil values
if v.Operator != v3.FilterOperatorEqual && v.Operator != v3.FilterOperatorNotEqual {
filter = fmt.Sprintf("%s != nil && %s", name, filter)
}
if v.Operator == v3.FilterOperatorContains || v.Operator == v3.FilterOperatorNotContains {
// `contains` and `ncontains` should be case insensitive to match how they work when querying logs.
filter = fmt.Sprintf(
"lower(%s) %s lower(%s)",
name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value),
)
}
filterParts = append(filterParts, filter)
// Avoid running operators on nil values
if v.Operator != v3.FilterOperatorEqual && v.Operator != v3.FilterOperatorNotEqual {
filter = fmt.Sprintf("%s != nil && %s", name, filter)
}
}
filter := strings.Join(filterParts, " || ")
// check if the filter is a correct expression language
_, err := expr.Compile(filter)
if err != nil {

View File

@ -2,6 +2,7 @@ package rules
import (
"context"
"encoding/json"
"fmt"
"strconv"
"time"
@ -9,6 +10,7 @@ import (
"github.com/jmoiron/sqlx"
"go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
@ -43,6 +45,9 @@ type RuleDB interface {
// GetAllPlannedMaintenance fetches the maintenance definitions from db
GetAllPlannedMaintenance(ctx context.Context) ([]PlannedMaintenance, error)
// used for internal telemetry
GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error)
}
type StoredRule struct {
@ -295,3 +300,33 @@ func (r *ruleDB) EditPlannedMaintenance(ctx context.Context, maintenance Planned
return "", nil
}
func (r *ruleDB) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
alertsInfo := model.AlertsInfo{}
// fetch alerts from rules db
query := "SELECT data FROM rules"
var alertsData []string
err := r.Select(&alertsData, query)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return &alertsInfo, err
}
for _, alert := range alertsData {
var rule GettableRule
err = json.Unmarshal([]byte(alert), &rule)
if err != nil {
zap.L().Error("invalid rule data", zap.Error(err))
continue
}
if rule.AlertType == "LOGS_BASED_ALERT" {
alertsInfo.LogsBasedAlerts = alertsInfo.LogsBasedAlerts + 1
} else if rule.AlertType == "METRIC_BASED_ALERT" {
alertsInfo.MetricBasedAlerts = alertsInfo.MetricBasedAlerts + 1
} else if rule.AlertType == "TRACES_BASED_ALERT" {
alertsInfo.TracesBasedAlerts = alertsInfo.TracesBasedAlerts + 1
}
alertsInfo.TotalAlerts = alertsInfo.TotalAlerts + 1
}
return &alertsInfo, nil
}

View File

@ -25,6 +25,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/telemetry"
"go.signoz.io/signoz/pkg/query-service/utils/labels"
)
@ -112,6 +113,8 @@ func NewManager(o *ManagerOptions) (*Manager, error) {
db := NewRuleDB(o.DBConn)
telemetry.GetInstance().SetAlertsInfoCallback(db.GetAlertsInfo)
m := &Manager{
tasks: map[string]Task{},
rules: map[string]Rule{},

View File

@ -111,13 +111,22 @@ func (r *PromRule) Condition() *RuleCondition {
return r.ruleCondition
}
// targetVal returns the target value for the rule condition
// when the y-axis and target units are non-empty, it
// converts the target value to the y-axis unit
func (r *PromRule) targetVal() float64 {
if r.ruleCondition == nil || r.ruleCondition.Target == nil {
return 0
}
// get the converter for the target unit
unitConverter := converter.FromUnit(converter.Unit(r.ruleCondition.TargetUnit))
value := unitConverter.Convert(converter.Value{F: *r.ruleCondition.Target, U: converter.Unit(r.ruleCondition.TargetUnit)}, converter.Unit(r.Unit()))
// convert the target value to the y-axis unit
value := unitConverter.Convert(converter.Value{
F: *r.ruleCondition.Target,
U: converter.Unit(r.ruleCondition.TargetUnit),
}, converter.Unit(r.Unit()))
return value.F
}
@ -370,8 +379,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (
}
zap.L().Debug("alerting for series", zap.String("name", r.Name()), zap.Any("series", series))
thresholdFormatter := formatter.FromUnit(r.ruleCondition.TargetUnit)
threshold := thresholdFormatter.Format(r.targetVal(), r.ruleCondition.TargetUnit)
threshold := valueFormatter.Format(r.targetVal(), r.Unit())
tmplData := AlertTemplateData(l, valueFormatter.Format(alertSmpl.F, r.Unit()), threshold)
// Inject some convenience variables that are easier to remember for users

View File

@ -165,13 +165,22 @@ func (r *ThresholdRule) PreferredChannels() []string {
return r.preferredChannels
}
// targetVal returns the target value for the rule condition
// when the y-axis and target units are non-empty, it
// converts the target value to the y-axis unit
func (r *ThresholdRule) targetVal() float64 {
if r.ruleCondition == nil || r.ruleCondition.Target == nil {
return 0
}
// get the converter for the target unit
unitConverter := converter.FromUnit(converter.Unit(r.ruleCondition.TargetUnit))
value := unitConverter.Convert(converter.Value{F: *r.ruleCondition.Target, U: converter.Unit(r.ruleCondition.TargetUnit)}, converter.Unit(r.Unit()))
// convert the target value to the y-axis unit
value := unitConverter.Convert(converter.Value{
F: *r.ruleCondition.Target,
U: converter.Unit(r.ruleCondition.TargetUnit),
}, converter.Unit(r.Unit()))
return value.F
}
@ -874,8 +883,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie
}
value := valueFormatter.Format(smpl.V, r.Unit())
thresholdFormatter := formatter.FromUnit(r.ruleCondition.TargetUnit)
threshold := thresholdFormatter.Format(r.targetVal(), r.ruleCondition.TargetUnit)
threshold := valueFormatter.Format(r.targetVal(), r.Unit())
zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold))
tmplData := AlertTemplateData(l, value, threshold)

View File

@ -185,6 +185,12 @@ type Telemetry struct {
patTokenUser bool
countUsers int8
mutex sync.RWMutex
alertsInfoCallback func(ctx context.Context) (*model.AlertsInfo, error)
}
func (a *Telemetry) SetAlertsInfoCallback(callback func(ctx context.Context) (*model.AlertsInfo, error)) {
a.alertsInfoCallback = callback
}
func createTelemetry() {
@ -310,7 +316,7 @@ func createTelemetry() {
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, user.Email, false, false)
}
}
alertsInfo, err := telemetry.reader.GetAlertsInfo(context.Background())
alertsInfo, err := telemetry.alertsInfoCallback(context.Background())
if err == nil {
dashboardsInfo, err := telemetry.reader.GetDashboardsInfo(context.Background())
if err == nil {

View File

@ -192,7 +192,7 @@ services:
<<: *db-depend
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.1}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@ -205,7 +205,7 @@ services:
# condition: service_healthy
otel-collector:
image: signoz/signoz-otel-collector:0.102.0
image: signoz/signoz-otel-collector:0.102.1
container_name: signoz-otel-collector
command:
[