Merge remote-tracking branch 'upstream/develop' into feat/logs

This commit is contained in:
nityanandagohain 2022-08-04 17:32:45 +05:30
commit 0807a0ae26
77 changed files with 1463 additions and 648 deletions

2
.github/CODEOWNERS vendored
View File

@ -4,4 +4,4 @@
* @ankitnayan * @ankitnayan
/frontend/ @palashgdev @pranshuchittora /frontend/ @palashgdev @pranshuchittora
/deploy/ @prashant-shahi /deploy/ @prashant-shahi
/pkg/query-service/ @srikanthccv @makeavish @nityanandagohain /pkg/query-service/ @srikanthccv

View File

@ -5,9 +5,11 @@ receivers:
# otel-collector internal metrics # otel-collector internal metrics
- job_name: "otel-collector" - job_name: "otel-collector"
scrape_interval: 60s scrape_interval: 60s
static_configs: dns_sd_configs:
- targets: - names:
- otel-collector:8888 - 'tasks.otel-collector'
type: 'A'
port: 8888
# otel-collector-metrics internal metrics # otel-collector-metrics internal metrics
- job_name: "otel-collector-metrics" - job_name: "otel-collector-metrics"
scrape_interval: 60s scrape_interval: 60s
@ -17,9 +19,11 @@ receivers:
# SigNoz span metrics # SigNoz span metrics
- job_name: "signozspanmetrics-collector" - job_name: "signozspanmetrics-collector"
scrape_interval: 60s scrape_interval: 60s
static_configs: dns_sd_configs:
- targets: - names:
- otel-collector:8889 - 'tasks.otel-collector'
type: 'A'
port: 8889
processors: processors:
batch: batch:

View File

@ -1,4 +1,11 @@
{ {
"target_missing": "Please enter a threshold to proceed",
"rule_test_fired": "Test notification sent successfully",
"no_alerts_found": "No alerts found during the evaluation. This happens when rule condition is unsatisfied. You may adjust the rule threshold and retry.",
"button_testrule": "Test Notification",
"label_channel_select": "Notification Channels",
"placeholder_channel_select": "select one or more channels",
"channel_select_tooltip": "Leave empty to send this alert on all the configured channels",
"preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.", "preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.",
"preview_chart_threshold_label": "Threshold", "preview_chart_threshold_label": "Threshold",
"placeholder_label_key_pair": "Click here to enter a label (key value pairs)", "placeholder_label_key_pair": "Click here to enter a label (key value pairs)",

View File

@ -1,4 +1,14 @@
{ {
"channel_delete_unexp_error": "Something went wrong",
"channel_delete_success": "Channel Deleted Successfully",
"column_channel_name": "Name",
"column_channel_type": "Type",
"column_channel_action": "Action",
"column_channel_edit": "Edit",
"button_new_channel": "New Alert Channel",
"tooltip_notification_channels": "More details on how to setting notification channels",
"sending_channels_note": "The alerts will be sent to all the configured channels.",
"loading_channels_message": "Loading Channels..",
"page_title_create": "New Notification Channels", "page_title_create": "New Notification Channels",
"page_title_edit": "Edit Notification Channels", "page_title_edit": "Edit Notification Channels",
"button_save_channel": "Save", "button_save_channel": "Save",

View File

@ -1,4 +1,11 @@
{ {
"target_missing": "Please enter a threshold to proceed",
"rule_test_fired": "Test notification sent successfully",
"no_alerts_found": "No alerts found during the evaluation. This happens when rule condition is unsatisfied. You may adjust the rule threshold and retry.",
"button_testrule": "Test Notification",
"label_channel_select": "Notification Channels",
"placeholder_channel_select": "select one or more channels",
"channel_select_tooltip": "Leave empty to send this alert on all the configured channels",
"preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.", "preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.",
"preview_chart_threshold_label": "Threshold", "preview_chart_threshold_label": "Threshold",
"placeholder_label_key_pair": "Click here to enter a label (key value pairs)", "placeholder_label_key_pair": "Click here to enter a label (key value pairs)",

View File

@ -1,4 +1,14 @@
{ {
"channel_delete_unexp_error": "Something went wrong",
"channel_delete_success": "Channel Deleted Successfully",
"column_channel_name": "Name",
"column_channel_type": "Type",
"column_channel_action": "Action",
"column_channel_edit": "Edit",
"button_new_channel": "New Alert Channel",
"tooltip_notification_channels": "More details on how to setting notification channels",
"sending_channels_note": "The alerts will be sent to all the configured channels.",
"loading_channels_message": "Loading Channels..",
"page_title_create": "New Notification Channels", "page_title_create": "New Notification Channels",
"page_title_edit": "Edit Notification Channels", "page_title_edit": "Edit Notification Channels",
"button_save_channel": "Save", "button_save_channel": "Save",

View File

@ -0,0 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/patch';
const patch = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.patch(`/rules/${props.id}`, {
...props.data,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default patch;

View File

@ -0,0 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/testAlert';
const testAlert = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.post('/testRule', {
...props.data,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default testAlert;

View File

@ -0,0 +1,24 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/metrics/getTopLevelOperations';
const getTopLevelOperations = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.post(`/service/top_level_operations`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data[props.service],
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default getTopLevelOperations;

View File

@ -2,13 +2,13 @@ import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios'; import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api'; import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/metrics/getTopEndPoints'; import { PayloadProps, Props } from 'types/api/metrics/getTopOperations';
const getTopEndPoints = async ( const getTopOperations = async (
props: Props, props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => { ): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try { try {
const response = await axios.post(`/service/top_endpoints`, { const response = await axios.post(`/service/top_operations`, {
start: `${props.start}`, start: `${props.start}`,
end: `${props.end}`, end: `${props.end}`,
service: props.service, service: props.service,
@ -26,4 +26,4 @@ const getTopEndPoints = async (
} }
}; };
export default getTopEndPoints; export default getTopOperations;

View File

@ -5,6 +5,7 @@ import ROUTES from 'constants/routes';
import useComponentPermission from 'hooks/useComponentPermission'; import useComponentPermission from 'hooks/useComponentPermission';
import history from 'lib/history'; import history from 'lib/history';
import React, { useCallback, useState } from 'react'; import React, { useCallback, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import { generatePath } from 'react-router-dom'; import { generatePath } from 'react-router-dom';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
@ -14,6 +15,7 @@ import AppReducer from 'types/reducer/app';
import Delete from './Delete'; import Delete from './Delete';
function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element { function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
const { t } = useTranslation(['channels']);
const [notifications, Element] = notification.useNotification(); const [notifications, Element] = notification.useNotification();
const [channels, setChannels] = useState<Channels[]>(allChannels); const [channels, setChannels] = useState<Channels[]>(allChannels);
const { role } = useSelector<AppState, AppReducer>((state) => state.app); const { role } = useSelector<AppState, AppReducer>((state) => state.app);
@ -29,12 +31,12 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
const columns: ColumnsType<Channels> = [ const columns: ColumnsType<Channels> = [
{ {
title: 'Name', title: t('column_channel_name'),
dataIndex: 'name', dataIndex: 'name',
key: 'name', key: 'name',
}, },
{ {
title: 'Type', title: t('column_channel_type'),
dataIndex: 'type', dataIndex: 'type',
key: 'type', key: 'type',
}, },
@ -42,14 +44,14 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
if (action) { if (action) {
columns.push({ columns.push({
title: 'Action', title: t('column_channel_action'),
dataIndex: 'id', dataIndex: 'id',
key: 'action', key: 'action',
align: 'center', align: 'center',
render: (id: string): JSX.Element => ( render: (id: string): JSX.Element => (
<> <>
<Button onClick={(): void => onClickEditHandler(id)} type="link"> <Button onClick={(): void => onClickEditHandler(id)} type="link">
Edit {t('column_channel_edit')}
</Button> </Button>
<Delete id={id} setChannels={setChannels} notifications={notifications} /> <Delete id={id} setChannels={setChannels} notifications={notifications} />
</> </>

View File

@ -1,29 +1,31 @@
import { Button } from 'antd'; import { Button } from 'antd';
import { NotificationInstance } from 'antd/lib/notification'; import { NotificationInstance } from 'antd/lib/notification';
import deleteAlert from 'api/channels/delete'; import deleteChannel from 'api/channels/delete';
import React, { useState } from 'react'; import React, { useState } from 'react';
import { useTranslation } from 'react-i18next';
import { Channels } from 'types/api/channels/getAll'; import { Channels } from 'types/api/channels/getAll';
function Delete({ notifications, setChannels, id }: DeleteProps): JSX.Element { function Delete({ notifications, setChannels, id }: DeleteProps): JSX.Element {
const { t } = useTranslation(['channels']);
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const onClickHandler = async (): Promise<void> => { const onClickHandler = async (): Promise<void> => {
try { try {
setLoading(true); setLoading(true);
const response = await deleteAlert({ const response = await deleteChannel({
id, id,
}); });
if (response.statusCode === 200) { if (response.statusCode === 200) {
notifications.success({ notifications.success({
message: 'Success', message: 'Success',
description: 'Channel Deleted Successfully', description: t('channel_delete_success'),
}); });
setChannels((preChannels) => preChannels.filter((e) => e.id !== id)); setChannels((preChannels) => preChannels.filter((e) => e.id !== id));
} else { } else {
notifications.error({ notifications.error({
message: 'Error', message: 'Error',
description: response.error || 'Something went wrong', description: response.error || t('channel_delete_unexp_error'),
}); });
} }
setLoading(false); setLoading(false);
@ -31,7 +33,9 @@ function Delete({ notifications, setChannels, id }: DeleteProps): JSX.Element {
notifications.error({ notifications.error({
message: 'Error', message: 'Error',
description: description:
error instanceof Error ? error.toString() : 'Something went wrong', error instanceof Error
? error.toString()
: t('channel_delete_unexp_error'),
}); });
setLoading(false); setLoading(false);
} }

View File

@ -8,16 +8,18 @@ import useComponentPermission from 'hooks/useComponentPermission';
import useFetch from 'hooks/useFetch'; import useFetch from 'hooks/useFetch';
import history from 'lib/history'; import history from 'lib/history';
import React, { useCallback } from 'react'; import React, { useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import AppReducer from 'types/reducer/app'; import AppReducer from 'types/reducer/app';
import AlertChannelsComponent from './AlertChannels'; import AlertChannelsComponent from './AlertChannels';
import { Button, ButtonContainer } from './styles'; import { Button, ButtonContainer, RightActionContainer } from './styles';
const { Paragraph } = Typography; const { Paragraph } = Typography;
function AlertChannels(): JSX.Element { function AlertChannels(): JSX.Element {
const { t } = useTranslation(['channels']);
const { role } = useSelector<AppState, AppReducer>((state) => state.app); const { role } = useSelector<AppState, AppReducer>((state) => state.app);
const [addNewChannelPermission] = useComponentPermission( const [addNewChannelPermission] = useComponentPermission(
['add_new_channel'], ['add_new_channel'],
@ -34,28 +36,28 @@ function AlertChannels(): JSX.Element {
} }
if (loading || payload === undefined) { if (loading || payload === undefined) {
return <Spinner tip="Loading Channels.." height="90vh" />; return <Spinner tip={t('loading_channels_message')} height="90vh" />;
} }
return ( return (
<> <>
<ButtonContainer> <ButtonContainer>
<Paragraph ellipsis type="secondary"> <Paragraph ellipsis type="secondary">
The latest added channel is used as the default channel for sending alerts {t('sending_channels_note')}
</Paragraph> </Paragraph>
<div> <RightActionContainer>
<TextToolTip <TextToolTip
text="More details on how to setting notification channels" text={t('tooltip_notification_channels')}
url="https://signoz.io/docs/userguide/alerts-management/#setting-notification-channel" url="https://signoz.io/docs/userguide/alerts-management/#setting-notification-channel"
/> />
{addNewChannelPermission && ( {addNewChannelPermission && (
<Button onClick={onToggleHandler} icon={<PlusOutlined />}> <Button onClick={onToggleHandler} icon={<PlusOutlined />}>
New Alert Channel {t('button_new_channel')}
</Button> </Button>
)} )}
</div> </RightActionContainer>
</ButtonContainer> </ButtonContainer>
<AlertChannelsComponent allChannels={payload} /> <AlertChannelsComponent allChannels={payload} />

View File

@ -1,6 +1,13 @@
import { Button as ButtonComponent } from 'antd'; import { Button as ButtonComponent } from 'antd';
import styled from 'styled-components'; import styled from 'styled-components';
export const RightActionContainer = styled.div`
&&& {
display: flex;
align-items: center;
}
`;
export const ButtonContainer = styled.div` export const ButtonContainer = styled.div`
&&& { &&& {
display: flex; display: flex;

View File

@ -4,9 +4,12 @@ import React from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { AlertDef, Labels } from 'types/api/alerts/def'; import { AlertDef, Labels } from 'types/api/alerts/def';
import ChannelSelect from './ChannelSelect';
import LabelSelect from './labels'; import LabelSelect from './labels';
import { import {
ChannelSelectTip,
FormContainer, FormContainer,
FormItemMedium,
InputSmall, InputSmall,
SeveritySelect, SeveritySelect,
StepHeading, StepHeading,
@ -80,7 +83,7 @@ function BasicInfo({ alertDef, setAlertDef }: BasicInfoProps): JSX.Element {
}} }}
/> />
</FormItem> </FormItem>
<FormItem label={t('field_labels')}> <FormItemMedium label={t('field_labels')}>
<LabelSelect <LabelSelect
onSetLabels={(l: Labels): void => { onSetLabels={(l: Labels): void => {
setAlertDef({ setAlertDef({
@ -92,7 +95,19 @@ function BasicInfo({ alertDef, setAlertDef }: BasicInfoProps): JSX.Element {
}} }}
initialValues={alertDef.labels} initialValues={alertDef.labels}
/> />
</FormItem> </FormItemMedium>
<FormItemMedium label="Notification Channels">
<ChannelSelect
currentValue={alertDef.preferredChannels}
onSelectChannels={(s: string[]): void => {
setAlertDef({
...alertDef,
preferredChannels: s,
});
}}
/>
<ChannelSelectTip> {t('channel_select_tooltip')}</ChannelSelectTip>
</FormItemMedium>
</FormContainer> </FormContainer>
</> </>
); );

View File

@ -0,0 +1,70 @@
import { notification, Select } from 'antd';
import getChannels from 'api/channels/getAll';
import useFetch from 'hooks/useFetch';
import React from 'react';
import { useTranslation } from 'react-i18next';
import { StyledSelect } from './styles';
export interface ChannelSelectProps {
currentValue?: string[];
onSelectChannels: (s: string[]) => void;
}
function ChannelSelect({
currentValue,
onSelectChannels,
}: ChannelSelectProps): JSX.Element | null {
// init namespace for translations
const { t } = useTranslation('alerts');
const { loading, payload, error, errorMessage } = useFetch(getChannels);
const handleChange = (value: string[]): void => {
onSelectChannels(value);
};
if (error && errorMessage !== '') {
notification.error({
message: 'Error',
description: errorMessage,
});
}
const renderOptions = (): React.ReactNode[] => {
const children: React.ReactNode[] = [];
if (loading || payload === undefined || payload.length === 0) {
return children;
}
payload.forEach((o) => {
children.push(
<Select.Option key={o.id} value={o.name}>
{o.name}
</Select.Option>,
);
});
return children;
};
return (
<StyledSelect
status={error ? 'error' : ''}
mode="multiple"
style={{ width: '100%' }}
placeholder={t('placeholder_channel_select')}
value={currentValue}
onChange={(value): void => {
handleChange(value as string[]);
}}
optionLabelProp="label"
>
{renderOptions()}
</StyledSelect>
);
}
ChannelSelect.defaultProps = {
currentValue: [],
};
export default ChannelSelect;

View File

@ -0,0 +1,6 @@
import { Select } from 'antd';
import styled from 'styled-components';
export const StyledSelect = styled(Select)`
border-radius: 4px;
`;

View File

@ -21,7 +21,7 @@ export interface ChartPreviewProps {
selectedTime?: timePreferenceType; selectedTime?: timePreferenceType;
selectedInterval?: Time; selectedInterval?: Time;
headline?: JSX.Element; headline?: JSX.Element;
threshold?: number; threshold?: number | undefined;
} }
function ChartPreview({ function ChartPreview({
@ -35,7 +35,7 @@ function ChartPreview({
}: ChartPreviewProps): JSX.Element | null { }: ChartPreviewProps): JSX.Element | null {
const { t } = useTranslation('alerts'); const { t } = useTranslation('alerts');
const staticLine: StaticLineProps | undefined = const staticLine: StaticLineProps | undefined =
threshold && threshold > 0 threshold !== undefined
? { ? {
yMin: threshold, yMin: threshold,
yMax: threshold, yMax: threshold,
@ -117,7 +117,7 @@ ChartPreview.defaultProps = {
selectedTime: 'GLOBAL_TIME', selectedTime: 'GLOBAL_TIME',
selectedInterval: '5min', selectedInterval: '5min',
headline: undefined, headline: undefined,
threshold: 0, threshold: undefined,
}; };
export default ChartPreview; export default ChartPreview;

View File

@ -156,7 +156,7 @@ function RuleOptions({
...alertDef, ...alertDef,
condition: { condition: {
...alertDef.condition, ...alertDef.condition,
target: (value as number) || undefined, target: value as number,
}, },
}); });
}} }}

View File

@ -1,6 +1,7 @@
import { ExclamationCircleOutlined, SaveOutlined } from '@ant-design/icons'; import { ExclamationCircleOutlined, SaveOutlined } from '@ant-design/icons';
import { FormInstance, Modal, notification, Typography } from 'antd'; import { FormInstance, Modal, notification, Typography } from 'antd';
import saveAlertApi from 'api/alerts/save'; import saveAlertApi from 'api/alerts/save';
import testAlertApi from 'api/alerts/testAlert';
import ROUTES from 'constants/routes'; import ROUTES from 'constants/routes';
import QueryTypeTag from 'container/NewWidget/LeftContainer/QueryTypeTag'; import QueryTypeTag from 'container/NewWidget/LeftContainer/QueryTypeTag';
import PlotTag from 'container/NewWidget/LeftContainer/WidgetGraph/PlotTag'; import PlotTag from 'container/NewWidget/LeftContainer/WidgetGraph/PlotTag';
@ -143,10 +144,74 @@ function FormAlertRules({
}); });
} }
}; };
const validatePromParams = useCallback((): boolean => {
let retval = true;
if (queryCategory !== EQueryType.PROM) return retval;
if (!promQueries || Object.keys(promQueries).length === 0) {
notification.error({
message: 'Error',
description: t('promql_required'),
});
return false;
}
Object.keys(promQueries).forEach((key) => {
if (promQueries[key].query === '') {
notification.error({
message: 'Error',
description: t('promql_required'),
});
retval = false;
}
});
return retval;
}, [t, promQueries, queryCategory]);
const validateQBParams = useCallback((): boolean => {
let retval = true;
if (queryCategory !== EQueryType.QUERY_BUILDER) return true;
if (!metricQueries || Object.keys(metricQueries).length === 0) {
notification.error({
message: 'Error',
description: t('condition_required'),
});
return false;
}
if (!alertDef.condition?.target) {
notification.error({
message: 'Error',
description: t('target_missing'),
});
return false;
}
Object.keys(metricQueries).forEach((key) => {
if (metricQueries[key].metricName === '') {
notification.error({
message: 'Error',
description: t('metricname_missing', { where: metricQueries[key].name }),
});
retval = false;
}
});
Object.keys(formulaQueries).forEach((key) => {
if (formulaQueries[key].expression === '') {
notification.error({
message: 'Error',
description: t('expression_missing', formulaQueries[key].name),
});
retval = false;
}
});
return retval;
}, [t, alertDef, queryCategory, metricQueries, formulaQueries]);
const isFormValid = useCallback((): boolean => { const isFormValid = useCallback((): boolean => {
let retval = true;
if (!alertDef.alert || alertDef.alert === '') { if (!alertDef.alert || alertDef.alert === '') {
notification.error({ notification.error({
message: 'Error', message: 'Error',
@ -155,57 +220,14 @@ function FormAlertRules({
return false; return false;
} }
if ( if (!validatePromParams()) {
queryCategory === EQueryType.PROM &&
(!promQueries || Object.keys(promQueries).length === 0)
) {
notification.error({
message: 'Error',
description: t('promql_required'),
});
return false; return false;
} }
if ( return validateQBParams();
(queryCategory === EQueryType.QUERY_BUILDER && !metricQueries) || }, [t, validateQBParams, alertDef, validatePromParams]);
Object.keys(metricQueries).length === 0
) {
notification.error({
message: 'Error',
description: t('condition_required'),
});
return false;
}
if (queryCategory === EQueryType.QUERY_BUILDER) {
Object.keys(metricQueries).forEach((key) => {
if (metricQueries[key].metricName === '') {
retval = false;
notification.error({
message: 'Error',
description: t('metricname_missing', { where: metricQueries[key].name }),
});
}
});
Object.keys(formulaQueries).forEach((key) => {
if (formulaQueries[key].expression === '') {
retval = false;
notification.error({
message: 'Error',
description: t('expression_missing', formulaQueries[key].name),
});
}
});
}
return retval;
}, [t, alertDef, queryCategory, metricQueries, formulaQueries, promQueries]);
const saveRule = useCallback(async () => {
if (!isFormValid()) {
return;
}
const preparePostData = (): AlertDef => {
const postableAlert: AlertDef = { const postableAlert: AlertDef = {
...alertDef, ...alertDef,
source: window?.location.toString(), source: window?.location.toString(),
@ -220,6 +242,22 @@ function FormAlertRules({
}, },
}, },
}; };
return postableAlert;
};
const memoizedPreparePostData = useCallback(preparePostData, [
queryCategory,
alertDef,
metricQueries,
formulaQueries,
promQueries,
]);
const saveRule = useCallback(async () => {
if (!isFormValid()) {
return;
}
const postableAlert = memoizedPreparePostData();
setLoading(true); setLoading(true);
try { try {
@ -250,24 +288,13 @@ function FormAlertRules({
}); });
} }
} catch (e) { } catch (e) {
console.log('save alert api failed:', e);
notification.error({ notification.error({
message: 'Error', message: 'Error',
description: t('unexpected_error'), description: t('unexpected_error'),
}); });
} }
setLoading(false); setLoading(false);
}, [ }, [t, isFormValid, ruleId, ruleCache, memoizedPreparePostData]);
t,
isFormValid,
queryCategory,
ruleId,
alertDef,
metricQueries,
formulaQueries,
promQueries,
ruleCache,
]);
const onSaveHandler = useCallback(async () => { const onSaveHandler = useCallback(async () => {
const content = ( const content = (
@ -288,6 +315,44 @@ function FormAlertRules({
}); });
}, [t, saveRule, queryCategory]); }, [t, saveRule, queryCategory]);
const onTestRuleHandler = useCallback(async () => {
if (!isFormValid()) {
return;
}
const postableAlert = memoizedPreparePostData();
setLoading(true);
try {
const response = await testAlertApi({ data: postableAlert });
if (response.statusCode === 200) {
const { payload } = response;
if (payload?.alertCount === 0) {
notification.error({
message: 'Error',
description: t('no_alerts_found'),
});
} else {
notification.success({
message: 'Success',
description: t('rule_test_fired'),
});
}
} else {
notification.error({
message: 'Error',
description: response.error || t('unexpected_error'),
});
}
} catch (e) {
notification.error({
message: 'Error',
description: t('unexpected_error'),
});
}
setLoading(false);
}, [t, isFormValid, memoizedPreparePostData]);
const renderBasicInfo = (): JSX.Element => ( const renderBasicInfo = (): JSX.Element => (
<BasicInfo alertDef={alertDef} setAlertDef={setAlertDef} /> <BasicInfo alertDef={alertDef} setAlertDef={setAlertDef} />
); );
@ -354,6 +419,14 @@ function FormAlertRules({
> >
{ruleId > 0 ? t('button_savechanges') : t('button_createrule')} {ruleId > 0 ? t('button_savechanges') : t('button_createrule')}
</ActionButton> </ActionButton>
<ActionButton
loading={loading || false}
type="default"
onClick={onTestRuleHandler}
>
{' '}
{t('button_testrule')}
</ActionButton>
<ActionButton <ActionButton
disabled={loading || false} disabled={loading || false}
type="default" type="default"

View File

@ -8,8 +8,7 @@ interface SearchContainerProps {
} }
export const SearchContainer = styled.div<SearchContainerProps>` export const SearchContainer = styled.div<SearchContainerProps>`
width: 70%; border-radius: 4px;
border-radisu: 4px;
background: ${({ isDarkMode }): string => (isDarkMode ? '#000' : '#fff')}; background: ${({ isDarkMode }): string => (isDarkMode ? '#000' : '#fff')};
flex: 1; flex: 1;
display: flex; display: flex;

View File

@ -1,4 +1,15 @@
import { Button, Card, Col, Form, Input, InputNumber, Row, Select } from 'antd'; import {
Button,
Card,
Col,
Form,
Input,
InputNumber,
Row,
Select,
Typography,
} from 'antd';
import FormItem from 'antd/lib/form/FormItem';
import TextArea from 'antd/lib/input/TextArea'; import TextArea from 'antd/lib/input/TextArea';
import styled from 'styled-components'; import styled from 'styled-components';
@ -67,7 +78,7 @@ export const InlineSelect = styled(Select)`
`; `;
export const SeveritySelect = styled(Select)` export const SeveritySelect = styled(Select)`
width: 15% !important; width: 25% !important;
`; `;
export const InputSmall = styled(Input)` export const InputSmall = styled(Input)`
@ -99,3 +110,11 @@ export const ThresholdInput = styled(InputNumber)`
export const TextareaMedium = styled(TextArea)` export const TextareaMedium = styled(TextArea)`
width: 70%; width: 70%;
`; `;
export const FormItemMedium = styled(FormItem)`
width: 70%;
`;
export const ChannelSelectTip = styled(Typography.Text)`
color: hsla(0, 0%, 100%, 0.3);
`;

View File

@ -1,10 +1,11 @@
import { Button } from 'antd';
import { NotificationInstance } from 'antd/lib/notification/index'; import { NotificationInstance } from 'antd/lib/notification/index';
import deleteAlerts from 'api/alerts/delete'; import deleteAlerts from 'api/alerts/delete';
import { State } from 'hooks/useFetch'; import { State } from 'hooks/useFetch';
import React, { useState } from 'react'; import React, { useState } from 'react';
import { PayloadProps as DeleteAlertPayloadProps } from 'types/api/alerts/delete'; import { PayloadProps as DeleteAlertPayloadProps } from 'types/api/alerts/delete';
import { Alerts } from 'types/api/alerts/getAll'; import { GettableAlert } from 'types/api/alerts/get';
import { ColumnButton } from './styles';
function DeleteAlert({ function DeleteAlert({
id, id,
@ -72,20 +73,20 @@ function DeleteAlert({
}; };
return ( return (
<Button <ColumnButton
disabled={deleteAlertState.loading || false} disabled={deleteAlertState.loading || false}
loading={deleteAlertState.loading || false} loading={deleteAlertState.loading || false}
onClick={(): Promise<void> => onDeleteHandler(id)} onClick={(): Promise<void> => onDeleteHandler(id)}
type="link" type="link"
> >
Delete Delete
</Button> </ColumnButton>
); );
} }
interface DeleteAlertProps { interface DeleteAlertProps {
id: Alerts['id']; id: GettableAlert['id'];
setData: React.Dispatch<React.SetStateAction<Alerts[]>>; setData: React.Dispatch<React.SetStateAction<GettableAlert[]>>;
notifications: NotificationInstance; notifications: NotificationInstance;
} }

View File

@ -1,6 +1,6 @@
/* eslint-disable react/display-name */ /* eslint-disable react/display-name */
import { PlusOutlined } from '@ant-design/icons'; import { PlusOutlined } from '@ant-design/icons';
import { notification, Tag, Typography } from 'antd'; import { notification, Typography } from 'antd';
import Table, { ColumnsType } from 'antd/lib/table'; import Table, { ColumnsType } from 'antd/lib/table';
import TextToolTip from 'components/TextToolTip'; import TextToolTip from 'components/TextToolTip';
import ROUTES from 'constants/routes'; import ROUTES from 'constants/routes';
@ -13,15 +13,16 @@ import { UseQueryResult } from 'react-query';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import { ErrorResponse, SuccessResponse } from 'types/api'; import { ErrorResponse, SuccessResponse } from 'types/api';
import { Alerts } from 'types/api/alerts/getAll'; import { GettableAlert } from 'types/api/alerts/get';
import AppReducer from 'types/reducer/app'; import AppReducer from 'types/reducer/app';
import DeleteAlert from './DeleteAlert'; import DeleteAlert from './DeleteAlert';
import { Button, ButtonContainer } from './styles'; import { Button, ButtonContainer, ColumnButton, StyledTag } from './styles';
import Status from './TableComponents/Status'; import Status from './TableComponents/Status';
import ToggleAlertState from './ToggleAlertState';
function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element { function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
const [data, setData] = useState<Alerts[]>(allAlertRules || []); const [data, setData] = useState<GettableAlert[]>(allAlertRules || []);
const { t } = useTranslation('common'); const { t } = useTranslation('common');
const { role } = useSelector<AppState, AppReducer>((state) => state.app); const { role } = useSelector<AppState, AppReducer>((state) => state.app);
const [addNewAlert, action] = useComponentPermission( const [addNewAlert, action] = useComponentPermission(
@ -53,22 +54,27 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
history.push(`${ROUTES.EDIT_ALERTS}?ruleId=${id}`); history.push(`${ROUTES.EDIT_ALERTS}?ruleId=${id}`);
}; };
const columns: ColumnsType<Alerts> = [ const columns: ColumnsType<GettableAlert> = [
{ {
title: 'Status', title: 'Status',
dataIndex: 'state', dataIndex: 'state',
key: 'state', key: 'state',
sorter: (a, b): number => sorter: (a, b): number =>
b.labels.severity.length - a.labels.severity.length, (b.state ? b.state.charCodeAt(0) : 1000) -
(a.state ? a.state.charCodeAt(0) : 1000),
render: (value): JSX.Element => <Status status={value} />, render: (value): JSX.Element => <Status status={value} />,
}, },
{ {
title: 'Alert Name', title: 'Alert Name',
dataIndex: 'alert', dataIndex: 'alert',
key: 'name', key: 'name',
sorter: (a, b): number => a.name.charCodeAt(0) - b.name.charCodeAt(0), sorter: (a, b): number =>
(a.alert ? a.alert.charCodeAt(0) : 1000) -
(b.alert ? b.alert.charCodeAt(0) : 1000),
render: (value, record): JSX.Element => ( render: (value, record): JSX.Element => (
<Typography.Link onClick={(): void => onEditHandler(record.id.toString())}> <Typography.Link
onClick={(): void => onEditHandler(record.id ? record.id.toString() : '')}
>
{value} {value}
</Typography.Link> </Typography.Link>
), ),
@ -78,7 +84,8 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
dataIndex: 'labels', dataIndex: 'labels',
key: 'severity', key: 'severity',
sorter: (a, b): number => sorter: (a, b): number =>
a.labels.severity.length - b.labels.severity.length, (a.labels ? a.labels.severity.length : 0) -
(b.labels ? b.labels.severity.length : 0),
render: (value): JSX.Element => { render: (value): JSX.Element => {
const objectKeys = Object.keys(value); const objectKeys = Object.keys(value);
const withSeverityKey = objectKeys.find((e) => e === 'severity') || ''; const withSeverityKey = objectKeys.find((e) => e === 'severity') || '';
@ -92,6 +99,7 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
dataIndex: 'labels', dataIndex: 'labels',
key: 'tags', key: 'tags',
align: 'center', align: 'center',
width: 350,
render: (value): JSX.Element => { render: (value): JSX.Element => {
const objectKeys = Object.keys(value); const objectKeys = Object.keys(value);
const withOutSeverityKeys = objectKeys.filter((e) => e !== 'severity'); const withOutSeverityKeys = objectKeys.filter((e) => e !== 'severity');
@ -104,9 +112,9 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
<> <>
{withOutSeverityKeys.map((e) => { {withOutSeverityKeys.map((e) => {
return ( return (
<Tag key={e} color="magenta"> <StyledTag key={e} color="magenta">
{e}: {value[e]} {e}: {value[e]}
</Tag> </StyledTag>
); );
})} })}
</> </>
@ -120,14 +128,19 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
title: 'Action', title: 'Action',
dataIndex: 'id', dataIndex: 'id',
key: 'action', key: 'action',
render: (id: Alerts['id']): JSX.Element => { render: (id: GettableAlert['id'], record): JSX.Element => {
return ( return (
<> <>
<DeleteAlert notifications={notifications} setData={setData} id={id} /> <ToggleAlertState disabled={record.disabled} setData={setData} id={id} />
<Button onClick={(): void => onEditHandler(id.toString())} type="link"> <ColumnButton
onClick={(): void => onEditHandler(id.toString())}
type="link"
>
Edit Edit
</Button> </ColumnButton>
<DeleteAlert notifications={notifications} setData={setData} id={id} />
</> </>
); );
}, },
@ -159,8 +172,10 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
} }
interface ListAlertProps { interface ListAlertProps {
allAlertRules: Alerts[]; allAlertRules: GettableAlert[];
refetch: UseQueryResult<ErrorResponse | SuccessResponse<Alerts[]>>['refetch']; refetch: UseQueryResult<
ErrorResponse | SuccessResponse<GettableAlert[]>
>['refetch'];
} }
export default ListAlert; export default ListAlert;

View File

@ -1,6 +1,6 @@
import { Tag } from 'antd'; import { Tag } from 'antd';
import React from 'react'; import React from 'react';
import { Alerts } from 'types/api/alerts/getAll'; import { GettableAlert } from 'types/api/alerts/get';
function Status({ status }: StatusProps): JSX.Element { function Status({ status }: StatusProps): JSX.Element {
switch (status) { switch (status) {
@ -16,14 +16,18 @@ function Status({ status }: StatusProps): JSX.Element {
return <Tag color="red">Firing</Tag>; return <Tag color="red">Firing</Tag>;
} }
case 'disabled': {
return <Tag>Disabled</Tag>;
}
default: { default: {
return <Tag color="default">Unknown Status</Tag>; return <Tag color="default">Unknown</Tag>;
} }
} }
} }
interface StatusProps { interface StatusProps {
status: Alerts['state']; status: GettableAlert['state'];
} }
export default Status; export default Status;

View File

@ -0,0 +1,108 @@
import { notification } from 'antd';
import patchAlert from 'api/alerts/patch';
import { State } from 'hooks/useFetch';
import React, { useState } from 'react';
import { GettableAlert } from 'types/api/alerts/get';
import { PayloadProps as PatchPayloadProps } from 'types/api/alerts/patch';
import { ColumnButton } from './styles';
function ToggleAlertState({
id,
disabled,
setData,
}: ToggleAlertStateProps): JSX.Element {
const [apiStatus, setAPIStatus] = useState<State<PatchPayloadProps>>({
error: false,
errorMessage: '',
loading: false,
success: false,
payload: undefined,
});
const defaultErrorMessage = 'Something went wrong';
const onToggleHandler = async (
id: number,
disabled: boolean,
): Promise<void> => {
try {
setAPIStatus((state) => ({
...state,
loading: true,
}));
const response = await patchAlert({
id,
data: {
disabled,
},
});
if (response.statusCode === 200) {
setData((state) => {
return state.map((alert) => {
if (alert.id === id) {
return {
...alert,
disabled: response.payload.disabled,
state: response.payload.state,
};
}
return alert;
});
});
setAPIStatus((state) => ({
...state,
loading: false,
payload: response.payload,
}));
notification.success({
message: 'Success',
});
} else {
setAPIStatus((state) => ({
...state,
loading: false,
error: true,
errorMessage: response.error || defaultErrorMessage,
}));
notification.error({
message: response.error || defaultErrorMessage,
});
}
} catch (error) {
setAPIStatus((state) => ({
...state,
loading: false,
error: true,
errorMessage: defaultErrorMessage,
}));
notification.error({
message: defaultErrorMessage,
});
}
};
return (
<ColumnButton
disabled={apiStatus.loading || false}
loading={apiStatus.loading || false}
onClick={(): Promise<void> => onToggleHandler(id, !disabled)}
type="link"
>
{disabled ? 'Enable' : 'Disable'}
</ColumnButton>
);
}
interface ToggleAlertStateProps {
id: GettableAlert['id'];
disabled: boolean;
setData: React.Dispatch<React.SetStateAction<GettableAlert[]>>;
}
export default ToggleAlertState;

View File

@ -1,4 +1,4 @@
import { Button as ButtonComponent } from 'antd'; import { Button as ButtonComponent, Tag } from 'antd';
import styled from 'styled-components'; import styled from 'styled-components';
export const ButtonContainer = styled.div` export const ButtonContainer = styled.div`
@ -12,6 +12,20 @@ export const ButtonContainer = styled.div`
export const Button = styled(ButtonComponent)` export const Button = styled(ButtonComponent)`
&&& { &&& {
margin-left: 1rem; margin-left: 1em;
}
`;
export const ColumnButton = styled(ButtonComponent)`
&&& {
padding-left: 0;
padding-right: 0;
margin-right: 1.5em;
}
`;
export const StyledTag = styled(Tag)`
&&& {
white-space: normal;
} }
`; `;

View File

@ -15,7 +15,7 @@ import { PromQLWidgets } from 'types/api/dashboard/getAll';
import MetricReducer from 'types/reducer/metrics'; import MetricReducer from 'types/reducer/metrics';
import { Card, Col, GraphContainer, GraphTitle, Row } from '../styles'; import { Card, Col, GraphContainer, GraphTitle, Row } from '../styles';
import TopEndpointsTable from '../TopEndpointsTable'; import TopOperationsTable from '../TopOperationsTable';
import { Button } from './styles'; import { Button } from './styles';
function Application({ getWidget }: DashboardProps): JSX.Element { function Application({ getWidget }: DashboardProps): JSX.Element {
@ -23,11 +23,13 @@ function Application({ getWidget }: DashboardProps): JSX.Element {
const selectedTimeStamp = useRef(0); const selectedTimeStamp = useRef(0);
const { const {
topEndPoints, topOperations,
serviceOverview, serviceOverview,
resourceAttributePromQLQuery, resourceAttributePromQLQuery,
resourceAttributeQueries, resourceAttributeQueries,
topLevelOperations,
} = useSelector<AppState, MetricReducer>((state) => state.metrics); } = useSelector<AppState, MetricReducer>((state) => state.metrics);
const operationsRegex = topLevelOperations.join('|');
const selectedTraceTags: string = JSON.stringify( const selectedTraceTags: string = JSON.stringify(
convertRawQueriesToTraceSelectedTags(resourceAttributeQueries, 'array') || [], convertRawQueriesToTraceSelectedTags(resourceAttributeQueries, 'array') || [],
@ -107,7 +109,7 @@ function Application({ getWidget }: DashboardProps): JSX.Element {
<Button <Button
type="default" type="default"
size="small" size="small"
id="Application_button" id="Service_button"
onClick={(): void => { onClick={(): void => {
onTracePopupClick(selectedTimeStamp.current); onTracePopupClick(selectedTimeStamp.current);
}} }}
@ -115,13 +117,13 @@ function Application({ getWidget }: DashboardProps): JSX.Element {
View Traces View Traces
</Button> </Button>
<Card> <Card>
<GraphTitle>Application latency</GraphTitle> <GraphTitle>Latency</GraphTitle>
<GraphContainer> <GraphContainer>
<Graph <Graph
onClickHandler={(ChartEvent, activeElements, chart, data): void => { onClickHandler={(ChartEvent, activeElements, chart, data): void => {
onClickHandler(ChartEvent, activeElements, chart, data, 'Application'); onClickHandler(ChartEvent, activeElements, chart, data, 'Service');
}} }}
name="application_latency" name="service_latency"
type="line" type="line"
data={{ data={{
datasets: [ datasets: [
@ -175,7 +177,7 @@ function Application({ getWidget }: DashboardProps): JSX.Element {
<Button <Button
type="default" type="default"
size="small" size="small"
id="Request_button" id="Rate_button"
onClick={(): void => { onClick={(): void => {
onTracePopupClick(selectedTimeStamp.current); onTracePopupClick(selectedTimeStamp.current);
}} }}
@ -183,21 +185,21 @@ function Application({ getWidget }: DashboardProps): JSX.Element {
View Traces View Traces
</Button> </Button>
<Card> <Card>
<GraphTitle>Requests</GraphTitle> <GraphTitle>Rate (ops/s)</GraphTitle>
<GraphContainer> <GraphContainer>
<FullView <FullView
name="request_per_sec" name="operations_per_sec"
fullViewOptions={false} fullViewOptions={false}
onClickHandler={(event, element, chart, data): void => { onClickHandler={(event, element, chart, data): void => {
onClickHandler(event, element, chart, data, 'Request'); onClickHandler(event, element, chart, data, 'Rate');
}} }}
widget={getWidget([ widget={getWidget([
{ {
query: `sum(rate(signoz_latency_count{service_name="${servicename}", span_kind="SPAN_KIND_SERVER"${resourceAttributePromQLQuery}}[5m]))`, query: `sum(rate(signoz_latency_count{service_name="${servicename}", operation=~"${operationsRegex}"${resourceAttributePromQLQuery}}[5m]))`,
legend: 'Requests', legend: 'Operations',
}, },
])} ])}
yAxisUnit="reqps" yAxisUnit="ops"
/> />
</GraphContainer> </GraphContainer>
</Card> </Card>
@ -227,7 +229,7 @@ function Application({ getWidget }: DashboardProps): JSX.Element {
}} }}
widget={getWidget([ widget={getWidget([
{ {
query: `max(sum(rate(signoz_calls_total{service_name="${servicename}", span_kind="SPAN_KIND_SERVER", status_code="STATUS_CODE_ERROR"${resourceAttributePromQLQuery}}[5m]) OR rate(signoz_calls_total{service_name="${servicename}", span_kind="SPAN_KIND_SERVER", http_status_code=~"5.."${resourceAttributePromQLQuery}}[5m]))*100/sum(rate(signoz_calls_total{service_name="${servicename}", span_kind="SPAN_KIND_SERVER"${resourceAttributePromQLQuery}}[5m]))) < 1000 OR vector(0)`, query: `max(sum(rate(signoz_calls_total{service_name="${servicename}", operation=~"${operationsRegex}", status_code="STATUS_CODE_ERROR"${resourceAttributePromQLQuery}}[5m]) OR rate(signoz_calls_total{service_name="${servicename}", operation=~"${operationsRegex}", http_status_code=~"5.."${resourceAttributePromQLQuery}}[5m]))*100/sum(rate(signoz_calls_total{service_name="${servicename}", operation=~"${operationsRegex}"${resourceAttributePromQLQuery}}[5m]))) < 1000 OR vector(0)`,
legend: 'Error Percentage', legend: 'Error Percentage',
}, },
])} ])}
@ -239,7 +241,7 @@ function Application({ getWidget }: DashboardProps): JSX.Element {
<Col span={12}> <Col span={12}>
<Card> <Card>
<TopEndpointsTable data={topEndPoints} /> <TopOperationsTable data={topOperations} />
</Card> </Card>
</Col> </Col>
</Row> </Row>

View File

@ -11,7 +11,7 @@ import { AppState } from 'store/reducers';
import { GlobalReducer } from 'types/reducer/globalTime'; import { GlobalReducer } from 'types/reducer/globalTime';
import MetricReducer from 'types/reducer/metrics'; import MetricReducer from 'types/reducer/metrics';
function TopEndpointsTable(props: TopEndpointsTableProps): JSX.Element { function TopOperationsTable(props: TopOperationsTableProps): JSX.Element {
const { minTime, maxTime } = useSelector<AppState, GlobalReducer>( const { minTime, maxTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime, (state) => state.globalTime,
); );
@ -85,7 +85,7 @@ function TopEndpointsTable(props: TopEndpointsTableProps): JSX.Element {
title: 'Number of Calls', title: 'Number of Calls',
dataIndex: 'numCalls', dataIndex: 'numCalls',
key: 'numCalls', key: 'numCalls',
sorter: (a: TopEndpointListItem, b: TopEndpointListItem): number => sorter: (a: TopOperationListItem, b: TopOperationListItem): number =>
a.numCalls - b.numCalls, a.numCalls - b.numCalls,
}, },
]; ];
@ -94,7 +94,7 @@ function TopEndpointsTable(props: TopEndpointsTableProps): JSX.Element {
<Table <Table
showHeader showHeader
title={(): string => { title={(): string => {
return 'Top Endpoints'; return 'Key Operations';
}} }}
tableLayout="fixed" tableLayout="fixed"
dataSource={data} dataSource={data}
@ -104,7 +104,7 @@ function TopEndpointsTable(props: TopEndpointsTableProps): JSX.Element {
); );
} }
interface TopEndpointListItem { interface TopOperationListItem {
p50: number; p50: number;
p95: number; p95: number;
p99: number; p99: number;
@ -112,10 +112,10 @@ interface TopEndpointListItem {
name: string; name: string;
} }
type DataProps = TopEndpointListItem; type DataProps = TopOperationListItem;
interface TopEndpointsTableProps { interface TopOperationsTableProps {
data: TopEndpointListItem[]; data: TopOperationListItem[];
} }
export default TopEndpointsTable; export default TopOperationsTable;

View File

@ -56,14 +56,14 @@ function Metrics(): JSX.Element {
render: (value: number): string => (value / 1000000).toFixed(2), render: (value: number): string => (value / 1000000).toFixed(2),
}, },
{ {
title: 'Error Rate (% of requests)', title: 'Error Rate (% of total)',
dataIndex: 'errorRate', dataIndex: 'errorRate',
key: 'errorRate', key: 'errorRate',
sorter: (a: DataProps, b: DataProps): number => a.errorRate - b.errorRate, sorter: (a: DataProps, b: DataProps): number => a.errorRate - b.errorRate,
render: (value: number): string => value.toFixed(2), render: (value: number): string => value.toFixed(2),
}, },
{ {
title: 'Requests Per Second', title: 'Operations Per Second',
dataIndex: 'callRate', dataIndex: 'callRate',
key: 'callRate', key: 'callRate',
sorter: (a: DataProps, b: DataProps): number => a.callRate - b.callRate, sorter: (a: DataProps, b: DataProps): number => a.callRate - b.callRate,

View File

@ -42,8 +42,9 @@ export interface Option {
} }
export const ServiceMapOptions: Option[] = [ export const ServiceMapOptions: Option[] = [
{ value: '1min', label: 'Last 1 min' },
{ value: '5min', label: 'Last 5 min' }, { value: '5min', label: 'Last 5 min' },
{ value: '15min', label: 'Last 15 min' },
{ value: '30min', label: 'Last 30 min' },
]; ];
export const getDefaultOption = (route: string): Time => { export const getDefaultOption = (route: string): Time => {

View File

@ -2,7 +2,7 @@
import type { SelectProps } from 'antd'; import type { SelectProps } from 'antd';
import { Tag } from 'antd'; import { Tag } from 'antd';
import React, { useCallback, useMemo } from 'react'; import React, { useCallback, useMemo } from 'react';
import { Alerts } from 'types/api/alerts/getAll'; import { Alerts } from 'types/api/alerts/getTriggered';
import { Container, Select } from './styles'; import { Container, Select } from './styles';

View File

@ -2,7 +2,7 @@ import { Tag, Typography } from 'antd';
import convertDateToAmAndPm from 'lib/convertDateToAmAndPm'; import convertDateToAmAndPm from 'lib/convertDateToAmAndPm';
import getFormattedDate from 'lib/getFormatedDate'; import getFormattedDate from 'lib/getFormatedDate';
import React from 'react'; import React from 'react';
import { Alerts } from 'types/api/alerts/getAll'; import { Alerts } from 'types/api/alerts/getTriggered';
import Status from '../TableComponents/AlertStatus'; import Status from '../TableComponents/AlertStatus';
import { TableCell, TableRow } from './styles'; import { TableCell, TableRow } from './styles';

View File

@ -1,7 +1,7 @@
import { MinusSquareOutlined, PlusSquareOutlined } from '@ant-design/icons'; import { MinusSquareOutlined, PlusSquareOutlined } from '@ant-design/icons';
import { Tag } from 'antd'; import { Tag } from 'antd';
import React, { useState } from 'react'; import React, { useState } from 'react';
import { Alerts } from 'types/api/alerts/getAll'; import { Alerts } from 'types/api/alerts/getTriggered';
import ExapandableRow from './ExapandableRow'; import ExapandableRow from './ExapandableRow';
import { IconContainer, StatusContainer, TableCell, TableRow } from './styles'; import { IconContainer, StatusContainer, TableCell, TableRow } from './styles';

View File

@ -1,6 +1,6 @@
import groupBy from 'lodash-es/groupBy'; import groupBy from 'lodash-es/groupBy';
import React, { useMemo } from 'react'; import React, { useMemo } from 'react';
import { Alerts } from 'types/api/alerts/getAll'; import { Alerts } from 'types/api/alerts/getTriggered';
import { Value } from '../Filter'; import { Value } from '../Filter';
import { FilterAlerts } from '../utils'; import { FilterAlerts } from '../utils';

View File

@ -5,7 +5,7 @@ import AlertStatus from 'container/TriggeredAlerts/TableComponents/AlertStatus';
import convertDateToAmAndPm from 'lib/convertDateToAmAndPm'; import convertDateToAmAndPm from 'lib/convertDateToAmAndPm';
import getFormattedDate from 'lib/getFormatedDate'; import getFormattedDate from 'lib/getFormatedDate';
import React from 'react'; import React from 'react';
import { Alerts } from 'types/api/alerts/getAll'; import { Alerts } from 'types/api/alerts/getTriggered';
import { Value } from './Filter'; import { Value } from './Filter';
import { FilterAlerts } from './utils'; import { FilterAlerts } from './utils';

View File

@ -1,7 +1,7 @@
import getTriggeredApi from 'api/alerts/getTriggered'; import getTriggeredApi from 'api/alerts/getTriggered';
import useInterval from 'hooks/useInterval'; import useInterval from 'hooks/useInterval';
import React, { useState } from 'react'; import React, { useState } from 'react';
import { Alerts } from 'types/api/alerts/getAll'; import { Alerts } from 'types/api/alerts/getTriggered';
import Filter, { Value } from './Filter'; import Filter, { Value } from './Filter';
import FilteredTable from './FilteredTable'; import FilteredTable from './FilteredTable';

View File

@ -1,4 +1,4 @@
import { Alerts } from 'types/api/alerts/getAll'; import { Alerts } from 'types/api/alerts/getTriggered';
import { Value } from './Filter'; import { Value } from './Filter';

View File

@ -45,6 +45,9 @@ interface graphLink {
source: string; source: string;
target: string; target: string;
value: number; value: number;
callRate: number;
errorRate: number;
p99: number;
} }
export interface graphDataType { export interface graphDataType {
nodes: graphNode[]; nodes: graphNode[];
@ -96,16 +99,16 @@ function ServiceMap(props: ServiceMapProps): JSX.Element {
const graphData = { nodes, links }; const graphData = { nodes, links };
return ( return (
<Container> <Container>
<SelectService {/* <SelectService
services={serviceMap.services} services={serviceMap.items}
zoomToService={zoomToService} zoomToService={zoomToService}
zoomToDefault={zoomToDefault} zoomToDefault={zoomToDefault}
/> /> */}
<ForceGraph2D <ForceGraph2D
ref={fgRef} ref={fgRef}
cooldownTicks={100} cooldownTicks={100}
graphData={graphData} graphData={graphData}
nodeLabel={getTooltip} linkLabel={getTooltip}
linkAutoColorBy={(d) => d.target} linkAutoColorBy={(d) => d.target}
linkDirectionalParticles="value" linkDirectionalParticles="value"
linkDirectionalParticleSpeed={(d) => d.value} linkDirectionalParticleSpeed={(d) => d.value}
@ -124,7 +127,7 @@ function ServiceMap(props: ServiceMapProps): JSX.Element {
ctx.fillStyle = isDarkMode ? '#ffffff' : '#000000'; ctx.fillStyle = isDarkMode ? '#ffffff' : '#000000';
ctx.fillText(label, node.x, node.y); ctx.fillText(label, node.x, node.y);
}} }}
onNodeClick={(node) => { onLinkHover={(node) => {
const tooltip = document.querySelector('.graph-tooltip'); const tooltip = document.querySelector('.graph-tooltip');
if (tooltip && node) { if (tooltip && node) {
tooltip.innerHTML = getTooltip(node); tooltip.innerHTML = getTooltip(node);

View File

@ -1,12 +1,13 @@
/*eslint-disable*/ /*eslint-disable*/
//@ts-nocheck //@ts-nocheck
import { cloneDeep, find, maxBy, uniq, uniqBy } from 'lodash-es'; import { cloneDeep, find, maxBy, uniq, uniqBy, groupBy, sumBy } from 'lodash-es';
import { graphDataType } from './ServiceMap'; import { graphDataType } from './ServiceMap';
const MIN_WIDTH = 10; const MIN_WIDTH = 10;
const MAX_WIDTH = 20; const MAX_WIDTH = 20;
const DEFAULT_FONT_SIZE = 6; const DEFAULT_FONT_SIZE = 6;
export const getDimensions = (num, highest) => { export const getDimensions = (num, highest) => {
const percentage = (num / highest) * 100; const percentage = (num / highest) * 100;
const width = (percentage * (MAX_WIDTH - MIN_WIDTH)) / 100 + MIN_WIDTH; const width = (percentage * (MAX_WIDTH - MIN_WIDTH)) / 100 + MIN_WIDTH;
@ -18,19 +19,30 @@ export const getDimensions = (num, highest) => {
}; };
export const getGraphData = (serviceMap, isDarkMode): graphDataType => { export const getGraphData = (serviceMap, isDarkMode): graphDataType => {
const { items, services } = serviceMap; const { items } = serviceMap;
const services = Object.values(groupBy(items, 'child')).map((e) => {
return {
serviceName: e[0].child,
errorRate: sumBy(e, 'errorRate'),
callRate: sumBy(e, 'callRate'),
}
});
const highestCallCount = maxBy(items, (e) => e?.callCount)?.callCount; const highestCallCount = maxBy(items, (e) => e?.callCount)?.callCount;
const highestCallRate = maxBy(services, (e) => e?.callRate)?.callRate; const highestCallRate = maxBy(services, (e) => e?.callRate)?.callRate;
const divNum = Number( const divNum = Number(
String(1).padEnd(highestCallCount.toString().length, '0'), String(1).padEnd(highestCallCount.toString().length, '0'),
); );
const links = cloneDeep(items).map((node) => { const links = cloneDeep(items).map((node) => {
const { parent, child, callCount } = node; const { parent, child, callCount, callRate, errorRate, p99 } = node;
return { return {
source: parent, source: parent,
target: child, target: child,
value: (100 - callCount / divNum) * 0.03, value: (100 - callCount / divNum) * 0.03,
callRate,
errorRate,
p99,
}; };
}); });
const uniqParent = uniqBy(cloneDeep(items), 'parent').map((e) => e.parent); const uniqParent = uniqBy(cloneDeep(items), 'parent').map((e) => e.parent);
@ -47,15 +59,10 @@ export const getGraphData = (serviceMap, isDarkMode): graphDataType => {
width: MIN_WIDTH, width: MIN_WIDTH,
color, color,
nodeVal: MIN_WIDTH, nodeVal: MIN_WIDTH,
callRate: 0,
errorRate: 0,
p99: 0,
}; };
} }
if (service.errorRate > 0) { if (service.errorRate > 0) {
color = isDarkMode ? '#DB836E' : '#F98989'; color = isDarkMode ? '#DB836E' : '#F98989';
} else if (service.fourXXRate > 0) {
color = isDarkMode ? '#C79931' : '#F9DA7B';
} }
const { fontSize, width } = getDimensions(service.callRate, highestCallRate); const { fontSize, width } = getDimensions(service.callRate, highestCallRate);
return { return {
@ -65,9 +72,6 @@ export const getGraphData = (serviceMap, isDarkMode): graphDataType => {
width, width,
color, color,
nodeVal: width, nodeVal: width,
callRate: service.callRate.toFixed(2),
errorRate: service.errorRate,
p99: service.p99,
}; };
}); });
return { return {
@ -90,25 +94,31 @@ export const getZoomPx = (): number => {
return 190; return 190;
}; };
export const getTooltip = (node: { const getRound2DigitsAfterDecimal = (num: number) => {
if (num === 0) {
return 0;
}
return num.toFixed(20).match(/^-?\d*\.?0*\d{0,2}/)[0];
}
export const getTooltip = (link: {
p99: number; p99: number;
errorRate: number; errorRate: number;
callRate: number; callRate: number;
id: string; id: string;
}) => { }) => {
return `<div style="color:#333333;padding:12px;background: white;border-radius: 2px;"> return `<div style="color:#333333;padding:12px;background: white;border-radius: 2px;">
<div style="font-weight:bold; margin-bottom:16px;">${node.id}</div>
<div class="keyval"> <div class="keyval">
<div class="key">P99 latency:</div> <div class="key">P99 latency:</div>
<div class="val">${node.p99 / 1000000}ms</div> <div class="val">${getRound2DigitsAfterDecimal(link.p99/ 1000000)}ms</div>
</div> </div>
<div class="keyval"> <div class="keyval">
<div class="key">Request:</div> <div class="key">Request:</div>
<div class="val">${node.callRate}/sec</div> <div class="val">${getRound2DigitsAfterDecimal(link.callRate)}/sec</div>
</div> </div>
<div class="keyval"> <div class="keyval">
<div class="key">Error Rate:</div> <div class="key">Error Rate:</div>
<div class="val">${node.errorRate}%</div> <div class="val">${getRound2DigitsAfterDecimal(link.errorRate)}%</div>
</div> </div>
</div>`; </div>`;
}; };

View File

@ -3,7 +3,8 @@
// import getExternalError from 'api/metrics/getExternalError'; // import getExternalError from 'api/metrics/getExternalError';
// import getExternalService from 'api/metrics/getExternalService'; // import getExternalService from 'api/metrics/getExternalService';
import getServiceOverview from 'api/metrics/getServiceOverview'; import getServiceOverview from 'api/metrics/getServiceOverview';
import getTopEndPoints from 'api/metrics/getTopEndPoints'; import getTopLevelOperations from 'api/metrics/getTopLevelOperations';
import getTopOperations from 'api/metrics/getTopOperations';
import { AxiosError } from 'axios'; import { AxiosError } from 'axios';
import GetMinMax from 'lib/getMinMax'; import GetMinMax from 'lib/getMinMax';
import getStep from 'lib/getStep'; import getStep from 'lib/getStep';
@ -46,7 +47,8 @@ export const GetInitialData = (
// getExternalErrorResponse, // getExternalErrorResponse,
// getExternalServiceResponse, // getExternalServiceResponse,
getServiceOverviewResponse, getServiceOverviewResponse,
getTopEndPointsResponse, getTopOperationsResponse,
getTopLevelOperationsResponse,
] = await Promise.all([ ] = await Promise.all([
// getDBOverView({ // getDBOverView({
// ...props, // ...props,
@ -67,12 +69,15 @@ export const GetInitialData = (
step: getStep({ start: minTime, end: maxTime, inputFormat: 'ns' }), step: getStep({ start: minTime, end: maxTime, inputFormat: 'ns' }),
selectedTags: props.selectedTags, selectedTags: props.selectedTags,
}), }),
getTopEndPoints({ getTopOperations({
end: maxTime, end: maxTime,
service: props.serviceName, service: props.serviceName,
start: minTime, start: minTime,
selectedTags: props.selectedTags, selectedTags: props.selectedTags,
}), }),
getTopLevelOperations({
service: props.serviceName,
}),
]); ]);
if ( if (
@ -81,7 +86,8 @@ export const GetInitialData = (
// getExternalErrorResponse.statusCode === 200 && // getExternalErrorResponse.statusCode === 200 &&
// getExternalServiceResponse.statusCode === 200 && // getExternalServiceResponse.statusCode === 200 &&
getServiceOverviewResponse.statusCode === 200 && getServiceOverviewResponse.statusCode === 200 &&
getTopEndPointsResponse.statusCode === 200 getTopOperationsResponse.statusCode === 200 &&
getTopLevelOperationsResponse.statusCode === 200
) { ) {
dispatch({ dispatch({
type: 'GET_INTIAL_APPLICATION_DATA', type: 'GET_INTIAL_APPLICATION_DATA',
@ -91,7 +97,8 @@ export const GetInitialData = (
// externalError: getExternalErrorResponse.payload, // externalError: getExternalErrorResponse.payload,
// externalService: getExternalServiceResponse.payload, // externalService: getExternalServiceResponse.payload,
serviceOverview: getServiceOverviewResponse.payload, serviceOverview: getServiceOverviewResponse.payload,
topEndPoints: getTopEndPointsResponse.payload, topOperations: getTopOperationsResponse.payload,
topLevelOperations: getTopLevelOperationsResponse.payload,
}, },
}); });
} else { } else {
@ -99,8 +106,9 @@ export const GetInitialData = (
type: 'GET_INITIAL_APPLICATION_ERROR', type: 'GET_INITIAL_APPLICATION_ERROR',
payload: { payload: {
errorMessage: errorMessage:
getTopEndPointsResponse.error || getTopOperationsResponse.error ||
getServiceOverviewResponse.error || getServiceOverviewResponse.error ||
getTopLevelOperationsResponse.error ||
// getExternalServiceResponse.error || // getExternalServiceResponse.error ||
// getExternalErrorResponse.error || // getExternalErrorResponse.error ||
// getExternalAverageDurationResponse.error || // getExternalAverageDurationResponse.error ||

View File

@ -6,26 +6,16 @@ import { ActionTypes } from './types';
export interface ServiceMapStore { export interface ServiceMapStore {
items: ServicesMapItem[]; items: ServicesMapItem[];
services: ServicesItem[];
loading: boolean; loading: boolean;
} }
export interface ServicesItem {
serviceName: string;
p99: number;
avgDuration: number;
numCalls: number;
callRate: number;
numErrors: number;
errorRate: number;
num4XX: number;
fourXXRate: number;
}
export interface ServicesMapItem { export interface ServicesMapItem {
parent: string; parent: string;
child: string; child: string;
callCount: number; callCount: number;
callRate: number;
errorRate: number;
p99: number;
} }
export interface ServiceMapItemAction { export interface ServiceMapItemAction {
@ -33,11 +23,6 @@ export interface ServiceMapItemAction {
payload: ServicesMapItem[]; payload: ServicesMapItem[];
} }
export interface ServicesAction {
type: ActionTypes.getServices;
payload: ServicesItem[];
}
export interface ServiceMapLoading { export interface ServiceMapLoading {
type: ActionTypes.serviceMapLoading; type: ActionTypes.serviceMapLoading;
payload: { payload: {
@ -55,19 +40,13 @@ export const getDetailedServiceMapItems = (globalTime: GlobalTime) => {
end, end,
tags: [], tags: [],
}; };
const [serviceMapDependenciesResponse, response] = await Promise.all([ const [dependencyGraphResponse] = await Promise.all([
api.post<ServicesMapItem[]>(`/serviceMapDependencies`, serviceMapPayload), api.post<ServicesMapItem[]>(`/dependency_graph`, serviceMapPayload),
api.post<ServicesItem[]>(`/services`, serviceMapPayload),
]); ]);
dispatch<ServicesAction>({
type: ActionTypes.getServices,
payload: response.data,
});
dispatch<ServiceMapItemAction>({ dispatch<ServiceMapItemAction>({
type: ActionTypes.getServiceMapItems, type: ActionTypes.getServiceMapItems,
payload: serviceMapDependenciesResponse.data, payload: dependencyGraphResponse.data,
}); });
dispatch<ServiceMapLoading>({ dispatch<ServiceMapLoading>({

View File

@ -1,8 +1,4 @@
import { import { ServiceMapItemAction, ServiceMapLoading } from './serviceMap';
ServiceMapItemAction,
ServiceMapLoading,
ServicesAction,
} from './serviceMap';
import { GetUsageDataAction } from './usage'; import { GetUsageDataAction } from './usage';
export enum ActionTypes { export enum ActionTypes {
@ -17,6 +13,5 @@ export enum ActionTypes {
export type Action = export type Action =
| GetUsageDataAction | GetUsageDataAction
| ServicesAction
| ServiceMapItemAction | ServiceMapItemAction
| ServiceMapLoading; | ServiceMapLoading;

View File

@ -21,7 +21,7 @@ const InitialValue: InitialValueTypes = {
services: [], services: [],
dbOverView: [], dbOverView: [],
externalService: [], externalService: [],
topEndPoints: [], topOperations: [],
externalAverageDuration: [], externalAverageDuration: [],
externalError: [], externalError: [],
serviceOverview: [], serviceOverview: [],
@ -29,6 +29,7 @@ const InitialValue: InitialValueTypes = {
resourceAttributePromQLQuery: resourceAttributesQueryToPromQL( resourceAttributePromQLQuery: resourceAttributesQueryToPromQL(
GetResourceAttributeQueriesFromURL() || [], GetResourceAttributeQueriesFromURL() || [],
), ),
topLevelOperations: [],
}; };
const metrics = ( const metrics = (
@ -88,22 +89,24 @@ const metrics = (
case GET_INTIAL_APPLICATION_DATA: { case GET_INTIAL_APPLICATION_DATA: {
const { const {
// dbOverView, // dbOverView,
topEndPoints, topOperations,
serviceOverview, serviceOverview,
// externalService, // externalService,
// externalAverageDuration, // externalAverageDuration,
// externalError, // externalError,
topLevelOperations,
} = action.payload; } = action.payload;
return { return {
...state, ...state,
// dbOverView, // dbOverView,
topEndPoints, topOperations,
serviceOverview, serviceOverview,
// externalService, // externalService,
// externalAverageDuration, // externalAverageDuration,
// externalError, // externalError,
metricsApplicationLoading: false, metricsApplicationLoading: false,
topLevelOperations,
}; };
} }

View File

@ -2,7 +2,6 @@ import { Action, ActionTypes, ServiceMapStore } from 'store/actions';
const initialState: ServiceMapStore = { const initialState: ServiceMapStore = {
items: [], items: [],
services: [],
loading: true, loading: true,
}; };
@ -16,11 +15,6 @@ export const ServiceMapReducer = (
...state, ...state,
items: action.payload, items: action.payload,
}; };
case ActionTypes.getServices:
return {
...state,
services: action.payload,
};
case ActionTypes.serviceMapLoading: { case ActionTypes.serviceMapLoading: {
return { return {
...state, ...state,

View File

@ -5,7 +5,7 @@
import { IResourceAttributeQuery } from 'container/MetricsApplication/ResourceAttributesFilter/types'; import { IResourceAttributeQuery } from 'container/MetricsApplication/ResourceAttributesFilter/types';
import { ServicesList } from 'types/api/metrics/getService'; import { ServicesList } from 'types/api/metrics/getService';
import { ServiceOverview } from 'types/api/metrics/getServiceOverview'; import { ServiceOverview } from 'types/api/metrics/getServiceOverview';
import { TopEndPoints } from 'types/api/metrics/getTopEndPoints'; import { TopOperations } from 'types/api/metrics/getTopOperations';
export const GET_SERVICE_LIST_SUCCESS = 'GET_SERVICE_LIST_SUCCESS'; export const GET_SERVICE_LIST_SUCCESS = 'GET_SERVICE_LIST_SUCCESS';
export const GET_SERVICE_LIST_LOADING_START = 'GET_SERVICE_LIST_LOADING_START'; export const GET_SERVICE_LIST_LOADING_START = 'GET_SERVICE_LIST_LOADING_START';
@ -38,12 +38,13 @@ export interface GetServiceListError {
export interface GetInitialApplicationData { export interface GetInitialApplicationData {
type: typeof GET_INTIAL_APPLICATION_DATA; type: typeof GET_INTIAL_APPLICATION_DATA;
payload: { payload: {
topEndPoints: TopEndPoints[]; topOperations: TopOperations[];
// dbOverView: DBOverView[]; // dbOverView: DBOverView[];
// externalService: ExternalService[]; // externalService: ExternalService[];
// externalAverageDuration: ExternalAverageDuration[]; // externalAverageDuration: ExternalAverageDuration[];
// externalError: ExternalError[]; // externalError: ExternalError[];
serviceOverview: ServiceOverview[]; serviceOverview: ServiceOverview[];
topLevelOperations: string[];
}; };
} }

View File

@ -18,6 +18,8 @@ export interface AlertDef {
annotations?: Labels; annotations?: Labels;
evalWindow?: string; evalWindow?: string;
source?: string; source?: string;
disabled?: boolean;
preferredChannels?: string[];
} }
export interface RuleCondition { export interface RuleCondition {

View File

@ -1,7 +1,7 @@
import { Alerts } from './getAll'; import { AlertDef } from './def';
export interface Props { export interface Props {
id: Alerts['id']; id: AlertDef['id'];
} }
export interface PayloadProps { export interface PayloadProps {

View File

@ -4,6 +4,13 @@ export interface Props {
id: AlertDef['id']; id: AlertDef['id'];
} }
export interface GettableAlert extends AlertDef {
id: number;
alert: string;
state: string;
disabled: boolean;
}
export type PayloadProps = { export type PayloadProps = {
data: AlertDef; data: GettableAlert;
}; };

View File

@ -1,32 +1,3 @@
export interface Alerts { import { GettableAlert } from './get';
labels: AlertsLabel;
annotations: {
description: string;
summary: string;
[key: string]: string;
};
state: string;
name: string;
id: number;
endsAt: string;
fingerprint: string;
generatorURL: string;
receivers: Receivers[];
startsAt: string;
status: {
inhibitedBy: [];
silencedBy: [];
state: string;
};
updatedAt: string;
}
interface Receivers { export type PayloadProps = GettableAlert[];
name: string;
}
interface AlertsLabel {
[key: string]: string;
}
export type PayloadProps = Alerts[];

View File

@ -1,4 +1,4 @@
import { Alerts } from './getAll'; import { AlertDef } from './def';
export interface Props { export interface Props {
silenced: boolean; silenced: boolean;
@ -7,8 +7,8 @@ export interface Props {
[key: string]: string | boolean; [key: string]: string | boolean;
} }
export interface Group { export interface Group {
alerts: Alerts[]; alerts: AlertDef[];
label: Alerts['labels']; label: AlertDef['labels'];
receiver: { receiver: {
[key: string]: string; [key: string]: string;
}; };

View File

@ -1,4 +1,33 @@
import { Alerts } from './getAll'; export interface Alerts {
labels: AlertsLabel;
annotations: {
description: string;
summary: string;
[key: string]: string;
};
state: string;
name: string;
id: number;
endsAt: string;
fingerprint: string;
generatorURL: string;
receivers: Receivers[];
startsAt: string;
status: {
inhibitedBy: [];
silencedBy: [];
state: string;
};
updatedAt: string;
}
interface Receivers {
name: string;
}
interface AlertsLabel {
[key: string]: string;
}
export interface Props { export interface Props {
silenced: boolean; silenced: boolean;

View File

@ -0,0 +1,12 @@
import { GettableAlert } from './get';
export type PayloadProps = GettableAlert;
export interface PatchProps {
disabled?: boolean;
}
export interface Props {
id?: number;
data: PatchProps;
}

View File

@ -0,0 +1,10 @@
import { AlertDef } from 'types/api/alerts/def';
export interface Props {
data: AlertDef;
}
export interface PayloadProps {
alertCount: number;
message: string;
}

View File

@ -0,0 +1,7 @@
export type TopLevelOperations = string[];
export interface Props {
service: string;
}
export type PayloadProps = TopLevelOperations;

View File

@ -1,6 +1,6 @@
import { Tags } from 'types/reducer/trace'; import { Tags } from 'types/reducer/trace';
export interface TopEndPoints { export interface TopOperations {
name: string; name: string;
numCalls: number; numCalls: number;
p50: number; p50: number;
@ -15,4 +15,4 @@ export interface Props {
selectedTags: Tags[]; selectedTags: Tags[];
} }
export type PayloadProps = TopEndPoints[]; export type PayloadProps = TopOperations[];

View File

@ -5,7 +5,7 @@ import { ExternalError } from 'types/api/metrics/getExternalError';
import { ExternalService } from 'types/api/metrics/getExternalService'; import { ExternalService } from 'types/api/metrics/getExternalService';
import { ServicesList } from 'types/api/metrics/getService'; import { ServicesList } from 'types/api/metrics/getService';
import { ServiceOverview } from 'types/api/metrics/getServiceOverview'; import { ServiceOverview } from 'types/api/metrics/getServiceOverview';
import { TopEndPoints } from 'types/api/metrics/getTopEndPoints'; import { TopOperations } from 'types/api/metrics/getTopOperations';
interface MetricReducer { interface MetricReducer {
services: ServicesList[]; services: ServicesList[];
@ -15,12 +15,13 @@ interface MetricReducer {
errorMessage: string; errorMessage: string;
dbOverView: DBOverView[]; dbOverView: DBOverView[];
externalService: ExternalService[]; externalService: ExternalService[];
topEndPoints: TopEndPoints[]; topOperations: TopOperations[];
externalAverageDuration: ExternalAverageDuration[]; externalAverageDuration: ExternalAverageDuration[];
externalError: ExternalError[]; externalError: ExternalError[];
serviceOverview: ServiceOverview[]; serviceOverview: ServiceOverview[];
resourceAttributeQueries: IResourceAttributeQuery[]; resourceAttributeQueries: IResourceAttributeQuery[];
resourceAttributePromQLQuery: string; resourceAttributePromQLQuery: string;
topLevelOperations: string[];
} }
export default MetricReducer; export default MetricReducer;

View File

@ -20,7 +20,7 @@ RUN go mod download -x
# Add the sources and proceed with build # Add the sources and proceed with build
ADD . . ADD . .
RUN go build -a -ldflags "-linkmode external -extldflags '-static' -s -w $LD_FLAGS" -o ./bin/query-service ./main.go RUN go build -tags timetzdata -a -ldflags "-linkmode external -extldflags '-static' -s -w $LD_FLAGS" -o ./bin/query-service ./main.go
RUN chmod +x ./bin/query-service RUN chmod +x ./bin/query-service

View File

@ -18,21 +18,24 @@ const (
) )
const ( const (
defaultDatasource string = "tcp://localhost:9000" defaultDatasource string = "tcp://localhost:9000"
defaultTraceDB string = "signoz_traces" defaultTraceDB string = "signoz_traces"
defaultOperationsTable string = "signoz_operations" defaultOperationsTable string = "signoz_operations"
defaultIndexTable string = "signoz_index_v2" defaultIndexTable string = "signoz_index_v2"
defaultErrorTable string = "signoz_error_index_v2" defaultErrorTable string = "signoz_error_index_v2"
defaulDurationTable string = "durationSortMV" defaultDurationTable string = "durationSortMV"
defaultSpansTable string = "signoz_spans" defaultUsageExplorerTable string = "usage_explorer"
defaultLogsDB string = "signoz_logs" defaultSpansTable string = "signoz_spans"
defaultLogsTable string = "logs" defaultDependencyGraphTable string = "dependency_graph_minutes"
defaultLogAttributeKeysTable string = "logs_atrribute_keys" defaultTopLevelOperationsTable string = "top_level_operations"
defaultLogResourceKeysTable string = "logs_resource_keys" defaultLogsDB string = "signoz_logs"
defaultLiveTailRefreshSeconds int = 10 defaultLogsTable string = "logs"
defaultWriteBatchDelay time.Duration = 5 * time.Second defaultLogAttributeKeysTable string = "logs_atrribute_keys"
defaultWriteBatchSize int = 10000 defaultLogResourceKeysTable string = "logs_resource_keys"
defaultEncoding Encoding = EncodingJSON defaultLiveTailRefreshSeconds int = 10
defaultWriteBatchDelay time.Duration = 5 * time.Second
defaultWriteBatchSize int = 10000
defaultEncoding Encoding = EncodingJSON
) )
const ( const (
@ -48,24 +51,27 @@ const (
// NamespaceConfig is Clickhouse's internal configuration data // NamespaceConfig is Clickhouse's internal configuration data
type namespaceConfig struct { type namespaceConfig struct {
namespace string namespace string
Enabled bool Enabled bool
Datasource string Datasource string
TraceDB string TraceDB string
OperationsTable string OperationsTable string
IndexTable string IndexTable string
DurationTable string DurationTable string
SpansTable string UsageExplorerTable string
ErrorTable string SpansTable string
LogsDB string ErrorTable string
LogsTable string DependencyGraphTable string
LogsAttributeKeysTable string TopLevelOperationsTable string
LogsResourceKeysTable string LogsDB string
LiveTailRefreshSeconds int LogsTable string
WriteBatchDelay time.Duration LogsAttributeKeysTable string
WriteBatchSize int LogsResourceKeysTable string
Encoding Encoding LiveTailRefreshSeconds int
Connector Connector WriteBatchDelay time.Duration
WriteBatchSize int
Encoding Encoding
Connector Connector
} }
// Connecto defines how to connect to the database // Connecto defines how to connect to the database
@ -112,24 +118,27 @@ func NewOptions(datasource string, primaryNamespace string, otherNamespaces ...s
options := &Options{ options := &Options{
primary: &namespaceConfig{ primary: &namespaceConfig{
namespace: primaryNamespace, namespace: primaryNamespace,
Enabled: true, Enabled: true,
Datasource: datasource, Datasource: datasource,
TraceDB: defaultTraceDB, TraceDB: defaultTraceDB,
OperationsTable: defaultOperationsTable, OperationsTable: defaultOperationsTable,
IndexTable: defaultIndexTable, IndexTable: defaultIndexTable,
ErrorTable: defaultErrorTable, ErrorTable: defaultErrorTable,
DurationTable: defaulDurationTable, DurationTable: defaultDurationTable,
SpansTable: defaultSpansTable, UsageExplorerTable: defaultUsageExplorerTable,
LogsDB: defaultLogsDB, SpansTable: defaultSpansTable,
LogsTable: defaultLogsTable, DependencyGraphTable: defaultDependencyGraphTable,
LogsAttributeKeysTable: defaultLogAttributeKeysTable, TopLevelOperationsTable: defaultTopLevelOperationsTable,
LogsResourceKeysTable: defaultLogResourceKeysTable, LogsDB: defaultLogsDB,
LiveTailRefreshSeconds: defaultLiveTailRefreshSeconds, LogsTable: defaultLogsTable,
WriteBatchDelay: defaultWriteBatchDelay, LogsAttributeKeysTable: defaultLogAttributeKeysTable,
WriteBatchSize: defaultWriteBatchSize, LogsResourceKeysTable: defaultLogResourceKeysTable,
Encoding: defaultEncoding, LiveTailRefreshSeconds: defaultLiveTailRefreshSeconds,
Connector: defaultConnector, WriteBatchDelay: defaultWriteBatchDelay,
WriteBatchSize: defaultWriteBatchSize,
Encoding: defaultEncoding,
Connector: defaultConnector,
}, },
others: make(map[string]*namespaceConfig, len(otherNamespaces)), others: make(map[string]*namespaceConfig, len(otherNamespaces)),
} }

View File

@ -48,16 +48,17 @@ import (
) )
const ( const (
primaryNamespace = "clickhouse" primaryNamespace = "clickhouse"
archiveNamespace = "clickhouse-archive" archiveNamespace = "clickhouse-archive"
signozTraceDBName = "signoz_traces" signozTraceDBName = "signoz_traces"
signozDurationMVTable = "durationSort" signozDurationMVTable = "durationSort"
signozSpansTable = "signoz_spans" signozUsageExplorerTable = "usage_explorer"
signozErrorIndexTable = "signoz_error_index_v2" signozSpansTable = "signoz_spans"
signozTraceTableName = "signoz_index_v2" signozErrorIndexTable = "signoz_error_index_v2"
signozMetricDBName = "signoz_metrics" signozTraceTableName = "signoz_index_v2"
signozSampleTableName = "samples_v2" signozMetricDBName = "signoz_metrics"
signozTSTableName = "time_series_v2" signozSampleTableName = "samples_v2"
signozTSTableName = "time_series_v2"
minTimespanForProgressiveSearch = time.Hour minTimespanForProgressiveSearch = time.Hour
minTimespanForProgressiveSearchMargin = time.Minute minTimespanForProgressiveSearchMargin = time.Minute
@ -76,20 +77,23 @@ var (
// SpanWriter for reading spans from ClickHouse // SpanWriter for reading spans from ClickHouse
type ClickHouseReader struct { type ClickHouseReader struct {
db clickhouse.Conn db clickhouse.Conn
localDB *sqlx.DB localDB *sqlx.DB
traceDB string traceDB string
operationsTable string operationsTable string
durationTable string durationTable string
indexTable string indexTable string
errorTable string errorTable string
spansTable string usageExplorerTable string
logsDB string spansTable string
logsTable string dependencyGraphTable string
logsAttributeKeys string topLevelOperationsTable string
logsResourceKeys string logsDB string
queryEngine *promql.Engine logsTable string
remoteStorage *remote.Storage logsAttributeKeys string
logsResourceKeys string
queryEngine *promql.Engine
remoteStorage *remote.Storage
promConfigFile string promConfigFile string
promConfig *config.Config promConfig *config.Config
@ -118,21 +122,24 @@ func NewReader(localDB *sqlx.DB, configFile string) *ClickHouseReader {
} }
return &ClickHouseReader{ return &ClickHouseReader{
db: db, db: db,
localDB: localDB, localDB: localDB,
traceDB: options.primary.TraceDB, traceDB: options.primary.TraceDB,
alertManager: alertManager, alertManager: alertManager,
operationsTable: options.primary.OperationsTable, operationsTable: options.primary.OperationsTable,
indexTable: options.primary.IndexTable, indexTable: options.primary.IndexTable,
errorTable: options.primary.ErrorTable, errorTable: options.primary.ErrorTable,
durationTable: options.primary.DurationTable, usageExplorerTable: options.primary.UsageExplorerTable,
spansTable: options.primary.SpansTable, durationTable: options.primary.DurationTable,
logsDB: options.primary.LogsDB, spansTable: options.primary.SpansTable,
logsTable: options.primary.LogsTable, dependencyGraphTable: options.primary.DependencyGraphTable,
logsAttributeKeys: options.primary.LogsAttributeKeysTable, topLevelOperationsTable: options.primary.TopLevelOperationsTable,
logsResourceKeys: options.primary.LogsResourceKeysTable, logsDB: options.primary.LogsDB,
liveTailRefreshSeconds: options.primary.LiveTailRefreshSeconds, logsTable: options.primary.LogsTable,
promConfigFile: configFile, logsAttributeKeys: options.primary.LogsAttributeKeysTable,
logsResourceKeys: options.primary.LogsResourceKeysTable,
liveTailRefreshSeconds: options.primary.LiveTailRefreshSeconds,
promConfigFile: configFile,
} }
} }
@ -386,14 +393,21 @@ func (r *ClickHouseReader) GetChannel(id string) (*model.ChannelItem, *model.Api
idInt, _ := strconv.Atoi(id) idInt, _ := strconv.Atoi(id)
channel := model.ChannelItem{} channel := model.ChannelItem{}
query := fmt.Sprintf("SELECT id, created_at, updated_at, name, type, data data FROM notification_channels WHERE id=%d", idInt) query := "SELECT id, created_at, updated_at, name, type, data data FROM notification_channels WHERE id=? "
err := r.localDB.Get(&channel, query) stmt, err := r.localDB.Preparex(query)
zap.S().Info(query) zap.S().Info(query, idInt)
if err != nil { if err != nil {
zap.S().Debug("Error in processing sql query: ", err) zap.S().Debug("Error in preparing sql query for GetChannel : ", err)
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
}
err = stmt.Get(&channel, idInt)
if err != nil {
zap.S().Debug(fmt.Sprintf("Error in getting channel with id=%d : ", idInt), err)
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
} }
@ -662,103 +676,153 @@ func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, erro
return &services, nil return &services, nil
} }
func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context) (*map[string][]string, *model.ApiError) {
operations := map[string][]string{}
query := fmt.Sprintf(`SELECT DISTINCT name, serviceName FROM %s.%s`, r.traceDB, r.topLevelOperationsTable)
rows, err := r.db.Query(ctx, query)
if err != nil {
zap.S().Error("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
}
defer rows.Close()
for rows.Next() {
var name, serviceName string
if err := rows.Scan(&name, &serviceName); err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("Error in reading data")}
}
if _, ok := operations[serviceName]; !ok {
operations[serviceName] = []string{}
}
operations[serviceName] = append(operations[serviceName], name)
}
return &operations, nil
}
func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) { func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) {
if r.indexTable == "" { if r.indexTable == "" {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: ErrNoIndexTable} return nil, &model.ApiError{Typ: model.ErrorExec, Err: ErrNoIndexTable}
} }
topLevelOps, apiErr := r.GetTopLevelOperations(ctx)
if apiErr != nil {
return nil, apiErr
}
serviceItems := []model.ServiceItem{} serviceItems := []model.ServiceItem{}
var wg sync.WaitGroup
// limit the number of concurrent queries to not overload the clickhouse server
sem := make(chan struct{}, 10)
var mtx sync.RWMutex
query := fmt.Sprintf("SELECT serviceName, quantile(0.99)(durationNano) as p99, avg(durationNano) as avgDuration, count(*) as numCalls FROM %s.%s WHERE timestamp>='%s' AND timestamp<='%s' AND kind='2'", r.traceDB, r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10)) for svc, ops := range *topLevelOps {
args := []interface{}{} sem <- struct{}{}
args, errStatus := buildQueryWithTagParams(ctx, queryParams.Tags, &query, args) wg.Add(1)
if errStatus != nil { go func(svc string, ops []string) {
return nil, errStatus defer wg.Done()
defer func() { <-sem }()
var serviceItem model.ServiceItem
var numErrors uint64
query := fmt.Sprintf(
`SELECT
quantile(0.99)(durationNano) as p99,
avg(durationNano) as avgDuration,
count(*) as numCalls
FROM %s.%s
WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end`,
r.traceDB, r.indexTable,
)
errorQuery := fmt.Sprintf(
`SELECT
count(*) as numErrors
FROM %s.%s
WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end AND statusCode=2`,
r.traceDB, r.indexTable,
)
args := []interface{}{}
args = append(args,
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
clickhouse.Named("serviceName", svc),
clickhouse.Named("names", ops),
)
args, errStatus := buildQueryWithTagParams(ctx, queryParams.Tags, &query, args)
if errStatus != nil {
zap.S().Error("Error in processing sql query: ", errStatus)
return
}
err := r.db.QueryRow(
ctx,
query,
args...,
).ScanStruct(&serviceItem)
if err != nil {
zap.S().Error("Error in processing sql query: ", err)
return
}
err = r.db.QueryRow(ctx, errorQuery, args...).Scan(&numErrors)
if err != nil {
zap.S().Error("Error in processing sql query: ", err)
return
}
serviceItem.ServiceName = svc
serviceItem.NumErrors = numErrors
mtx.Lock()
serviceItems = append(serviceItems, serviceItem)
mtx.Unlock()
}(svc, ops)
} }
query += " GROUP BY serviceName ORDER BY p99 DESC" wg.Wait()
err := r.db.Select(ctx, &serviceItems, query, args...)
zap.S().Info(query) for idx := range serviceItems {
serviceItems[idx].CallRate = float64(serviceItems[idx].NumCalls) / float64(queryParams.Period)
if err != nil { serviceItems[idx].ErrorRate = float64(serviceItems[idx].NumErrors) * 100 / float64(serviceItems[idx].NumCalls)
zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
} }
////////////////// Below block gets 5xx of services
serviceErrorItems := []model.ServiceItem{}
query = fmt.Sprintf("SELECT serviceName, count(*) as numErrors FROM %s.%s WHERE timestamp>='%s' AND timestamp<='%s' AND kind='2' AND (statusCode>=500 OR statusCode=2)", r.traceDB, r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
args = []interface{}{}
args, errStatus = buildQueryWithTagParams(ctx, queryParams.Tags, &query, args)
if errStatus != nil {
return nil, errStatus
}
query += " GROUP BY serviceName"
err = r.db.Select(ctx, &serviceErrorItems, query, args...)
zap.S().Info(query)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
}
m5xx := make(map[string]uint64)
for j := range serviceErrorItems {
m5xx[serviceErrorItems[j].ServiceName] = serviceErrorItems[j].NumErrors
}
///////////////////////////////////////////
////////////////// Below block gets 4xx of services
service4xxItems := []model.ServiceItem{}
query = fmt.Sprintf("SELECT serviceName, count(*) as num4xx FROM %s.%s WHERE timestamp>='%s' AND timestamp<='%s' AND kind='2' AND statusCode>=400 AND statusCode<500", r.traceDB, r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
args = []interface{}{}
args, errStatus = buildQueryWithTagParams(ctx, queryParams.Tags, &query, args)
if errStatus != nil {
return nil, errStatus
}
query += " GROUP BY serviceName"
err = r.db.Select(ctx, &service4xxItems, query, args...)
zap.S().Info(query)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
}
m4xx := make(map[string]uint64)
for j := range service4xxItems {
m4xx[service4xxItems[j].ServiceName] = service4xxItems[j].Num4XX
}
for i := range serviceItems {
if val, ok := m5xx[serviceItems[i].ServiceName]; ok {
serviceItems[i].NumErrors = val
}
if val, ok := m4xx[serviceItems[i].ServiceName]; ok {
serviceItems[i].Num4XX = val
}
serviceItems[i].CallRate = float64(serviceItems[i].NumCalls) / float64(queryParams.Period)
serviceItems[i].FourXXRate = float64(serviceItems[i].Num4XX) * 100 / float64(serviceItems[i].NumCalls)
serviceItems[i].ErrorRate = float64(serviceItems[i].NumErrors) * 100 / float64(serviceItems[i].NumCalls)
}
return &serviceItems, nil return &serviceItems, nil
} }
func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *model.GetServiceOverviewParams) (*[]model.ServiceOverviewItem, *model.ApiError) { func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *model.GetServiceOverviewParams) (*[]model.ServiceOverviewItem, *model.ApiError) {
topLevelOps, apiErr := r.GetTopLevelOperations(ctx)
if apiErr != nil {
return nil, apiErr
}
ops, ok := (*topLevelOps)[queryParams.ServiceName]
if !ok {
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("Service not found")}
}
namedArgs := []interface{}{
clickhouse.Named("interval", strconv.Itoa(int(queryParams.StepSeconds/60))),
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
clickhouse.Named("serviceName", queryParams.ServiceName),
clickhouse.Named("names", ops),
}
serviceOverviewItems := []model.ServiceOverviewItem{} serviceOverviewItems := []model.ServiceOverviewItem{}
query := fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %s minute) as time, quantile(0.99)(durationNano) as p99, quantile(0.95)(durationNano) as p95,quantile(0.50)(durationNano) as p50, count(*) as numCalls FROM %s.%s WHERE timestamp>='%s' AND timestamp<='%s' AND kind='2' AND serviceName='%s'", strconv.Itoa(int(queryParams.StepSeconds/60)), r.traceDB, r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10), queryParams.ServiceName) query := fmt.Sprintf(`
SELECT
toStartOfInterval(timestamp, INTERVAL @interval minute) as time,
quantile(0.99)(durationNano) as p99,
quantile(0.95)(durationNano) as p95,
quantile(0.50)(durationNano) as p50,
count(*) as numCalls
FROM %s.%s
WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end`,
r.traceDB, r.indexTable,
)
args := []interface{}{} args := []interface{}{}
args = append(args, namedArgs...)
args, errStatus := buildQueryWithTagParams(ctx, queryParams.Tags, &query, args) args, errStatus := buildQueryWithTagParams(ctx, queryParams.Tags, &query, args)
if errStatus != nil { if errStatus != nil {
return nil, errStatus return nil, errStatus
@ -766,17 +830,25 @@ func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *
query += " GROUP BY time ORDER BY time DESC" query += " GROUP BY time ORDER BY time DESC"
err := r.db.Select(ctx, &serviceOverviewItems, query, args...) err := r.db.Select(ctx, &serviceOverviewItems, query, args...)
zap.S().Info(query) zap.S().Debug(query)
if err != nil { if err != nil {
zap.S().Debug("Error in processing sql query: ", err) zap.S().Error("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
} }
serviceErrorItems := []model.ServiceErrorItem{} serviceErrorItems := []model.ServiceErrorItem{}
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %s minute) as time, count(*) as numErrors FROM %s.%s WHERE timestamp>='%s' AND timestamp<='%s' AND kind='2' AND serviceName='%s' AND hasError=true", strconv.Itoa(int(queryParams.StepSeconds/60)), r.traceDB, r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10), queryParams.ServiceName) query = fmt.Sprintf(`
SELECT
toStartOfInterval(timestamp, INTERVAL @interval minute) as time,
count(*) as numErrors
FROM %s.%s
WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end AND statusCode=2`,
r.traceDB, r.indexTable,
)
args = []interface{}{} args = []interface{}{}
args = append(args, namedArgs...)
args, errStatus = buildQueryWithTagParams(ctx, queryParams.Tags, &query, args) args, errStatus = buildQueryWithTagParams(ctx, queryParams.Tags, &query, args)
if errStatus != nil { if errStatus != nil {
return nil, errStatus return nil, errStatus
@ -784,10 +856,10 @@ func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *
query += " GROUP BY time ORDER BY time DESC" query += " GROUP BY time ORDER BY time DESC"
err = r.db.Select(ctx, &serviceErrorItems, query, args...) err = r.db.Select(ctx, &serviceErrorItems, query, args...)
zap.S().Info(query) zap.S().Debug(query)
if err != nil { if err != nil {
zap.S().Debug("Error in processing sql query: ", err) zap.S().Error("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
} }
@ -1528,45 +1600,67 @@ func (r *ClickHouseReader) GetTagValues(ctx context.Context, queryParams *model.
return &cleanedTagValues, nil return &cleanedTagValues, nil
} }
func (r *ClickHouseReader) GetTopEndpoints(ctx context.Context, queryParams *model.GetTopEndpointsParams) (*[]model.TopEndpointsItem, *model.ApiError) { func (r *ClickHouseReader) GetTopOperations(ctx context.Context, queryParams *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError) {
var topEndpointsItems []model.TopEndpointsItem namedArgs := []interface{}{
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
clickhouse.Named("serviceName", queryParams.ServiceName),
}
query := fmt.Sprintf("SELECT quantile(0.5)(durationNano) as p50, quantile(0.95)(durationNano) as p95, quantile(0.99)(durationNano) as p99, COUNT(1) as numCalls, name FROM %s.%s WHERE timestamp >= '%s' AND timestamp <= '%s' AND kind='2' and serviceName='%s'", r.traceDB, r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10), queryParams.ServiceName) var topOperationsItems []model.TopOperationsItem
query := fmt.Sprintf(`
SELECT
quantile(0.5)(durationNano) as p50,
quantile(0.95)(durationNano) as p95,
quantile(0.99)(durationNano) as p99,
COUNT(*) as numCalls,
name
FROM %s.%s
WHERE serviceName = @serviceName AND timestamp>= @start AND timestamp<= @end`,
r.traceDB, r.indexTable,
)
args := []interface{}{} args := []interface{}{}
args = append(args, namedArgs...)
args, errStatus := buildQueryWithTagParams(ctx, queryParams.Tags, &query, args) args, errStatus := buildQueryWithTagParams(ctx, queryParams.Tags, &query, args)
if errStatus != nil { if errStatus != nil {
return nil, errStatus return nil, errStatus
} }
query += " GROUP BY name" query += " GROUP BY name ORDER BY p99 DESC LIMIT 10"
err := r.db.Select(ctx, &topEndpointsItems, query, args...) err := r.db.Select(ctx, &topOperationsItems, query, args...)
zap.S().Info(query) zap.S().Debug(query)
if err != nil { if err != nil {
zap.S().Debug("Error in processing sql query: ", err) zap.S().Error("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
} }
if topEndpointsItems == nil { if topOperationsItems == nil {
topEndpointsItems = []model.TopEndpointsItem{} topOperationsItems = []model.TopOperationsItem{}
} }
return &topEndpointsItems, nil return &topOperationsItems, nil
} }
func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetUsageParams) (*[]model.UsageItem, error) { func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetUsageParams) (*[]model.UsageItem, error) {
var usageItems []model.UsageItem var usageItems []model.UsageItem
namedArgs := []interface{}{
clickhouse.Named("interval", queryParams.StepHour),
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
}
var query string var query string
if len(queryParams.ServiceName) != 0 { if len(queryParams.ServiceName) != 0 {
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d HOUR) as time, count(1) as count FROM %s.%s WHERE serviceName='%s' AND timestamp>='%s' AND timestamp<='%s' GROUP BY time ORDER BY time ASC", queryParams.StepHour, r.traceDB, r.indexTable, queryParams.ServiceName, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10)) namedArgs = append(namedArgs, clickhouse.Named("serviceName", queryParams.ServiceName))
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL @interval HOUR) as time, sum(count) as count FROM %s.%s WHERE service_name=@serviceName AND timestamp>=@start AND timestamp<=@end GROUP BY time ORDER BY time ASC", r.traceDB, r.usageExplorerTable)
} else { } else {
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d HOUR) as time, count(1) as count FROM %s.%s WHERE timestamp>='%s' AND timestamp<='%s' GROUP BY time ORDER BY time ASC", queryParams.StepHour, r.traceDB, r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10)) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL @interval HOUR) as time, sum(count) as count FROM %s.%s WHERE timestamp>=@start AND timestamp<=@end GROUP BY time ORDER BY time ASC", r.traceDB, r.usageExplorerTable)
} }
err := r.db.Select(ctx, &usageItems, query) err := r.db.Select(ctx, &usageItems, query, namedArgs...)
zap.S().Info(query) zap.S().Info(query)
@ -1626,48 +1720,50 @@ func interfaceArrayToStringArray(array []interface{}) []string {
return strArray return strArray
} }
func (r *ClickHouseReader) GetServiceMapDependencies(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error) { func (r *ClickHouseReader) GetDependencyGraph(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error) {
serviceMapDependencyItems := []model.ServiceMapDependencyItem{}
query := fmt.Sprintf(`SELECT spanID, parentSpanID, serviceName FROM %s.%s WHERE timestamp>='%s' AND timestamp<='%s'`, r.traceDB, r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10)) response := []model.ServiceMapDependencyResponseItem{}
err := r.db.Select(ctx, &serviceMapDependencyItems, query) args := []interface{}{}
args = append(args,
clickhouse.Named("start", uint64(queryParams.Start.Unix())),
clickhouse.Named("end", uint64(queryParams.End.Unix())),
clickhouse.Named("duration", uint64(queryParams.End.Unix()-queryParams.Start.Unix())),
)
zap.S().Info(query) query := fmt.Sprintf(`
WITH
quantilesMergeState(0.5, 0.75, 0.9, 0.95, 0.99)(duration_quantiles_state) AS duration_quantiles_state,
finalizeAggregation(duration_quantiles_state) AS result
SELECT
src as parent,
dest as child,
result[1] AS p50,
result[2] AS p75,
result[3] AS p90,
result[4] AS p95,
result[5] AS p99,
sum(total_count) as callCount,
sum(total_count)/ @duration AS callRate,
sum(error_count)/sum(total_count) as errorRate
FROM %s.%s
WHERE toUInt64(toDateTime(timestamp)) >= @start AND toUInt64(toDateTime(timestamp)) <= @end
GROUP BY
src,
dest`,
r.traceDB, r.dependencyGraphTable,
)
zap.S().Debug(query, args)
err := r.db.Select(ctx, &response, query, args...)
if err != nil { if err != nil {
zap.S().Debug("Error in processing sql query: ", err) zap.S().Error("Error in processing sql query: ", err)
return nil, fmt.Errorf("Error in processing sql query") return nil, fmt.Errorf("Error in processing sql query")
} }
serviceMap := make(map[string]*model.ServiceMapDependencyResponseItem) return &response, nil
spanId2ServiceNameMap := make(map[string]string)
for i := range serviceMapDependencyItems {
spanId2ServiceNameMap[serviceMapDependencyItems[i].SpanId] = serviceMapDependencyItems[i].ServiceName
}
for i := range serviceMapDependencyItems {
parent2childServiceName := spanId2ServiceNameMap[serviceMapDependencyItems[i].ParentSpanId] + "-" + spanId2ServiceNameMap[serviceMapDependencyItems[i].SpanId]
if _, ok := serviceMap[parent2childServiceName]; !ok {
serviceMap[parent2childServiceName] = &model.ServiceMapDependencyResponseItem{
Parent: spanId2ServiceNameMap[serviceMapDependencyItems[i].ParentSpanId],
Child: spanId2ServiceNameMap[serviceMapDependencyItems[i].SpanId],
CallCount: 1,
}
} else {
serviceMap[parent2childServiceName].CallCount++
}
}
retMe := make([]model.ServiceMapDependencyResponseItem, 0, len(serviceMap))
for _, dependency := range serviceMap {
if dependency.Parent == "" {
continue
}
retMe = append(retMe, *dependency)
}
return &retMe, nil
} }
func (r *ClickHouseReader) GetFilteredSpansAggregates(ctx context.Context, queryParams *model.GetFilteredSpanAggregatesParams) (*model.GetFilteredSpansAggregatesResponse, *model.ApiError) { func (r *ClickHouseReader) GetFilteredSpansAggregates(ctx context.Context, queryParams *model.GetFilteredSpanAggregatesParams) (*model.GetFilteredSpansAggregatesResponse, *model.ApiError) {
@ -1907,7 +2003,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context,
switch params.Type { switch params.Type {
case constants.TraceTTL: case constants.TraceTTL:
tableNameArray := []string{signozTraceDBName + "." + signozTraceTableName, signozTraceDBName + "." + signozDurationMVTable, signozTraceDBName + "." + signozSpansTable, signozTraceDBName + "." + signozErrorIndexTable} tableNameArray := []string{signozTraceDBName + "." + signozTraceTableName, signozTraceDBName + "." + signozDurationMVTable, signozTraceDBName + "." + signozSpansTable, signozTraceDBName + "." + signozErrorIndexTable, signozTraceDBName + "." + signozUsageExplorerTable, signozTraceDBName + "." + defaultDependencyGraphTable}
for _, tableName = range tableNameArray { for _, tableName = range tableNameArray {
statusItem, err := r.checkTTLStatusItem(ctx, tableName) statusItem, err := r.checkTTLStatusItem(ctx, tableName)
if err != nil { if err != nil {
@ -2253,7 +2349,7 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, ttlParams *model.GetTTLPa
switch ttlParams.Type { switch ttlParams.Type {
case constants.TraceTTL: case constants.TraceTTL:
tableNameArray := []string{signozTraceDBName + "." + signozTraceTableName, signozTraceDBName + "." + signozDurationMVTable, signozTraceDBName + "." + signozSpansTable, signozTraceDBName + "." + signozErrorIndexTable} tableNameArray := []string{signozTraceDBName + "." + signozTraceTableName, signozTraceDBName + "." + signozDurationMVTable, signozTraceDBName + "." + signozSpansTable, signozTraceDBName + "." + signozErrorIndexTable, signozTraceDBName + "." + signozUsageExplorerTable, signozTraceDBName + "." + defaultDependencyGraphTable}
status, err := r.setTTLQueryStatus(ctx, tableNameArray) status, err := r.setTTLQueryStatus(ctx, tableNameArray)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -311,6 +311,7 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router) {
router.HandleFunc("/api/v1/rules", EditAccess(aH.createRule)).Methods(http.MethodPost) router.HandleFunc("/api/v1/rules", EditAccess(aH.createRule)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/rules/{id}", EditAccess(aH.editRule)).Methods(http.MethodPut) router.HandleFunc("/api/v1/rules/{id}", EditAccess(aH.editRule)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/rules/{id}", EditAccess(aH.deleteRule)).Methods(http.MethodDelete) router.HandleFunc("/api/v1/rules/{id}", EditAccess(aH.deleteRule)).Methods(http.MethodDelete)
router.HandleFunc("/api/v1/rules/{id}", EditAccess(aH.patchRule)).Methods(http.MethodPatch)
router.HandleFunc("/api/v1/dashboards", ViewAccess(aH.getDashboards)).Methods(http.MethodGet) router.HandleFunc("/api/v1/dashboards", ViewAccess(aH.getDashboards)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/dashboards", EditAccess(aH.createDashboards)).Methods(http.MethodPost) router.HandleFunc("/api/v1/dashboards", EditAccess(aH.createDashboards)).Methods(http.MethodPost)
@ -323,10 +324,11 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router) {
router.HandleFunc("/api/v1/services", ViewAccess(aH.getServices)).Methods(http.MethodPost) router.HandleFunc("/api/v1/services", ViewAccess(aH.getServices)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/services/list", aH.getServicesList).Methods(http.MethodGet) router.HandleFunc("/api/v1/services/list", aH.getServicesList).Methods(http.MethodGet)
router.HandleFunc("/api/v1/service/overview", ViewAccess(aH.getServiceOverview)).Methods(http.MethodPost) router.HandleFunc("/api/v1/service/overview", ViewAccess(aH.getServiceOverview)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/service/top_endpoints", ViewAccess(aH.getTopEndpoints)).Methods(http.MethodPost) router.HandleFunc("/api/v1/service/top_operations", ViewAccess(aH.getTopOperations)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/service/top_level_operations", ViewAccess(aH.getServicesTopLevelOps)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/traces/{traceId}", ViewAccess(aH.searchTraces)).Methods(http.MethodGet) router.HandleFunc("/api/v1/traces/{traceId}", ViewAccess(aH.searchTraces)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/usage", ViewAccess(aH.getUsage)).Methods(http.MethodGet) router.HandleFunc("/api/v1/usage", ViewAccess(aH.getUsage)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/serviceMapDependencies", ViewAccess(aH.serviceMapDependencies)).Methods(http.MethodPost) router.HandleFunc("/api/v1/dependency_graph", ViewAccess(aH.dependencyGraph)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/settings/ttl", AdminAccess(aH.setTTL)).Methods(http.MethodPost) router.HandleFunc("/api/v1/settings/ttl", AdminAccess(aH.setTTL)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/settings/ttl", ViewAccess(aH.getTTL)).Methods(http.MethodGet) router.HandleFunc("/api/v1/settings/ttl", ViewAccess(aH.getTTL)).Methods(http.MethodGet)
@ -786,6 +788,28 @@ func (aH *APIHandler) deleteRule(w http.ResponseWriter, r *http.Request) {
} }
// patchRule updates only requested changes in the rule
func (aH *APIHandler) patchRule(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
defer r.Body.Close()
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("msg: error in getting req body of patch rule API\n", "\t error:", err)
respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
gettableRule, err := aH.ruleManager.PatchRule(string(body), id)
if err != nil {
respondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
aH.respond(w, gettableRule)
}
func (aH *APIHandler) editRule(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) editRule(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"] id := mux.Vars(r)["id"]
@ -1082,14 +1106,14 @@ func (aH *APIHandler) submitFeedback(w http.ResponseWriter, r *http.Request) {
} }
func (aH *APIHandler) getTopEndpoints(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) getTopOperations(w http.ResponseWriter, r *http.Request) {
query, err := parseGetTopEndpointsRequest(r) query, err := parseGetTopOperationsRequest(r)
if aH.handleError(w, err, http.StatusBadRequest) { if aH.handleError(w, err, http.StatusBadRequest) {
return return
} }
result, apiErr := (*aH.reader).GetTopEndpoints(r.Context(), query) result, apiErr := (*aH.reader).GetTopOperations(r.Context(), query)
if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) { if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
return return
@ -1131,6 +1155,17 @@ func (aH *APIHandler) getServiceOverview(w http.ResponseWriter, r *http.Request)
} }
func (aH *APIHandler) getServicesTopLevelOps(w http.ResponseWriter, r *http.Request) {
result, apiErr := (*aH.reader).GetTopLevelOperations(r.Context())
if apiErr != nil {
respondError(w, apiErr, nil)
return
}
aH.writeJSON(w, r, result)
}
func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
query, err := parseGetServicesRequest(r) query, err := parseGetServicesRequest(r)
@ -1152,14 +1187,14 @@ func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
aH.writeJSON(w, r, result) aH.writeJSON(w, r, result)
} }
func (aH *APIHandler) serviceMapDependencies(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) dependencyGraph(w http.ResponseWriter, r *http.Request) {
query, err := parseGetServicesRequest(r) query, err := parseGetServicesRequest(r)
if aH.handleError(w, err, http.StatusBadRequest) { if aH.handleError(w, err, http.StatusBadRequest) {
return return
} }
result, err := (*aH.reader).GetServiceMapDependencies(r.Context(), query) result, err := (*aH.reader).GetDependencyGraph(r.Context(), query)
if aH.handleError(w, err, http.StatusBadRequest) { if aH.handleError(w, err, http.StatusBadRequest) {
return return
} }

View File

@ -32,8 +32,8 @@ func parseUser(r *http.Request) (*model.User, error) {
return &user, nil return &user, nil
} }
func parseGetTopEndpointsRequest(r *http.Request) (*model.GetTopEndpointsParams, error) { func parseGetTopOperationsRequest(r *http.Request) (*model.GetTopOperationsParams, error) {
var postData *model.GetTopEndpointsParams var postData *model.GetTopOperationsParams
err := json.NewDecoder(r.Body).Decode(&postData) err := json.NewDecoder(r.Body).Decode(&postData)
if err != nil { if err != nil {

View File

@ -140,7 +140,7 @@ func (s *Server) createPrivateServer(api *APIHandler) (*http.Server, error) {
//todo(amol): find out a way to add exact domain or //todo(amol): find out a way to add exact domain or
// ip here for alert manager // ip here for alert manager
AllowedOrigins: []string{"*"}, AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT"}, AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"},
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type"}, AllowedHeaders: []string{"Accept", "Authorization", "Content-Type"},
}) })
@ -166,7 +166,7 @@ func (s *Server) createPublicServer(api *APIHandler) (*http.Server, error) {
c := cors.New(cors.Options{ c := cors.New(cors.Options{
AllowedOrigins: []string{"*"}, AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "OPTIONS"}, AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"},
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "cache-control"}, AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "cache-control"},
}) })

View File

@ -23,4 +23,4 @@ scrape_configs:
remote_read: remote_read:
- url: tcp://localhost:9001/?database=signoz_metrics - url: tcp://localhost:9000/?database=signoz_metrics

View File

@ -11,7 +11,7 @@ require (
github.com/gorilla/mux v1.8.0 github.com/gorilla/mux v1.8.0
github.com/gosimple/slug v1.10.0 github.com/gosimple/slug v1.10.0
github.com/jmoiron/sqlx v1.3.4 github.com/jmoiron/sqlx v1.3.4
github.com/json-iterator/go v1.1.10 github.com/json-iterator/go v1.1.12
github.com/mattn/go-sqlite3 v1.14.8 github.com/mattn/go-sqlite3 v1.14.8
github.com/minio/minio-go/v6 v6.0.57 github.com/minio/minio-go/v6 v6.0.57
github.com/oklog/oklog v0.3.2 github.com/oklog/oklog v0.3.2
@ -92,7 +92,7 @@ require (
github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 // indirect github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223 // indirect github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223 // indirect
github.com/oklog/run v1.1.0 // indirect github.com/oklog/run v1.1.0 // indirect
github.com/oklog/ulid v0.3.1-0.20170117200651-66bb6560562f // indirect github.com/oklog/ulid v0.3.1-0.20170117200651-66bb6560562f // indirect

View File

@ -295,6 +295,8 @@ github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBv
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
@ -343,6 +345,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223 h1:F9x/1yl3T2AeKLr2AMdilSD8+f9bvMnNN8VS5iDtovc= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223 h1:F9x/1yl3T2AeKLr2AMdilSD8+f9bvMnNN8VS5iDtovc=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/oklog/oklog v0.2.3-0.20170918173356-f857583a70c3/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= github.com/oklog/oklog v0.2.3-0.20170918173356-f857583a70c3/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs=

View File

@ -40,6 +40,8 @@ type Alert struct {
StartsAt time.Time `json:"startsAt,omitempty"` StartsAt time.Time `json:"startsAt,omitempty"`
EndsAt time.Time `json:"endsAt,omitempty"` EndsAt time.Time `json:"endsAt,omitempty"`
GeneratorURL string `json:"generatorURL,omitempty"` GeneratorURL string `json:"generatorURL,omitempty"`
Receivers []string `json:"receivers,omitempty"`
} }
// Name returns the name of the alert. It is equivalent to the "alertname" label. // Name returns the name of the alert. It is equivalent to the "alertname" label.
@ -53,7 +55,7 @@ func (a *Alert) Hash() uint64 {
} }
func (a *Alert) String() string { func (a *Alert) String() string {
s := fmt.Sprintf("%s[%s]", a.Name(), fmt.Sprintf("%016x", a.Hash())[:7]) s := fmt.Sprintf("%s[%s][%s]", a.Name(), fmt.Sprintf("%016x", a.Hash())[:7], a.Receivers)
if a.Resolved() { if a.Resolved() {
return s + "[resolved]" return s + "[resolved]"
} }

View File

@ -20,11 +20,13 @@ type Reader interface {
GetInstantQueryMetricsResult(ctx context.Context, query *model.InstantQueryMetricsParams) (*promql.Result, *stats.QueryStats, *model.ApiError) GetInstantQueryMetricsResult(ctx context.Context, query *model.InstantQueryMetricsParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
GetQueryRangeResult(ctx context.Context, query *model.QueryRangeParams) (*promql.Result, *stats.QueryStats, *model.ApiError) GetQueryRangeResult(ctx context.Context, query *model.QueryRangeParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
GetServiceOverview(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceOverviewItem, *model.ApiError) GetServiceOverview(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceOverviewItem, *model.ApiError)
GetTopLevelOperations(ctx context.Context) (*map[string][]string, *model.ApiError)
GetServices(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) GetServices(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError)
GetTopEndpoints(ctx context.Context, query *model.GetTopEndpointsParams) (*[]model.TopEndpointsItem, *model.ApiError) GetTopOperations(ctx context.Context, query *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError)
GetUsage(ctx context.Context, query *model.GetUsageParams) (*[]model.UsageItem, error) GetUsage(ctx context.Context, query *model.GetUsageParams) (*[]model.UsageItem, error)
GetServicesList(ctx context.Context) (*[]string, error) GetServicesList(ctx context.Context) (*[]string, error)
GetServiceMapDependencies(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error) GetDependencyGraph(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error)
GetTTL(ctx context.Context, ttlParams *model.GetTTLParams) (*model.GetTTLResponseItem, *model.ApiError) GetTTL(ctx context.Context, ttlParams *model.GetTTLParams) (*model.GetTTLResponseItem, *model.ApiError)
// GetDisks returns a list of disks configured in the underlying DB. It is supported by // GetDisks returns a list of disks configured in the underlying DB. It is supported by

View File

@ -135,7 +135,7 @@ type MetricAutocompleteTagParams struct {
TagKey string TagKey string
} }
type GetTopEndpointsParams struct { type GetTopOperationsParams struct {
StartTime string `json:"start"` StartTime string `json:"start"`
EndTime string `json:"end"` EndTime string `json:"end"`
ServiceName string `json:"service"` ServiceName string `json:"service"`

View File

@ -3,6 +3,7 @@ package model
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"math"
"strconv" "strconv"
"time" "time"
@ -206,19 +207,13 @@ func (item *SearchSpanReponseItem) GetValues() []interface{} {
return returnArray return returnArray
} }
type ServiceMapDependencyItem struct {
SpanId string `json:"spanId,omitempty" ch:"spanID"`
ParentSpanId string `json:"parentSpanId,omitempty" ch:"parentSpanID"`
ServiceName string `json:"serviceName,omitempty" ch:"serviceName"`
}
type UsageItem struct { type UsageItem struct {
Time time.Time `json:"time,omitempty" ch:"time"` Time time.Time `json:"time,omitempty" ch:"time"`
Timestamp uint64 `json:"timestamp" ch:"timestamp"` Timestamp uint64 `json:"timestamp" ch:"timestamp"`
Count uint64 `json:"count" ch:"count"` Count uint64 `json:"count" ch:"count"`
} }
type TopEndpointsItem struct { type TopOperationsItem struct {
Percentile50 float64 `json:"p50" ch:"p50"` Percentile50 float64 `json:"p50" ch:"p50"`
Percentile95 float64 `json:"p95" ch:"p95"` Percentile95 float64 `json:"p95" ch:"p95"`
Percentile99 float64 `json:"p99" ch:"p99"` Percentile99 float64 `json:"p99" ch:"p99"`
@ -233,10 +228,18 @@ type TagFilters struct {
type TagValues struct { type TagValues struct {
TagValues string `json:"tagValues" ch:"tagValues"` TagValues string `json:"tagValues" ch:"tagValues"`
} }
type ServiceMapDependencyResponseItem struct { type ServiceMapDependencyResponseItem struct {
Parent string `json:"parent,omitempty" ch:"parent"` Parent string `json:"parent" ch:"parent"`
Child string `json:"child,omitempty" ch:"child"` Child string `json:"child" ch:"child"`
CallCount int `json:"callCount,omitempty" ch:"callCount"` CallCount uint64 `json:"callCount" ch:"callCount"`
CallRate float64 `json:"callRate" ch:"callRate"`
ErrorRate float64 `json:"errorRate" ch:"errorRate"`
P99 float64 `json:"p99" ch:"p99"`
P95 float64 `json:"p95" ch:"p95"`
P90 float64 `json:"p90" ch:"p90"`
P75 float64 `json:"p75" ch:"p75"`
P50 float64 `json:"p50" ch:"p50"`
} }
type GetFilteredSpansAggregatesResponse struct { type GetFilteredSpansAggregatesResponse struct {
@ -462,3 +465,30 @@ type LogsAggregatesDBResponseItem struct {
Value float64 `ch:"value"` Value float64 `ch:"value"`
GroupBy string `ch:"groupBy"` GroupBy string `ch:"groupBy"`
} }
// MarshalJSON implements json.Marshaler.
func (s *ServiceItem) MarshalJSON() ([]byte, error) {
// If a service didn't not send any data in the last interval duration
// it's values such as 99th percentile will return as NaN and
// json encoding doesn't support NaN
// We still want to show it in the UI, so we'll replace NaN with 0
type Alias ServiceItem
if math.IsInf(s.AvgDuration, 0) || math.IsNaN(s.AvgDuration) {
s.AvgDuration = 0
}
if math.IsInf(s.CallRate, 0) || math.IsNaN(s.CallRate) {
s.CallRate = 0
}
if math.IsInf(s.ErrorRate, 0) || math.IsNaN(s.ErrorRate) {
s.ErrorRate = 0
}
if math.IsInf(s.Percentile99, 0) || math.IsNaN(s.Percentile99) {
s.Percentile99 = 0
}
return json.Marshal(&struct {
*Alias
}{
Alias: (*Alias)(s),
})
}

View File

@ -2,12 +2,18 @@ package rules
import ( import (
"encoding/json" "encoding/json"
"fmt"
"github.com/pkg/errors" "github.com/pkg/errors"
"go.signoz.io/query-service/model" "go.signoz.io/query-service/model"
"go.signoz.io/query-service/utils/labels" "go.signoz.io/query-service/utils/labels"
"net/url"
"strings"
"time" "time"
) )
// this file contains common structs and methods used by
// rule engine
// how long before re-sending the alert // how long before re-sending the alert
const resolvedRetention = 15 * time.Minute const resolvedRetention = 15 * time.Minute
@ -41,6 +47,7 @@ const (
StateInactive AlertState = iota StateInactive AlertState = iota
StatePending StatePending
StateFiring StateFiring
StateDisabled
) )
func (s AlertState) String() string { func (s AlertState) String() string {
@ -51,6 +58,8 @@ func (s AlertState) String() string {
return "pending" return "pending"
case StateFiring: case StateFiring:
return "firing" return "firing"
case StateDisabled:
return "disabled"
} }
panic(errors.Errorf("unknown alert state: %d", s)) panic(errors.Errorf("unknown alert state: %d", s))
} }
@ -63,6 +72,9 @@ type Alert struct {
GeneratorURL string GeneratorURL string
// list of preferred receivers, e.g. slack
Receivers []string
Value float64 Value float64
ActiveAt time.Time ActiveAt time.Time
FiredAt time.Time FiredAt time.Time
@ -71,7 +83,6 @@ type Alert struct {
ValidUntil time.Time ValidUntil time.Time
} }
// todo(amol): need to review this with ankit
func (a *Alert) needsSending(ts time.Time, resendDelay time.Duration) bool { func (a *Alert) needsSending(ts time.Time, resendDelay time.Duration) bool {
if a.State == StatePending { if a.State == StatePending {
return false return false
@ -198,3 +209,30 @@ func (d *Duration) UnmarshalJSON(b []byte) error {
return errors.New("invalid duration") return errors.New("invalid duration")
} }
} }
// prepareRuleGeneratorURL creates an appropriate url
// for the rule. the URL is sent in slack messages as well as
// to other systems and allows backtracking to the rule definition
// from the third party systems.
func prepareRuleGeneratorURL(ruleId string, source string) string {
if source == "" {
return source
}
// check if source is a valid url
_, err := url.Parse(source)
if err != nil {
return ""
}
// since we capture window.location when a new rule is created
// we end up with rulesource host:port/alerts/new. in this case
// we want to replace new with rule id parameter
hasNew := strings.LastIndex(source, "new")
if hasNew > -1 {
ruleURL := fmt.Sprintf("%sedit?ruleId=%s", source[0:hasNew], ruleId)
return ruleURL
}
return source
}

View File

@ -30,9 +30,13 @@ type PostableRule struct {
Labels map[string]string `yaml:"labels,omitempty" json:"labels,omitempty"` Labels map[string]string `yaml:"labels,omitempty" json:"labels,omitempty"`
Annotations map[string]string `yaml:"annotations,omitempty" json:"annotations,omitempty"` Annotations map[string]string `yaml:"annotations,omitempty" json:"annotations,omitempty"`
Disabled bool `json:"disabled"`
// Source captures the source url where rule has been created // Source captures the source url where rule has been created
Source string `json:"source,omitempty"` Source string `json:"source,omitempty"`
PreferredChannels []string `json:"preferredChannels,omitempty"`
// legacy // legacy
Expr string `yaml:"expr,omitempty" json:"expr,omitempty"` Expr string `yaml:"expr,omitempty" json:"expr,omitempty"`
OldYaml string `json:"yaml,omitempty"` OldYaml string `json:"yaml,omitempty"`
@ -43,16 +47,23 @@ func ParsePostableRule(content []byte) (*PostableRule, []error) {
} }
func parsePostableRule(content []byte, kind string) (*PostableRule, []error) { func parsePostableRule(content []byte, kind string) (*PostableRule, []error) {
rule := PostableRule{} return parseIntoRule(PostableRule{}, content, kind)
}
// parseIntoRule loads the content (data) into PostableRule and also
// validates the end result
func parseIntoRule(initRule PostableRule, content []byte, kind string) (*PostableRule, []error) {
rule := &initRule
var err error var err error
if kind == "json" { if kind == "json" {
if err = json.Unmarshal(content, &rule); err != nil { if err = json.Unmarshal(content, rule); err != nil {
zap.S().Debugf("postable rule content", string(content), "\t kind:", kind) zap.S().Debugf("postable rule content", string(content), "\t kind:", kind)
return nil, []error{fmt.Errorf("failed to load json")} return nil, []error{fmt.Errorf("failed to load json")}
} }
} else if kind == "yaml" { } else if kind == "yaml" {
if err = yaml.Unmarshal(content, &rule); err != nil { if err = yaml.Unmarshal(content, rule); err != nil {
zap.S().Debugf("postable rule content", string(content), "\t kind:", kind) zap.S().Debugf("postable rule content", string(content), "\t kind:", kind)
return nil, []error{fmt.Errorf("failed to load yaml")} return nil, []error{fmt.Errorf("failed to load yaml")}
} }
@ -105,7 +116,8 @@ func parsePostableRule(content []byte, kind string) (*PostableRule, []error) {
if errs := rule.Validate(); len(errs) > 0 { if errs := rule.Validate(); len(errs) > 0 {
return nil, errs return nil, errs
} }
return &rule, []error{}
return rule, []error{}
} }
func isValidLabelName(ln string) bool { func isValidLabelName(ln string) bool {
@ -213,18 +225,7 @@ type GettableRules struct {
// GettableRule has info for an alerting rules. // GettableRule has info for an alerting rules.
type GettableRule struct { type GettableRule struct {
Labels map[string]string `json:"labels"` Id string `json:"id"`
Annotations map[string]string `json:"annotations"` State string `json:"state"`
State string `json:"state"` PostableRule
Alert string `json:"alert"`
// Description string `yaml:"description,omitempty" json:"description,omitempty"`
Id string `json:"id"`
RuleType RuleType `yaml:"ruleType,omitempty" json:"ruleType,omitempty"`
EvalWindow Duration `yaml:"evalWindow,omitempty" json:"evalWindow,omitempty"`
Frequency Duration `yaml:"frequency,omitempty" json:"frequency,omitempty"`
RuleCondition RuleCondition `yaml:"condition,omitempty" json:"condition,omitempty"`
// ActiveAt *time.Time `json:"activeAt,omitempty"`
// Value float64 `json:"value"`
} }

View File

@ -29,8 +29,16 @@ func ruleIdFromTaskName(n string) string {
return strings.Split(n, "-groupname")[0] return strings.Split(n, "-groupname")[0]
} }
func prepareTaskName(ruleId int64) string { func prepareTaskName(ruleId interface{}) string {
return fmt.Sprintf("%d-groupname", ruleId) switch ruleId.(type) {
case int, int64:
return fmt.Sprintf("%d-groupname", ruleId)
case string:
return fmt.Sprintf("%s-groupname", ruleId)
default:
return fmt.Sprintf("%v-groupname", ruleId)
}
} }
// ManagerOptions bundles options for the Manager. // ManagerOptions bundles options for the Manager.
@ -170,10 +178,11 @@ func (m *Manager) initiate() error {
continue continue
} }
} }
if !parsedRule.Disabled {
err := m.addTask(parsedRule, taskName) err := m.addTask(parsedRule, taskName)
if err != nil { if err != nil {
zap.S().Errorf("failed to load the rule definition (%s): %v", taskName, err) zap.S().Errorf("failed to load the rule definition (%s): %v", taskName, err)
}
} }
} }
@ -206,7 +215,7 @@ func (m *Manager) Stop() {
// EditRuleDefinition writes the rule definition to the // EditRuleDefinition writes the rule definition to the
// datastore and also updates the rule executor // datastore and also updates the rule executor
func (m *Manager) EditRule(ruleStr string, id string) error { func (m *Manager) EditRule(ruleStr string, id string) error {
// todo(amol): fetch recent rule from db first
parsedRule, errs := ParsePostableRule([]byte(ruleStr)) parsedRule, errs := ParsePostableRule([]byte(ruleStr))
if len(errs) > 0 { if len(errs) > 0 {
@ -221,16 +230,9 @@ func (m *Manager) EditRule(ruleStr string, id string) error {
} }
if !m.opts.DisableRules { if !m.opts.DisableRules {
err = m.editTask(parsedRule, taskName) return m.syncRuleStateWithTask(taskName, parsedRule)
if err != nil {
// todo(amol): using tx with sqllite3 is gets
// database locked. need to research and resolve this
//tx.Rollback()
return err
}
} }
// return tx.Commit()
return nil return nil
} }
@ -249,8 +251,7 @@ func (m *Manager) editTask(rule *PostableRule, taskName string) error {
// it to finish the current iteration. Then copy it into the new group. // it to finish the current iteration. Then copy it into the new group.
oldTask, ok := m.tasks[taskName] oldTask, ok := m.tasks[taskName]
if !ok { if !ok {
zap.S().Errorf("msg:", "rule task not found, edit task failed", "\t task name:", taskName) zap.S().Warnf("msg:", "rule task not found, a new task will be created ", "\t task name:", taskName)
return errors.New("rule task not found, edit task failed")
} }
delete(m.tasks, taskName) delete(m.tasks, taskName)
@ -281,10 +282,7 @@ func (m *Manager) DeleteRule(id string) error {
taskName := prepareTaskName(int64(idInt)) taskName := prepareTaskName(int64(idInt))
if !m.opts.DisableRules { if !m.opts.DisableRules {
if err := m.deleteTask(taskName); err != nil { m.deleteTask(taskName)
zap.S().Errorf("msg: ", "failed to unload the rule task from memory, please retry", "\t ruleid: ", id)
return err
}
} }
if _, _, err := m.ruleDB.DeleteRuleTx(id); err != nil { if _, _, err := m.ruleDB.DeleteRuleTx(id); err != nil {
@ -295,7 +293,7 @@ func (m *Manager) DeleteRule(id string) error {
return nil return nil
} }
func (m *Manager) deleteTask(taskName string) error { func (m *Manager) deleteTask(taskName string) {
m.mtx.Lock() m.mtx.Lock()
defer m.mtx.Unlock() defer m.mtx.Unlock()
@ -305,11 +303,8 @@ func (m *Manager) deleteTask(taskName string) error {
delete(m.tasks, taskName) delete(m.tasks, taskName)
delete(m.rules, ruleIdFromTaskName(taskName)) delete(m.rules, ruleIdFromTaskName(taskName))
} else { } else {
zap.S().Errorf("msg:", "rule not found for deletion", "\t name:", taskName) zap.S().Info("msg: ", "rule not found for deletion", "\t name:", taskName)
return fmt.Errorf("rule not found")
} }
return nil
} }
// CreateRule stores rule def into db and also // CreateRule stores rule def into db and also
@ -386,12 +381,7 @@ func (m *Manager) prepareTask(acquireLock bool, r *PostableRule, taskName string
// create a threshold rule // create a threshold rule
tr, err := NewThresholdRule( tr, err := NewThresholdRule(
ruleId, ruleId,
r.Alert, r,
r.RuleCondition,
time.Duration(r.EvalWindow),
r.Labels,
r.Annotations,
r.Source,
) )
if err != nil { if err != nil {
@ -411,14 +401,8 @@ func (m *Manager) prepareTask(acquireLock bool, r *PostableRule, taskName string
// create promql rule // create promql rule
pr, err := NewPromRule( pr, err := NewPromRule(
ruleId, ruleId,
r.Alert, r,
r.RuleCondition,
time.Duration(r.EvalWindow),
r.Labels,
r.Annotations,
// required as promql engine works with logger and not zap
log.With(m.logger, "alert", r.Alert), log.With(m.logger, "alert", r.Alert),
r.Source,
) )
if err != nil { if err != nil {
@ -526,6 +510,7 @@ func (m *Manager) prepareNotifyFunc() NotifyFunc {
Labels: alert.Labels, Labels: alert.Labels,
Annotations: alert.Annotations, Annotations: alert.Annotations,
GeneratorURL: generatorURL, GeneratorURL: generatorURL,
Receivers: alert.Receivers,
} }
if !alert.ResolvedAt.IsZero() { if !alert.ResolvedAt.IsZero() {
a.EndsAt = alert.ResolvedAt a.EndsAt = alert.ResolvedAt
@ -555,6 +540,9 @@ func (m *Manager) ListRuleStates() (*GettableRules, error) {
// fetch rules from DB // fetch rules from DB
storedRules, err := m.ruleDB.GetStoredRules() storedRules, err := m.ruleDB.GetStoredRules()
if err != nil {
return nil, err
}
// initiate response object // initiate response object
resp := make([]*GettableRule, 0) resp := make([]*GettableRule, 0)
@ -571,7 +559,8 @@ func (m *Manager) ListRuleStates() (*GettableRules, error) {
// fetch state of rule from memory // fetch state of rule from memory
if rm, ok := m.rules[ruleResponse.Id]; !ok { if rm, ok := m.rules[ruleResponse.Id]; !ok {
zap.S().Warnf("msg:", "invalid rule id found while fetching list of rules", "\t err:", err, "\t rule_id:", ruleResponse.Id) ruleResponse.State = StateDisabled.String()
ruleResponse.Disabled = true
} else { } else {
ruleResponse.State = rm.State().String() ruleResponse.State = rm.State().String()
} }
@ -593,3 +582,104 @@ func (m *Manager) GetRule(id string) (*GettableRule, error) {
r.Id = fmt.Sprintf("%d", s.Id) r.Id = fmt.Sprintf("%d", s.Id)
return r, nil return r, nil
} }
// syncRuleStateWithTask ensures that the state of a stored rule matches
// the task state. For example - if a stored rule is disabled, then
// there is no task running against it.
func (m *Manager) syncRuleStateWithTask(taskName string, rule *PostableRule) error {
if rule.Disabled {
// check if rule has any task running
if _, ok := m.tasks[taskName]; ok {
// delete task from memory
m.deleteTask(taskName)
}
} else {
// check if rule has a task running
if _, ok := m.tasks[taskName]; !ok {
// rule has not task, start one
if err := m.addTask(rule, taskName); err != nil {
return err
}
}
}
return nil
}
// PatchRule supports attribute level changes to the rule definition unlike
// EditRule, which updates entire rule definition in the DB.
// the process:
// - get the latest rule from db
// - over write the patch attributes received in input (ruleStr)
// - re-deploy or undeploy task as necessary
// - update the patched rule in the DB
func (m *Manager) PatchRule(ruleStr string, ruleId string) (*GettableRule, error) {
if ruleId == "" {
return nil, fmt.Errorf("id is mandatory for patching rule")
}
taskName := prepareTaskName(ruleId)
// retrieve rule from DB
storedJSON, err := m.ruleDB.GetStoredRule(ruleId)
if err != nil {
zap.S().Errorf("msg:", "failed to get stored rule with given id", "\t error:", err)
return nil, err
}
// storedRule holds the current stored rule from DB
storedRule := PostableRule{}
if err := json.Unmarshal([]byte(storedJSON.Data), &storedRule); err != nil {
zap.S().Errorf("msg:", "failed to get unmarshal stored rule with given id", "\t error:", err)
return nil, err
}
// patchedRule is combo of stored rule and patch received in the request
patchedRule, errs := parseIntoRule(storedRule, []byte(ruleStr), "json")
if len(errs) > 0 {
zap.S().Errorf("failed to parse rules:", errs)
// just one rule is being parsed so expect just one error
return nil, errs[0]
}
// deploy or un-deploy task according to patched (new) rule state
if err := m.syncRuleStateWithTask(taskName, patchedRule); err != nil {
zap.S().Errorf("failed to sync stored rule state with the task")
return nil, err
}
// prepare rule json to write to update db
patchedRuleBytes, err := json.Marshal(patchedRule)
if err != nil {
return nil, err
}
// write updated rule to db
if _, _, err = m.ruleDB.EditRuleTx(string(patchedRuleBytes), ruleId); err != nil {
// write failed, rollback task state
// restore task state from the stored rule
if err := m.syncRuleStateWithTask(taskName, &storedRule); err != nil {
zap.S().Errorf("msg: ", "failed to restore rule after patch failure", "\t error:", err)
}
return nil, err
}
// prepare http response
response := GettableRule{
Id: ruleId,
PostableRule: *patchedRule,
}
// fetch state of rule from memory
if rm, ok := m.rules[ruleId]; !ok {
response.State = StateDisabled.String()
response.Disabled = true
} else {
response.State = rm.State().String()
}
return &response, nil
}

View File

@ -29,6 +29,8 @@ type PromRule struct {
labels plabels.Labels labels plabels.Labels
annotations plabels.Labels annotations plabels.Labels
preferredChannels []string
mtx sync.Mutex mtx sync.Mutex
evaluationDuration time.Duration evaluationDuration time.Duration
evaluationTimestamp time.Time evaluationTimestamp time.Time
@ -45,38 +47,37 @@ type PromRule struct {
func NewPromRule( func NewPromRule(
id string, id string,
name string, postableRule *PostableRule,
ruleCondition *RuleCondition,
evalWindow time.Duration,
labels, annotations map[string]string,
logger log.Logger, logger log.Logger,
source string,
) (*PromRule, error) { ) (*PromRule, error) {
if int64(evalWindow) == 0 { if postableRule.RuleCondition == nil {
evalWindow = 5 * time.Minute
}
if ruleCondition == nil {
return nil, fmt.Errorf("no rule condition") return nil, fmt.Errorf("no rule condition")
} else if !ruleCondition.IsValid() { } else if !postableRule.RuleCondition.IsValid() {
return nil, fmt.Errorf("invalid rule condition") return nil, fmt.Errorf("invalid rule condition")
} }
zap.S().Info("msg:", "creating new alerting rule", "\t name:", name, "\t condition:", ruleCondition.String()) p := PromRule{
id: id,
name: postableRule.Alert,
source: postableRule.Source,
ruleCondition: postableRule.RuleCondition,
evalWindow: time.Duration(postableRule.EvalWindow),
labels: plabels.FromMap(postableRule.Labels),
annotations: plabels.FromMap(postableRule.Annotations),
preferredChannels: postableRule.PreferredChannels,
health: HealthUnknown,
active: map[uint64]*Alert{},
logger: logger,
}
return &PromRule{ if int64(p.evalWindow) == 0 {
id: id, p.evalWindow = 5 * time.Minute
name: name, }
source: source,
ruleCondition: ruleCondition, zap.S().Info("msg:", "creating new alerting rule", "\t name:", p.name, "\t condition:", p.ruleCondition.String())
evalWindow: evalWindow,
labels: plabels.FromMap(labels), return &p, nil
annotations: plabels.FromMap(annotations),
health: HealthUnknown,
active: map[uint64]*Alert{},
logger: logger,
}, nil
} }
func (r *PromRule) Name() string { func (r *PromRule) Name() string {
@ -96,7 +97,11 @@ func (r *PromRule) Type() RuleType {
} }
func (r *PromRule) GeneratorURL() string { func (r *PromRule) GeneratorURL() string {
return r.source return prepareRuleGeneratorURL(r.ID(), r.source)
}
func (r *PromRule) PreferredChannels() []string {
return r.preferredChannels
} }
func (r *PromRule) SetLastError(err error) { func (r *PromRule) SetLastError(err error) {
@ -382,6 +387,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (
State: StatePending, State: StatePending,
Value: smpl.V, Value: smpl.V,
GeneratorURL: r.GeneratorURL(), GeneratorURL: r.GeneratorURL(),
Receivers: r.preferredChannels,
} }
} }
@ -392,6 +398,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (
if alert, ok := r.active[h]; ok && alert.State != StateInactive { if alert, ok := r.active[h]; ok && alert.State != StateInactive {
alert.Value = a.Value alert.Value = a.Value
alert.Annotations = a.Annotations alert.Annotations = a.Annotations
alert.Receivers = r.preferredChannels
continue continue
} }
@ -429,11 +436,12 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (
func (r *PromRule) String() string { func (r *PromRule) String() string {
ar := PostableRule{ ar := PostableRule{
Alert: r.name, Alert: r.name,
RuleCondition: r.ruleCondition, RuleCondition: r.ruleCondition,
EvalWindow: Duration(r.evalWindow), EvalWindow: Duration(r.evalWindow),
Labels: r.labels.Map(), Labels: r.labels.Map(),
Annotations: r.annotations.Map(), Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,
} }
byt, err := yaml.Marshal(ar) byt, err := yaml.Marshal(ar)

View File

@ -19,6 +19,8 @@ type Rule interface {
State() AlertState State() AlertState
ActiveAlerts() []*Alert ActiveAlerts() []*Alert
PreferredChannels() []string
Eval(context.Context, time.Time, *Queriers) (interface{}, error) Eval(context.Context, time.Time, *Queriers) (interface{}, error)
String() string String() string
// Query() string // Query() string

View File

@ -17,6 +17,9 @@ import (
"go.signoz.io/query-service/utils/times" "go.signoz.io/query-service/utils/times"
) )
// this file contains all the methods and structs
// related to go templating in rule labels and annotations
type tmplQueryRecord struct { type tmplQueryRecord struct {
Labels map[string]string Labels map[string]string
Value float64 Value float64

View File

@ -32,6 +32,7 @@ type ThresholdRule struct {
labels labels.Labels labels labels.Labels
annotations labels.Labels annotations labels.Labels
preferredChannels []string
mtx sync.Mutex mtx sync.Mutex
evaluationDuration time.Duration evaluationDuration time.Duration
evaluationTimestamp time.Time evaluationTimestamp time.Time
@ -46,37 +47,35 @@ type ThresholdRule struct {
func NewThresholdRule( func NewThresholdRule(
id string, id string,
name string, p *PostableRule,
ruleCondition *RuleCondition,
evalWindow time.Duration,
l, a map[string]string,
source string,
) (*ThresholdRule, error) { ) (*ThresholdRule, error) {
if int64(evalWindow) == 0 { if p.RuleCondition == nil {
evalWindow = 5 * time.Minute
}
if ruleCondition == nil {
return nil, fmt.Errorf("no rule condition") return nil, fmt.Errorf("no rule condition")
} else if !ruleCondition.IsValid() { } else if !p.RuleCondition.IsValid() {
return nil, fmt.Errorf("invalid rule condition") return nil, fmt.Errorf("invalid rule condition")
} }
zap.S().Info("msg:", "creating new alerting rule", "\t name:", name, "\t condition:", ruleCondition.String()) t := ThresholdRule{
id: id,
name: p.Alert,
source: p.Source,
ruleCondition: p.RuleCondition,
evalWindow: time.Duration(p.EvalWindow),
labels: labels.FromMap(p.Labels),
annotations: labels.FromMap(p.Annotations),
preferredChannels: p.PreferredChannels,
health: HealthUnknown,
active: map[uint64]*Alert{},
}
return &ThresholdRule{ if int64(t.evalWindow) == 0 {
id: id, t.evalWindow = 5 * time.Minute
name: name, }
source: source,
ruleCondition: ruleCondition,
evalWindow: evalWindow,
labels: labels.FromMap(l),
annotations: labels.FromMap(a),
health: HealthUnknown, zap.S().Info("msg:", "creating new alerting rule", "\t name:", t.name, "\t condition:", t.ruleCondition.String(), "\t generatorURL:", t.GeneratorURL())
active: map[uint64]*Alert{},
}, nil return &t, nil
} }
func (r *ThresholdRule) Name() string { func (r *ThresholdRule) Name() string {
@ -92,7 +91,11 @@ func (r *ThresholdRule) Condition() *RuleCondition {
} }
func (r *ThresholdRule) GeneratorURL() string { func (r *ThresholdRule) GeneratorURL() string {
return r.source return prepareRuleGeneratorURL(r.ID(), r.source)
}
func (r *ThresholdRule) PreferredChannels() []string {
return r.preferredChannels
} }
func (r *ThresholdRule) target() *float64 { func (r *ThresholdRule) target() *float64 {
@ -231,9 +234,9 @@ func (r *ThresholdRule) GetEvaluationTimestamp() time.Time {
// State returns the maximum state of alert instances for this rule. // State returns the maximum state of alert instances for this rule.
// StateFiring > StatePending > StateInactive // StateFiring > StatePending > StateInactive
func (r *ThresholdRule) State() AlertState { func (r *ThresholdRule) State() AlertState {
r.mtx.Lock() r.mtx.Lock()
defer r.mtx.Unlock() defer r.mtx.Unlock()
maxState := StateInactive maxState := StateInactive
for _, a := range r.active { for _, a := range r.active {
if a.State > maxState { if a.State > maxState {
@ -477,6 +480,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
} }
} }
} }
zap.S().Debugf("ruleid:", r.ID(), "\t resultmap(potential alerts):", len(resultMap))
for _, sample := range resultMap { for _, sample := range resultMap {
// check alert rule condition before dumping results // check alert rule condition before dumping results
@ -484,7 +488,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
result = append(result, sample) result = append(result, sample)
} }
} }
zap.S().Debugf("ruleid:", r.ID(), "\t result (found alerts):", len(result))
return result, nil return result, nil
} }
@ -613,6 +617,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie
State: StatePending, State: StatePending,
Value: smpl.V, Value: smpl.V,
GeneratorURL: r.GeneratorURL(), GeneratorURL: r.GeneratorURL(),
Receivers: r.preferredChannels,
} }
} }
@ -626,6 +631,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie
alert.Value = a.Value alert.Value = a.Value
alert.Annotations = a.Annotations alert.Annotations = a.Annotations
alert.Receivers = r.preferredChannels
continue continue
} }
@ -663,11 +669,12 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie
func (r *ThresholdRule) String() string { func (r *ThresholdRule) String() string {
ar := PostableRule{ ar := PostableRule{
Alert: r.name, Alert: r.name,
RuleCondition: r.ruleCondition, RuleCondition: r.ruleCondition,
EvalWindow: Duration(r.evalWindow), EvalWindow: Duration(r.evalWindow),
Labels: r.labels.Map(), Labels: r.labels.Map(),
Annotations: r.annotations.Map(), Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,
} }
byt, err := yaml.Marshal(ar) byt, err := yaml.Marshal(ar)