mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-12 04:49:01 +08:00
commit
96e3d00e74
@ -146,7 +146,7 @@ services:
|
|||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz/query-service:0.37.2
|
image: signoz/query-service:0.38.0
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"-config=/root/config/prometheus.yml",
|
"-config=/root/config/prometheus.yml",
|
||||||
@ -186,7 +186,7 @@ services:
|
|||||||
<<: *db-depend
|
<<: *db-depend
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: signoz/frontend:0.37.2
|
image: signoz/frontend:0.38.0
|
||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
@ -164,7 +164,7 @@ services:
|
|||||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz/query-service:${DOCKER_TAG:-0.37.2}
|
image: signoz/query-service:${DOCKER_TAG:-0.38.0}
|
||||||
container_name: signoz-query-service
|
container_name: signoz-query-service
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
@ -203,7 +203,7 @@ services:
|
|||||||
<<: *db-depend
|
<<: *db-depend
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: signoz/frontend:${DOCKER_TAG:-0.37.2}
|
image: signoz/frontend:${DOCKER_TAG:-0.38.0}
|
||||||
container_name: signoz-frontend
|
container_name: signoz-frontend
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -152,9 +152,9 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
|||||||
router.HandleFunc("/api/v2/metrics/query_range", am.ViewAccess(ah.queryRangeMetricsV2)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v2/metrics/query_range", am.ViewAccess(ah.queryRangeMetricsV2)).Methods(http.MethodPost)
|
||||||
|
|
||||||
// PAT APIs
|
// PAT APIs
|
||||||
router.HandleFunc("/api/v1/pat", am.OpenAccess(ah.createPAT)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/pat", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v1/pat", am.OpenAccess(ah.getPATs)).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/pat", am.AdminAccess(ah.getPATs)).Methods(http.MethodGet)
|
||||||
router.HandleFunc("/api/v1/pat/{id}", am.OpenAccess(ah.deletePAT)).Methods(http.MethodDelete)
|
router.HandleFunc("/api/v1/pat/{id}", am.AdminAccess(ah.deletePAT)).Methods(http.MethodDelete)
|
||||||
|
|
||||||
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
||||||
|
@ -40,6 +40,7 @@ type billingDetails struct {
|
|||||||
BillingPeriodEnd int64 `json:"billingPeriodEnd"`
|
BillingPeriodEnd int64 `json:"billingPeriodEnd"`
|
||||||
Details details `json:"details"`
|
Details details `json:"details"`
|
||||||
Discount float64 `json:"discount"`
|
Discount float64 `json:"discount"`
|
||||||
|
SubscriptionStatus string `json:"subscriptionStatus"`
|
||||||
} `json:"data"`
|
} `json:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,6 +62,7 @@
|
|||||||
"button_cancel": "No",
|
"button_cancel": "No",
|
||||||
"field_promql_expr": "PromQL Expression",
|
"field_promql_expr": "PromQL Expression",
|
||||||
"field_alert_name": "Alert Name",
|
"field_alert_name": "Alert Name",
|
||||||
|
"field_notification_channel": "Notification Channel",
|
||||||
"field_alert_desc": "Alert Description",
|
"field_alert_desc": "Alert Description",
|
||||||
"field_labels": "Labels",
|
"field_labels": "Labels",
|
||||||
"field_severity": "Severity",
|
"field_severity": "Severity",
|
||||||
@ -100,7 +101,7 @@
|
|||||||
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
|
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
|
||||||
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
|
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
|
||||||
"user_tooltip_more_help": "More details on how to create alerts",
|
"user_tooltip_more_help": "More details on how to create alerts",
|
||||||
"choose_alert_type": "Choose a type for the alert:",
|
"choose_alert_type": "Choose a type for the alert",
|
||||||
"metric_based_alert": "Metric based Alert",
|
"metric_based_alert": "Metric based Alert",
|
||||||
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
|
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
|
||||||
"log_based_alert": "Log-based Alert",
|
"log_based_alert": "Log-based Alert",
|
||||||
|
@ -54,6 +54,7 @@
|
|||||||
"field_promql_expr": "PromQL Expression",
|
"field_promql_expr": "PromQL Expression",
|
||||||
"field_alert_name": "Alert Name",
|
"field_alert_name": "Alert Name",
|
||||||
"field_alert_desc": "Alert Description",
|
"field_alert_desc": "Alert Description",
|
||||||
|
"field_notification_channel": "Notification Channel",
|
||||||
"field_labels": "Labels",
|
"field_labels": "Labels",
|
||||||
"field_severity": "Severity",
|
"field_severity": "Severity",
|
||||||
"option_critical": "Critical",
|
"option_critical": "Critical",
|
||||||
|
@ -63,6 +63,7 @@
|
|||||||
"field_promql_expr": "PromQL Expression",
|
"field_promql_expr": "PromQL Expression",
|
||||||
"field_alert_name": "Alert Name",
|
"field_alert_name": "Alert Name",
|
||||||
"field_alert_desc": "Alert Description",
|
"field_alert_desc": "Alert Description",
|
||||||
|
"field_notification_channel": "Notification Channel",
|
||||||
"field_labels": "Labels",
|
"field_labels": "Labels",
|
||||||
"field_severity": "Severity",
|
"field_severity": "Severity",
|
||||||
"option_critical": "Critical",
|
"option_critical": "Critical",
|
||||||
@ -100,7 +101,7 @@
|
|||||||
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
|
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
|
||||||
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
|
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
|
||||||
"user_tooltip_more_help": "More details on how to create alerts",
|
"user_tooltip_more_help": "More details on how to create alerts",
|
||||||
"choose_alert_type": "Choose a type for the alert:",
|
"choose_alert_type": "Choose a type for the alert",
|
||||||
"metric_based_alert": "Metric based Alert",
|
"metric_based_alert": "Metric based Alert",
|
||||||
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
|
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
|
||||||
"log_based_alert": "Log-based Alert",
|
"log_based_alert": "Log-based Alert",
|
||||||
|
@ -54,6 +54,7 @@
|
|||||||
"field_promql_expr": "PromQL Expression",
|
"field_promql_expr": "PromQL Expression",
|
||||||
"field_alert_name": "Alert Name",
|
"field_alert_name": "Alert Name",
|
||||||
"field_alert_desc": "Alert Description",
|
"field_alert_desc": "Alert Description",
|
||||||
|
"field_notification_channel": "Notification Channel",
|
||||||
"field_labels": "Labels",
|
"field_labels": "Labels",
|
||||||
"field_severity": "Severity",
|
"field_severity": "Severity",
|
||||||
"option_critical": "Critical",
|
"option_critical": "Critical",
|
||||||
|
@ -73,12 +73,19 @@ function ResizeTable({
|
|||||||
}
|
}
|
||||||
}, [columns]);
|
}, [columns]);
|
||||||
|
|
||||||
|
const paginationConfig = {
|
||||||
|
hideOnSinglePage: true,
|
||||||
|
showTotal: (total: number, range: number[]): string =>
|
||||||
|
`${range[0]}-${range[1]} of ${total} items`,
|
||||||
|
...tableParams.pagination,
|
||||||
|
};
|
||||||
|
|
||||||
return onDragColumn ? (
|
return onDragColumn ? (
|
||||||
<ReactDragListView.DragColumn {...dragColumnParams} onDragEnd={onDragColumn}>
|
<ReactDragListView.DragColumn {...dragColumnParams} onDragEnd={onDragColumn}>
|
||||||
<Table {...tableParams} />
|
<Table {...tableParams} pagination={paginationConfig} />
|
||||||
</ReactDragListView.DragColumn>
|
</ReactDragListView.DragColumn>
|
||||||
) : (
|
) : (
|
||||||
<Table {...tableParams} />
|
<Table {...tableParams} pagination={paginationConfig} />
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { Row } from 'antd';
|
import { Row, Typography } from 'antd';
|
||||||
import { useMemo } from 'react';
|
import { useMemo } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||||
@ -33,7 +33,14 @@ function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<SelectTypeContainer>
|
<SelectTypeContainer>
|
||||||
<h3> {t('choose_alert_type')} </h3>
|
<Typography.Title
|
||||||
|
level={4}
|
||||||
|
style={{
|
||||||
|
padding: '0 8px',
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{t('choose_alert_type')}
|
||||||
|
</Typography.Title>
|
||||||
<Row>{renderOptions}</Row>
|
<Row>{renderOptions}</Row>
|
||||||
</SelectTypeContainer>
|
</SelectTypeContainer>
|
||||||
);
|
);
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { Form, Select } from 'antd';
|
import { Form, Select, Switch } from 'antd';
|
||||||
|
import { useEffect, useState } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { AlertDef, Labels } from 'types/api/alerts/def';
|
import { AlertDef, Labels } from 'types/api/alerts/def';
|
||||||
import { requireErrorMessage } from 'utils/form/requireErrorMessage';
|
import { requireErrorMessage } from 'utils/form/requireErrorMessage';
|
||||||
@ -7,7 +8,6 @@ import { popupContainer } from 'utils/selectPopupContainer';
|
|||||||
import ChannelSelect from './ChannelSelect';
|
import ChannelSelect from './ChannelSelect';
|
||||||
import LabelSelect from './labels';
|
import LabelSelect from './labels';
|
||||||
import {
|
import {
|
||||||
ChannelSelectTip,
|
|
||||||
FormContainer,
|
FormContainer,
|
||||||
FormItemMedium,
|
FormItemMedium,
|
||||||
InputSmall,
|
InputSmall,
|
||||||
@ -19,14 +19,41 @@ import {
|
|||||||
const { Option } = Select;
|
const { Option } = Select;
|
||||||
|
|
||||||
interface BasicInfoProps {
|
interface BasicInfoProps {
|
||||||
|
isNewRule: boolean;
|
||||||
alertDef: AlertDef;
|
alertDef: AlertDef;
|
||||||
setAlertDef: (a: AlertDef) => void;
|
setAlertDef: (a: AlertDef) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
function BasicInfo({ alertDef, setAlertDef }: BasicInfoProps): JSX.Element {
|
function BasicInfo({
|
||||||
// init namespace for translations
|
isNewRule,
|
||||||
|
alertDef,
|
||||||
|
setAlertDef,
|
||||||
|
}: BasicInfoProps): JSX.Element {
|
||||||
const { t } = useTranslation('alerts');
|
const { t } = useTranslation('alerts');
|
||||||
|
|
||||||
|
const [
|
||||||
|
shouldBroadCastToAllChannels,
|
||||||
|
setShouldBroadCastToAllChannels,
|
||||||
|
] = useState(false);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const hasPreferredChannels =
|
||||||
|
(alertDef.preferredChannels && alertDef.preferredChannels.length > 0) ||
|
||||||
|
isNewRule;
|
||||||
|
|
||||||
|
setShouldBroadCastToAllChannels(!hasPreferredChannels);
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleBroadcastToAllChannels = (shouldBroadcast: boolean): void => {
|
||||||
|
setShouldBroadCastToAllChannels(shouldBroadcast);
|
||||||
|
|
||||||
|
setAlertDef({
|
||||||
|
...alertDef,
|
||||||
|
broadcastToAll: shouldBroadcast,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<StepHeading> {t('alert_form_step3')} </StepHeading>
|
<StepHeading> {t('alert_form_step3')} </StepHeading>
|
||||||
@ -105,18 +132,38 @@ function BasicInfo({ alertDef, setAlertDef }: BasicInfoProps): JSX.Element {
|
|||||||
initialValues={alertDef.labels}
|
initialValues={alertDef.labels}
|
||||||
/>
|
/>
|
||||||
</FormItemMedium>
|
</FormItemMedium>
|
||||||
<FormItemMedium label="Notification Channels">
|
|
||||||
<ChannelSelect
|
<FormItemMedium
|
||||||
currentValue={alertDef.preferredChannels}
|
name="alert_all_configured_channels"
|
||||||
onSelectChannels={(preferredChannels): void => {
|
label="Alert all the configured channels"
|
||||||
setAlertDef({
|
>
|
||||||
...alertDef,
|
<Switch
|
||||||
preferredChannels,
|
checked={shouldBroadCastToAllChannels}
|
||||||
});
|
onChange={handleBroadcastToAllChannels}
|
||||||
}}
|
|
||||||
/>
|
/>
|
||||||
<ChannelSelectTip> {t('channel_select_tooltip')}</ChannelSelectTip>
|
|
||||||
</FormItemMedium>
|
</FormItemMedium>
|
||||||
|
|
||||||
|
{!shouldBroadCastToAllChannels && (
|
||||||
|
<FormItemMedium
|
||||||
|
label="Notification Channels"
|
||||||
|
name="notification_channels"
|
||||||
|
required
|
||||||
|
rules={[
|
||||||
|
{ required: true, message: requireErrorMessage(t('field_alert_name')) },
|
||||||
|
]}
|
||||||
|
>
|
||||||
|
<ChannelSelect
|
||||||
|
disabled={shouldBroadCastToAllChannels}
|
||||||
|
currentValue={alertDef.preferredChannels}
|
||||||
|
onSelectChannels={(preferredChannels): void => {
|
||||||
|
setAlertDef({
|
||||||
|
...alertDef,
|
||||||
|
preferredChannels,
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</FormItemMedium>
|
||||||
|
)}
|
||||||
</FormContainer>
|
</FormContainer>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
@ -8,11 +8,13 @@ import { useTranslation } from 'react-i18next';
|
|||||||
import { StyledSelect } from './styles';
|
import { StyledSelect } from './styles';
|
||||||
|
|
||||||
export interface ChannelSelectProps {
|
export interface ChannelSelectProps {
|
||||||
|
disabled?: boolean;
|
||||||
currentValue?: string[];
|
currentValue?: string[];
|
||||||
onSelectChannels: (s: string[]) => void;
|
onSelectChannels: (s: string[]) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
function ChannelSelect({
|
function ChannelSelect({
|
||||||
|
disabled,
|
||||||
currentValue,
|
currentValue,
|
||||||
onSelectChannels,
|
onSelectChannels,
|
||||||
}: ChannelSelectProps): JSX.Element | null {
|
}: ChannelSelectProps): JSX.Element | null {
|
||||||
@ -52,6 +54,7 @@ function ChannelSelect({
|
|||||||
};
|
};
|
||||||
return (
|
return (
|
||||||
<StyledSelect
|
<StyledSelect
|
||||||
|
disabled={disabled}
|
||||||
status={error ? 'error' : ''}
|
status={error ? 'error' : ''}
|
||||||
mode="multiple"
|
mode="multiple"
|
||||||
style={{ width: '100%' }}
|
style={{ width: '100%' }}
|
||||||
@ -68,6 +71,7 @@ function ChannelSelect({
|
|||||||
}
|
}
|
||||||
|
|
||||||
ChannelSelect.defaultProps = {
|
ChannelSelect.defaultProps = {
|
||||||
|
disabled: false,
|
||||||
currentValue: [],
|
currentValue: [],
|
||||||
};
|
};
|
||||||
export default ChannelSelect;
|
export default ChannelSelect;
|
||||||
|
@ -53,6 +53,7 @@ import {
|
|||||||
import UserGuide from './UserGuide';
|
import UserGuide from './UserGuide';
|
||||||
import { getSelectedQueryOptions } from './utils';
|
import { getSelectedQueryOptions } from './utils';
|
||||||
|
|
||||||
|
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||||
function FormAlertRules({
|
function FormAlertRules({
|
||||||
alertType,
|
alertType,
|
||||||
formInstance,
|
formInstance,
|
||||||
@ -78,6 +79,8 @@ function FormAlertRules({
|
|||||||
// use query client
|
// use query client
|
||||||
const ruleCache = useQueryClient();
|
const ruleCache = useQueryClient();
|
||||||
|
|
||||||
|
const isNewRule = ruleId === 0;
|
||||||
|
|
||||||
const [loading, setLoading] = useState(false);
|
const [loading, setLoading] = useState(false);
|
||||||
|
|
||||||
// alertDef holds the form values to be posted
|
// alertDef holds the form values to be posted
|
||||||
@ -108,8 +111,17 @@ function FormAlertRules({
|
|||||||
useShareBuilderUrl(sq);
|
useShareBuilderUrl(sq);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
setAlertDef(initialValue);
|
const broadcastToSpecificChannels =
|
||||||
}, [initialValue]);
|
(initialValue &&
|
||||||
|
initialValue.preferredChannels &&
|
||||||
|
initialValue.preferredChannels.length > 0) ||
|
||||||
|
isNewRule;
|
||||||
|
|
||||||
|
setAlertDef({
|
||||||
|
...initialValue,
|
||||||
|
broadcastToAll: !broadcastToSpecificChannels,
|
||||||
|
});
|
||||||
|
}, [initialValue, isNewRule]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Set selectedQueryName based on the length of queryOptions
|
// Set selectedQueryName based on the length of queryOptions
|
||||||
@ -243,6 +255,7 @@ function FormAlertRules({
|
|||||||
const preparePostData = (): AlertDef => {
|
const preparePostData = (): AlertDef => {
|
||||||
const postableAlert: AlertDef = {
|
const postableAlert: AlertDef = {
|
||||||
...alertDef,
|
...alertDef,
|
||||||
|
preferredChannels: alertDef.broadcastToAll ? [] : alertDef.preferredChannels,
|
||||||
alertType,
|
alertType,
|
||||||
source: window?.location.toString(),
|
source: window?.location.toString(),
|
||||||
ruleType:
|
ruleType:
|
||||||
@ -386,7 +399,11 @@ function FormAlertRules({
|
|||||||
}, [t, isFormValid, memoizedPreparePostData, notifications]);
|
}, [t, isFormValid, memoizedPreparePostData, notifications]);
|
||||||
|
|
||||||
const renderBasicInfo = (): JSX.Element => (
|
const renderBasicInfo = (): JSX.Element => (
|
||||||
<BasicInfo alertDef={alertDef} setAlertDef={setAlertDef} />
|
<BasicInfo
|
||||||
|
alertDef={alertDef}
|
||||||
|
setAlertDef={setAlertDef}
|
||||||
|
isNewRule={isNewRule}
|
||||||
|
/>
|
||||||
);
|
);
|
||||||
|
|
||||||
const renderQBChartPreview = (): JSX.Element => (
|
const renderQBChartPreview = (): JSX.Element => (
|
||||||
@ -421,8 +438,6 @@ function FormAlertRules({
|
|||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
||||||
const isNewRule = ruleId === 0;
|
|
||||||
|
|
||||||
const isAlertNameMissing = !formInstance.getFieldValue('alert');
|
const isAlertNameMissing = !formInstance.getFieldValue('alert');
|
||||||
|
|
||||||
const isAlertAvialableToSave =
|
const isAlertAvialableToSave =
|
||||||
@ -442,6 +457,10 @@ function FormAlertRules({
|
|||||||
}));
|
}));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const isChannelConfigurationValid =
|
||||||
|
alertDef?.broadcastToAll ||
|
||||||
|
(alertDef.preferredChannels && alertDef.preferredChannels.length > 0);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{Element}
|
{Element}
|
||||||
@ -489,7 +508,11 @@ function FormAlertRules({
|
|||||||
type="primary"
|
type="primary"
|
||||||
onClick={onSaveHandler}
|
onClick={onSaveHandler}
|
||||||
icon={<SaveOutlined />}
|
icon={<SaveOutlined />}
|
||||||
disabled={isAlertNameMissing || isAlertAvialableToSave}
|
disabled={
|
||||||
|
isAlertNameMissing ||
|
||||||
|
isAlertAvialableToSave ||
|
||||||
|
!isChannelConfigurationValid
|
||||||
|
}
|
||||||
>
|
>
|
||||||
{isNewRule ? t('button_createrule') : t('button_savechanges')}
|
{isNewRule ? t('button_createrule') : t('button_savechanges')}
|
||||||
</ActionButton>
|
</ActionButton>
|
||||||
@ -497,6 +520,7 @@ function FormAlertRules({
|
|||||||
|
|
||||||
<ActionButton
|
<ActionButton
|
||||||
loading={loading || false}
|
loading={loading || false}
|
||||||
|
disabled={isAlertNameMissing || !isChannelConfigurationValid}
|
||||||
type="default"
|
type="default"
|
||||||
onClick={onTestRuleHandler}
|
onClick={onTestRuleHandler}
|
||||||
>
|
>
|
||||||
|
@ -124,6 +124,7 @@ function WidgetGraphComponent({
|
|||||||
if (setSelectedDashboard && updatedDashboard.payload) {
|
if (setSelectedDashboard && updatedDashboard.payload) {
|
||||||
setSelectedDashboard(updatedDashboard.payload);
|
setSelectedDashboard(updatedDashboard.payload);
|
||||||
}
|
}
|
||||||
|
setDeleteModal(false);
|
||||||
featureResponse.refetch();
|
featureResponse.refetch();
|
||||||
},
|
},
|
||||||
onError: () => {
|
onError: () => {
|
||||||
@ -255,6 +256,7 @@ function WidgetGraphComponent({
|
|||||||
destroyOnClose
|
destroyOnClose
|
||||||
onCancel={onDeleteModelHandler}
|
onCancel={onDeleteModelHandler}
|
||||||
open={deleteModal}
|
open={deleteModal}
|
||||||
|
confirmLoading={updateDashboardMutation.isLoading}
|
||||||
title="Delete"
|
title="Delete"
|
||||||
height="10vh"
|
height="10vh"
|
||||||
onOk={onDeleteHandler}
|
onOk={onDeleteHandler}
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
.fullscreen-grid-container {
|
.fullscreen-grid-container {
|
||||||
overflow: auto;
|
overflow: auto;
|
||||||
|
margin-top: 1rem;
|
||||||
|
|
||||||
.react-grid-layout {
|
.react-grid-layout {
|
||||||
border: none !important;
|
border: none !important;
|
||||||
|
margin-top: 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -13,3 +15,9 @@
|
|||||||
height: calc(100% - 30px);
|
height: calc(100% - 30px);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.lightMode {
|
||||||
|
.fullscreen-grid-container {
|
||||||
|
background-color: rgb(250, 250, 250);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -55,7 +55,7 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
|
|||||||
|
|
||||||
const isDarkMode = useIsDarkMode();
|
const isDarkMode = useIsDarkMode();
|
||||||
|
|
||||||
const [dashboardLayout, setDashboardLayout] = useState(layouts);
|
const [dashboardLayout, setDashboardLayout] = useState<Layout[]>([]);
|
||||||
|
|
||||||
const updateDashboardMutation = useUpdateDashboard();
|
const updateDashboardMutation = useUpdateDashboard();
|
||||||
|
|
||||||
@ -77,6 +77,10 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
|
|||||||
userRole,
|
userRole,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
setDashboardLayout(layouts);
|
||||||
|
}, [layouts]);
|
||||||
|
|
||||||
const onSaveHandler = (): void => {
|
const onSaveHandler = (): void => {
|
||||||
if (!selectedDashboard) return;
|
if (!selectedDashboard) return;
|
||||||
|
|
||||||
|
@ -1,21 +1,14 @@
|
|||||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||||
import { useCallback } from 'react';
|
import { useCallback } from 'react';
|
||||||
import { Layout } from 'react-grid-layout';
|
|
||||||
|
|
||||||
import { EMPTY_WIDGET_LAYOUT } from './config';
|
|
||||||
import GraphLayoutContainer from './GridCardLayout';
|
import GraphLayoutContainer from './GridCardLayout';
|
||||||
|
|
||||||
function GridGraph(): JSX.Element {
|
function GridGraph(): JSX.Element {
|
||||||
const { handleToggleDashboardSlider, setLayouts } = useDashboard();
|
const { handleToggleDashboardSlider } = useDashboard();
|
||||||
|
|
||||||
const onEmptyWidgetHandler = useCallback(() => {
|
const onEmptyWidgetHandler = useCallback(() => {
|
||||||
handleToggleDashboardSlider(true);
|
handleToggleDashboardSlider(true);
|
||||||
|
}, [handleToggleDashboardSlider]);
|
||||||
setLayouts((preLayout: Layout[]) => [
|
|
||||||
EMPTY_WIDGET_LAYOUT,
|
|
||||||
...(preLayout || []),
|
|
||||||
]);
|
|
||||||
}, [handleToggleDashboardSlider, setLayouts]);
|
|
||||||
|
|
||||||
return <GraphLayoutContainer onAddPanelHandler={onEmptyWidgetHandler} />;
|
return <GraphLayoutContainer onAddPanelHandler={onEmptyWidgetHandler} />;
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,5 @@
|
|||||||
|
.delete-modal {
|
||||||
|
.ant-modal-confirm-body {
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
}
|
@ -1,3 +1,5 @@
|
|||||||
|
import './DeleteButton.styles.scss';
|
||||||
|
|
||||||
import { DeleteOutlined, ExclamationCircleOutlined } from '@ant-design/icons';
|
import { DeleteOutlined, ExclamationCircleOutlined } from '@ant-design/icons';
|
||||||
import { Modal, Tooltip, Typography } from 'antd';
|
import { Modal, Tooltip, Typography } from 'antd';
|
||||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||||
@ -64,6 +66,7 @@ function DeleteButton({
|
|||||||
okText: 'Delete',
|
okText: 'Delete',
|
||||||
okButtonProps: { danger: true },
|
okButtonProps: { danger: true },
|
||||||
centered: true,
|
centered: true,
|
||||||
|
className: 'delete-modal',
|
||||||
});
|
});
|
||||||
}, [modal, name, deleteDashboardMutation, notifications, t, queryClient]);
|
}, [modal, name, deleteDashboardMutation, notifications, t, queryClient]);
|
||||||
|
|
||||||
|
@ -6,3 +6,11 @@
|
|||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
color: gray;
|
color: gray;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.variable-item {
|
||||||
|
.variable-select {
|
||||||
|
.ant-select-dropdown {
|
||||||
|
max-width: 300px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -5,15 +5,16 @@ import { WarningOutlined } from '@ant-design/icons';
|
|||||||
import { Input, Popover, Select, Tooltip, Typography } from 'antd';
|
import { Input, Popover, Select, Tooltip, Typography } from 'antd';
|
||||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||||
import useDebounce from 'hooks/useDebounce';
|
|
||||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||||
|
import { debounce } from 'lodash-es';
|
||||||
import map from 'lodash-es/map';
|
import map from 'lodash-es/map';
|
||||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||||
import { memo, useEffect, useMemo, useState } from 'react';
|
import { memo, useEffect, useMemo, useState } from 'react';
|
||||||
import { useQuery } from 'react-query';
|
import { useQuery } from 'react-query';
|
||||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||||
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
||||||
|
import { popupContainer } from 'utils/selectPopupContainer';
|
||||||
|
|
||||||
import { variablePropsToPayloadVariables } from '../utils';
|
import { variablePropsToPayloadVariables } from '../utils';
|
||||||
import { SelectItemStyle, VariableContainer, VariableValue } from './styles';
|
import { SelectItemStyle, VariableContainer, VariableValue } from './styles';
|
||||||
@ -44,6 +45,7 @@ const getSelectValue = (
|
|||||||
return selectedValue?.toString() || '';
|
return selectedValue?.toString() || '';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||||
function VariableItem({
|
function VariableItem({
|
||||||
variableData,
|
variableData,
|
||||||
existingVariables,
|
existingVariables,
|
||||||
@ -55,24 +57,8 @@ function VariableItem({
|
|||||||
[],
|
[],
|
||||||
);
|
);
|
||||||
|
|
||||||
const [variableValue, setVaribleValue] = useState(
|
|
||||||
variableData?.selectedValue?.toString() || '',
|
|
||||||
);
|
|
||||||
|
|
||||||
const debouncedVariableValue = useDebounce(variableValue, 500);
|
|
||||||
|
|
||||||
const [errorMessage, setErrorMessage] = useState<null | string>(null);
|
const [errorMessage, setErrorMessage] = useState<null | string>(null);
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const { selectedValue } = variableData;
|
|
||||||
|
|
||||||
if (selectedValue) {
|
|
||||||
setVaribleValue(selectedValue?.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [variableData]);
|
|
||||||
|
|
||||||
const getDependentVariables = (queryValue: string): string[] => {
|
const getDependentVariables = (queryValue: string): string[] => {
|
||||||
const matches = queryValue.match(variableRegexPattern);
|
const matches = queryValue.match(variableRegexPattern);
|
||||||
|
|
||||||
@ -92,7 +78,12 @@ function VariableItem({
|
|||||||
const variableName = variableData.name || '';
|
const variableName = variableData.name || '';
|
||||||
|
|
||||||
dependentVariables?.forEach((element) => {
|
dependentVariables?.forEach((element) => {
|
||||||
dependentVariablesStr += `${element}${existingVariables[element]?.selectedValue}`;
|
const [, variable] =
|
||||||
|
Object.entries(existingVariables).find(
|
||||||
|
([, value]) => value.name === element,
|
||||||
|
) || [];
|
||||||
|
|
||||||
|
dependentVariablesStr += `${element}${variable?.selectedValue}`;
|
||||||
});
|
});
|
||||||
|
|
||||||
const variableKey = dependentVariablesStr.replace(/\s/g, '');
|
const variableKey = dependentVariablesStr.replace(/\s/g, '');
|
||||||
@ -204,6 +195,9 @@ function VariableItem({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// do not debounce the above function as we do not need debounce in select variables
|
||||||
|
const debouncedHandleChange = debounce(handleChange, 500);
|
||||||
|
|
||||||
const { selectedValue } = variableData;
|
const { selectedValue } = variableData;
|
||||||
const selectedValueStringified = useMemo(() => getSelectValue(selectedValue), [
|
const selectedValueStringified = useMemo(() => getSelectValue(selectedValue), [
|
||||||
selectedValue,
|
selectedValue,
|
||||||
@ -219,14 +213,6 @@ function VariableItem({
|
|||||||
: undefined;
|
: undefined;
|
||||||
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
|
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (debouncedVariableValue !== variableData?.selectedValue?.toString()) {
|
|
||||||
handleChange(debouncedVariableValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [debouncedVariableValue]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Fetch options for CUSTOM Type
|
// Fetch options for CUSTOM Type
|
||||||
if (variableData.type === 'CUSTOM') {
|
if (variableData.type === 'CUSTOM') {
|
||||||
@ -240,7 +226,7 @@ function VariableItem({
|
|||||||
placement="top"
|
placement="top"
|
||||||
title={isDashboardLocked ? 'Dashboard is locked' : ''}
|
title={isDashboardLocked ? 'Dashboard is locked' : ''}
|
||||||
>
|
>
|
||||||
<VariableContainer>
|
<VariableContainer className="variable-item">
|
||||||
<Typography.Text className="variable-name" ellipsis>
|
<Typography.Text className="variable-name" ellipsis>
|
||||||
${variableData.name}
|
${variableData.name}
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
@ -250,9 +236,10 @@ function VariableItem({
|
|||||||
placeholder="Enter value"
|
placeholder="Enter value"
|
||||||
disabled={isDashboardLocked}
|
disabled={isDashboardLocked}
|
||||||
bordered={false}
|
bordered={false}
|
||||||
value={variableValue}
|
key={variableData.selectedValue?.toString()}
|
||||||
|
defaultValue={variableData.selectedValue?.toString()}
|
||||||
onChange={(e): void => {
|
onChange={(e): void => {
|
||||||
setVaribleValue(e.target.value || '');
|
debouncedHandleChange(e.target.value || '');
|
||||||
}}
|
}}
|
||||||
style={{
|
style={{
|
||||||
width:
|
width:
|
||||||
@ -263,18 +250,25 @@ function VariableItem({
|
|||||||
!errorMessage &&
|
!errorMessage &&
|
||||||
optionsData && (
|
optionsData && (
|
||||||
<Select
|
<Select
|
||||||
value={selectValue}
|
key={
|
||||||
|
selectValue && Array.isArray(selectValue)
|
||||||
|
? selectValue.join(' ')
|
||||||
|
: selectValue || variableData.id
|
||||||
|
}
|
||||||
|
defaultValue={selectValue}
|
||||||
onChange={handleChange}
|
onChange={handleChange}
|
||||||
bordered={false}
|
bordered={false}
|
||||||
placeholder="Select value"
|
placeholder="Select value"
|
||||||
|
placement="bottomRight"
|
||||||
mode={mode}
|
mode={mode}
|
||||||
dropdownMatchSelectWidth={false}
|
dropdownMatchSelectWidth={false}
|
||||||
style={SelectItemStyle}
|
style={SelectItemStyle}
|
||||||
loading={isLoading}
|
loading={isLoading}
|
||||||
showArrow
|
|
||||||
showSearch
|
showSearch
|
||||||
data-testid="variable-select"
|
data-testid="variable-select"
|
||||||
|
className="variable-select"
|
||||||
disabled={isDashboardLocked}
|
disabled={isDashboardLocked}
|
||||||
|
getPopupContainer={popupContainer}
|
||||||
>
|
>
|
||||||
{enableSelectAll && (
|
{enableSelectAll && (
|
||||||
<Select.Option data-testid="option-ALL" value={ALL_SELECT_VALUE}>
|
<Select.Option data-testid="option-ALL" value={ALL_SELECT_VALUE}>
|
||||||
|
@ -307,7 +307,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
|||||||
disabled={isSaveDisabled}
|
disabled={isSaveDisabled}
|
||||||
onClick={onSaveDashboard}
|
onClick={onSaveDashboard}
|
||||||
>
|
>
|
||||||
Save
|
Save Changes
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
@ -316,13 +316,14 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
|||||||
<Button
|
<Button
|
||||||
type="primary"
|
type="primary"
|
||||||
data-testid="new-widget-save"
|
data-testid="new-widget-save"
|
||||||
|
loading={updateDashboardMutation.isLoading}
|
||||||
disabled={isSaveDisabled}
|
disabled={isSaveDisabled}
|
||||||
onClick={onSaveDashboard}
|
onClick={onSaveDashboard}
|
||||||
>
|
>
|
||||||
Save
|
Save Changes
|
||||||
</Button>
|
</Button>
|
||||||
)}
|
)}
|
||||||
<Button onClick={onClickDiscardHandler}>Discard</Button>
|
<Button onClick={onClickDiscardHandler}>Discard Changes</Button>
|
||||||
</ButtonContainer>
|
</ButtonContainer>
|
||||||
|
|
||||||
<PanelContainer>
|
<PanelContainer>
|
||||||
@ -385,6 +386,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
|||||||
closable
|
closable
|
||||||
onCancel={closeModal}
|
onCancel={closeModal}
|
||||||
onOk={onClickSaveHandler}
|
onOk={onClickSaveHandler}
|
||||||
|
confirmLoading={updateDashboardMutation.isLoading}
|
||||||
centered
|
centered
|
||||||
open={saveModal}
|
open={saveModal}
|
||||||
width={600}
|
width={600}
|
||||||
|
@ -25,7 +25,7 @@ export const LeftContainerWrapper = styled(Col)`
|
|||||||
|
|
||||||
export const ButtonContainer = styled.div`
|
export const ButtonContainer = styled.div`
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: 1rem;
|
gap: 8px;
|
||||||
margin-bottom: 1rem;
|
margin-bottom: 1rem;
|
||||||
justify-content: flex-end;
|
justify-content: flex-end;
|
||||||
`;
|
`;
|
||||||
|
@ -150,6 +150,20 @@ function QueryBuilderSearch({
|
|||||||
(item) => item.key as BaseAutocompleteData,
|
(item) => item.key as BaseAutocompleteData,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Avoid updating query with onChange at the bottom of this useEffect
|
||||||
|
// if there are no `tags` that need to be normalized after receiving
|
||||||
|
// the latest `sourceKeys`.
|
||||||
|
//
|
||||||
|
// Executing the following logic for empty tags leads to emptying
|
||||||
|
// out of `query` via `onChange`.
|
||||||
|
// `tags` can contain stale empty value while being updated by `useTag`
|
||||||
|
// which maintains it as a state and updates it via useEffect when props change.
|
||||||
|
// This was observed when pipeline filters were becoming empty after
|
||||||
|
// returning from logs explorer.
|
||||||
|
if ((tags?.length || 0) < 1) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
initialTagFilters.items = tags.map((tag, index) => {
|
initialTagFilters.items = tags.map((tag, index) => {
|
||||||
const isJsonTrue = query.filters?.items[index]?.key?.isJSON;
|
const isJsonTrue = query.filters?.items[index]?.key?.isJSON;
|
||||||
|
|
||||||
|
@ -74,6 +74,8 @@ function SideNav({
|
|||||||
isCurrentVersionError,
|
isCurrentVersionError,
|
||||||
} = useSelector<AppState, AppReducer>((state) => state.app);
|
} = useSelector<AppState, AppReducer>((state) => state.app);
|
||||||
|
|
||||||
|
const [licenseTag, setLicenseTag] = useState('');
|
||||||
|
|
||||||
const userSettingsMenuItem = {
|
const userSettingsMenuItem = {
|
||||||
key: ROUTES.MY_SETTINGS,
|
key: ROUTES.MY_SETTINGS,
|
||||||
label: user?.name || 'User',
|
label: user?.name || 'User',
|
||||||
@ -239,6 +241,18 @@ function SideNav({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isFetching) {
|
||||||
|
if (isCloudUserVal) {
|
||||||
|
setLicenseTag('Cloud');
|
||||||
|
} else if (isEnterprise) {
|
||||||
|
setLicenseTag('Enterprise');
|
||||||
|
} else {
|
||||||
|
setLicenseTag('Free');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [isCloudUserVal, isEnterprise, isFetching]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={cx('sideNav', collapsed ? 'collapsed' : '')}>
|
<div className={cx('sideNav', collapsed ? 'collapsed' : '')}>
|
||||||
<div className="brand">
|
<div className="brand">
|
||||||
@ -257,7 +271,7 @@ function SideNav({
|
|||||||
|
|
||||||
{!collapsed && (
|
{!collapsed && (
|
||||||
<>
|
<>
|
||||||
<div className="license tag">{!isEnterprise ? 'Free' : 'Enterprise'}</div>
|
{!isFetching && <div className="license tag">{licenseTag}</div>}
|
||||||
|
|
||||||
<ToggleButton
|
<ToggleButton
|
||||||
checked={isDarkMode}
|
checked={isDarkMode}
|
||||||
|
@ -12,6 +12,7 @@ describe('CustomDateTimeModal', () => {
|
|||||||
visible
|
visible
|
||||||
onCreate={handleCreate}
|
onCreate={handleCreate}
|
||||||
onCancel={handleCancel}
|
onCancel={handleCancel}
|
||||||
|
setCustomDTPickerVisible={jest.fn()}
|
||||||
/>,
|
/>,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { DatePicker, Modal } from 'antd';
|
import { DatePicker, Modal } from 'antd';
|
||||||
import dayjs, { Dayjs } from 'dayjs';
|
import dayjs, { Dayjs } from 'dayjs';
|
||||||
import { useState } from 'react';
|
import { Dispatch, SetStateAction, useState } from 'react';
|
||||||
|
|
||||||
export type DateTimeRangeType = [Dayjs | null, Dayjs | null] | null;
|
export type DateTimeRangeType = [Dayjs | null, Dayjs | null] | null;
|
||||||
|
|
||||||
@ -10,12 +10,12 @@ function CustomDateTimeModal({
|
|||||||
visible,
|
visible,
|
||||||
onCreate,
|
onCreate,
|
||||||
onCancel,
|
onCancel,
|
||||||
|
setCustomDTPickerVisible,
|
||||||
}: CustomDateTimeModalProps): JSX.Element {
|
}: CustomDateTimeModalProps): JSX.Element {
|
||||||
const [selectedDate, setDateTime] = useState<DateTimeRangeType>();
|
const [selectedDate, setDateTime] = useState<DateTimeRangeType>();
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
const onModalOkHandler = (date_time: any): void => {
|
const onModalOkHandler = (date_time: any): void => {
|
||||||
onCreate(date_time);
|
|
||||||
setDateTime(date_time);
|
setDateTime(date_time);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -25,7 +25,10 @@ function CustomDateTimeModal({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const onOk = (): void => {
|
const onOk = (): void => {
|
||||||
if (selectedDate) onCreate(selectedDate);
|
if (selectedDate) {
|
||||||
|
onCreate(selectedDate);
|
||||||
|
setCustomDTPickerVisible(false);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -42,7 +45,6 @@ function CustomDateTimeModal({
|
|||||||
allowClear
|
allowClear
|
||||||
onOk={onModalOkHandler}
|
onOk={onModalOkHandler}
|
||||||
showTime
|
showTime
|
||||||
onCalendarChange={onModalOkHandler}
|
|
||||||
/>
|
/>
|
||||||
</Modal>
|
</Modal>
|
||||||
);
|
);
|
||||||
@ -52,6 +54,7 @@ interface CustomDateTimeModalProps {
|
|||||||
visible: boolean;
|
visible: boolean;
|
||||||
onCreate: (dateTimeRange: DateTimeRangeType) => void;
|
onCreate: (dateTimeRange: DateTimeRangeType) => void;
|
||||||
onCancel: () => void;
|
onCancel: () => void;
|
||||||
|
setCustomDTPickerVisible: Dispatch<SetStateAction<boolean>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export default CustomDateTimeModal;
|
export default CustomDateTimeModal;
|
||||||
|
@ -216,7 +216,6 @@ function DateTimeSelection({
|
|||||||
if (dateTimeRange !== null) {
|
if (dateTimeRange !== null) {
|
||||||
const [startTimeMoment, endTimeMoment] = dateTimeRange;
|
const [startTimeMoment, endTimeMoment] = dateTimeRange;
|
||||||
if (startTimeMoment && endTimeMoment) {
|
if (startTimeMoment && endTimeMoment) {
|
||||||
setCustomDTPickerVisible(false);
|
|
||||||
updateTimeInterval('custom', [
|
updateTimeInterval('custom', [
|
||||||
startTimeMoment?.toDate().getTime() || 0,
|
startTimeMoment?.toDate().getTime() || 0,
|
||||||
endTimeMoment?.toDate().getTime() || 0,
|
endTimeMoment?.toDate().getTime() || 0,
|
||||||
@ -352,6 +351,7 @@ function DateTimeSelection({
|
|||||||
onCancel={(): void => {
|
onCancel={(): void => {
|
||||||
setCustomDTPickerVisible(false);
|
setCustomDTPickerVisible(false);
|
||||||
}}
|
}}
|
||||||
|
setCustomDTPickerVisible={setCustomDTPickerVisible}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
@ -13,6 +13,8 @@ function ServiceMap({ fgRef, serviceMap }: any): JSX.Element {
|
|||||||
|
|
||||||
const graphData = { nodes, links };
|
const graphData = { nodes, links };
|
||||||
|
|
||||||
|
let zoomLevel = 1;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<ForceGraph2D
|
<ForceGraph2D
|
||||||
ref={fgRef}
|
ref={fgRef}
|
||||||
@ -23,8 +25,9 @@ function ServiceMap({ fgRef, serviceMap }: any): JSX.Element {
|
|||||||
linkDirectionalParticles="value"
|
linkDirectionalParticles="value"
|
||||||
linkDirectionalParticleSpeed={(d) => d.value}
|
linkDirectionalParticleSpeed={(d) => d.value}
|
||||||
nodeCanvasObject={(node, ctx) => {
|
nodeCanvasObject={(node, ctx) => {
|
||||||
const label = transformLabel(node.id);
|
const label = transformLabel(node.id, zoomLevel);
|
||||||
const { fontSize } = node;
|
let { fontSize } = node;
|
||||||
|
fontSize = (fontSize * 3) / zoomLevel;
|
||||||
ctx.font = `${fontSize}px Roboto`;
|
ctx.font = `${fontSize}px Roboto`;
|
||||||
const { width } = node;
|
const { width } = node;
|
||||||
|
|
||||||
@ -43,6 +46,9 @@ function ServiceMap({ fgRef, serviceMap }: any): JSX.Element {
|
|||||||
tooltip.innerHTML = getTooltip(node);
|
tooltip.innerHTML = getTooltip(node);
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
|
onZoom={(zoom) => {
|
||||||
|
zoomLevel = zoom.k;
|
||||||
|
}}
|
||||||
nodePointerAreaPaint={(node, color, ctx) => {
|
nodePointerAreaPaint={(node, color, ctx) => {
|
||||||
ctx.fillStyle = color;
|
ctx.fillStyle = color;
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
|
@ -59,6 +59,7 @@ export const getGraphData = (serviceMap, isDarkMode): graphDataType => {
|
|||||||
width: MIN_WIDTH,
|
width: MIN_WIDTH,
|
||||||
color,
|
color,
|
||||||
nodeVal: MIN_WIDTH,
|
nodeVal: MIN_WIDTH,
|
||||||
|
name: node,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if (service.errorRate > 0) {
|
if (service.errorRate > 0) {
|
||||||
@ -72,6 +73,7 @@ export const getGraphData = (serviceMap, isDarkMode): graphDataType => {
|
|||||||
width,
|
width,
|
||||||
color,
|
color,
|
||||||
nodeVal: width,
|
nodeVal: width,
|
||||||
|
name: node,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
return {
|
return {
|
||||||
@ -123,9 +125,12 @@ export const getTooltip = (link: {
|
|||||||
</div>`;
|
</div>`;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const transformLabel = (label: string) => {
|
export const transformLabel = (label: string, zoomLevel: number) => {
|
||||||
const MAX_LENGTH = 13;
|
//? 13 is the minimum label length. Scaling factor of 0.9 which is slightly less than 1
|
||||||
const MAX_SHOW = 10;
|
//? ensures smoother zoom transitions, gradually increasing MAX_LENGTH, displaying more of the label as
|
||||||
|
//? zooming in.
|
||||||
|
const MAX_LENGTH = 13 * (zoomLevel / 0.9);
|
||||||
|
const MAX_SHOW = MAX_LENGTH - 3;
|
||||||
if (label.length > MAX_LENGTH) {
|
if (label.length > MAX_LENGTH) {
|
||||||
return `${label.slice(0, MAX_SHOW)}...`;
|
return `${label.slice(0, MAX_SHOW)}...`;
|
||||||
}
|
}
|
||||||
|
@ -344,7 +344,7 @@ function SignUp({ version }: SignUpProps): JSX.Element {
|
|||||||
placeholder={t('placeholder_firstname')}
|
placeholder={t('placeholder_firstname')}
|
||||||
required
|
required
|
||||||
id="signupFirstName"
|
id="signupFirstName"
|
||||||
disabled={isDetailsDisable}
|
disabled={isDetailsDisable && form.getFieldValue('firstName')}
|
||||||
/>
|
/>
|
||||||
</FormContainer.Item>
|
</FormContainer.Item>
|
||||||
</div>
|
</div>
|
||||||
|
@ -21,6 +21,7 @@ export interface AlertDef {
|
|||||||
source?: string;
|
source?: string;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
preferredChannels?: string[];
|
preferredChannels?: string[];
|
||||||
|
broadcastToAll?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface RuleCondition {
|
export interface RuleCondition {
|
||||||
|
90
pkg/query-service/app/having.go
Normal file
90
pkg/query-service/app/having.go
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// applyHavingClause applies the having clause to the result
|
||||||
|
// each query has its own having clause
|
||||||
|
// there can be multiple having clauses for each query
|
||||||
|
func applyHavingClause(result []*v3.Result, queryRangeParams *v3.QueryRangeParamsV3) {
|
||||||
|
for _, result := range result {
|
||||||
|
builderQueries := queryRangeParams.CompositeQuery.BuilderQueries
|
||||||
|
|
||||||
|
if builderQueries != nil && (builderQueries[result.QueryName].DataSource == v3.DataSourceMetrics) {
|
||||||
|
havingClause := builderQueries[result.QueryName].Having
|
||||||
|
|
||||||
|
for i := 0; i < len(result.Series); i++ {
|
||||||
|
for j := 0; j < len(result.Series[i].Points); j++ {
|
||||||
|
if !evaluateHavingClause(havingClause, result.Series[i].Points[j].Value) {
|
||||||
|
result.Series[i].Points = append(result.Series[i].Points[:j], result.Series[i].Points[j+1:]...)
|
||||||
|
j--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func evaluateHavingClause(having []v3.Having, value float64) bool {
|
||||||
|
if len(having) == 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, h := range having {
|
||||||
|
switch h.Operator {
|
||||||
|
case v3.HavingOperatorEqual:
|
||||||
|
if value == h.Value.(float64) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case v3.HavingOperatorNotEqual:
|
||||||
|
if value != h.Value.(float64) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case v3.HavingOperatorGreaterThan:
|
||||||
|
if value > h.Value.(float64) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case v3.HavingOperatorGreaterThanOrEq:
|
||||||
|
if value >= h.Value.(float64) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case v3.HavingOperatorLessThan:
|
||||||
|
if value < h.Value.(float64) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case v3.HavingOperatorLessThanOrEq:
|
||||||
|
if value <= h.Value.(float64) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case v3.HavingOperatorIn, v3.HavingOperator(strings.ToLower(string(v3.HavingOperatorIn))):
|
||||||
|
values, ok := h.Value.([]interface{})
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, v := range values {
|
||||||
|
if value == v.(float64) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case v3.HavingOperatorNotIn, v3.HavingOperator(strings.ToLower(string(v3.HavingOperatorNotIn))):
|
||||||
|
values, ok := h.Value.([]interface{})
|
||||||
|
if !ok {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
found := false
|
||||||
|
for _, v := range values {
|
||||||
|
if value == v.(float64) {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
283
pkg/query-service/app/having_test.go
Normal file
283
pkg/query-service/app/having_test.go
Normal file
@ -0,0 +1,283 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestApplyHavingCaluse(t *testing.T) {
|
||||||
|
type testCase struct {
|
||||||
|
name string
|
||||||
|
results []*v3.Result
|
||||||
|
params *v3.QueryRangeParamsV3
|
||||||
|
want []*v3.Result
|
||||||
|
}
|
||||||
|
|
||||||
|
testCases := []testCase{
|
||||||
|
{
|
||||||
|
name: "test having equal to",
|
||||||
|
results: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
params: &v3.QueryRangeParamsV3{
|
||||||
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
|
"A": {
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
Having: []v3.Having{
|
||||||
|
{
|
||||||
|
Operator: v3.HavingOperatorEqual,
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: []*v3.Result{
|
||||||
|
{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test having `in`",
|
||||||
|
results: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
params: &v3.QueryRangeParamsV3{
|
||||||
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
|
"A": {
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
Having: []v3.Having{
|
||||||
|
{
|
||||||
|
Operator: v3.HavingOperatorIn,
|
||||||
|
Value: []interface{}{0.3, 0.4},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: []*v3.Result{
|
||||||
|
{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test having `not in` and multiple results",
|
||||||
|
results: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
QueryName: "B",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
params: &v3.QueryRangeParamsV3{
|
||||||
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
|
"A": {
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
Having: []v3.Having{
|
||||||
|
{
|
||||||
|
Operator: v3.HavingOperatorNotIn,
|
||||||
|
Value: []interface{}{0.3, 0.4},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"B": {
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
Having: []v3.Having{
|
||||||
|
{
|
||||||
|
Operator: v3.HavingOperatorNotIn,
|
||||||
|
Value: []interface{}{0.1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: []*v3.Result{
|
||||||
|
{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
|
||||||
|
applyHavingClause(tc.results, tc.params)
|
||||||
|
|
||||||
|
got := tc.results
|
||||||
|
|
||||||
|
if len(got) != len(tc.want) {
|
||||||
|
t.Errorf("got %v, want %v", got, tc.want)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range got {
|
||||||
|
if len(got[i].Series) != len(tc.want[i].Series) {
|
||||||
|
t.Errorf("got %v, want %v", got, tc.want)
|
||||||
|
}
|
||||||
|
|
||||||
|
for j := range got[i].Series {
|
||||||
|
if len(got[i].Series[j].Points) != len(tc.want[i].Series[j].Points) {
|
||||||
|
t.Errorf("got %v, want %v", len(got[i].Series[j].Points), len(tc.want[i].Series[j].Points))
|
||||||
|
}
|
||||||
|
|
||||||
|
for k := range got[i].Series[j].Points {
|
||||||
|
if got[i].Series[j].Points[k].Value != tc.want[i].Series[j].Points[k].Value {
|
||||||
|
t.Errorf("got %v, want %v", got, tc.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -8,7 +8,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"sort"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
@ -3107,78 +3106,6 @@ func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) {
|
|||||||
aH.queryRangeV3(r.Context(), queryRangeParams, w, r)
|
aH.queryRangeV3(r.Context(), queryRangeParams, w, r)
|
||||||
}
|
}
|
||||||
|
|
||||||
func applyMetricLimit(results []*v3.Result, queryRangeParams *v3.QueryRangeParamsV3) {
|
|
||||||
// apply limit if any for metrics
|
|
||||||
// use the grouping set points to apply the limit
|
|
||||||
|
|
||||||
for _, result := range results {
|
|
||||||
builderQueries := queryRangeParams.CompositeQuery.BuilderQueries
|
|
||||||
|
|
||||||
if builderQueries != nil && (builderQueries[result.QueryName].DataSource == v3.DataSourceMetrics ||
|
|
||||||
result.QueryName != builderQueries[result.QueryName].Expression) {
|
|
||||||
limit := builderQueries[result.QueryName].Limit
|
|
||||||
|
|
||||||
orderByList := builderQueries[result.QueryName].OrderBy
|
|
||||||
if limit >= 0 {
|
|
||||||
if len(orderByList) == 0 {
|
|
||||||
// If no orderBy is specified, sort by value in descending order
|
|
||||||
orderByList = []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "desc"}}
|
|
||||||
}
|
|
||||||
sort.SliceStable(result.Series, func(i, j int) bool {
|
|
||||||
for _, orderBy := range orderByList {
|
|
||||||
if orderBy.ColumnName == constants.SigNozOrderByValue {
|
|
||||||
|
|
||||||
// For table type queries (we rely on the fact that one value for row), sort
|
|
||||||
// based on final aggregation value
|
|
||||||
if len(result.Series[i].Points) == 1 && len(result.Series[j].Points) == 1 {
|
|
||||||
if orderBy.Order == "asc" {
|
|
||||||
return result.Series[i].Points[0].Value < result.Series[j].Points[0].Value
|
|
||||||
} else if orderBy.Order == "desc" {
|
|
||||||
return result.Series[i].Points[0].Value > result.Series[j].Points[0].Value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// For graph type queries, sort based on GroupingSetsPoint
|
|
||||||
if result.Series[i].GroupingSetsPoint == nil || result.Series[j].GroupingSetsPoint == nil {
|
|
||||||
// Handle nil GroupingSetsPoint, if needed
|
|
||||||
// Here, we assume non-nil values are always less than nil values
|
|
||||||
return result.Series[i].GroupingSetsPoint != nil
|
|
||||||
}
|
|
||||||
if orderBy.Order == "asc" {
|
|
||||||
return result.Series[i].GroupingSetsPoint.Value < result.Series[j].GroupingSetsPoint.Value
|
|
||||||
} else if orderBy.Order == "desc" {
|
|
||||||
return result.Series[i].GroupingSetsPoint.Value > result.Series[j].GroupingSetsPoint.Value
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Sort based on Labels map
|
|
||||||
labelI, existsI := result.Series[i].Labels[orderBy.ColumnName]
|
|
||||||
labelJ, existsJ := result.Series[j].Labels[orderBy.ColumnName]
|
|
||||||
|
|
||||||
if !existsI || !existsJ {
|
|
||||||
// Handle missing labels, if needed
|
|
||||||
// Here, we assume non-existent labels are always less than existing ones
|
|
||||||
return existsI
|
|
||||||
}
|
|
||||||
|
|
||||||
if orderBy.Order == "asc" {
|
|
||||||
return strings.Compare(labelI, labelJ) < 0
|
|
||||||
} else if orderBy.Order == "desc" {
|
|
||||||
return strings.Compare(labelI, labelJ) > 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Preserve original order if no matching orderBy is found
|
|
||||||
return i < j
|
|
||||||
})
|
|
||||||
|
|
||||||
if limit > 0 && len(result.Series) > int(limit) {
|
|
||||||
result.Series = result.Series[:limit]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) liveTailLogs(w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) liveTailLogs(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
// get the param from url and add it to body
|
// get the param from url and add it to body
|
||||||
@ -3295,6 +3222,10 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||||
|
postProcessResult(result, queryRangeParams)
|
||||||
|
}
|
||||||
|
|
||||||
resp := v3.QueryRangeResponse{
|
resp := v3.QueryRangeResponse{
|
||||||
Result: result,
|
Result: result,
|
||||||
}
|
}
|
||||||
@ -3322,3 +3253,48 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
aH.queryRangeV4(r.Context(), queryRangeParams, w, r)
|
aH.queryRangeV4(r.Context(), queryRangeParams, w, r)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// postProcessResult applies having clause, metric limit, reduce function to the result
|
||||||
|
// This function is effective for metrics data source for now, but it can be extended to other data sources
|
||||||
|
// if needed
|
||||||
|
// Much of this work can be done in the ClickHouse query, but we decided to do it here because:
|
||||||
|
// 1. Effective use of caching
|
||||||
|
// 2. Easier to add new functions
|
||||||
|
func postProcessResult(result []*v3.Result, queryRangeParams *v3.QueryRangeParamsV3) {
|
||||||
|
// Having clause is not part of the clickhouse query, so we need to apply it here
|
||||||
|
// It's not included in the query because it doesn't work nicely with caching
|
||||||
|
// With this change, if you have a query with a having clause, and then you change the having clause
|
||||||
|
// to something else, the query will still be cached.
|
||||||
|
applyHavingClause(result, queryRangeParams)
|
||||||
|
// We apply the metric limit here because it's not part of the clickhouse query
|
||||||
|
// The limit in the context of the time series query is the number of time series
|
||||||
|
// So for the limit to work, we need to know what series to keep and what to discard
|
||||||
|
// For us to know that, we need to execute the query first, and then apply the limit
|
||||||
|
// which we found expensive, because we are executing the query twice on the same data
|
||||||
|
// So we decided to apply the limit here, after the query is executed
|
||||||
|
// The function is named applyMetricLimit because it only applies to metrics data source
|
||||||
|
// In traces and logs, the limit is achieved using subqueries
|
||||||
|
applyMetricLimit(result, queryRangeParams)
|
||||||
|
// Each series in the result produces N number of points, where N is (end - start) / step
|
||||||
|
// For the panel type table, we need to show one point for each series in the row
|
||||||
|
// We do that by applying a reduce function to each series
|
||||||
|
applyReduceTo(result, queryRangeParams)
|
||||||
|
// We apply the functions here it's easier to add new functions
|
||||||
|
applyFunctions(result, queryRangeParams)
|
||||||
|
}
|
||||||
|
|
||||||
|
// applyFunctions applies functions for each query in the composite query
|
||||||
|
// The functions can be more than one, and they are applied in the order they are defined
|
||||||
|
func applyFunctions(results []*v3.Result, queryRangeParams *v3.QueryRangeParamsV3) {
|
||||||
|
for idx, result := range results {
|
||||||
|
builderQueries := queryRangeParams.CompositeQuery.BuilderQueries
|
||||||
|
|
||||||
|
if builderQueries != nil && (builderQueries[result.QueryName].DataSource == v3.DataSourceMetrics) {
|
||||||
|
functions := builderQueries[result.QueryName].Functions
|
||||||
|
|
||||||
|
for _, function := range functions {
|
||||||
|
results[idx] = queryBuilder.ApplyFunction(function, result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -8,9 +8,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/model"
|
"go.signoz.io/signoz/pkg/query-service/model"
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestPrepareQuery(t *testing.T) {
|
func TestPrepareQuery(t *testing.T) {
|
||||||
@ -132,619 +130,3 @@ func TestPrepareQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestApplyLimitOnMetricResult(t *testing.T) {
|
|
||||||
cases := []struct {
|
|
||||||
name string
|
|
||||||
inputResult []*v3.Result
|
|
||||||
params *v3.QueryRangeParamsV3
|
|
||||||
expectedResult []*v3.Result
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "test limit 1 without order", // top most (latency/error) as default
|
|
||||||
inputResult: []*v3.Result{
|
|
||||||
{
|
|
||||||
QueryName: "A",
|
|
||||||
Series: []*v3.Series{
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "frontend",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 19.2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 19.5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 19.3,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "route",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
params: &v3.QueryRangeParamsV3{
|
|
||||||
Start: 1689220036000,
|
|
||||||
End: 1689220096000,
|
|
||||||
Step: 60,
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
"A": {
|
|
||||||
QueryName: "A",
|
|
||||||
AggregateAttribute: v3.AttributeKey{Key: "signo_calls_total"},
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateOperator: v3.AggregateOperatorSumRate,
|
|
||||||
Expression: "A",
|
|
||||||
GroupBy: []v3.AttributeKey{{Key: "service_name"}},
|
|
||||||
Limit: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
PanelType: v3.PanelTypeGraph,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedResult: []*v3.Result{
|
|
||||||
{
|
|
||||||
QueryName: "A",
|
|
||||||
Series: []*v3.Series{
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "frontend",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 19.2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 19.5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 19.3,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test limit with order asc",
|
|
||||||
inputResult: []*v3.Result{
|
|
||||||
{
|
|
||||||
QueryName: "A",
|
|
||||||
Series: []*v3.Series{
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "frontend",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 19.2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 19.5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 19.3,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "route",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
params: &v3.QueryRangeParamsV3{
|
|
||||||
Start: 1689220036000,
|
|
||||||
End: 1689220096000,
|
|
||||||
Step: 60,
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
"A": {
|
|
||||||
QueryName: "A",
|
|
||||||
AggregateAttribute: v3.AttributeKey{Key: "signo_calls_total"},
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateOperator: v3.AggregateOperatorSumRate,
|
|
||||||
Expression: "A",
|
|
||||||
GroupBy: []v3.AttributeKey{{Key: "service_name"}},
|
|
||||||
Limit: 1,
|
|
||||||
OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "asc"}},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
PanelType: v3.PanelTypeGraph,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedResult: []*v3.Result{
|
|
||||||
{
|
|
||||||
QueryName: "A",
|
|
||||||
Series: []*v3.Series{
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "route",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test data source not metrics",
|
|
||||||
inputResult: []*v3.Result{
|
|
||||||
{
|
|
||||||
QueryName: "A",
|
|
||||||
Series: []*v3.Series{
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "frontend",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 69,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 240,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 154.5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "redis",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 420,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 260,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 340,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
params: &v3.QueryRangeParamsV3{
|
|
||||||
Start: 1689220036000,
|
|
||||||
End: 1689220096000,
|
|
||||||
Step: 60,
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
"A": {
|
|
||||||
QueryName: "A",
|
|
||||||
AggregateAttribute: v3.AttributeKey{Key: "service_name"},
|
|
||||||
DataSource: v3.DataSourceTraces,
|
|
||||||
AggregateOperator: v3.AggregateOperatorSum,
|
|
||||||
Expression: "A",
|
|
||||||
GroupBy: []v3.AttributeKey{{Key: "service_name"}},
|
|
||||||
Limit: 1,
|
|
||||||
OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "asc"}},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
PanelType: v3.PanelTypeGraph,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedResult: []*v3.Result{
|
|
||||||
{
|
|
||||||
QueryName: "A",
|
|
||||||
Series: []*v3.Series{
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "frontend",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 69,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 240,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 154.5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "redis",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 420,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 260,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 340,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// ["GET /api/v1/health", "DELETE /api/v1/health"] so result should be ["DELETE /api/v1/health"] although it has lower value
|
|
||||||
name: "test limit with operation asc",
|
|
||||||
inputResult: []*v3.Result{
|
|
||||||
{
|
|
||||||
QueryName: "A",
|
|
||||||
Series: []*v3.Series{
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "frontend",
|
|
||||||
"operation": "GET /api/v1/health",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 19.2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 19.5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 19.3,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "route",
|
|
||||||
"operation": "DELETE /api/v1/health",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
params: &v3.QueryRangeParamsV3{
|
|
||||||
Start: 1689220036000,
|
|
||||||
End: 1689220096000,
|
|
||||||
Step: 60,
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
"A": {
|
|
||||||
QueryName: "A",
|
|
||||||
AggregateAttribute: v3.AttributeKey{Key: "signo_calls_total"},
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateOperator: v3.AggregateOperatorSumRate,
|
|
||||||
Expression: "A",
|
|
||||||
GroupBy: []v3.AttributeKey{{Key: "service_name"}},
|
|
||||||
Limit: 1,
|
|
||||||
OrderBy: []v3.OrderBy{{ColumnName: "operation", Order: "asc"}},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
PanelType: v3.PanelTypeGraph,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedResult: []*v3.Result{
|
|
||||||
{
|
|
||||||
QueryName: "A",
|
|
||||||
Series: []*v3.Series{
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "route",
|
|
||||||
"operation": "DELETE /api/v1/health",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test limit with multiple order by labels",
|
|
||||||
inputResult: []*v3.Result{
|
|
||||||
{
|
|
||||||
QueryName: "A",
|
|
||||||
Series: []*v3.Series{
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "frontend",
|
|
||||||
"operation": "GET /api/v1/health",
|
|
||||||
"status_code": "200",
|
|
||||||
"priority": "P0",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 19.2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 19.5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 19.3,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "route",
|
|
||||||
"operation": "DELETE /api/v1/health",
|
|
||||||
"status_code": "301",
|
|
||||||
"priority": "P1",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "route",
|
|
||||||
"operation": "DELETE /api/v1/health",
|
|
||||||
"status_code": "400",
|
|
||||||
"priority": "P0",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "route",
|
|
||||||
"operation": "DELETE /api/v1/health",
|
|
||||||
"status_code": "200",
|
|
||||||
"priority": "P1",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
params: &v3.QueryRangeParamsV3{
|
|
||||||
Start: 1689220036000,
|
|
||||||
End: 1689220096000,
|
|
||||||
Step: 60,
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
"A": {
|
|
||||||
QueryName: "A",
|
|
||||||
AggregateAttribute: v3.AttributeKey{Key: "signo_calls_total"},
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateOperator: v3.AggregateOperatorSumRate,
|
|
||||||
Expression: "A",
|
|
||||||
GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation"}, {Key: "status_code"}, {Key: "priority"}},
|
|
||||||
Limit: 2,
|
|
||||||
OrderBy: []v3.OrderBy{
|
|
||||||
{ColumnName: "priority", Order: "asc"},
|
|
||||||
{ColumnName: "status_code", Order: "desc"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
PanelType: v3.PanelTypeGraph,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expectedResult: []*v3.Result{
|
|
||||||
{
|
|
||||||
QueryName: "A",
|
|
||||||
Series: []*v3.Series{
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "frontend",
|
|
||||||
"operation": "GET /api/v1/health",
|
|
||||||
"status_code": "200",
|
|
||||||
"priority": "P0",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 19.2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 19.5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 19.3,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Labels: map[string]string{
|
|
||||||
"service_name": "route",
|
|
||||||
"operation": "DELETE /api/v1/health",
|
|
||||||
"status_code": "400",
|
|
||||||
"priority": "P0",
|
|
||||||
},
|
|
||||||
Points: []v3.Point{
|
|
||||||
{
|
|
||||||
Timestamp: 1689220036000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Timestamp: 1689220096000,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupingSetsPoint: &v3.Point{
|
|
||||||
Timestamp: 0,
|
|
||||||
Value: 8.83,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, c := range cases {
|
|
||||||
t.Run(c.name, func(t *testing.T) {
|
|
||||||
result := c.inputResult
|
|
||||||
applyMetricLimit(result, c.params)
|
|
||||||
if len(result) != len(c.expectedResult) {
|
|
||||||
t.Errorf("expected result length: %d, but got: %d", len(c.expectedResult), len(result))
|
|
||||||
}
|
|
||||||
for i, r := range result {
|
|
||||||
if r.QueryName != c.expectedResult[i].QueryName {
|
|
||||||
t.Errorf("expected query name: %s, but got: %s", c.expectedResult[i].QueryName, r.QueryName)
|
|
||||||
}
|
|
||||||
if len(r.Series) != len(c.expectedResult[i].Series) {
|
|
||||||
t.Errorf("expected series length: %d, but got: %d", len(c.expectedResult[i].Series), len(r.Series))
|
|
||||||
}
|
|
||||||
for j, s := range r.Series {
|
|
||||||
if len(s.Points) != len(c.expectedResult[i].Series[j].Points) {
|
|
||||||
t.Errorf("expected points length: %d, but got: %d", len(c.expectedResult[i].Series[j].Points), len(s.Points))
|
|
||||||
}
|
|
||||||
for k, p := range s.Points {
|
|
||||||
if p.Timestamp != c.expectedResult[i].Series[j].Points[k].Timestamp {
|
|
||||||
t.Errorf("expected point timestamp: %d, but got: %d", c.expectedResult[i].Series[j].Points[k].Timestamp, p.Timestamp)
|
|
||||||
}
|
|
||||||
if p.Value != c.expectedResult[i].Series[j].Points[k].Value {
|
|
||||||
t.Errorf("expected point value: %f, but got: %f", c.expectedResult[i].Series[j].Points[k].Value, p.Value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
81
pkg/query-service/app/limit.go
Normal file
81
pkg/query-service/app/limit.go
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// applyMetricLimit applies limit to the metrics query results
|
||||||
|
func applyMetricLimit(results []*v3.Result, queryRangeParams *v3.QueryRangeParamsV3) {
|
||||||
|
// apply limit if any for metrics
|
||||||
|
// use the grouping set points to apply the limit
|
||||||
|
|
||||||
|
for _, result := range results {
|
||||||
|
builderQueries := queryRangeParams.CompositeQuery.BuilderQueries
|
||||||
|
|
||||||
|
if builderQueries != nil && (builderQueries[result.QueryName].DataSource == v3.DataSourceMetrics) {
|
||||||
|
limit := builderQueries[result.QueryName].Limit
|
||||||
|
|
||||||
|
orderByList := builderQueries[result.QueryName].OrderBy
|
||||||
|
if limit > 0 {
|
||||||
|
if len(orderByList) == 0 {
|
||||||
|
// If no orderBy is specified, sort by value in descending order
|
||||||
|
orderByList = []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "desc"}}
|
||||||
|
}
|
||||||
|
sort.SliceStable(result.Series, func(i, j int) bool {
|
||||||
|
for _, orderBy := range orderByList {
|
||||||
|
if orderBy.ColumnName == constants.SigNozOrderByValue {
|
||||||
|
|
||||||
|
// For table type queries (we rely on the fact that one value for row), sort
|
||||||
|
// based on final aggregation value
|
||||||
|
if len(result.Series[i].Points) == 1 && len(result.Series[j].Points) == 1 {
|
||||||
|
if orderBy.Order == "asc" {
|
||||||
|
return result.Series[i].Points[0].Value < result.Series[j].Points[0].Value
|
||||||
|
} else if orderBy.Order == "desc" {
|
||||||
|
return result.Series[i].Points[0].Value > result.Series[j].Points[0].Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For graph type queries, sort based on GroupingSetsPoint
|
||||||
|
if result.Series[i].GroupingSetsPoint == nil || result.Series[j].GroupingSetsPoint == nil {
|
||||||
|
// Handle nil GroupingSetsPoint, if needed
|
||||||
|
// Here, we assume non-nil values are always less than nil values
|
||||||
|
return result.Series[i].GroupingSetsPoint != nil
|
||||||
|
}
|
||||||
|
if orderBy.Order == "asc" {
|
||||||
|
return result.Series[i].GroupingSetsPoint.Value < result.Series[j].GroupingSetsPoint.Value
|
||||||
|
} else if orderBy.Order == "desc" {
|
||||||
|
return result.Series[i].GroupingSetsPoint.Value > result.Series[j].GroupingSetsPoint.Value
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Sort based on Labels map
|
||||||
|
labelI, existsI := result.Series[i].Labels[orderBy.ColumnName]
|
||||||
|
labelJ, existsJ := result.Series[j].Labels[orderBy.ColumnName]
|
||||||
|
|
||||||
|
if !existsI || !existsJ {
|
||||||
|
// Handle missing labels, if needed
|
||||||
|
// Here, we assume non-existent labels are always less than existing ones
|
||||||
|
return existsI
|
||||||
|
}
|
||||||
|
|
||||||
|
if orderBy.Order == "asc" {
|
||||||
|
return strings.Compare(labelI, labelJ) < 0
|
||||||
|
} else if orderBy.Order == "desc" {
|
||||||
|
return strings.Compare(labelI, labelJ) > 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Preserve original order if no matching orderBy is found
|
||||||
|
return i < j
|
||||||
|
})
|
||||||
|
|
||||||
|
if limit > 0 && len(result.Series) > int(limit) {
|
||||||
|
result.Series = result.Series[:limit]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
624
pkg/query-service/app/limit_test.go
Normal file
624
pkg/query-service/app/limit_test.go
Normal file
@ -0,0 +1,624 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestApplyLimitOnMetricResult(t *testing.T) {
|
||||||
|
cases := []struct {
|
||||||
|
name string
|
||||||
|
inputResult []*v3.Result
|
||||||
|
params *v3.QueryRangeParamsV3
|
||||||
|
expectedResult []*v3.Result
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "test limit 1 without order", // top most (latency/error) as default
|
||||||
|
inputResult: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "frontend",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 19.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 19.5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 19.3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "route",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
params: &v3.QueryRangeParamsV3{
|
||||||
|
Start: 1689220036000,
|
||||||
|
End: 1689220096000,
|
||||||
|
Step: 60,
|
||||||
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
|
"A": {
|
||||||
|
QueryName: "A",
|
||||||
|
AggregateAttribute: v3.AttributeKey{Key: "signo_calls_total"},
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateOperator: v3.AggregateOperatorSumRate,
|
||||||
|
Expression: "A",
|
||||||
|
GroupBy: []v3.AttributeKey{{Key: "service_name"}},
|
||||||
|
Limit: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
QueryType: v3.QueryTypeBuilder,
|
||||||
|
PanelType: v3.PanelTypeGraph,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectedResult: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "frontend",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 19.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 19.5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 19.3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test limit with order asc",
|
||||||
|
inputResult: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "frontend",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 19.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 19.5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 19.3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "route",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
params: &v3.QueryRangeParamsV3{
|
||||||
|
Start: 1689220036000,
|
||||||
|
End: 1689220096000,
|
||||||
|
Step: 60,
|
||||||
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
|
"A": {
|
||||||
|
QueryName: "A",
|
||||||
|
AggregateAttribute: v3.AttributeKey{Key: "signo_calls_total"},
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateOperator: v3.AggregateOperatorSumRate,
|
||||||
|
Expression: "A",
|
||||||
|
GroupBy: []v3.AttributeKey{{Key: "service_name"}},
|
||||||
|
Limit: 1,
|
||||||
|
OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "asc"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
QueryType: v3.QueryTypeBuilder,
|
||||||
|
PanelType: v3.PanelTypeGraph,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectedResult: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "route",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test data source not metrics",
|
||||||
|
inputResult: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "frontend",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 69,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 240,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 154.5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "redis",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 420,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 260,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 340,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
params: &v3.QueryRangeParamsV3{
|
||||||
|
Start: 1689220036000,
|
||||||
|
End: 1689220096000,
|
||||||
|
Step: 60,
|
||||||
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
|
"A": {
|
||||||
|
QueryName: "A",
|
||||||
|
AggregateAttribute: v3.AttributeKey{Key: "service_name"},
|
||||||
|
DataSource: v3.DataSourceTraces,
|
||||||
|
AggregateOperator: v3.AggregateOperatorSum,
|
||||||
|
Expression: "A",
|
||||||
|
GroupBy: []v3.AttributeKey{{Key: "service_name"}},
|
||||||
|
Limit: 1,
|
||||||
|
OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "asc"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
QueryType: v3.QueryTypeBuilder,
|
||||||
|
PanelType: v3.PanelTypeGraph,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectedResult: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "frontend",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 69,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 240,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 154.5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "redis",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 420,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 260,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 340,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// ["GET /api/v1/health", "DELETE /api/v1/health"] so result should be ["DELETE /api/v1/health"] although it has lower value
|
||||||
|
name: "test limit with operation asc",
|
||||||
|
inputResult: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "frontend",
|
||||||
|
"operation": "GET /api/v1/health",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 19.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 19.5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 19.3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "route",
|
||||||
|
"operation": "DELETE /api/v1/health",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
params: &v3.QueryRangeParamsV3{
|
||||||
|
Start: 1689220036000,
|
||||||
|
End: 1689220096000,
|
||||||
|
Step: 60,
|
||||||
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
|
"A": {
|
||||||
|
QueryName: "A",
|
||||||
|
AggregateAttribute: v3.AttributeKey{Key: "signo_calls_total"},
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateOperator: v3.AggregateOperatorSumRate,
|
||||||
|
Expression: "A",
|
||||||
|
GroupBy: []v3.AttributeKey{{Key: "service_name"}},
|
||||||
|
Limit: 1,
|
||||||
|
OrderBy: []v3.OrderBy{{ColumnName: "operation", Order: "asc"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
QueryType: v3.QueryTypeBuilder,
|
||||||
|
PanelType: v3.PanelTypeGraph,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectedResult: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "route",
|
||||||
|
"operation": "DELETE /api/v1/health",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test limit with multiple order by labels",
|
||||||
|
inputResult: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "frontend",
|
||||||
|
"operation": "GET /api/v1/health",
|
||||||
|
"status_code": "200",
|
||||||
|
"priority": "P0",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 19.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 19.5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 19.3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "route",
|
||||||
|
"operation": "DELETE /api/v1/health",
|
||||||
|
"status_code": "301",
|
||||||
|
"priority": "P1",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "route",
|
||||||
|
"operation": "DELETE /api/v1/health",
|
||||||
|
"status_code": "400",
|
||||||
|
"priority": "P0",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "route",
|
||||||
|
"operation": "DELETE /api/v1/health",
|
||||||
|
"status_code": "200",
|
||||||
|
"priority": "P1",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
params: &v3.QueryRangeParamsV3{
|
||||||
|
Start: 1689220036000,
|
||||||
|
End: 1689220096000,
|
||||||
|
Step: 60,
|
||||||
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
|
"A": {
|
||||||
|
QueryName: "A",
|
||||||
|
AggregateAttribute: v3.AttributeKey{Key: "signo_calls_total"},
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateOperator: v3.AggregateOperatorSumRate,
|
||||||
|
Expression: "A",
|
||||||
|
GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation"}, {Key: "status_code"}, {Key: "priority"}},
|
||||||
|
Limit: 2,
|
||||||
|
OrderBy: []v3.OrderBy{
|
||||||
|
{ColumnName: "priority", Order: "asc"},
|
||||||
|
{ColumnName: "status_code", Order: "desc"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
QueryType: v3.QueryTypeBuilder,
|
||||||
|
PanelType: v3.PanelTypeGraph,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectedResult: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "frontend",
|
||||||
|
"operation": "GET /api/v1/health",
|
||||||
|
"status_code": "200",
|
||||||
|
"priority": "P0",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 19.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 19.5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 19.3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Labels: map[string]string{
|
||||||
|
"service_name": "route",
|
||||||
|
"operation": "DELETE /api/v1/health",
|
||||||
|
"status_code": "400",
|
||||||
|
"priority": "P0",
|
||||||
|
},
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Timestamp: 1689220036000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Timestamp: 1689220096000,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupingSetsPoint: &v3.Point{
|
||||||
|
Timestamp: 0,
|
||||||
|
Value: 8.83,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, c := range cases {
|
||||||
|
t.Run(c.name, func(t *testing.T) {
|
||||||
|
result := c.inputResult
|
||||||
|
applyMetricLimit(result, c.params)
|
||||||
|
if len(result) != len(c.expectedResult) {
|
||||||
|
t.Errorf("expected result length: %d, but got: %d", len(c.expectedResult), len(result))
|
||||||
|
}
|
||||||
|
for i, r := range result {
|
||||||
|
if r.QueryName != c.expectedResult[i].QueryName {
|
||||||
|
t.Errorf("expected query name: %s, but got: %s", c.expectedResult[i].QueryName, r.QueryName)
|
||||||
|
}
|
||||||
|
if len(r.Series) != len(c.expectedResult[i].Series) {
|
||||||
|
t.Errorf("expected series length: %d, but got: %d", len(c.expectedResult[i].Series), len(r.Series))
|
||||||
|
}
|
||||||
|
for j, s := range r.Series {
|
||||||
|
if len(s.Points) != len(c.expectedResult[i].Series[j].Points) {
|
||||||
|
t.Errorf("expected points length: %d, but got: %d", len(c.expectedResult[i].Series[j].Points), len(s.Points))
|
||||||
|
}
|
||||||
|
for k, p := range s.Points {
|
||||||
|
if p.Timestamp != c.expectedResult[i].Series[j].Points[k].Timestamp {
|
||||||
|
t.Errorf("expected point timestamp: %d, but got: %d", c.expectedResult[i].Series[j].Points[k].Timestamp, p.Timestamp)
|
||||||
|
}
|
||||||
|
if p.Value != c.expectedResult[i].Series[j].Points[k].Value {
|
||||||
|
t.Errorf("expected point value: %f, but got: %f", c.expectedResult[i].Series[j].Points[k].Value, p.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -24,7 +24,10 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P
|
|||||||
groupBy := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
|
groupBy := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
|
||||||
orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
|
orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
|
||||||
|
|
||||||
if mq.Quantile != 0 {
|
var quantile float64
|
||||||
|
|
||||||
|
if v3.IsPercentileOperator(mq.SpaceAggregation) {
|
||||||
|
quantile = v3.GetPercentileFromOperator(mq.SpaceAggregation)
|
||||||
// If quantile is set, we need to group by le
|
// If quantile is set, we need to group by le
|
||||||
// and set the space aggregation to sum
|
// and set the space aggregation to sum
|
||||||
// and time aggregation to rate
|
// and time aggregation to rate
|
||||||
@ -57,8 +60,8 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
if mq.Quantile != 0 {
|
if quantile != 0 {
|
||||||
query = fmt.Sprintf(`SELECT %s, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s`, groupBy, mq.Quantile, query, groupBy, orderBy)
|
query = fmt.Sprintf(`SELECT %s, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s`, groupBy, quantile, query, groupBy, orderBy)
|
||||||
}
|
}
|
||||||
|
|
||||||
return query, nil
|
return query, nil
|
||||||
|
@ -336,9 +336,9 @@ func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) {
|
|||||||
DataType: v3.AttributeKeyDataTypeString,
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
Type: v3.AttributeKeyTypeTag,
|
Type: v3.AttributeKeyTypeTag,
|
||||||
}},
|
}},
|
||||||
Expression: "A",
|
Expression: "A",
|
||||||
Disabled: false,
|
Disabled: false,
|
||||||
Quantile: 0.99,
|
SpaceAggregation: v3.SpaceAggregationPercentile99,
|
||||||
},
|
},
|
||||||
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
||||||
},
|
},
|
||||||
@ -366,9 +366,9 @@ func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Expression: "A",
|
Expression: "A",
|
||||||
Disabled: false,
|
Disabled: false,
|
||||||
Quantile: 0.99,
|
SpaceAggregation: v3.SpaceAggregationPercentile99,
|
||||||
},
|
},
|
||||||
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
|
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
|
||||||
},
|
},
|
||||||
@ -418,9 +418,9 @@ func TestPrepreMetricQueryDeltaQuantile(t *testing.T) {
|
|||||||
DataType: v3.AttributeKeyDataTypeString,
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
Type: v3.AttributeKeyTypeTag,
|
Type: v3.AttributeKeyTypeTag,
|
||||||
}},
|
}},
|
||||||
Expression: "A",
|
Expression: "A",
|
||||||
Disabled: false,
|
Disabled: false,
|
||||||
Quantile: 0.99,
|
SpaceAggregation: v3.SpaceAggregationPercentile99,
|
||||||
},
|
},
|
||||||
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
||||||
},
|
},
|
||||||
@ -448,9 +448,9 @@ func TestPrepreMetricQueryDeltaQuantile(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Expression: "A",
|
Expression: "A",
|
||||||
Disabled: false,
|
Disabled: false,
|
||||||
Quantile: 0.99,
|
SpaceAggregation: v3.SpaceAggregationPercentile99,
|
||||||
},
|
},
|
||||||
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
|
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
|
||||||
},
|
},
|
||||||
|
286
pkg/query-service/app/queryBuilder/functions.go
Normal file
286
pkg/query-service/app/queryBuilder/functions.go
Normal file
@ -0,0 +1,286 @@
|
|||||||
|
package queryBuilder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// funcCutOffMin cuts off values below the threshold and replaces them with NaN
|
||||||
|
func funcCutOffMin(result *v3.Result, threshold float64) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
for idx, point := range series.Points {
|
||||||
|
if point.Value < threshold {
|
||||||
|
point.Value = math.NaN()
|
||||||
|
}
|
||||||
|
series.Points[idx] = point
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// funcCutOffMax cuts off values above the threshold and replaces them with NaN
|
||||||
|
func funcCutOffMax(result *v3.Result, threshold float64) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
for idx, point := range series.Points {
|
||||||
|
if point.Value > threshold {
|
||||||
|
point.Value = math.NaN()
|
||||||
|
}
|
||||||
|
series.Points[idx] = point
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// funcClampMin cuts off values below the threshold and replaces them with the threshold
|
||||||
|
func funcClampMin(result *v3.Result, threshold float64) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
for idx, point := range series.Points {
|
||||||
|
if point.Value < threshold {
|
||||||
|
point.Value = threshold
|
||||||
|
}
|
||||||
|
series.Points[idx] = point
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// funcClampMax cuts off values above the threshold and replaces them with the threshold
|
||||||
|
func funcClampMax(result *v3.Result, threshold float64) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
for idx, point := range series.Points {
|
||||||
|
if point.Value > threshold {
|
||||||
|
point.Value = threshold
|
||||||
|
}
|
||||||
|
series.Points[idx] = point
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// funcAbsolute returns the absolute value of each point
|
||||||
|
func funcAbsolute(result *v3.Result) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
for idx, point := range series.Points {
|
||||||
|
point.Value = math.Abs(point.Value)
|
||||||
|
series.Points[idx] = point
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// funcLog2 returns the log2 of each point
|
||||||
|
func funcLog2(result *v3.Result) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
for idx, point := range series.Points {
|
||||||
|
point.Value = math.Log2(point.Value)
|
||||||
|
series.Points[idx] = point
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// funcLog10 returns the log10 of each point
|
||||||
|
func funcLog10(result *v3.Result) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
for idx, point := range series.Points {
|
||||||
|
point.Value = math.Log10(point.Value)
|
||||||
|
series.Points[idx] = point
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// funcCumSum returns the cumulative sum for each point in a series
|
||||||
|
func funcCumSum(result *v3.Result) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
var sum float64
|
||||||
|
for idx, point := range series.Points {
|
||||||
|
if !math.IsNaN(point.Value) {
|
||||||
|
sum += point.Value
|
||||||
|
}
|
||||||
|
point.Value = sum
|
||||||
|
series.Points[idx] = point
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func funcEWMA(result *v3.Result, alpha float64) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
var ewma float64
|
||||||
|
var initialized bool
|
||||||
|
|
||||||
|
for i, point := range series.Points {
|
||||||
|
if !initialized {
|
||||||
|
if !math.IsNaN(point.Value) {
|
||||||
|
// Initialize EWMA with the first non-NaN value
|
||||||
|
ewma = point.Value
|
||||||
|
initialized = true
|
||||||
|
}
|
||||||
|
// Continue until the EWMA is initialized
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if !math.IsNaN(point.Value) {
|
||||||
|
// Update EWMA with the current value
|
||||||
|
ewma = alpha*point.Value + (1-alpha)*ewma
|
||||||
|
}
|
||||||
|
// Set the EWMA value for the current point
|
||||||
|
series.Points[i].Value = ewma
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// funcMedian3 returns the median of 3 points for each point in a series
|
||||||
|
func funcMedian3(result *v3.Result) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
median3 := make([]float64, 0)
|
||||||
|
for i := 1; i < len(series.Points)-1; i++ {
|
||||||
|
values := make([]float64, 0, 3)
|
||||||
|
|
||||||
|
// Add non-NaN values to the slice
|
||||||
|
for j := -1; j <= 1; j++ {
|
||||||
|
if !math.IsNaN(series.Points[i+j].Value) {
|
||||||
|
values = append(values, series.Points[i+j].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle the case where there are not enough values to calculate a median
|
||||||
|
if len(values) == 0 {
|
||||||
|
median3 = append(median3, math.NaN())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
median3 = append(median3, median(values))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the median3 values for the series
|
||||||
|
for i := 1; i < len(series.Points)-1; i++ {
|
||||||
|
series.Points[i].Value = median3[i-1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// funcMedian5 returns the median of 5 points for each point in a series
|
||||||
|
func funcMedian5(result *v3.Result) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
median5 := make([]float64, 0)
|
||||||
|
for i := 2; i < len(series.Points)-2; i++ {
|
||||||
|
values := make([]float64, 0, 5)
|
||||||
|
|
||||||
|
// Add non-NaN values to the slice
|
||||||
|
for j := -2; j <= 2; j++ {
|
||||||
|
if !math.IsNaN(series.Points[i+j].Value) {
|
||||||
|
values = append(values, series.Points[i+j].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle the case where there are not enough values to calculate a median
|
||||||
|
if len(values) == 0 {
|
||||||
|
median5 = append(median5, math.NaN())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
median5 = append(median5, median(values))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the median5 values for the series
|
||||||
|
for i := 2; i < len(series.Points)-2; i++ {
|
||||||
|
series.Points[i].Value = median5[i-2]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// funcMedian7 returns the median of 7 points for each point in a series
|
||||||
|
func funcMedian7(result *v3.Result) *v3.Result {
|
||||||
|
for _, series := range result.Series {
|
||||||
|
median7 := make([]float64, 0)
|
||||||
|
for i := 3; i < len(series.Points)-3; i++ {
|
||||||
|
values := make([]float64, 0, 7)
|
||||||
|
|
||||||
|
// Add non-NaN values to the slice
|
||||||
|
for j := -3; j <= 3; j++ {
|
||||||
|
if !math.IsNaN(series.Points[i+j].Value) {
|
||||||
|
values = append(values, series.Points[i+j].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle the case where there are not enough values to calculate a median
|
||||||
|
if len(values) == 0 {
|
||||||
|
median7 = append(median7, math.NaN())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
median7 = append(median7, median(values))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the median7 values for the series
|
||||||
|
for i := 3; i < len(series.Points)-3; i++ {
|
||||||
|
series.Points[i].Value = median7[i-3]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func median(values []float64) float64 {
|
||||||
|
sort.Float64s(values)
|
||||||
|
medianIndex := len(values) / 2
|
||||||
|
if len(values)%2 == 0 {
|
||||||
|
return (values[medianIndex-1] + values[medianIndex]) / 2
|
||||||
|
}
|
||||||
|
return values[medianIndex]
|
||||||
|
}
|
||||||
|
|
||||||
|
func ApplyFunction(fn v3.Function, result *v3.Result) *v3.Result {
|
||||||
|
|
||||||
|
switch fn.Name {
|
||||||
|
case v3.FunctionNameCutOffMin, v3.FunctionNameCutOffMax, v3.FunctionNameClampMin, v3.FunctionNameClampMax:
|
||||||
|
threshold, ok := fn.Args[0].(float64)
|
||||||
|
if !ok {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
switch fn.Name {
|
||||||
|
case v3.FunctionNameCutOffMin:
|
||||||
|
return funcCutOffMin(result, threshold)
|
||||||
|
case v3.FunctionNameCutOffMax:
|
||||||
|
return funcCutOffMax(result, threshold)
|
||||||
|
case v3.FunctionNameClampMin:
|
||||||
|
return funcClampMin(result, threshold)
|
||||||
|
case v3.FunctionNameClampMax:
|
||||||
|
return funcClampMax(result, threshold)
|
||||||
|
}
|
||||||
|
case v3.FunctionNameAbsolute:
|
||||||
|
return funcAbsolute(result)
|
||||||
|
case v3.FunctionNameLog2:
|
||||||
|
return funcLog2(result)
|
||||||
|
case v3.FunctionNameLog10:
|
||||||
|
return funcLog10(result)
|
||||||
|
case v3.FunctionNameCumSum:
|
||||||
|
return funcCumSum(result)
|
||||||
|
case v3.FunctionNameEWMA3, v3.FunctionNameEWMA5, v3.FunctionNameEWMA7:
|
||||||
|
alpha, ok := fn.Args[0].(float64)
|
||||||
|
if !ok {
|
||||||
|
// alpha = 2 / (n + 1) where n is the window size
|
||||||
|
if fn.Name == v3.FunctionNameEWMA3 {
|
||||||
|
alpha = 0.5 // 2 / (3 + 1)
|
||||||
|
} else if fn.Name == v3.FunctionNameEWMA5 {
|
||||||
|
alpha = 1 / float64(3) // 2 / (5 + 1)
|
||||||
|
} else if fn.Name == v3.FunctionNameEWMA7 {
|
||||||
|
alpha = 0.25 // 2 / (7 + 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return funcEWMA(result, alpha)
|
||||||
|
case v3.FunctionNameMedian3:
|
||||||
|
return funcMedian3(result)
|
||||||
|
case v3.FunctionNameMedian5:
|
||||||
|
return funcMedian5(result)
|
||||||
|
case v3.FunctionNameMedian7:
|
||||||
|
return funcMedian7(result)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
604
pkg/query-service/app/queryBuilder/functions_test.go
Normal file
604
pkg/query-service/app/queryBuilder/functions_test.go
Normal file
@ -0,0 +1,604 @@
|
|||||||
|
package queryBuilder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFuncCutOffMin(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
result *v3.Result
|
||||||
|
threshold float64
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want *v3.Result
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "test funcCutOffMin",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
threshold: 0.3,
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: math.NaN(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: math.NaN(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test funcCutOffMin with threshold 0",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
threshold: 0,
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
newResult := funcCutOffMin(tt.args.result, tt.args.threshold)
|
||||||
|
for j, series := range newResult.Series {
|
||||||
|
for k, point := range series.Points {
|
||||||
|
|
||||||
|
if math.IsNaN(tt.want.Series[j].Points[k].Value) {
|
||||||
|
if !math.IsNaN(point.Value) {
|
||||||
|
t.Errorf("funcCutOffMin() = %v, want %v", point.Value, tt.want.Series[j].Points[k].Value)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if point.Value != tt.want.Series[j].Points[k].Value {
|
||||||
|
t.Errorf("funcCutOffMin() = %v, want %v", point.Value, tt.want.Series[j].Points[k].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFuncCutOffMax(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
result *v3.Result
|
||||||
|
threshold float64
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want *v3.Result
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "test funcCutOffMax",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
threshold: 0.3,
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: math.NaN(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: math.NaN(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test funcCutOffMax with threshold 0",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
threshold: 0,
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: math.NaN(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: math.NaN(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: math.NaN(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: math.NaN(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: math.NaN(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
newResult := funcCutOffMax(tt.args.result, tt.args.threshold)
|
||||||
|
for j, series := range newResult.Series {
|
||||||
|
for k, point := range series.Points {
|
||||||
|
|
||||||
|
if math.IsNaN(tt.want.Series[j].Points[k].Value) {
|
||||||
|
if !math.IsNaN(point.Value) {
|
||||||
|
t.Errorf("funcCutOffMax() = %v, want %v", point.Value, tt.want.Series[j].Points[k].Value)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if point.Value != tt.want.Series[j].Points[k].Value {
|
||||||
|
t.Errorf("funcCutOffMax() = %v, want %v", point.Value, tt.want.Series[j].Points[k].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCutOffMinCumSum(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
result *v3.Result
|
||||||
|
threshold float64
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want *v3.Result
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "test funcCutOffMin followed by funcCumulativeSum",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
threshold: 0.3,
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.9,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 1.2,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
newResult := funcCutOffMin(tt.args.result, tt.args.threshold)
|
||||||
|
newResult = funcCumSum(newResult)
|
||||||
|
for j, series := range newResult.Series {
|
||||||
|
for k, point := range series.Points {
|
||||||
|
|
||||||
|
if math.IsNaN(tt.want.Series[j].Points[k].Value) {
|
||||||
|
if !math.IsNaN(point.Value) {
|
||||||
|
t.Errorf("funcCutOffMin() = %v, want %v", point.Value, tt.want.Series[j].Points[k].Value)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if point.Value != tt.want.Series[j].Points[k].Value {
|
||||||
|
t.Errorf("funcCutOffMin() = %v, want %v", point.Value, tt.want.Series[j].Points[k].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFuncMedian3(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
result *v3.Result
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want *v3.Result
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Values",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 5}, {Timestamp: 2, Value: 3}, {Timestamp: 3, Value: 8}, {Timestamp: 4, Value: 2}, {Timestamp: 5, Value: 7}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 5}, {Timestamp: 2, Value: 5}, {Timestamp: 3, Value: 3}, {Timestamp: 4, Value: 7}, {Timestamp: 5, Value: 7}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "NaNHandling",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: math.NaN()}, {Timestamp: 2, Value: 3}, {Timestamp: 3, Value: math.NaN()}, {Timestamp: 4, Value: 7}, {Timestamp: 5, Value: 9}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: math.NaN()}, {Timestamp: 2, Value: 3}, {Timestamp: 3, Value: 5}, {Timestamp: 4, Value: 8}, {Timestamp: 5, Value: 9}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "UniformValues",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 7}, {Timestamp: 2, Value: 7}, {Timestamp: 3, Value: 7}, {Timestamp: 4, Value: 7}, {Timestamp: 5, Value: 7}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 7}, {Timestamp: 2, Value: 7}, {Timestamp: 3, Value: 7}, {Timestamp: 4, Value: 7}, {Timestamp: 5, Value: 7}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "SingleValueSeries",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 9}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 9}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "EmptySeries",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := funcMedian3(tt.args.result)
|
||||||
|
for j, series := range got.Series {
|
||||||
|
for k, point := range series.Points {
|
||||||
|
if point.Value != tt.want.Series[j].Points[k].Value && !math.IsNaN(tt.want.Series[j].Points[k].Value) {
|
||||||
|
t.Errorf("funcMedian3() = %v, want %v", point.Value, tt.want.Series[j].Points[k].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFuncMedian5(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
result *v3.Result
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want *v3.Result
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Values",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 5}, {Timestamp: 2, Value: 3}, {Timestamp: 3, Value: 8}, {Timestamp: 4, Value: 2}, {Timestamp: 5, Value: 7}, {Timestamp: 6, Value: 9}, {Timestamp: 7, Value: 1}, {Timestamp: 8, Value: 4}, {Timestamp: 9, Value: 6}, {Timestamp: 10, Value: 10}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 5}, {Timestamp: 2, Value: 3}, {Timestamp: 3, Value: 5}, {Timestamp: 4, Value: 7}, {Timestamp: 5, Value: 7}, {Timestamp: 6, Value: 4}, {Timestamp: 7, Value: 6}, {Timestamp: 8, Value: 6}, {Timestamp: 9, Value: 6}, {Timestamp: 10, Value: 10}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "NaNHandling",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: math.NaN()}, {Timestamp: 2, Value: 3}, {Timestamp: 3, Value: math.NaN()}, {Timestamp: 4, Value: 7}, {Timestamp: 5, Value: 9}, {Timestamp: 6, Value: 1}, {Timestamp: 7, Value: 4}, {Timestamp: 8, Value: 6}, {Timestamp: 9, Value: 10}, {Timestamp: 10, Value: 2}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: math.NaN()}, {Timestamp: 2, Value: 3}, {Timestamp: 3, Value: 7}, {Timestamp: 4, Value: 5}, {Timestamp: 5, Value: 5.5}, {Timestamp: 6, Value: 6}, {Timestamp: 7, Value: 6}, {Timestamp: 8, Value: 4}, {Timestamp: 9, Value: 10}, {Timestamp: 10, Value: 2}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "UniformValues",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 7}, {Timestamp: 2, Value: 7}, {Timestamp: 3, Value: 7}, {Timestamp: 4, Value: 7}, {Timestamp: 5, Value: 7}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 7}, {Timestamp: 2, Value: 7}, {Timestamp: 3, Value: 7}, {Timestamp: 4, Value: 7}, {Timestamp: 5, Value: 7}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "SingleValueSeries",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 9}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{{Timestamp: 1, Value: 9}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "EmptySeries",
|
||||||
|
args: args{
|
||||||
|
result: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &v3.Result{
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
got := funcMedian5(tt.args.result)
|
||||||
|
for j, series := range got.Series {
|
||||||
|
for k, point := range series.Points {
|
||||||
|
if point.Value != tt.want.Series[j].Points[k].Value && !math.IsNaN(tt.want.Series[j].Points[k].Value) {
|
||||||
|
t.Errorf("funcMedian5() = %v, want %v", point.Value, tt.want.Series[j].Points[k].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
71
pkg/query-service/app/reduce_to.go
Normal file
71
pkg/query-service/app/reduce_to.go
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// applyReduceTo applies the reduceTo operator to each series
|
||||||
|
// and returns a new series with the reduced value
|
||||||
|
// reduceTo can be one of the following:
|
||||||
|
// - last
|
||||||
|
// - sum
|
||||||
|
// - avg
|
||||||
|
// - min
|
||||||
|
// - max
|
||||||
|
func applyReduceTo(result []*v3.Result, queryRangeParams *v3.QueryRangeParamsV3) {
|
||||||
|
for _, result := range result {
|
||||||
|
builderQueries := queryRangeParams.CompositeQuery.BuilderQueries
|
||||||
|
|
||||||
|
// reduceTo is only applicable for metrics data source
|
||||||
|
// and for table and value panels
|
||||||
|
if builderQueries[result.QueryName] != nil && (builderQueries[result.QueryName].DataSource == v3.DataSourceMetrics &&
|
||||||
|
(queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeTable || queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeValue)) {
|
||||||
|
reduceTo := builderQueries[result.QueryName].ReduceTo
|
||||||
|
|
||||||
|
switch reduceTo {
|
||||||
|
case v3.ReduceToOperatorLast:
|
||||||
|
for i := 0; i < len(result.Series); i++ {
|
||||||
|
if len(result.Series[i].Points) > 0 {
|
||||||
|
result.Series[i].Points = []v3.Point{result.Series[i].Points[len(result.Series[i].Points)-1]}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case v3.ReduceToOperatorSum:
|
||||||
|
for i := 0; i < len(result.Series); i++ {
|
||||||
|
var sum float64
|
||||||
|
for j := 0; j < len(result.Series[i].Points); j++ {
|
||||||
|
sum += result.Series[i].Points[j].Value
|
||||||
|
}
|
||||||
|
result.Series[i].Points = []v3.Point{{Value: sum}}
|
||||||
|
}
|
||||||
|
case v3.ReduceToOperatorAvg:
|
||||||
|
for i := 0; i < len(result.Series); i++ {
|
||||||
|
var sum float64
|
||||||
|
for j := 0; j < len(result.Series[i].Points); j++ {
|
||||||
|
sum += result.Series[i].Points[j].Value
|
||||||
|
}
|
||||||
|
result.Series[i].Points = []v3.Point{{Value: sum / float64(len(result.Series[i].Points))}}
|
||||||
|
}
|
||||||
|
case v3.ReduceToOperatorMin:
|
||||||
|
for i := 0; i < len(result.Series); i++ {
|
||||||
|
var min float64
|
||||||
|
for j := 0; j < len(result.Series[i].Points); j++ {
|
||||||
|
if j == 0 || result.Series[i].Points[j].Value < min {
|
||||||
|
min = result.Series[i].Points[j].Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.Series[i].Points = []v3.Point{{Value: min}}
|
||||||
|
}
|
||||||
|
case v3.ReduceToOperatorMax:
|
||||||
|
for i := 0; i < len(result.Series); i++ {
|
||||||
|
var max float64
|
||||||
|
for j := 0; j < len(result.Series[i].Points); j++ {
|
||||||
|
if j == 0 || result.Series[i].Points[j].Value > max {
|
||||||
|
max = result.Series[i].Points[j].Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.Series[i].Points = []v3.Point{{Value: max}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
99
pkg/query-service/app/reduce_to_test.go
Normal file
99
pkg/query-service/app/reduce_to_test.go
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestApplyReduceTo(t *testing.T) {
|
||||||
|
type testCase struct {
|
||||||
|
name string
|
||||||
|
results []*v3.Result
|
||||||
|
params *v3.QueryRangeParamsV3
|
||||||
|
want []*v3.Result
|
||||||
|
}
|
||||||
|
|
||||||
|
testCases := []testCase{
|
||||||
|
{
|
||||||
|
name: "test reduce to",
|
||||||
|
results: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 0.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Value: 0.1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
params: &v3.QueryRangeParamsV3{
|
||||||
|
CompositeQuery: &v3.CompositeQuery{
|
||||||
|
PanelType: v3.PanelTypeValue,
|
||||||
|
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||||
|
"A": {
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
ReduceTo: v3.ReduceToOperatorSum,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: []*v3.Result{
|
||||||
|
{
|
||||||
|
QueryName: "A",
|
||||||
|
Series: []*v3.Series{
|
||||||
|
{
|
||||||
|
Points: []v3.Point{
|
||||||
|
{
|
||||||
|
Value: 1.5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
|
||||||
|
applyReduceTo(tc.results, tc.params)
|
||||||
|
got := tc.results
|
||||||
|
|
||||||
|
for _, gotResult := range got {
|
||||||
|
for _, wantResult := range tc.want {
|
||||||
|
if gotResult.QueryName == wantResult.QueryName {
|
||||||
|
if len(gotResult.Series) != len(wantResult.Series) {
|
||||||
|
t.Errorf("got %v, want %v", gotResult.Series, wantResult.Series)
|
||||||
|
} else {
|
||||||
|
for i, gotSeries := range gotResult.Series {
|
||||||
|
for j, gotPoint := range gotSeries.Points {
|
||||||
|
if gotPoint.Value != wantResult.Series[i].Points[j].Value {
|
||||||
|
t.Errorf("got %v, want %v", gotPoint.Value, wantResult.Series[i].Points[j].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -6,6 +6,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
@ -474,18 +475,94 @@ func (t TimeAggregation) IsRateOperator() bool {
|
|||||||
type SpaceAggregation string
|
type SpaceAggregation string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
SpaceAggregationUnspecified SpaceAggregation = ""
|
SpaceAggregationUnspecified SpaceAggregation = ""
|
||||||
SpaceAggregationSum SpaceAggregation = "sum"
|
SpaceAggregationSum SpaceAggregation = "sum"
|
||||||
SpaceAggregationAvg SpaceAggregation = "avg"
|
SpaceAggregationAvg SpaceAggregation = "avg"
|
||||||
SpaceAggregationMin SpaceAggregation = "min"
|
SpaceAggregationMin SpaceAggregation = "min"
|
||||||
SpaceAggregationMax SpaceAggregation = "max"
|
SpaceAggregationMax SpaceAggregation = "max"
|
||||||
SpaceAggregationCount SpaceAggregation = "count"
|
SpaceAggregationCount SpaceAggregation = "count"
|
||||||
|
SpaceAggregationPercentile50 SpaceAggregation = "percentile_50"
|
||||||
|
SpaceAggregationPercentile75 SpaceAggregation = "percentile_75"
|
||||||
|
SpaceAggregationPercentile90 SpaceAggregation = "percentile_90"
|
||||||
|
SpaceAggregationPercentile95 SpaceAggregation = "percentile_95"
|
||||||
|
SpaceAggregationPercentile99 SpaceAggregation = "percentile_99"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func IsPercentileOperator(operator SpaceAggregation) bool {
|
||||||
|
switch operator {
|
||||||
|
case SpaceAggregationPercentile50,
|
||||||
|
SpaceAggregationPercentile75,
|
||||||
|
SpaceAggregationPercentile90,
|
||||||
|
SpaceAggregationPercentile95,
|
||||||
|
SpaceAggregationPercentile99:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetPercentileFromOperator(operator SpaceAggregation) float64 {
|
||||||
|
// This could be done with a map, but it's just easier to read this way
|
||||||
|
switch operator {
|
||||||
|
case SpaceAggregationPercentile50:
|
||||||
|
return 0.5
|
||||||
|
case SpaceAggregationPercentile75:
|
||||||
|
return 0.75
|
||||||
|
case SpaceAggregationPercentile90:
|
||||||
|
return 0.9
|
||||||
|
case SpaceAggregationPercentile95:
|
||||||
|
return 0.95
|
||||||
|
case SpaceAggregationPercentile99:
|
||||||
|
return 0.99
|
||||||
|
default:
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type FunctionName string
|
||||||
|
|
||||||
|
const (
|
||||||
|
FunctionNameCutOffMin FunctionName = "cutOffMin"
|
||||||
|
FunctionNameCutOffMax FunctionName = "cutOffMax"
|
||||||
|
FunctionNameClampMin FunctionName = "clampMin"
|
||||||
|
FunctionNameClampMax FunctionName = "clampMax"
|
||||||
|
FunctionNameAbsolute FunctionName = "absolute"
|
||||||
|
FunctionNameLog2 FunctionName = "log2"
|
||||||
|
FunctionNameLog10 FunctionName = "log10"
|
||||||
|
FunctionNameCumSum FunctionName = "cumSum"
|
||||||
|
FunctionNameEWMA3 FunctionName = "ewma3"
|
||||||
|
FunctionNameEWMA5 FunctionName = "ewma5"
|
||||||
|
FunctionNameEWMA7 FunctionName = "ewma7"
|
||||||
|
FunctionNameMedian3 FunctionName = "median3"
|
||||||
|
FunctionNameMedian5 FunctionName = "median5"
|
||||||
|
FunctionNameMedian7 FunctionName = "median7"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (f FunctionName) Validate() error {
|
||||||
|
switch f {
|
||||||
|
case FunctionNameCutOffMin,
|
||||||
|
FunctionNameCutOffMax,
|
||||||
|
FunctionNameClampMin,
|
||||||
|
FunctionNameClampMax,
|
||||||
|
FunctionNameAbsolute,
|
||||||
|
FunctionNameLog2,
|
||||||
|
FunctionNameLog10,
|
||||||
|
FunctionNameCumSum,
|
||||||
|
FunctionNameEWMA3,
|
||||||
|
FunctionNameEWMA5,
|
||||||
|
FunctionNameEWMA7,
|
||||||
|
FunctionNameMedian3,
|
||||||
|
FunctionNameMedian5,
|
||||||
|
FunctionNameMedian7:
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("invalid function name: %s", f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type Function struct {
|
type Function struct {
|
||||||
Category string `json:"category"`
|
Name FunctionName `json:"name"`
|
||||||
Name string `json:"name"`
|
Args []interface{} `json:"args,omitempty"`
|
||||||
Args []interface{} `json:"args,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type BuilderQuery struct {
|
type BuilderQuery struct {
|
||||||
@ -509,7 +586,6 @@ type BuilderQuery struct {
|
|||||||
SelectColumns []AttributeKey `json:"selectColumns,omitempty"`
|
SelectColumns []AttributeKey `json:"selectColumns,omitempty"`
|
||||||
TimeAggregation TimeAggregation `json:"timeAggregation,omitempty"`
|
TimeAggregation TimeAggregation `json:"timeAggregation,omitempty"`
|
||||||
SpaceAggregation SpaceAggregation `json:"spaceAggregation,omitempty"`
|
SpaceAggregation SpaceAggregation `json:"spaceAggregation,omitempty"`
|
||||||
Quantile float64 `json:"quantile,omitempty"`
|
|
||||||
Functions []Function `json:"functions,omitempty"`
|
Functions []Function `json:"functions,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -528,7 +604,7 @@ func (b *BuilderQuery) Validate() error {
|
|||||||
return fmt.Errorf("data source is invalid: %w", err)
|
return fmt.Errorf("data source is invalid: %w", err)
|
||||||
}
|
}
|
||||||
if b.DataSource == DataSourceMetrics {
|
if b.DataSource == DataSourceMetrics {
|
||||||
if b.TimeAggregation == TimeAggregationUnspecified && b.Quantile == 0 {
|
if b.TimeAggregation == TimeAggregationUnspecified {
|
||||||
if err := b.AggregateOperator.Validate(); err != nil {
|
if err := b.AggregateOperator.Validate(); err != nil {
|
||||||
return fmt.Errorf("aggregate operator is invalid: %w", err)
|
return fmt.Errorf("aggregate operator is invalid: %w", err)
|
||||||
}
|
}
|
||||||
@ -562,6 +638,14 @@ func (b *BuilderQuery) Validate() error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if b.Having != nil {
|
||||||
|
for _, having := range b.Having {
|
||||||
|
if err := having.Operator.Validate(); err != nil {
|
||||||
|
return fmt.Errorf("having operator is invalid: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for _, selectColumn := range b.SelectColumns {
|
for _, selectColumn := range b.SelectColumns {
|
||||||
if err := selectColumn.Validate(); err != nil {
|
if err := selectColumn.Validate(); err != nil {
|
||||||
return fmt.Errorf("select column is invalid %w", err)
|
return fmt.Errorf("select column is invalid %w", err)
|
||||||
@ -571,6 +655,15 @@ func (b *BuilderQuery) Validate() error {
|
|||||||
if b.Expression == "" {
|
if b.Expression == "" {
|
||||||
return fmt.Errorf("expression is required")
|
return fmt.Errorf("expression is required")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(b.Functions) > 0 {
|
||||||
|
for _, function := range b.Functions {
|
||||||
|
if err := function.Name.Validate(); err != nil {
|
||||||
|
return fmt.Errorf("function name is invalid: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -655,10 +748,43 @@ type OrderBy struct {
|
|||||||
IsColumn bool `json:"-"`
|
IsColumn bool `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// See HAVING_OPERATORS in queryBuilder.ts
|
||||||
|
|
||||||
|
type HavingOperator string
|
||||||
|
|
||||||
|
const (
|
||||||
|
HavingOperatorEqual HavingOperator = "="
|
||||||
|
HavingOperatorNotEqual HavingOperator = "!="
|
||||||
|
HavingOperatorGreaterThan HavingOperator = ">"
|
||||||
|
HavingOperatorGreaterThanOrEq HavingOperator = ">="
|
||||||
|
HavingOperatorLessThan HavingOperator = "<"
|
||||||
|
HavingOperatorLessThanOrEq HavingOperator = "<="
|
||||||
|
HavingOperatorIn HavingOperator = "IN"
|
||||||
|
HavingOperatorNotIn HavingOperator = "NOT_IN"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (h HavingOperator) Validate() error {
|
||||||
|
switch h {
|
||||||
|
case HavingOperatorEqual,
|
||||||
|
HavingOperatorNotEqual,
|
||||||
|
HavingOperatorGreaterThan,
|
||||||
|
HavingOperatorGreaterThanOrEq,
|
||||||
|
HavingOperatorLessThan,
|
||||||
|
HavingOperatorLessThanOrEq,
|
||||||
|
HavingOperatorIn,
|
||||||
|
HavingOperatorNotIn,
|
||||||
|
HavingOperator(strings.ToLower(string(HavingOperatorIn))),
|
||||||
|
HavingOperator(strings.ToLower(string(HavingOperatorNotIn))):
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("invalid having operator: %s", h)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type Having struct {
|
type Having struct {
|
||||||
ColumnName string `json:"columnName"`
|
ColumnName string `json:"columnName"`
|
||||||
Operator string `json:"op"`
|
Operator HavingOperator `json:"op"`
|
||||||
Value interface{} `json:"value"`
|
Value interface{} `json:"value"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Having) CacheKey() string {
|
func (h *Having) CacheKey() string {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user