Merge branch 'develop' into release/v0.47.x

This commit is contained in:
Prashant Shahi 2024-06-05 19:43:32 +05:30
commit a6e005e3a2
29 changed files with 505 additions and 172 deletions

View File

@ -1,25 +1,33 @@
/* eslint-disable prefer-destructuring */
import './CodeCopyBtn.scss'; import './CodeCopyBtn.scss';
import { CheckOutlined, CopyOutlined } from '@ant-design/icons'; import { CheckOutlined, CopyOutlined } from '@ant-design/icons';
import cx from 'classnames'; import cx from 'classnames';
import { useState } from 'react'; import React, { useState } from 'react';
export default function CodeCopyBtn({ function CodeCopyBtn({
children, children,
onCopyClick,
}: { }: {
children: React.ReactNode; children: React.ReactNode;
onCopyClick?: (additionalInfo?: Record<string, unknown>) => void;
}): JSX.Element { }): JSX.Element {
const [isSnippetCopied, setIsSnippetCopied] = useState(false); const [isSnippetCopied, setIsSnippetCopied] = useState(false);
const handleClick = (): void => { const handleClick = (): void => {
let copiedText = '';
if (children && Array.isArray(children)) { if (children && Array.isArray(children)) {
setIsSnippetCopied(true); setIsSnippetCopied(true);
navigator.clipboard.writeText(children[0].props.children[0]).finally(() => { navigator.clipboard.writeText(children[0].props.children[0]).finally(() => {
copiedText = (children[0].props.children[0] as string).slice(0, 200); // slicing is done due to the limitation in accepted char length in attributes
setTimeout(() => { setTimeout(() => {
setIsSnippetCopied(false); setIsSnippetCopied(false);
}, 1000); }, 1000);
}); });
copiedText = (children[0].props.children[0] as string).slice(0, 200);
} }
onCopyClick?.({ copiedText });
}; };
return ( return (
@ -30,3 +38,9 @@ export default function CodeCopyBtn({
</div> </div>
); );
} }
CodeCopyBtn.defaultProps = {
onCopyClick: (): void => {},
};
export default CodeCopyBtn;

View File

@ -2,6 +2,8 @@
/* eslint-disable react/jsx-props-no-spreading */ /* eslint-disable react/jsx-props-no-spreading */
/* eslint-disable @typescript-eslint/explicit-function-return-type */ /* eslint-disable @typescript-eslint/explicit-function-return-type */
import logEvent from 'api/common/logEvent';
import { isEmpty } from 'lodash-es';
import ReactMarkdown from 'react-markdown'; import ReactMarkdown from 'react-markdown';
import { CodeProps } from 'react-markdown/lib/ast-to-react'; import { CodeProps } from 'react-markdown/lib/ast-to-react';
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'; import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
@ -15,10 +17,28 @@ interface LinkProps {
children: React.ReactElement; children: React.ReactElement;
} }
function Pre({ children }: { children: React.ReactNode }): JSX.Element { function Pre({
children,
elementDetails,
trackCopyAction,
}: {
children: React.ReactNode;
trackCopyAction: boolean;
elementDetails: Record<string, unknown>;
}): JSX.Element {
const { trackingTitle = '', ...rest } = elementDetails;
const handleClick = (additionalInfo?: Record<string, unknown>): void => {
const trackingData = { ...rest, copiedContent: additionalInfo };
if (trackCopyAction && !isEmpty(trackingTitle)) {
logEvent(trackingTitle as string, trackingData);
}
};
return ( return (
<pre className="code-snippet-container"> <pre className="code-snippet-container">
<CodeCopyBtn>{children}</CodeCopyBtn> <CodeCopyBtn onCopyClick={handleClick}>{children}</CodeCopyBtn>
{children} {children}
</pre> </pre>
); );
@ -83,9 +103,13 @@ function CustomTag({ color }: { color: string }): JSX.Element {
function MarkdownRenderer({ function MarkdownRenderer({
markdownContent, markdownContent,
variables, variables,
trackCopyAction,
elementDetails,
}: { }: {
markdownContent: any; markdownContent: any;
variables: any; variables: any;
trackCopyAction?: boolean;
elementDetails?: Record<string, unknown>;
}): JSX.Element { }): JSX.Element {
const interpolatedMarkdown = interpolateMarkdown(markdownContent, variables); const interpolatedMarkdown = interpolateMarkdown(markdownContent, variables);
@ -96,7 +120,12 @@ function MarkdownRenderer({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment // eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore // @ts-ignore
a: Link, a: Link,
pre: Pre, pre: ({ children }) =>
Pre({
children,
elementDetails: elementDetails ?? {},
trackCopyAction: !!trackCopyAction,
}),
code: Code, code: Code,
customtag: CustomTag, customtag: CustomTag,
}} }}
@ -106,4 +135,9 @@ function MarkdownRenderer({
); );
} }
MarkdownRenderer.defaultProps = {
elementDetails: {},
trackCopyAction: false,
};
export { Code, Link, MarkdownRenderer, Pre }; export { Code, Link, MarkdownRenderer, Pre };

View File

@ -7,3 +7,10 @@
border-color: var(--bg-amber-300) !important; border-color: var(--bg-amber-300) !important;
} }
} }
.tooltip-overlay {
text-wrap: nowrap;
.ant-tooltip-inner {
width: max-content;
}
}

View File

@ -39,7 +39,12 @@ function FacingIssueBtn({
return isCloudUserVal && isChatSupportEnabled ? ( // Note: we would need to move this condition to license based in future return isCloudUserVal && isChatSupportEnabled ? ( // Note: we would need to move this condition to license based in future
<div className="facing-issue-button"> <div className="facing-issue-button">
<Tooltip title={onHoverText} autoAdjustOverflow> <Tooltip
title={onHoverText}
autoAdjustOverflow
style={{ padding: 8 }}
overlayClassName="tooltip-overlay"
>
<Button <Button
className={cx('periscope-btn', 'facing-issue-button', className)} className={cx('periscope-btn', 'facing-issue-button', className)}
onClick={handleFacingIssuesClick} onClick={handleFacingIssuesClick}

View File

@ -55,3 +55,20 @@ State: ${(alertDef as any)?.state || ''}
Alert Id: ${ruleId} Alert Id: ${ruleId}
Thanks`; Thanks`;
export const integrationsListMessage = `Hi Team,
I need help with Integrations.
Thanks`;
export const integrationDetailMessage = (
selectedIntegration: string,
): string => `
Hi Team,
I need help in configuring this integration.
Integration Id: ${selectedIntegration}
Thanks`;

View File

@ -1,7 +1,9 @@
import './Integrations.styles.scss'; import './Integrations.styles.scss';
import { Color } from '@signozhq/design-tokens'; import { Color } from '@signozhq/design-tokens';
import { Input, Typography } from 'antd'; import { Flex, Input, Typography } from 'antd';
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
import { integrationsListMessage } from 'components/facingIssueBtn/util';
import { Search } from 'lucide-react'; import { Search } from 'lucide-react';
import { Dispatch, SetStateAction } from 'react'; import { Dispatch, SetStateAction } from 'react';
@ -19,9 +21,18 @@ function Header(props: HeaderProps): JSX.Element {
return ( return (
<div className="integrations-header"> <div className="integrations-header">
<Typography.Title className="title">Integrations</Typography.Title> <Typography.Title className="title">Integrations</Typography.Title>
<Typography.Text className="subtitle"> <Flex justify="space-between" align="center">
Manage Integrations for this workspace <Typography.Text className="subtitle">
</Typography.Text> Manage Integrations for this workspace
</Typography.Text>
<FacingIssueBtn
attributes={{ screen: 'Integrations list page' }}
eventName="Integrations: Facing issues in integrations"
buttonText="Facing issues with integrations"
message={integrationsListMessage}
onHoverText="Click here to get help with integrations"
/>
</Flex>
<Input <Input
placeholder="Search for an integration..." placeholder="Search for an integration..."

View File

@ -1,6 +1,7 @@
import './IntegrationDetailContentTabs.styles.scss'; import './IntegrationDetailContentTabs.styles.scss';
import { Button, Typography } from 'antd'; import { Button, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames'; import cx from 'classnames';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer'; import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import useAnalytics from 'hooks/analytics/useAnalytics'; import useAnalytics from 'hooks/analytics/useAnalytics';
@ -17,12 +18,16 @@ function Configure(props: ConfigurationProps): JSX.Element {
const { configuration, integrationId } = props; const { configuration, integrationId } = props;
const [selectedConfigStep, setSelectedConfigStep] = useState(0); const [selectedConfigStep, setSelectedConfigStep] = useState(0);
const handleMenuClick = (index: number): void => {
setSelectedConfigStep(index);
};
const { trackEvent } = useAnalytics(); const { trackEvent } = useAnalytics();
const handleMenuClick = (index: number, config: any): void => {
setSelectedConfigStep(index);
logEvent('Integrations Detail Page: Configure tab', {
sectionName: config?.title,
integrationId,
});
};
useEffect(() => { useEffect(() => {
trackEvent( trackEvent(
INTEGRATION_TELEMETRY_EVENTS.INTEGRATIONS_DETAIL_CONFIGURE_INSTRUCTION, INTEGRATION_TELEMETRY_EVENTS.INTEGRATIONS_DETAIL_CONFIGURE_INSTRUCTION,
@ -33,6 +38,12 @@ function Configure(props: ConfigurationProps): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, []);
const markdownDetailsForTracking = {
trackingTitle: `Integrations Detail Page: Copy button`,
sectionName: configuration[selectedConfigStep].title,
integrationId,
};
return ( return (
<div className="integration-detail-configure"> <div className="integration-detail-configure">
<div className="configure-menu"> <div className="configure-menu">
@ -43,7 +54,7 @@ function Configure(props: ConfigurationProps): JSX.Element {
className={cx('configure-menu-item', { className={cx('configure-menu-item', {
active: selectedConfigStep === index, active: selectedConfigStep === index,
})} })}
onClick={(): void => handleMenuClick(index)} onClick={(): void => handleMenuClick(index, config)}
> >
<Typography.Text className="configure-text"> <Typography.Text className="configure-text">
{config.title} {config.title}
@ -55,6 +66,8 @@ function Configure(props: ConfigurationProps): JSX.Element {
<MarkdownRenderer <MarkdownRenderer
variables={{}} variables={{}}
markdownContent={configuration[selectedConfigStep].instructions} markdownContent={configuration[selectedConfigStep].instructions}
elementDetails={markdownDetailsForTracking}
trackCopyAction
/> />
</div> </div>
</div> </div>

View File

@ -24,6 +24,7 @@ function Overview(props: OverviewProps): JSX.Element {
]; ];
const assetLabelMap = ['Pipelines', 'Dashboards', 'Alerts']; const assetLabelMap = ['Pipelines', 'Dashboards', 'Alerts'];
return ( return (
<div className="integration-detail-overview"> <div className="integration-detail-overview">
<div className="integration-detail-overview-left-container"> <div className="integration-detail-overview-left-container">

View File

@ -4,7 +4,9 @@
import './IntegrationDetailPage.styles.scss'; import './IntegrationDetailPage.styles.scss';
import { Color } from '@signozhq/design-tokens'; import { Color } from '@signozhq/design-tokens';
import { Button, Skeleton, Typography } from 'antd'; import { Button, Flex, Skeleton, Typography } from 'antd';
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
import { integrationDetailMessage } from 'components/facingIssueBtn/util';
import { useGetIntegration } from 'hooks/Integrations/useGetIntegration'; import { useGetIntegration } from 'hooks/Integrations/useGetIntegration';
import { useGetIntegrationStatus } from 'hooks/Integrations/useGetIntegrationStatus'; import { useGetIntegrationStatus } from 'hooks/Integrations/useGetIntegrationStatus';
import { defaultTo } from 'lodash-es'; import { defaultTo } from 'lodash-es';
@ -64,16 +66,30 @@ function IntegrationDetailPage(props: IntegrationDetailPageProps): JSX.Element {
return ( return (
<div className="integration-detail-content"> <div className="integration-detail-content">
<Button <Flex justify="space-between" align="center">
type="text" <Button
icon={<ArrowLeft size={14} />} type="text"
className="all-integrations-btn" icon={<ArrowLeft size={14} />}
onClick={(): void => { className="all-integrations-btn"
setSelectedIntegration(null); onClick={(): void => {
}} setSelectedIntegration(null);
> }}
All Integrations >
</Button> All Integrations
</Button>
<FacingIssueBtn
attributes={{
screen: 'Integrations detail page',
activeTab: activeDetailTab,
integrationTitle: integrationData?.title || '',
integrationId: selectedIntegration,
}}
eventName="Integrations: Facing issues in integrations"
buttonText="Facing issues with integration"
message={integrationDetailMessage(selectedIntegration)}
onHoverText="Click here to get help with this integration"
/>
</Flex>
{loading ? ( {loading ? (
<div className="loading-integration-details"> <div className="loading-integration-details">

View File

@ -172,6 +172,20 @@
} }
} }
} }
.request-entity-container {
display: flex;
flex-direction: row;
justify-content: space-between;
align-items: center;
border-radius: 4px;
border: 0.5px solid rgba(78, 116, 248, 0.2);
background: rgba(69, 104, 220, 0.1);
padding: 12px;
margin: 24px 0;
margin-bottom: 80px;
}
} }
} }

View File

@ -8,6 +8,7 @@ import { useHistory, useLocation } from 'react-router-dom';
import Header from './Header'; import Header from './Header';
import IntegrationDetailPage from './IntegrationDetailPage/IntegrationDetailPage'; import IntegrationDetailPage from './IntegrationDetailPage/IntegrationDetailPage';
import IntegrationsList from './IntegrationsList'; import IntegrationsList from './IntegrationsList';
import { RequestIntegrationBtn } from './RequestIntegrationBtn';
import { INTEGRATION_TELEMETRY_EVENTS } from './utils'; import { INTEGRATION_TELEMETRY_EVENTS } from './utils';
function Integrations(): JSX.Element { function Integrations(): JSX.Element {
@ -65,6 +66,7 @@ function Integrations(): JSX.Element {
searchTerm={searchTerm} searchTerm={searchTerm}
setActiveDetailTab={setActiveDetailTab} setActiveDetailTab={setActiveDetailTab}
/> />
<RequestIntegrationBtn />
</> </>
)} )}
</div> </div>

View File

@ -0,0 +1,95 @@
import './Integrations.styles.scss';
import { LoadingOutlined } from '@ant-design/icons';
import { Button, Input, Space, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { useNotifications } from 'hooks/useNotifications';
import { Check } from 'lucide-react';
import { useState } from 'react';
import { useTranslation } from 'react-i18next';
export function RequestIntegrationBtn(): JSX.Element {
const [
isSubmittingRequestForIntegration,
setIsSubmittingRequestForIntegration,
] = useState(false);
const [requestedIntegrationName, setRequestedIntegrationName] = useState('');
const { notifications } = useNotifications();
const { t } = useTranslation(['common']);
const handleRequestIntegrationSubmit = async (): Promise<void> => {
try {
setIsSubmittingRequestForIntegration(true);
const response = await logEvent('Integration Requested', {
screen: 'Integration list page',
integration: requestedIntegrationName,
});
if (response.statusCode === 200) {
notifications.success({
message: 'Integration Request Submitted',
});
setIsSubmittingRequestForIntegration(false);
} else {
notifications.error({
message:
response.error ||
t('something_went_wrong', {
ns: 'common',
}),
});
setIsSubmittingRequestForIntegration(false);
}
} catch (error) {
notifications.error({
message: t('something_went_wrong', {
ns: 'common',
}),
});
setIsSubmittingRequestForIntegration(false);
}
};
return (
<div className="request-entity-container">
<Typography.Text>
Cannot find what youre looking for? Request more integrations
</Typography.Text>
<div className="form-section">
<Space.Compact style={{ width: '100%' }}>
<Input
placeholder="Enter integration name..."
style={{ width: 300, marginBottom: 0 }}
value={requestedIntegrationName}
onChange={(e): void => setRequestedIntegrationName(e.target.value)}
/>
<Button
className="periscope-btn primary"
icon={
isSubmittingRequestForIntegration ? (
<LoadingOutlined />
) : (
<Check size={12} />
)
}
type="primary"
onClick={handleRequestIntegrationSubmit}
disabled={
isSubmittingRequestForIntegration ||
!requestedIntegrationName ||
requestedIntegrationName?.trim().length === 0
}
>
Submit
</Button>
</Space.Compact>
</div>
</div>
);
}

View File

@ -4433,7 +4433,7 @@ func (r *ClickHouseReader) GetLogAttributeValues(ctx context.Context, req *v3.Fi
} }
func readRow(vars []interface{}, columnNames []string) ([]string, map[string]string, []map[string]string, v3.Point) { func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([]string, map[string]string, []map[string]string, v3.Point) {
// Each row will have a value and a timestamp, and an optional list of label values // Each row will have a value and a timestamp, and an optional list of label values
// example: {Timestamp: ..., Value: ...} // example: {Timestamp: ..., Value: ...}
// The timestamp may also not present in some cases where the time series is reduced to single value // The timestamp may also not present in some cases where the time series is reduced to single value
@ -4477,7 +4477,7 @@ func readRow(vars []interface{}, columnNames []string) ([]string, map[string]str
case *time.Time: case *time.Time:
point.Timestamp = v.UnixMilli() point.Timestamp = v.UnixMilli()
case *float64, *float32: case *float64, *float32:
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok { if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
point.Value = float64(reflect.ValueOf(v).Elem().Float()) point.Value = float64(reflect.ValueOf(v).Elem().Float())
} else { } else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float())) groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()))
@ -4486,8 +4486,8 @@ func readRow(vars []interface{}, columnNames []string) ([]string, map[string]str
} }
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()) groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float())
} }
case *uint8, *uint64, *uint16, *uint32: case *uint, *uint8, *uint64, *uint16, *uint32:
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok { if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
point.Value = float64(reflect.ValueOf(v).Elem().Uint()) point.Value = float64(reflect.ValueOf(v).Elem().Uint())
} else { } else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())) groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
@ -4496,8 +4496,8 @@ func readRow(vars []interface{}, columnNames []string) ([]string, map[string]str
} }
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()) groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())
} }
case *int8, *int16, *int32, *int64: case *int, *int8, *int16, *int32, *int64:
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok { if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
point.Value = float64(reflect.ValueOf(v).Elem().Int()) point.Value = float64(reflect.ValueOf(v).Elem().Int())
} else { } else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())) groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
@ -4520,7 +4520,7 @@ func readRow(vars []interface{}, columnNames []string) ([]string, map[string]str
return groupBy, groupAttributes, groupAttributesArray, point return groupBy, groupAttributes, groupAttributesArray, point
} }
func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNames []string) ([]*v3.Series, error) { func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNames []string, countOfNumberCols int) ([]*v3.Series, error) {
// when groupBy is applied, each combination of cartesian product // when groupBy is applied, each combination of cartesian product
// of attribute values is a separate series. Each item in seriesToPoints // of attribute values is a separate series. Each item in seriesToPoints
// represent a unique series where the key is sorted attribute values joined // represent a unique series where the key is sorted attribute values joined
@ -4555,7 +4555,7 @@ func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNam
if err := rows.Scan(vars...); err != nil { if err := rows.Scan(vars...); err != nil {
return nil, err return nil, err
} }
groupBy, groupAttributes, groupAttributesArray, metricPoint := readRow(vars, columnNames) groupBy, groupAttributes, groupAttributesArray, metricPoint := readRow(vars, columnNames, countOfNumberCols)
// skip the point if the value is NaN or Inf // skip the point if the value is NaN or Inf
// are they ever useful enough to be returned? // are they ever useful enough to be returned?
if math.IsNaN(metricPoint.Value) || math.IsInf(metricPoint.Value, 0) { if math.IsNaN(metricPoint.Value) || math.IsInf(metricPoint.Value, 0) {
@ -4574,20 +4574,7 @@ func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNam
var seriesList []*v3.Series var seriesList []*v3.Series
for _, key := range keys { for _, key := range keys {
points := seriesToPoints[key] points := seriesToPoints[key]
// find the grouping sets point for the series series := v3.Series{Labels: seriesToAttrs[key], Points: points, LabelsArray: labelsArray[key]}
// this is the point with the zero timestamp
// if there is no such point, then the series is not grouped
// and we can skip this step
var groupingSetsPoint *v3.Point
for idx, point := range points {
if point.Timestamp <= 0 {
groupingSetsPoint = &point
// remove the grouping sets point from the list of points
points = append(points[:idx], points[idx+1:]...)
break
}
}
series := v3.Series{Labels: seriesToAttrs[key], Points: points, GroupingSetsPoint: groupingSetsPoint, LabelsArray: labelsArray[key]}
seriesList = append(seriesList, &series) seriesList = append(seriesList, &series)
} }
return seriesList, getPersonalisedError(rows.Err()) return seriesList, getPersonalisedError(rows.Err())
@ -4627,11 +4614,28 @@ func (r *ClickHouseReader) GetTimeSeriesResultV3(ctx context.Context, query stri
columnNames = rows.Columns() columnNames = rows.Columns()
vars = make([]interface{}, len(columnTypes)) vars = make([]interface{}, len(columnTypes))
) )
var countOfNumberCols int
for i := range columnTypes { for i := range columnTypes {
vars[i] = reflect.New(columnTypes[i].ScanType()).Interface() vars[i] = reflect.New(columnTypes[i].ScanType()).Interface()
switch columnTypes[i].ScanType().Kind() {
case reflect.Float32,
reflect.Float64,
reflect.Uint,
reflect.Uint8,
reflect.Uint16,
reflect.Uint32,
reflect.Uint64,
reflect.Int,
reflect.Int8,
reflect.Int16,
reflect.Int32,
reflect.Int64:
countOfNumberCols++
}
} }
return readRowsForTimeSeriesResult(rows, vars, columnNames) return readRowsForTimeSeriesResult(rows, vars, columnNames, countOfNumberCols)
} }
// GetListResultV3 runs the query and returns list of rows // GetListResultV3 runs the query and returns list of rows

View File

@ -217,11 +217,7 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery) (string, erro
// `ts` is always added to the group by clause // `ts` is always added to the group by clause
func groupingSets(tags ...string) string { func groupingSets(tags ...string) string {
withTs := append(tags, "ts") withTs := append(tags, "ts")
if len(withTs) > 1 { return strings.Join(withTs, ", ")
return fmt.Sprintf(`GROUPING SETS ( (%s), (%s) )`, strings.Join(withTs, ", "), strings.Join(tags, ", "))
} else {
return strings.Join(withTs, ", ")
}
} }
// groupBy returns a string of comma separated tags for group by clause // groupBy returns a string of comma separated tags for group by clause

View File

@ -93,7 +93,7 @@ func TestPrepareTableQuery(t *testing.T) {
}, },
start: 1701794980000, start: 1701794980000,
end: 1701796780000, end: 1701796780000,
expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
} }

View File

@ -210,7 +210,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) {
}, },
start: 1701794980000, start: 1701794980000,
end: 1701796780000, end: 1701796780000,
expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
} }

View File

@ -95,7 +95,7 @@ func TestPrepareTableQuery(t *testing.T) {
}, },
start: 1701794980000, start: 1701794980000,
end: 1701796780000, end: 1701796780000,
expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
} }

View File

@ -210,7 +210,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) {
}, },
start: 1701794980000, start: 1701794980000,
end: 1701796780000, end: 1701796780000,
expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
{ {
name: "test time aggregation = rate, space aggregation percentile99, type = ExponentialHistogram", name: "test time aggregation = rate, space aggregation percentile99, type = ExponentialHistogram",
@ -244,7 +244,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) {
}, },
start: 1701794980000, start: 1701794980000,
end: 1701796780000, end: 1701796780000,
expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, quantilesDDMerge(0.01, 0.990000)(sketch)[1] as value FROM signoz_metrics.distributed_exp_hist INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'signoz_latency' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, quantilesDDMerge(0.01, 0.990000)(sketch)[1] as value FROM signoz_metrics.distributed_exp_hist INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'signoz_latency' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
} }

View File

@ -11,11 +11,7 @@ import (
// `ts` is always added to the group by clause // `ts` is always added to the group by clause
func groupingSets(tags ...string) string { func groupingSets(tags ...string) string {
withTs := append(tags, "ts") withTs := append(tags, "ts")
if len(withTs) > 1 { return strings.Join(withTs, ", ")
return fmt.Sprintf(`GROUPING SETS ( (%s), (%s) )`, strings.Join(withTs, ", "), strings.Join(tags, ", "))
} else {
return strings.Join(withTs, ", ")
}
} }
// GroupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause // GroupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause

View File

@ -193,7 +193,7 @@ func TestPrepareMetricQueryCumulativeRate(t *testing.T) {
TimeAggregation: v3.TimeAggregationRate, TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum, SpaceAggregation: v3.SpaceAggregationSum,
}, },
expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
{ {
name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by", name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by",
@ -226,7 +226,7 @@ func TestPrepareMetricQueryCumulativeRate(t *testing.T) {
TimeAggregation: v3.TimeAggregationRate, TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum, SpaceAggregation: v3.SpaceAggregationSum,
}, },
expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, endpoint, ts), (service_name, endpoint) ) ORDER BY service_name ASC, endpoint ASC, ts ASC", expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, endpoint, ts ORDER BY service_name ASC, endpoint ASC, ts ASC",
}, },
} }
@ -292,7 +292,7 @@ func TestPrepareMetricQueryDeltaRate(t *testing.T) {
TimeAggregation: v3.TimeAggregationRate, TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum, SpaceAggregation: v3.SpaceAggregationSum,
}, },
expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
} }
@ -344,7 +344,7 @@ func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) {
Disabled: false, Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99, SpaceAggregation: v3.SpaceAggregationPercentile99,
}, },
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
{ {
name: "test temporality = cumulative, quantile = 0.99 without group by", name: "test temporality = cumulative, quantile = 0.99 without group by",
@ -374,7 +374,7 @@ func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) {
Disabled: false, Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99, SpaceAggregation: v3.SpaceAggregationPercentile99,
}, },
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
}, },
} }
@ -426,7 +426,7 @@ func TestPrepreMetricQueryDeltaQuantile(t *testing.T) {
Disabled: false, Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99, SpaceAggregation: v3.SpaceAggregationPercentile99,
}, },
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
{ {
name: "test temporality = delta, quantile = 0.99 no group by", name: "test temporality = delta, quantile = 0.99 no group by",
@ -456,7 +456,7 @@ func TestPrepreMetricQueryDeltaQuantile(t *testing.T) {
Disabled: false, Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99, SpaceAggregation: v3.SpaceAggregationPercentile99,
}, },
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
}, },
} }
@ -520,7 +520,7 @@ func TestPrepareMetricQueryGauge(t *testing.T) {
Expression: "A", Expression: "A",
Disabled: false, Disabled: false,
}, },
expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (host_name, ts), (host_name) ) ORDER BY host_name ASC, ts ASC", expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY host_name, ts ORDER BY host_name ASC, ts ASC",
}, },
} }

View File

@ -100,7 +100,7 @@ func (q *querier) execClickHouseQuery(ctx context.Context, query string) ([]*v3.
points := make([]v3.Point, 0) points := make([]v3.Point, 0)
for pointIdx := range series.Points { for pointIdx := range series.Points {
point := series.Points[pointIdx] point := series.Points[pointIdx]
if point.Timestamp > 0 { if point.Timestamp >= 0 {
points = append(points, point) points = append(points, point)
} else { } else {
pointsWithNegativeTimestamps++ pointsWithNegativeTimestamps++

View File

@ -100,7 +100,7 @@ func (q *querier) execClickHouseQuery(ctx context.Context, query string) ([]*v3.
points := make([]v3.Point, 0) points := make([]v3.Point, 0)
for pointIdx := range series.Points { for pointIdx := range series.Points {
point := series.Points[pointIdx] point := series.Points[pointIdx]
if point.Timestamp > 0 { if point.Timestamp >= 0 {
points = append(points, point) points = append(points, point)
} else { } else {
pointsWithNegativeTimestamps++ pointsWithNegativeTimestamps++

View File

@ -297,9 +297,11 @@ const (
// written clickhouse query. The column alias indcate which value is // written clickhouse query. The column alias indcate which value is
// to be considered as final result (or target) // to be considered as final result (or target)
var ReservedColumnTargetAliases = map[string]struct{}{ var ReservedColumnTargetAliases = map[string]struct{}{
"result": {}, "__result": {},
"res": {}, "__value": {},
"value": {}, "result": {},
"res": {},
"value": {},
} }
// logsPPLPfx is a short constant for logsPipelinePrefix // logsPPLPfx is a short constant for logsPipelinePrefix

View File

@ -989,10 +989,9 @@ type LogsLiveTailClient struct {
} }
type Series struct { type Series struct {
Labels map[string]string `json:"labels"` Labels map[string]string `json:"labels"`
LabelsArray []map[string]string `json:"labelsArray"` LabelsArray []map[string]string `json:"labelsArray"`
Points []Point `json:"values"` Points []Point `json:"values"`
GroupingSetsPoint *Point `json:"-"`
} }
func (s *Series) SortPoints() { func (s *Series) SortPoints() {

View File

@ -24,10 +24,13 @@ func isSubset(super, sub map[string]string) bool {
} }
// Function to find unique label sets // Function to find unique label sets
func findUniqueLabelSets(results []*v3.Result) []map[string]string { func findUniqueLabelSets(results []*v3.Result, queriesInExpression map[string]struct{}) []map[string]string {
allLabelSets := make([]map[string]string, 0) allLabelSets := make([]map[string]string, 0)
// The size of the `results` small, It is the number of queries in the request // The size of the `results` small, It is the number of queries in the request
for _, result := range results { for _, result := range results {
if _, ok := queriesInExpression[result.QueryName]; !ok {
continue
}
// The size of the `result.Series` slice is usually small, It is the number of series in the query result. // The size of the `result.Series` slice is usually small, It is the number of series in the query result.
// We will limit the number of series in the query result to order of 100-1000. // We will limit the number of series in the query result to order of 100-1000.
for _, series := range result.Series { for _, series := range result.Series {
@ -120,7 +123,15 @@ func joinAndCalculate(
} }
} }
if len(expression.Vars()) != len(values) { canEval := true
for _, v := range expression.Vars() {
if _, ok := values[v]; !ok {
canEval = false
}
}
if !canEval {
// not enough values for expression evaluation // not enough values for expression evaluation
continue continue
} }
@ -154,7 +165,12 @@ func processResults(
expression *govaluate.EvaluableExpression, expression *govaluate.EvaluableExpression,
canDefaultZero map[string]bool, canDefaultZero map[string]bool,
) (*v3.Result, error) { ) (*v3.Result, error) {
uniqueLabelSets := findUniqueLabelSets(results)
queriesInExpression := make(map[string]struct{})
for _, v := range expression.Vars() {
queriesInExpression[v] = struct{}{}
}
uniqueLabelSets := findUniqueLabelSets(results, queriesInExpression)
newSeries := make([]*v3.Series, 0) newSeries := make([]*v3.Series, 0)
for _, labelSet := range uniqueLabelSets { for _, labelSet := range uniqueLabelSets {

View File

@ -11,9 +11,10 @@ import (
func TestFindUniqueLabelSets(t *testing.T) { func TestFindUniqueLabelSets(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
result []*v3.Result result []*v3.Result
want []map[string]string want []map[string]string
queriesInExpression map[string]struct{}
}{ }{
{ {
name: "test1", name: "test1",
@ -40,6 +41,10 @@ func TestFindUniqueLabelSets(t *testing.T) {
}, },
}, },
}, },
queriesInExpression: map[string]struct{}{
"A": {},
"B": {},
},
want: []map[string]string{ want: []map[string]string{
{ {
"service_name": "frontend", "service_name": "frontend",
@ -96,6 +101,12 @@ func TestFindUniqueLabelSets(t *testing.T) {
}, },
}, },
}, },
queriesInExpression: map[string]struct{}{
"A": {},
"B": {},
"C": {},
"D": {},
},
want: []map[string]string{ want: []map[string]string{
{ {
"service_name": "frontend", "service_name": "frontend",
@ -122,6 +133,10 @@ func TestFindUniqueLabelSets(t *testing.T) {
Series: []*v3.Series{}, Series: []*v3.Series{},
}, },
}, },
queriesInExpression: map[string]struct{}{
"A": {},
"B": {},
},
want: []map[string]string{}, want: []map[string]string{},
}, },
{ {
@ -160,6 +175,10 @@ func TestFindUniqueLabelSets(t *testing.T) {
}, },
}, },
}, },
queriesInExpression: map[string]struct{}{
"A": {},
"B": {},
},
want: []map[string]string{ want: []map[string]string{
{ {
"service_name": "frontend", "service_name": "frontend",
@ -175,7 +194,7 @@ func TestFindUniqueLabelSets(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
got := findUniqueLabelSets(tt.result) got := findUniqueLabelSets(tt.result, tt.queriesInExpression)
if !reflect.DeepEqual(got, tt.want) { if !reflect.DeepEqual(got, tt.want) {
t.Errorf("findUniqueLabelSets() = %v, want %v\n", got, tt.want) t.Errorf("findUniqueLabelSets() = %v, want %v\n", got, tt.want)
} }
@ -1683,3 +1702,135 @@ func TestProcessResultsNoDefaultZero(t *testing.T) {
}) })
} }
} }
func TestProcessResultsMixedQueries(t *testing.T) {
tests := []struct {
name string
results []*v3.Result
want *v3.Result
}{
{
name: "test1",
results: []*v3.Result{
{
QueryName: "A",
Series: []*v3.Series{
{
Labels: map[string]string{
"service_name": "frontend",
"operation": "GET /api",
},
Points: []v3.Point{
{
Timestamp: 1,
Value: 10,
},
{
Timestamp: 2,
Value: 20,
},
},
},
},
},
{
QueryName: "B",
Series: []*v3.Series{
{
Labels: map[string]string{
"service_name": "frontend",
"operation": "GET /api",
},
Points: []v3.Point{
{
Timestamp: 1,
Value: 10,
},
{
Timestamp: 2,
Value: 20,
},
},
},
},
},
{
QueryName: "C",
Series: []*v3.Series{
{
Labels: map[string]string{
"service_name": "redis",
},
Points: []v3.Point{
{
Timestamp: 1,
Value: 30,
},
{
Timestamp: 2,
Value: 50,
},
{
Timestamp: 3,
Value: 45,
},
},
},
},
},
},
want: &v3.Result{
Series: []*v3.Series{
{
Labels: map[string]string{
"service_name": "frontend",
"operation": "GET /api",
},
Points: []v3.Point{
{
Timestamp: 1,
Value: 1,
},
{
Timestamp: 2,
Value: 1,
},
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
expression, err := govaluate.NewEvaluableExpression("A / B")
if err != nil {
t.Errorf("Error parsing expression: %v", err)
}
canDefaultZero := map[string]bool{
"A": true,
"B": true,
"C": true,
}
got, err := processResults(tt.results, expression, canDefaultZero)
if err != nil {
t.Errorf("Error processing results: %v", err)
}
if len(got.Series) != len(tt.want.Series) {
t.Errorf("processResults(): number of sereis - got = %v, want %v", len(got.Series), len(tt.want.Series))
}
for i := range got.Series {
if len(got.Series[i].Points) != len(tt.want.Series[i].Points) {
t.Errorf("processResults(): number of points - got = %v, want %v", got, tt.want)
}
for j := range got.Series[i].Points {
if got.Series[i].Points[j].Value != tt.want.Series[i].Points[j].Value {
t.Errorf("processResults(): got = %v, want %v", got.Series[i].Points[j].Value, tt.want.Series[i].Points[j].Value)
}
}
}
})
}
}

View File

@ -34,10 +34,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -53,10 +49,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -99,10 +91,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
}, },
}, },
@ -128,10 +116,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -147,10 +131,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -194,10 +174,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -223,10 +199,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 240, Value: 240,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 154.5,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -242,10 +214,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 260, Value: 260,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 340,
},
}, },
}, },
}, },
@ -289,10 +257,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 240, Value: 240,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 154.5,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -308,10 +272,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 260, Value: 260,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 340,
},
}, },
}, },
}, },
@ -339,10 +299,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -359,10 +315,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -407,10 +359,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -439,10 +387,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -461,10 +405,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -483,10 +423,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -505,10 +441,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -558,10 +490,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -580,10 +508,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },

View File

@ -434,12 +434,19 @@ func (r *ThresholdRule) prepareQueryRange(ts time.Time) *v3.QueryRangeParamsV3 {
if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL { if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL {
params := &v3.QueryRangeParamsV3{ params := &v3.QueryRangeParamsV3{
Start: start, Start: start,
End: end, End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)), Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: r.ruleCondition.CompositeQuery, CompositeQuery: &v3.CompositeQuery{
Variables: make(map[string]interface{}, 0), QueryType: r.ruleCondition.CompositeQuery.QueryType,
NoCache: true, PanelType: r.ruleCondition.CompositeQuery.PanelType,
BuilderQueries: make(map[string]*v3.BuilderQuery),
ClickHouseQueries: make(map[string]*v3.ClickHouseQuery),
PromQueries: make(map[string]*v3.PromQuery),
Unit: r.ruleCondition.CompositeQuery.Unit,
},
Variables: make(map[string]interface{}, 0),
NoCache: true,
} }
querytemplate.AssignReservedVarsV3(params) querytemplate.AssignReservedVarsV3(params)
for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries { for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
@ -460,8 +467,13 @@ func (r *ThresholdRule) prepareQueryRange(ts time.Time) *v3.QueryRangeParamsV3 {
r.SetHealth(HealthBad) r.SetHealth(HealthBad)
return params return params
} }
r.ruleCondition.CompositeQuery.ClickHouseQueries[name].Query = query.String() params.CompositeQuery.ClickHouseQueries[name] = &v3.ClickHouseQuery{
Query: query.String(),
Disabled: chQuery.Disabled,
Legend: chQuery.Legend,
}
} }
return params
} }
if r.ruleCondition.CompositeQuery != nil && r.ruleCondition.CompositeQuery.BuilderQueries != nil { if r.ruleCondition.CompositeQuery != nil && r.ruleCondition.CompositeQuery.BuilderQueries != nil {
@ -1011,7 +1023,7 @@ func (r *ThresholdRule) String() string {
func removeGroupinSetPoints(series v3.Series) []v3.Point { func removeGroupinSetPoints(series v3.Series) []v3.Point {
var result []v3.Point var result []v3.Point
for _, s := range series.Points { for _, s := range series.Points {
if s.Timestamp > 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) { if s.Timestamp >= 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) {
result = append(result, s) result = append(result, s)
} }
} }

View File

@ -876,5 +876,9 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
params := rule.prepareQueryRange(ts) params := rule.prepareQueryRange(ts)
assert.Equal(t, c.expectedQuery, params.CompositeQuery.ClickHouseQueries["A"].Query, "Test case %d", idx) assert.Equal(t, c.expectedQuery, params.CompositeQuery.ClickHouseQueries["A"].Query, "Test case %d", idx)
secondTimeParams := rule.prepareQueryRange(ts)
assert.Equal(t, c.expectedQuery, secondTimeParams.CompositeQuery.ClickHouseQueries["A"].Query, "Test case %d", idx)
} }
} }