fix: remove the need for unnecessary dummy time column (#5108)

* fix: remove the need for unnecessary dummy time column

* chore: allow no timestamp

* chore: remove grouping sets
This commit is contained in:
Srikanth Chekuri 2024-06-05 19:33:45 +05:30 committed by GitHub
parent d1c075983f
commit 6a829489a8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 53 additions and 132 deletions

View File

@ -4433,7 +4433,7 @@ func (r *ClickHouseReader) GetLogAttributeValues(ctx context.Context, req *v3.Fi
} }
func readRow(vars []interface{}, columnNames []string) ([]string, map[string]string, []map[string]string, v3.Point) { func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([]string, map[string]string, []map[string]string, v3.Point) {
// Each row will have a value and a timestamp, and an optional list of label values // Each row will have a value and a timestamp, and an optional list of label values
// example: {Timestamp: ..., Value: ...} // example: {Timestamp: ..., Value: ...}
// The timestamp may also not present in some cases where the time series is reduced to single value // The timestamp may also not present in some cases where the time series is reduced to single value
@ -4477,7 +4477,7 @@ func readRow(vars []interface{}, columnNames []string) ([]string, map[string]str
case *time.Time: case *time.Time:
point.Timestamp = v.UnixMilli() point.Timestamp = v.UnixMilli()
case *float64, *float32: case *float64, *float32:
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok { if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
point.Value = float64(reflect.ValueOf(v).Elem().Float()) point.Value = float64(reflect.ValueOf(v).Elem().Float())
} else { } else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float())) groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()))
@ -4486,8 +4486,8 @@ func readRow(vars []interface{}, columnNames []string) ([]string, map[string]str
} }
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()) groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float())
} }
case *uint8, *uint64, *uint16, *uint32: case *uint, *uint8, *uint64, *uint16, *uint32:
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok { if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
point.Value = float64(reflect.ValueOf(v).Elem().Uint()) point.Value = float64(reflect.ValueOf(v).Elem().Uint())
} else { } else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())) groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
@ -4496,8 +4496,8 @@ func readRow(vars []interface{}, columnNames []string) ([]string, map[string]str
} }
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()) groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())
} }
case *int8, *int16, *int32, *int64: case *int, *int8, *int16, *int32, *int64:
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok { if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
point.Value = float64(reflect.ValueOf(v).Elem().Int()) point.Value = float64(reflect.ValueOf(v).Elem().Int())
} else { } else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())) groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
@ -4520,7 +4520,7 @@ func readRow(vars []interface{}, columnNames []string) ([]string, map[string]str
return groupBy, groupAttributes, groupAttributesArray, point return groupBy, groupAttributes, groupAttributesArray, point
} }
func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNames []string) ([]*v3.Series, error) { func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNames []string, countOfNumberCols int) ([]*v3.Series, error) {
// when groupBy is applied, each combination of cartesian product // when groupBy is applied, each combination of cartesian product
// of attribute values is a separate series. Each item in seriesToPoints // of attribute values is a separate series. Each item in seriesToPoints
// represent a unique series where the key is sorted attribute values joined // represent a unique series where the key is sorted attribute values joined
@ -4555,7 +4555,7 @@ func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNam
if err := rows.Scan(vars...); err != nil { if err := rows.Scan(vars...); err != nil {
return nil, err return nil, err
} }
groupBy, groupAttributes, groupAttributesArray, metricPoint := readRow(vars, columnNames) groupBy, groupAttributes, groupAttributesArray, metricPoint := readRow(vars, columnNames, countOfNumberCols)
// skip the point if the value is NaN or Inf // skip the point if the value is NaN or Inf
// are they ever useful enough to be returned? // are they ever useful enough to be returned?
if math.IsNaN(metricPoint.Value) || math.IsInf(metricPoint.Value, 0) { if math.IsNaN(metricPoint.Value) || math.IsInf(metricPoint.Value, 0) {
@ -4574,20 +4574,7 @@ func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNam
var seriesList []*v3.Series var seriesList []*v3.Series
for _, key := range keys { for _, key := range keys {
points := seriesToPoints[key] points := seriesToPoints[key]
// find the grouping sets point for the series series := v3.Series{Labels: seriesToAttrs[key], Points: points, LabelsArray: labelsArray[key]}
// this is the point with the zero timestamp
// if there is no such point, then the series is not grouped
// and we can skip this step
var groupingSetsPoint *v3.Point
for idx, point := range points {
if point.Timestamp <= 0 {
groupingSetsPoint = &point
// remove the grouping sets point from the list of points
points = append(points[:idx], points[idx+1:]...)
break
}
}
series := v3.Series{Labels: seriesToAttrs[key], Points: points, GroupingSetsPoint: groupingSetsPoint, LabelsArray: labelsArray[key]}
seriesList = append(seriesList, &series) seriesList = append(seriesList, &series)
} }
return seriesList, getPersonalisedError(rows.Err()) return seriesList, getPersonalisedError(rows.Err())
@ -4627,11 +4614,28 @@ func (r *ClickHouseReader) GetTimeSeriesResultV3(ctx context.Context, query stri
columnNames = rows.Columns() columnNames = rows.Columns()
vars = make([]interface{}, len(columnTypes)) vars = make([]interface{}, len(columnTypes))
) )
var countOfNumberCols int
for i := range columnTypes { for i := range columnTypes {
vars[i] = reflect.New(columnTypes[i].ScanType()).Interface() vars[i] = reflect.New(columnTypes[i].ScanType()).Interface()
switch columnTypes[i].ScanType().Kind() {
case reflect.Float32,
reflect.Float64,
reflect.Uint,
reflect.Uint8,
reflect.Uint16,
reflect.Uint32,
reflect.Uint64,
reflect.Int,
reflect.Int8,
reflect.Int16,
reflect.Int32,
reflect.Int64:
countOfNumberCols++
}
} }
return readRowsForTimeSeriesResult(rows, vars, columnNames) return readRowsForTimeSeriesResult(rows, vars, columnNames, countOfNumberCols)
} }
// GetListResultV3 runs the query and returns list of rows // GetListResultV3 runs the query and returns list of rows

View File

@ -217,11 +217,7 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery) (string, erro
// `ts` is always added to the group by clause // `ts` is always added to the group by clause
func groupingSets(tags ...string) string { func groupingSets(tags ...string) string {
withTs := append(tags, "ts") withTs := append(tags, "ts")
if len(withTs) > 1 { return strings.Join(withTs, ", ")
return fmt.Sprintf(`GROUPING SETS ( (%s), (%s) )`, strings.Join(withTs, ", "), strings.Join(tags, ", "))
} else {
return strings.Join(withTs, ", ")
}
} }
// groupBy returns a string of comma separated tags for group by clause // groupBy returns a string of comma separated tags for group by clause

View File

@ -93,7 +93,7 @@ func TestPrepareTableQuery(t *testing.T) {
}, },
start: 1701794980000, start: 1701794980000,
end: 1701796780000, end: 1701796780000,
expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
} }

View File

@ -210,7 +210,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) {
}, },
start: 1701794980000, start: 1701794980000,
end: 1701796780000, end: 1701796780000,
expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
} }

View File

@ -95,7 +95,7 @@ func TestPrepareTableQuery(t *testing.T) {
}, },
start: 1701794980000, start: 1701794980000,
end: 1701796780000, end: 1701796780000,
expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
} }

View File

@ -210,7 +210,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) {
}, },
start: 1701794980000, start: 1701794980000,
end: 1701796780000, end: 1701796780000,
expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
{ {
name: "test time aggregation = rate, space aggregation percentile99, type = ExponentialHistogram", name: "test time aggregation = rate, space aggregation percentile99, type = ExponentialHistogram",
@ -244,7 +244,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) {
}, },
start: 1701794980000, start: 1701794980000,
end: 1701796780000, end: 1701796780000,
expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, quantilesDDMerge(0.01, 0.990000)(sketch)[1] as value FROM signoz_metrics.distributed_exp_hist INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'signoz_latency' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, quantilesDDMerge(0.01, 0.990000)(sketch)[1] as value FROM signoz_metrics.distributed_exp_hist INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'signoz_latency' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
} }

View File

@ -11,11 +11,7 @@ import (
// `ts` is always added to the group by clause // `ts` is always added to the group by clause
func groupingSets(tags ...string) string { func groupingSets(tags ...string) string {
withTs := append(tags, "ts") withTs := append(tags, "ts")
if len(withTs) > 1 { return strings.Join(withTs, ", ")
return fmt.Sprintf(`GROUPING SETS ( (%s), (%s) )`, strings.Join(withTs, ", "), strings.Join(tags, ", "))
} else {
return strings.Join(withTs, ", ")
}
} }
// GroupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause // GroupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause

View File

@ -193,7 +193,7 @@ func TestPrepareMetricQueryCumulativeRate(t *testing.T) {
TimeAggregation: v3.TimeAggregationRate, TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum, SpaceAggregation: v3.SpaceAggregationSum,
}, },
expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
{ {
name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by", name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by",
@ -226,7 +226,7 @@ func TestPrepareMetricQueryCumulativeRate(t *testing.T) {
TimeAggregation: v3.TimeAggregationRate, TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum, SpaceAggregation: v3.SpaceAggregationSum,
}, },
expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, endpoint, ts), (service_name, endpoint) ) ORDER BY service_name ASC, endpoint ASC, ts ASC", expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, endpoint, ts ORDER BY service_name ASC, endpoint ASC, ts ASC",
}, },
} }
@ -292,7 +292,7 @@ func TestPrepareMetricQueryDeltaRate(t *testing.T) {
TimeAggregation: v3.TimeAggregationRate, TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum, SpaceAggregation: v3.SpaceAggregationSum,
}, },
expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
} }
@ -344,7 +344,7 @@ func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) {
Disabled: false, Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99, SpaceAggregation: v3.SpaceAggregationPercentile99,
}, },
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
{ {
name: "test temporality = cumulative, quantile = 0.99 without group by", name: "test temporality = cumulative, quantile = 0.99 without group by",
@ -374,7 +374,7 @@ func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) {
Disabled: false, Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99, SpaceAggregation: v3.SpaceAggregationPercentile99,
}, },
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
}, },
} }
@ -426,7 +426,7 @@ func TestPrepreMetricQueryDeltaQuantile(t *testing.T) {
Disabled: false, Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99, SpaceAggregation: v3.SpaceAggregationPercentile99,
}, },
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
}, },
{ {
name: "test temporality = delta, quantile = 0.99 no group by", name: "test temporality = delta, quantile = 0.99 no group by",
@ -456,7 +456,7 @@ func TestPrepreMetricQueryDeltaQuantile(t *testing.T) {
Disabled: false, Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99, SpaceAggregation: v3.SpaceAggregationPercentile99,
}, },
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
}, },
} }
@ -520,7 +520,7 @@ func TestPrepareMetricQueryGauge(t *testing.T) {
Expression: "A", Expression: "A",
Disabled: false, Disabled: false,
}, },
expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (host_name, ts), (host_name) ) ORDER BY host_name ASC, ts ASC", expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY host_name, ts ORDER BY host_name ASC, ts ASC",
}, },
} }

View File

@ -100,7 +100,7 @@ func (q *querier) execClickHouseQuery(ctx context.Context, query string) ([]*v3.
points := make([]v3.Point, 0) points := make([]v3.Point, 0)
for pointIdx := range series.Points { for pointIdx := range series.Points {
point := series.Points[pointIdx] point := series.Points[pointIdx]
if point.Timestamp > 0 { if point.Timestamp >= 0 {
points = append(points, point) points = append(points, point)
} else { } else {
pointsWithNegativeTimestamps++ pointsWithNegativeTimestamps++

View File

@ -100,7 +100,7 @@ func (q *querier) execClickHouseQuery(ctx context.Context, query string) ([]*v3.
points := make([]v3.Point, 0) points := make([]v3.Point, 0)
for pointIdx := range series.Points { for pointIdx := range series.Points {
point := series.Points[pointIdx] point := series.Points[pointIdx]
if point.Timestamp > 0 { if point.Timestamp >= 0 {
points = append(points, point) points = append(points, point)
} else { } else {
pointsWithNegativeTimestamps++ pointsWithNegativeTimestamps++

View File

@ -297,9 +297,11 @@ const (
// written clickhouse query. The column alias indcate which value is // written clickhouse query. The column alias indcate which value is
// to be considered as final result (or target) // to be considered as final result (or target)
var ReservedColumnTargetAliases = map[string]struct{}{ var ReservedColumnTargetAliases = map[string]struct{}{
"result": {}, "__result": {},
"res": {}, "__value": {},
"value": {}, "result": {},
"res": {},
"value": {},
} }
// logsPPLPfx is a short constant for logsPipelinePrefix // logsPPLPfx is a short constant for logsPipelinePrefix

View File

@ -989,10 +989,9 @@ type LogsLiveTailClient struct {
} }
type Series struct { type Series struct {
Labels map[string]string `json:"labels"` Labels map[string]string `json:"labels"`
LabelsArray []map[string]string `json:"labelsArray"` LabelsArray []map[string]string `json:"labelsArray"`
Points []Point `json:"values"` Points []Point `json:"values"`
GroupingSetsPoint *Point `json:"-"`
} }
func (s *Series) SortPoints() { func (s *Series) SortPoints() {

View File

@ -34,10 +34,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -53,10 +49,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -99,10 +91,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
}, },
}, },
@ -128,10 +116,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -147,10 +131,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -194,10 +174,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -223,10 +199,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 240, Value: 240,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 154.5,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -242,10 +214,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 260, Value: 260,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 340,
},
}, },
}, },
}, },
@ -289,10 +257,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 240, Value: 240,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 154.5,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -308,10 +272,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 260, Value: 260,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 340,
},
}, },
}, },
}, },
@ -339,10 +299,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -359,10 +315,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -407,10 +359,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -439,10 +387,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -461,10 +405,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -483,10 +423,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -505,10 +441,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },
@ -558,10 +490,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 19.5, Value: 19.5,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 19.3,
},
}, },
{ {
Labels: map[string]string{ Labels: map[string]string{
@ -580,10 +508,6 @@ func TestApplyLimitOnMetricResult(t *testing.T) {
Value: 8.83, Value: 8.83,
}, },
}, },
GroupingSetsPoint: &v3.Point{
Timestamp: 0,
Value: 8.83,
},
}, },
}, },
}, },

View File

@ -1011,7 +1011,7 @@ func (r *ThresholdRule) String() string {
func removeGroupinSetPoints(series v3.Series) []v3.Point { func removeGroupinSetPoints(series v3.Series) []v3.Point {
var result []v3.Point var result []v3.Point
for _, s := range series.Points { for _, s := range series.Points {
if s.Timestamp > 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) { if s.Timestamp >= 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) {
result = append(result, s) result = append(result, s)
} }
} }