mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-12 07:08:58 +08:00
fix: several issues (#5001)
This commit is contained in:
parent
3efd9801a1
commit
7e31b4ca01
@ -59,8 +59,8 @@ function CreateAlertChannels({
|
||||
|
||||
*Summary:* {{ .Annotations.summary }}
|
||||
*Description:* {{ .Annotations.description }}
|
||||
*RelatedLogs:* {{ .Annotations.related_logs }}
|
||||
*RelatedTraces:* {{ .Annotations.related_traces }}
|
||||
*RelatedLogs:* {{ if gt (len .Annotations.related_logs) 0 -}} View in <{{ .Annotations.related_logs }}|logs explorer> {{- end}}
|
||||
*RelatedTraces:* {{ if gt (len .Annotations.related_traces) 0 -}} View in <{{ .Annotations.related_traces }}|traces explorer> {{- end}}
|
||||
|
||||
*Details:*
|
||||
{{ range .Labels.SortedPairs }} • *{{ .Name }}:* {{ .Value }}
|
||||
|
@ -51,6 +51,7 @@ import (
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
"go.signoz.io/signoz/pkg/query-service/dao"
|
||||
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
|
||||
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
|
||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
@ -4570,6 +4571,11 @@ func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNam
|
||||
return nil, err
|
||||
}
|
||||
groupBy, groupAttributes, groupAttributesArray, metricPoint := readRow(vars, columnNames)
|
||||
// skip the point if the value is NaN or Inf
|
||||
// are they ever useful enough to be returned?
|
||||
if math.IsNaN(metricPoint.Value) || math.IsInf(metricPoint.Value, 0) {
|
||||
continue
|
||||
}
|
||||
sort.Strings(groupBy)
|
||||
key := strings.Join(groupBy, "")
|
||||
if _, exists := seriesToAttrs[key]; !exists {
|
||||
@ -4700,11 +4706,11 @@ func getPersonalisedError(err error) error {
|
||||
}
|
||||
zap.L().Error("error while reading result", zap.Error(err))
|
||||
if strings.Contains(err.Error(), "code: 307") {
|
||||
return errors.New("query is consuming too much resources, please reach out to the team")
|
||||
return chErrors.ErrResourceBytesLimitExceeded
|
||||
}
|
||||
|
||||
if strings.Contains(err.Error(), "code: 159") {
|
||||
return errors.New("Query is taking too long to run, please reach out to the team")
|
||||
return chErrors.ErrResourceTimeLimitExceeded
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
@ -2907,7 +2907,7 @@ func (aH *APIHandler) autoCompleteAttributeValues(w http.ResponseWriter, r *http
|
||||
aH.Respond(w, response)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) execClickHouseGraphQueries(ctx context.Context, queries map[string]string) ([]*v3.Result, error, map[string]string) {
|
||||
func (aH *APIHandler) execClickHouseGraphQueries(ctx context.Context, queries map[string]string) ([]*v3.Result, error, map[string]error) {
|
||||
type channelResult struct {
|
||||
Series []*v3.Series
|
||||
Err error
|
||||
@ -2937,13 +2937,13 @@ func (aH *APIHandler) execClickHouseGraphQueries(ctx context.Context, queries ma
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
errQuriesByName := make(map[string]error)
|
||||
res := make([]*v3.Result, 0)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
errQuriesByName[r.Name] = r.Err
|
||||
continue
|
||||
}
|
||||
res = append(res, &v3.Result{
|
||||
@ -2957,7 +2957,7 @@ func (aH *APIHandler) execClickHouseGraphQueries(ctx context.Context, queries ma
|
||||
return res, nil, nil
|
||||
}
|
||||
|
||||
func (aH *APIHandler) execClickHouseListQueries(ctx context.Context, queries map[string]string) ([]*v3.Result, error, map[string]string) {
|
||||
func (aH *APIHandler) execClickHouseListQueries(ctx context.Context, queries map[string]string) ([]*v3.Result, error, map[string]error) {
|
||||
type channelResult struct {
|
||||
List []*v3.Row
|
||||
Err error
|
||||
@ -2986,13 +2986,13 @@ func (aH *APIHandler) execClickHouseListQueries(ctx context.Context, queries map
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
errQuriesByName := make(map[string]error)
|
||||
res := make([]*v3.Result, 0)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
errQuriesByName[r.Name] = r.Err
|
||||
continue
|
||||
}
|
||||
res = append(res, &v3.Result{
|
||||
@ -3006,7 +3006,7 @@ func (aH *APIHandler) execClickHouseListQueries(ctx context.Context, queries map
|
||||
return res, nil, nil
|
||||
}
|
||||
|
||||
func (aH *APIHandler) execPromQueries(ctx context.Context, metricsQueryRangeParams *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) {
|
||||
func (aH *APIHandler) execPromQueries(ctx context.Context, metricsQueryRangeParams *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]error) {
|
||||
type channelResult struct {
|
||||
Series []*v3.Series
|
||||
Err error
|
||||
@ -3066,13 +3066,13 @@ func (aH *APIHandler) execPromQueries(ctx context.Context, metricsQueryRangePara
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
errQuriesByName := make(map[string]error)
|
||||
res := make([]*v3.Result, 0)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
errQuriesByName[r.Name] = r.Err
|
||||
continue
|
||||
}
|
||||
res = append(res, &v3.Result{
|
||||
@ -3170,7 +3170,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
|
||||
var result []*v3.Result
|
||||
var err error
|
||||
var errQuriesByName map[string]string
|
||||
var errQuriesByName map[string]error
|
||||
var spanKeys map[string]v3.AttributeKey
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
@ -3427,7 +3427,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
|
||||
var result []*v3.Result
|
||||
var err error
|
||||
var errQuriesByName map[string]string
|
||||
var errQuriesByName map[string]error
|
||||
var spanKeys map[string]v3.AttributeKey
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
|
@ -36,33 +36,6 @@ func buildMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery, tableNam
|
||||
|
||||
metricQueryGroupBy := mq.GroupBy
|
||||
|
||||
// if the aggregate operator is a histogram quantile, and user has not forgotten
|
||||
// the le tag in the group by then add the le tag to the group by
|
||||
if mq.AggregateOperator == v3.AggregateOperatorHistQuant50 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant75 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant90 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant95 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant99 {
|
||||
found := false
|
||||
for _, tag := range mq.GroupBy {
|
||||
if tag.Key == "le" {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
metricQueryGroupBy = append(
|
||||
metricQueryGroupBy,
|
||||
v3.AttributeKey{
|
||||
Key: "le",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: false,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
filterSubQuery, err := buildMetricsTimeSeriesFilterQuery(mq.Filters, metricQueryGroupBy, mq)
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
@ -60,6 +60,11 @@ func TestPanelTableForCumulative(t *testing.T) {
|
||||
},
|
||||
},
|
||||
Expression: "A",
|
||||
GroupBy: []v3.AttributeKey{
|
||||
{
|
||||
Key: "le",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "SELECT toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, le,ts ORDER BY fingerprint, le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, le ORDER BY fingerprint, le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
@ -77,6 +82,9 @@ func TestPanelTableForCumulative(t *testing.T) {
|
||||
{
|
||||
Key: "service_name",
|
||||
},
|
||||
{
|
||||
Key: "le",
|
||||
},
|
||||
},
|
||||
Expression: "A",
|
||||
},
|
||||
|
@ -12,39 +12,22 @@ func buildDeltaMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableNam
|
||||
|
||||
metricQueryGroupBy := mq.GroupBy
|
||||
|
||||
// if the aggregate operator is a histogram quantile, and user has not forgotten
|
||||
// the le tag in the group by then add the le tag to the group by
|
||||
if mq.AggregateOperator == v3.AggregateOperatorHistQuant50 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant75 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant90 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant95 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant99 {
|
||||
found := false
|
||||
for _, tag := range mq.GroupBy {
|
||||
if tag.Key == "le" {
|
||||
found = true
|
||||
if mq.Filters != nil {
|
||||
temporalityFound := false
|
||||
for _, filter := range mq.Filters.Items {
|
||||
if filter.Key.Key == "__temporality__" {
|
||||
temporalityFound = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
metricQueryGroupBy = append(
|
||||
metricQueryGroupBy,
|
||||
v3.AttributeKey{
|
||||
Key: "le",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: false,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if mq.Filters != nil {
|
||||
mq.Filters.Items = append(mq.Filters.Items, v3.FilterItem{
|
||||
Key: v3.AttributeKey{Key: "__temporality__"},
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: "Delta",
|
||||
})
|
||||
if !temporalityFound {
|
||||
mq.Filters.Items = append(mq.Filters.Items, v3.FilterItem{
|
||||
Key: v3.AttributeKey{Key: "__temporality__"},
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: "Delta",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
filterSubQuery, err := buildMetricsTimeSeriesFilterQuery(mq.Filters, metricQueryGroupBy, mq)
|
||||
|
@ -141,33 +141,6 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str
|
||||
|
||||
metricQueryGroupBy := mq.GroupBy
|
||||
|
||||
// if the aggregate operator is a histogram quantile, and user has not forgotten
|
||||
// the le tag in the group by then add the le tag to the group by
|
||||
if mq.AggregateOperator == v3.AggregateOperatorHistQuant50 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant75 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant90 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant95 ||
|
||||
mq.AggregateOperator == v3.AggregateOperatorHistQuant99 {
|
||||
found := false
|
||||
for _, tag := range mq.GroupBy {
|
||||
if tag.Key == "le" {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
metricQueryGroupBy = append(
|
||||
metricQueryGroupBy,
|
||||
v3.AttributeKey{
|
||||
Key: "le",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: false,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
filterSubQuery, err := buildMetricsTimeSeriesFilterQuery(mq.Filters, metricQueryGroupBy, mq)
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
@ -23,6 +23,8 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P
|
||||
|
||||
var quantile float64
|
||||
|
||||
percentileOperator := mq.SpaceAggregation
|
||||
|
||||
if v3.IsPercentileOperator(mq.SpaceAggregation) &&
|
||||
mq.AggregateAttribute.Type != v3.AttributeKeyType(v3.MetricTypeExponentialHistogram) {
|
||||
quantile = v3.GetPercentileFromOperator(mq.SpaceAggregation)
|
||||
@ -80,6 +82,7 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P
|
||||
// fixed-bucket histogram quantiles are calculated with UDF
|
||||
if quantile != 0 && mq.AggregateAttribute.Type != v3.AttributeKeyType(v3.MetricTypeExponentialHistogram) {
|
||||
query = fmt.Sprintf(`SELECT %s, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s`, groupBy, quantile, query, groupBy, orderBy)
|
||||
mq.SpaceAggregation = percentileOperator
|
||||
}
|
||||
|
||||
return query, nil
|
||||
|
@ -1005,6 +1005,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
|
||||
// Formula query
|
||||
// Check if the queries used in the expression can be joined
|
||||
if query.QueryName != query.Expression {
|
||||
expression, err := govaluate.NewEvaluableExpressionWithFunctions(query.Expression, evalFuncs())
|
||||
if err != nil {
|
||||
@ -1039,10 +1040,12 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
|
||||
}
|
||||
}
|
||||
|
||||
// If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval
|
||||
if minStep := common.MinAllowedStepInterval(queryRangeParams.Start, queryRangeParams.End); query.StepInterval < minStep {
|
||||
query.StepInterval = minStep
|
||||
}
|
||||
|
||||
// Remove the time shift function from the list of functions and set the shift by value
|
||||
var timeShiftBy int64
|
||||
if len(query.Functions) > 0 {
|
||||
for idx := range query.Functions {
|
||||
@ -1065,13 +1068,14 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
|
||||
if query.Filters == nil || len(query.Filters.Items) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
for idx := range query.Filters.Items {
|
||||
item := &query.Filters.Items[idx]
|
||||
value := item.Value
|
||||
if value != nil {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
variableName := strings.Trim(x, "{{ . }}")
|
||||
variableName := strings.Trim(x, "{[.$]}")
|
||||
if _, ok := queryRangeParams.Variables[variableName]; ok {
|
||||
item.Value = queryRangeParams.Variables[variableName]
|
||||
}
|
||||
@ -1079,7 +1083,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
|
||||
if len(x) > 0 {
|
||||
switch x[0].(type) {
|
||||
case string:
|
||||
variableName := strings.Trim(x[0].(string), "{{ . }}")
|
||||
variableName := strings.Trim(x[0].(string), "{[.$]}")
|
||||
if _, ok := queryRangeParams.Variables[variableName]; ok {
|
||||
item.Value = queryRangeParams.Variables[variableName]
|
||||
}
|
||||
@ -1087,6 +1091,67 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if item.Operator != v3.FilterOperatorIn && item.Operator != v3.FilterOperatorNotIn {
|
||||
// the value type should not be multiple values
|
||||
if _, ok := item.Value.([]interface{}); ok {
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("multiple values %s are not allowed for operator `%s` for key `%s`", item.Value, item.Operator, item.Key.Key)}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// for metrics v3
|
||||
|
||||
// if the aggregate operator is a histogram quantile, and user has not forgotten
|
||||
// the le tag in the group by then add the le tag to the group by
|
||||
if query.AggregateOperator == v3.AggregateOperatorHistQuant50 ||
|
||||
query.AggregateOperator == v3.AggregateOperatorHistQuant75 ||
|
||||
query.AggregateOperator == v3.AggregateOperatorHistQuant90 ||
|
||||
query.AggregateOperator == v3.AggregateOperatorHistQuant95 ||
|
||||
query.AggregateOperator == v3.AggregateOperatorHistQuant99 {
|
||||
found := false
|
||||
for _, tag := range query.GroupBy {
|
||||
if tag.Key == "le" {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
query.GroupBy = append(
|
||||
query.GroupBy,
|
||||
v3.AttributeKey{
|
||||
Key: "le",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
IsColumn: false,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// for metrics v4
|
||||
if v3.IsPercentileOperator(query.SpaceAggregation) &&
|
||||
query.AggregateAttribute.Type != v3.AttributeKeyType(v3.MetricTypeExponentialHistogram) {
|
||||
// If quantile is set, we need to group by le
|
||||
// and set the space aggregation to sum
|
||||
// and time aggregation to rate
|
||||
query.TimeAggregation = v3.TimeAggregationRate
|
||||
query.SpaceAggregation = v3.SpaceAggregationSum
|
||||
// If le is not present in group by for quantile, add it
|
||||
leFound := false
|
||||
for _, groupBy := range query.GroupBy {
|
||||
if groupBy.Key == "le" {
|
||||
leFound = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !leFound {
|
||||
query.GroupBy = append(query.GroupBy, v3.AttributeKey{
|
||||
Key: "le",
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1104,6 +1169,13 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
|
||||
if chQuery.Disabled {
|
||||
continue
|
||||
}
|
||||
|
||||
for name, value := range queryRangeParams.Variables {
|
||||
chQuery.Query = strings.Replace(chQuery.Query, fmt.Sprintf("{{%s}}", name), fmt.Sprint(value), -1)
|
||||
chQuery.Query = strings.Replace(chQuery.Query, fmt.Sprintf("[[%s]]", name), fmt.Sprint(value), -1)
|
||||
chQuery.Query = strings.Replace(chQuery.Query, fmt.Sprintf("$%s", name), fmt.Sprint(value), -1)
|
||||
}
|
||||
|
||||
tmpl := template.New("clickhouse-query")
|
||||
tmpl, err := tmpl.Parse(chQuery.Query)
|
||||
if err != nil {
|
||||
@ -1128,6 +1200,13 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
|
||||
if promQuery.Disabled {
|
||||
continue
|
||||
}
|
||||
|
||||
for name, value := range queryRangeParams.Variables {
|
||||
promQuery.Query = strings.Replace(promQuery.Query, fmt.Sprintf("{{%s}}", name), fmt.Sprint(value), -1)
|
||||
promQuery.Query = strings.Replace(promQuery.Query, fmt.Sprintf("[[%s]]", name), fmt.Sprint(value), -1)
|
||||
promQuery.Query = strings.Replace(promQuery.Query, fmt.Sprintf("$%s", name), fmt.Sprint(value), -1)
|
||||
}
|
||||
|
||||
tmpl := template.New("prometheus-query")
|
||||
tmpl, err := tmpl.Parse(promQuery.Query)
|
||||
if err != nil {
|
||||
|
@ -652,12 +652,12 @@ func TestParseQueryRangeParamsDashboardVarsSubstitution(t *testing.T) {
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "service_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
Operator: "EQ",
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: "{{.service_name}}",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "operation_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
Operator: "IN",
|
||||
Operator: v3.FilterOperatorIn,
|
||||
Value: "{{.operation_name}}",
|
||||
},
|
||||
},
|
||||
@ -675,6 +675,161 @@ func TestParseQueryRangeParamsDashboardVarsSubstitution(t *testing.T) {
|
||||
expectErr: false,
|
||||
expectedValue: []interface{}{"route", []interface{}{"GET /route", "POST /route"}},
|
||||
},
|
||||
{
|
||||
desc: "valid builder query with dashboard variables {{service_name}} and {{operation_name}}",
|
||||
compositeQuery: v3.CompositeQuery{
|
||||
PanelType: v3.PanelTypeGraph,
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
AggregateOperator: v3.AggregateOperatorSum,
|
||||
AggregateAttribute: v3.AttributeKey{Key: "attribute_metrics"},
|
||||
Expression: "A",
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "service_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: "{{service_name}}",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "operation_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
Operator: v3.FilterOperatorIn,
|
||||
Value: "{{operation_name}}",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
variables: map[string]interface{}{
|
||||
"service_name": "route",
|
||||
"operation_name": []interface{}{
|
||||
"GET /route",
|
||||
"POST /route",
|
||||
},
|
||||
},
|
||||
expectErr: false,
|
||||
expectedValue: []interface{}{"route", []interface{}{"GET /route", "POST /route"}},
|
||||
},
|
||||
{
|
||||
desc: "valid builder query with dashboard variables [[service_name]] and [[operation_name]]",
|
||||
compositeQuery: v3.CompositeQuery{
|
||||
PanelType: v3.PanelTypeGraph,
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
AggregateOperator: v3.AggregateOperatorSum,
|
||||
AggregateAttribute: v3.AttributeKey{Key: "attribute_metrics"},
|
||||
Expression: "A",
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "service_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: "[[service_name]]",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "operation_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
Operator: v3.FilterOperatorIn,
|
||||
Value: "[[operation_name]]",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
variables: map[string]interface{}{
|
||||
"service_name": "route",
|
||||
"operation_name": []interface{}{
|
||||
"GET /route",
|
||||
"POST /route",
|
||||
},
|
||||
},
|
||||
expectErr: false,
|
||||
expectedValue: []interface{}{"route", []interface{}{"GET /route", "POST /route"}},
|
||||
},
|
||||
{
|
||||
desc: "valid builder query with dashboard variables $service_name and $operation_name",
|
||||
compositeQuery: v3.CompositeQuery{
|
||||
PanelType: v3.PanelTypeGraph,
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
AggregateOperator: v3.AggregateOperatorSum,
|
||||
AggregateAttribute: v3.AttributeKey{Key: "attribute_metrics"},
|
||||
Expression: "A",
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "service_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: "$service_name",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "operation_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
Operator: v3.FilterOperatorIn,
|
||||
Value: "$operation_name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
variables: map[string]interface{}{
|
||||
"service_name": "route",
|
||||
"operation_name": []interface{}{
|
||||
"GET /route",
|
||||
"POST /route",
|
||||
},
|
||||
},
|
||||
expectErr: false,
|
||||
expectedValue: []interface{}{"route", []interface{}{"GET /route", "POST /route"}},
|
||||
},
|
||||
{
|
||||
desc: "multiple values for single select operator",
|
||||
compositeQuery: v3.CompositeQuery{
|
||||
PanelType: v3.PanelTypeGraph,
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
AggregateOperator: v3.AggregateOperatorSum,
|
||||
AggregateAttribute: v3.AttributeKey{Key: "attribute_metrics"},
|
||||
Expression: "A",
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "operation_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
|
||||
Operator: v3.FilterOperatorEqual,
|
||||
Value: "{{.operation_name}}",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
variables: map[string]interface{}{
|
||||
"service_name": "route",
|
||||
"operation_name": []interface{}{
|
||||
"GET /route",
|
||||
"POST /route",
|
||||
},
|
||||
},
|
||||
expectErr: true,
|
||||
errMsg: "multiple values [GET /route POST /route] are not allowed for operator `=` for key `operation_name`",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range reqCases {
|
||||
@ -759,6 +914,72 @@ func TestParseQueryRangeParamsPromQLVars(t *testing.T) {
|
||||
expectErr: false,
|
||||
expectedQuery: "http_calls_total{service_name=\"route\", status_code=~\"200|505\"}",
|
||||
},
|
||||
{
|
||||
desc: "valid prom query with dashboard variables {{service_name}} and {{status_code}}",
|
||||
compositeQuery: v3.CompositeQuery{
|
||||
PanelType: v3.PanelTypeGraph,
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "http_calls_total{service_name=\"{{service_name}}\", status_code=~\"{{status_code}}\"}",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
variables: map[string]interface{}{
|
||||
"service_name": "route",
|
||||
"status_code": []interface{}{
|
||||
200,
|
||||
505,
|
||||
},
|
||||
},
|
||||
expectErr: false,
|
||||
expectedQuery: "http_calls_total{service_name=\"route\", status_code=~\"200|505\"}",
|
||||
},
|
||||
{
|
||||
desc: "valid prom query with dashboard variables [[service_name]] and [[status_code]]",
|
||||
compositeQuery: v3.CompositeQuery{
|
||||
PanelType: v3.PanelTypeGraph,
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "http_calls_total{service_name=\"[[service_name]]\", status_code=~\"[[status_code]]\"}",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
variables: map[string]interface{}{
|
||||
"service_name": "route",
|
||||
"status_code": []interface{}{
|
||||
200,
|
||||
505,
|
||||
},
|
||||
},
|
||||
expectErr: false,
|
||||
expectedQuery: "http_calls_total{service_name=\"route\", status_code=~\"200|505\"}",
|
||||
},
|
||||
{
|
||||
desc: "valid prom query with dashboard variables $service_name and $status_code",
|
||||
compositeQuery: v3.CompositeQuery{
|
||||
PanelType: v3.PanelTypeGraph,
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "http_calls_total{service_name=\"$service_name\", status_code=~\"$status_code\"}",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
variables: map[string]interface{}{
|
||||
"service_name": "route",
|
||||
"status_code": []interface{}{
|
||||
200,
|
||||
505,
|
||||
},
|
||||
},
|
||||
expectErr: false,
|
||||
expectedQuery: "http_calls_total{service_name=\"route\", status_code=~\"200|505\"}",
|
||||
},
|
||||
{
|
||||
desc: "valid prom query with dashboard variables",
|
||||
compositeQuery: v3.CompositeQuery{
|
||||
|
@ -14,6 +14,7 @@ import (
|
||||
metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
@ -283,7 +284,7 @@ func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series {
|
||||
return mergedSeries
|
||||
}
|
||||
|
||||
func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
|
||||
func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) {
|
||||
|
||||
cacheKeys := q.keyGenerator.GenerateKeys(params)
|
||||
|
||||
@ -306,13 +307,13 @@ func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangePa
|
||||
close(ch)
|
||||
|
||||
results := make([]*v3.Result, 0)
|
||||
errQueriesByName := make(map[string]string)
|
||||
errQueriesByName := make(map[string]error)
|
||||
var errs []error
|
||||
|
||||
for result := range ch {
|
||||
if result.Err != nil {
|
||||
errs = append(errs, result.Err)
|
||||
errQueriesByName[result.Name] = result.Err.Error()
|
||||
errQueriesByName[result.Name] = result.Err
|
||||
continue
|
||||
}
|
||||
results = append(results, &v3.Result{
|
||||
@ -329,7 +330,7 @@ func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangePa
|
||||
return results, err, errQueriesByName
|
||||
}
|
||||
|
||||
func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) {
|
||||
func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]error) {
|
||||
channelResults := make(chan channelResult, len(params.CompositeQuery.PromQueries))
|
||||
var wg sync.WaitGroup
|
||||
cacheKeys := q.keyGenerator.GenerateKeys(params)
|
||||
@ -390,13 +391,13 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam
|
||||
close(channelResults)
|
||||
|
||||
results := make([]*v3.Result, 0)
|
||||
errQueriesByName := make(map[string]string)
|
||||
errQueriesByName := make(map[string]error)
|
||||
var errs []error
|
||||
|
||||
for result := range channelResults {
|
||||
if result.Err != nil {
|
||||
errs = append(errs, result.Err)
|
||||
errQueriesByName[result.Name] = result.Err.Error()
|
||||
errQueriesByName[result.Name] = result.Err
|
||||
continue
|
||||
}
|
||||
results = append(results, &v3.Result{
|
||||
@ -413,7 +414,7 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam
|
||||
return results, err, errQueriesByName
|
||||
}
|
||||
|
||||
func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) {
|
||||
func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]error) {
|
||||
channelResults := make(chan channelResult, len(params.CompositeQuery.ClickHouseQueries))
|
||||
var wg sync.WaitGroup
|
||||
for queryName, clickHouseQuery := range params.CompositeQuery.ClickHouseQueries {
|
||||
@ -431,13 +432,13 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang
|
||||
close(channelResults)
|
||||
|
||||
results := make([]*v3.Result, 0)
|
||||
errQueriesByName := make(map[string]string)
|
||||
errQueriesByName := make(map[string]error)
|
||||
var errs []error
|
||||
|
||||
for result := range channelResults {
|
||||
if result.Err != nil {
|
||||
errs = append(errs, result.Err)
|
||||
errQueriesByName[result.Name] = result.Err.Error()
|
||||
errQueriesByName[result.Name] = result.Err
|
||||
continue
|
||||
}
|
||||
results = append(results, &v3.Result{
|
||||
@ -453,7 +454,7 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang
|
||||
return results, err, errQueriesByName
|
||||
}
|
||||
|
||||
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
|
||||
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) {
|
||||
|
||||
queries, err := q.builder.PrepareQueries(params, keys)
|
||||
|
||||
@ -482,13 +483,13 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
errQuriesByName := make(map[string]error)
|
||||
res := make([]*v3.Result, 0)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
errQuriesByName[r.Name] = r.Err
|
||||
continue
|
||||
}
|
||||
res = append(res, &v3.Result{
|
||||
@ -502,10 +503,10 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
|
||||
return res, nil, nil
|
||||
}
|
||||
|
||||
func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
|
||||
func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) {
|
||||
var results []*v3.Result
|
||||
var err error
|
||||
var errQueriesByName map[string]string
|
||||
var errQueriesByName map[string]error
|
||||
if params.CompositeQuery != nil {
|
||||
switch params.CompositeQuery.QueryType {
|
||||
case v3.QueryTypeBuilder:
|
||||
@ -514,6 +515,13 @@ func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3,
|
||||
} else {
|
||||
results, err, errQueriesByName = q.runBuilderQueries(ctx, params, keys)
|
||||
}
|
||||
// in builder query, the only errors we expose are the ones that exceed the resource limits
|
||||
// everything else is internal error as they are not actionable by the user
|
||||
for name, err := range errQueriesByName {
|
||||
if !chErrors.IsResourceLimitError(err) {
|
||||
delete(errQueriesByName, name)
|
||||
}
|
||||
}
|
||||
case v3.QueryTypePromQL:
|
||||
results, err, errQueriesByName = q.runPromQueries(ctx, params)
|
||||
case v3.QueryTypeClickHouseSQL:
|
||||
|
@ -14,6 +14,7 @@ import (
|
||||
metricsV4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
@ -281,7 +282,7 @@ func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series {
|
||||
return mergedSeries
|
||||
}
|
||||
|
||||
func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
|
||||
func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) {
|
||||
|
||||
cacheKeys := q.keyGenerator.GenerateKeys(params)
|
||||
|
||||
@ -299,13 +300,13 @@ func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangePa
|
||||
close(ch)
|
||||
|
||||
results := make([]*v3.Result, 0)
|
||||
errQueriesByName := make(map[string]string)
|
||||
errQueriesByName := make(map[string]error)
|
||||
var errs []error
|
||||
|
||||
for result := range ch {
|
||||
if result.Err != nil {
|
||||
errs = append(errs, result.Err)
|
||||
errQueriesByName[result.Name] = result.Err.Error()
|
||||
errQueriesByName[result.Name] = result.Err
|
||||
continue
|
||||
}
|
||||
results = append(results, &v3.Result{
|
||||
@ -322,7 +323,7 @@ func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangePa
|
||||
return results, err, errQueriesByName
|
||||
}
|
||||
|
||||
func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) {
|
||||
func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]error) {
|
||||
channelResults := make(chan channelResult, len(params.CompositeQuery.PromQueries))
|
||||
var wg sync.WaitGroup
|
||||
cacheKeys := q.keyGenerator.GenerateKeys(params)
|
||||
@ -383,13 +384,13 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam
|
||||
close(channelResults)
|
||||
|
||||
results := make([]*v3.Result, 0)
|
||||
errQueriesByName := make(map[string]string)
|
||||
errQueriesByName := make(map[string]error)
|
||||
var errs []error
|
||||
|
||||
for result := range channelResults {
|
||||
if result.Err != nil {
|
||||
errs = append(errs, result.Err)
|
||||
errQueriesByName[result.Name] = result.Err.Error()
|
||||
errQueriesByName[result.Name] = result.Err
|
||||
continue
|
||||
}
|
||||
results = append(results, &v3.Result{
|
||||
@ -406,7 +407,7 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam
|
||||
return results, err, errQueriesByName
|
||||
}
|
||||
|
||||
func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) {
|
||||
func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]error) {
|
||||
channelResults := make(chan channelResult, len(params.CompositeQuery.ClickHouseQueries))
|
||||
var wg sync.WaitGroup
|
||||
for queryName, clickHouseQuery := range params.CompositeQuery.ClickHouseQueries {
|
||||
@ -424,13 +425,13 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang
|
||||
close(channelResults)
|
||||
|
||||
results := make([]*v3.Result, 0)
|
||||
errQueriesByName := make(map[string]string)
|
||||
errQueriesByName := make(map[string]error)
|
||||
var errs []error
|
||||
|
||||
for result := range channelResults {
|
||||
if result.Err != nil {
|
||||
errs = append(errs, result.Err)
|
||||
errQueriesByName[result.Name] = result.Err.Error()
|
||||
errQueriesByName[result.Name] = result.Err
|
||||
continue
|
||||
}
|
||||
results = append(results, &v3.Result{
|
||||
@ -446,7 +447,7 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang
|
||||
return results, err, errQueriesByName
|
||||
}
|
||||
|
||||
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
|
||||
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) {
|
||||
|
||||
queries, err := q.builder.PrepareQueries(params, keys)
|
||||
|
||||
@ -475,13 +476,13 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
errQuriesByName := make(map[string]error)
|
||||
res := make([]*v3.Result, 0)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
errQuriesByName[r.Name] = r.Err
|
||||
continue
|
||||
}
|
||||
res = append(res, &v3.Result{
|
||||
@ -495,10 +496,10 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
|
||||
return res, nil, nil
|
||||
}
|
||||
|
||||
func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
|
||||
func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) {
|
||||
var results []*v3.Result
|
||||
var err error
|
||||
var errQueriesByName map[string]string
|
||||
var errQueriesByName map[string]error
|
||||
if params.CompositeQuery != nil {
|
||||
switch params.CompositeQuery.QueryType {
|
||||
case v3.QueryTypeBuilder:
|
||||
@ -507,6 +508,13 @@ func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3,
|
||||
} else {
|
||||
results, err, errQueriesByName = q.runBuilderQueries(ctx, params, keys)
|
||||
}
|
||||
// in builder query, the only errors we expose are the ones that exceed the resource limits
|
||||
// everything else is internal error as they are not actionable by the user
|
||||
for name, err := range errQueriesByName {
|
||||
if !chErrors.IsResourceLimitError(err) {
|
||||
delete(errQueriesByName, name)
|
||||
}
|
||||
}
|
||||
case v3.QueryTypePromQL:
|
||||
results, err, errQueriesByName = q.runPromQueries(ctx, params)
|
||||
case v3.QueryTypeClickHouseSQL:
|
||||
|
@ -348,6 +348,7 @@ func TestDeltaQueryBuilder(t *testing.T) {
|
||||
Temporality: v3.Delta,
|
||||
GroupBy: []v3.AttributeKey{
|
||||
{Key: "service_name"},
|
||||
{Key: "le"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
42
pkg/query-service/errors/clickhouse.go
Normal file
42
pkg/query-service/errors/clickhouse.go
Normal file
@ -0,0 +1,42 @@
|
||||
package errors
|
||||
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
// ErrResourceBytesLimitExceeded is returned when the resource bytes limit is exceeded
|
||||
ErrResourceBytesLimitExceeded = NewResourceLimitError(errors.New("resource bytes limit exceeded, try applying filters such as service.name, etc. to reduce the data size"))
|
||||
// ErrResourceTimeLimitExceeded is returned when the resource time limit is exceeded
|
||||
ErrResourceTimeLimitExceeded = NewResourceLimitError(errors.New("resource time limit exceeded, try applying filters such as service.name, etc. to reduce the data size"))
|
||||
)
|
||||
|
||||
type ResourceLimitError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func NewResourceLimitError(err error) error {
|
||||
return &ResourceLimitError{err: err}
|
||||
}
|
||||
|
||||
func (e *ResourceLimitError) Error() string {
|
||||
return e.err.Error()
|
||||
}
|
||||
|
||||
func (e *ResourceLimitError) Unwrap() error {
|
||||
return e.err
|
||||
}
|
||||
|
||||
func IsResourceLimitError(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
var target *ResourceLimitError
|
||||
return errors.As(err, &target)
|
||||
}
|
||||
|
||||
func (e *ResourceLimitError) MarshalJSON() ([]byte, error) {
|
||||
return []byte(`"` + e.Error() + `"`), nil
|
||||
}
|
||||
|
||||
func (e *ResourceLimitError) UnmarshalJSON([]byte) error {
|
||||
return nil
|
||||
}
|
@ -107,7 +107,7 @@ type Reader interface {
|
||||
}
|
||||
|
||||
type Querier interface {
|
||||
QueryRange(context.Context, *v3.QueryRangeParamsV3, map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string)
|
||||
QueryRange(context.Context, *v3.QueryRangeParamsV3, map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error)
|
||||
|
||||
// test helpers
|
||||
QueriesExecuted() []string
|
||||
|
@ -433,24 +433,30 @@ func (c *CompositeQuery) Validate() error {
|
||||
}
|
||||
|
||||
if c.BuilderQueries == nil && c.ClickHouseQueries == nil && c.PromQueries == nil {
|
||||
return fmt.Errorf("composite query must contain at least one query")
|
||||
return fmt.Errorf("composite query must contain at least one query type")
|
||||
}
|
||||
|
||||
for name, query := range c.BuilderQueries {
|
||||
if err := query.Validate(); err != nil {
|
||||
return fmt.Errorf("builder query %s is invalid: %w", name, err)
|
||||
if c.QueryType == QueryTypeBuilder {
|
||||
for name, query := range c.BuilderQueries {
|
||||
if err := query.Validate(c.PanelType); err != nil {
|
||||
return fmt.Errorf("builder query %s is invalid: %w", name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for name, query := range c.ClickHouseQueries {
|
||||
if err := query.Validate(); err != nil {
|
||||
return fmt.Errorf("clickhouse query %s is invalid: %w", name, err)
|
||||
if c.QueryType == QueryTypeClickHouseSQL {
|
||||
for name, query := range c.ClickHouseQueries {
|
||||
if err := query.Validate(); err != nil {
|
||||
return fmt.Errorf("clickhouse query %s is invalid: %w", name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for name, query := range c.PromQueries {
|
||||
if err := query.Validate(); err != nil {
|
||||
return fmt.Errorf("prom query %s is invalid: %w", name, err)
|
||||
if c.QueryType == QueryTypePromQL {
|
||||
for name, query := range c.PromQueries {
|
||||
if err := query.Validate(); err != nil {
|
||||
return fmt.Errorf("prom query %s is invalid: %w", name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -663,10 +669,11 @@ type BuilderQuery struct {
|
||||
ShiftBy int64
|
||||
}
|
||||
|
||||
func (b *BuilderQuery) Validate() error {
|
||||
func (b *BuilderQuery) Validate(panelType PanelType) error {
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if b.QueryName == "" {
|
||||
return fmt.Errorf("query name is required")
|
||||
}
|
||||
@ -711,6 +718,10 @@ func (b *BuilderQuery) Validate() error {
|
||||
}
|
||||
}
|
||||
if b.GroupBy != nil {
|
||||
if len(b.GroupBy) > 0 && panelType == PanelTypeList {
|
||||
return fmt.Errorf("group by is not supported for list panel type")
|
||||
}
|
||||
|
||||
for _, groupBy := range b.GroupBy {
|
||||
if err := groupBy.Validate(); err != nil {
|
||||
return fmt.Errorf("group by is invalid %w", err)
|
||||
|
@ -167,7 +167,7 @@ func ClickHouseFormattedValue(v interface{}) string {
|
||||
|
||||
case []interface{}:
|
||||
if len(x) == 0 {
|
||||
return ""
|
||||
return "[]"
|
||||
}
|
||||
switch x[0].(type) {
|
||||
case string:
|
||||
@ -184,7 +184,7 @@ func ClickHouseFormattedValue(v interface{}) string {
|
||||
return strings.Join(strings.Fields(fmt.Sprint(x)), ",")
|
||||
default:
|
||||
zap.L().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x[0])))
|
||||
return ""
|
||||
return "[]"
|
||||
}
|
||||
default:
|
||||
zap.L().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x)))
|
||||
|
@ -8,17 +8,17 @@ import (
|
||||
|
||||
// AssignReservedVars assigns values for go template vars. assumes that
|
||||
// model.QueryRangeParamsV3.Start and End are Unix Nano timestamps
|
||||
func AssignReservedVarsV3(metricsQueryRangeParams *v3.QueryRangeParamsV3) {
|
||||
metricsQueryRangeParams.Variables["start_timestamp"] = metricsQueryRangeParams.Start / 1000
|
||||
metricsQueryRangeParams.Variables["end_timestamp"] = metricsQueryRangeParams.End / 1000
|
||||
func AssignReservedVarsV3(queryRangeParams *v3.QueryRangeParamsV3) {
|
||||
queryRangeParams.Variables["start_timestamp"] = queryRangeParams.Start / 1000
|
||||
queryRangeParams.Variables["end_timestamp"] = queryRangeParams.End / 1000
|
||||
|
||||
metricsQueryRangeParams.Variables["start_timestamp_ms"] = metricsQueryRangeParams.Start
|
||||
metricsQueryRangeParams.Variables["end_timestamp_ms"] = metricsQueryRangeParams.End
|
||||
queryRangeParams.Variables["start_timestamp_ms"] = queryRangeParams.Start
|
||||
queryRangeParams.Variables["end_timestamp_ms"] = queryRangeParams.End
|
||||
|
||||
metricsQueryRangeParams.Variables["start_timestamp_nano"] = metricsQueryRangeParams.Start * 1e6
|
||||
metricsQueryRangeParams.Variables["end_timestamp_nano"] = metricsQueryRangeParams.End * 1e6
|
||||
queryRangeParams.Variables["start_timestamp_nano"] = queryRangeParams.Start * 1e6
|
||||
queryRangeParams.Variables["end_timestamp_nano"] = queryRangeParams.End * 1e6
|
||||
|
||||
metricsQueryRangeParams.Variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", metricsQueryRangeParams.Start/1000)
|
||||
metricsQueryRangeParams.Variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", metricsQueryRangeParams.End/1000)
|
||||
queryRangeParams.Variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.Start/1000)
|
||||
queryRangeParams.Variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.End/1000)
|
||||
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user