From 014d4a2e7cef756c4b8476114b6a7833a216cf8b Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Fri, 9 Jun 2023 17:07:45 +0530 Subject: [PATCH] Fixes for pagination in listview (#2791) --- .../app/clickhouseReader/reader.go | 8 +- pkg/query-service/app/http_handler.go | 20 +- pkg/query-service/app/logs/v3/enrich_query.go | 145 +++++++++ .../app/logs/v3/enrich_query_test.go | 273 ++++++++++++++++ .../app/logs/v3/query_builder.go | 191 ++++++----- .../app/logs/v3/query_builder_test.go | 297 +++++++++--------- pkg/query-service/app/querier/querier.go | 8 +- pkg/query-service/app/querier/querier_test.go | 2 +- .../app/queryBuilder/query_builder.go | 12 +- pkg/query-service/constants/constants.go | 2 + pkg/query-service/interfaces/interface.go | 2 +- pkg/query-service/model/v3/v3.go | 10 +- 12 files changed, 698 insertions(+), 272 deletions(-) create mode 100644 pkg/query-service/app/logs/v3/enrich_query.go create mode 100644 pkg/query-service/app/logs/v3/enrich_query_test.go diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index 50e54f40d7..fb59650982 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -4203,15 +4203,15 @@ func (r *ClickHouseReader) GetListResultV3(ctx context.Context, query string) ([ var ( columnTypes = rows.ColumnTypes() columnNames = rows.Columns() - vars = make([]interface{}, len(columnTypes)) ) - for i := range columnTypes { - vars[i] = reflect.New(columnTypes[i].ScanType()).Interface() - } var rowList []*v3.Row for rows.Next() { + var vars = make([]interface{}, len(columnTypes)) + for i := range columnTypes { + vars[i] = reflect.New(columnTypes[i].ScanType()).Interface() + } if err := rows.Scan(vars...); err != nil { return nil, err } diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index b1ecc25aad..4eb48d1064 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -2695,13 +2695,17 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que var queries map[string]string switch queryRangeParams.CompositeQuery.QueryType { case v3.QueryTypeBuilder: - // get the fields if any logs query is present - var fields map[string]v3.AttributeKey - fields, err = aH.getLogFieldsV3(ctx, queryRangeParams) - if err != nil { - apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err} - RespondError(w, apiErrObj, errQuriesByName) - return + // check if any enrichment is required for logs if yes then enrich them + if logsv3.EnrichmentRequired(queryRangeParams) { + // get the fields if any logs query is present + var fields map[string]v3.AttributeKey + fields, err = aH.getLogFieldsV3(ctx, queryRangeParams) + if err != nil { + apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err} + RespondError(w, apiErrObj, errQuriesByName) + return + } + logsv3.Enrich(queryRangeParams, fields) } var spanKeys map[string]v3.AttributeKey @@ -2712,7 +2716,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que return } - queries, err = aH.queryBuilder.PrepareQueries(queryRangeParams, fields, spanKeys) + queries, err = aH.queryBuilder.PrepareQueries(queryRangeParams, spanKeys) if err != nil { RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return diff --git a/pkg/query-service/app/logs/v3/enrich_query.go b/pkg/query-service/app/logs/v3/enrich_query.go new file mode 100644 index 0000000000..88f9097814 --- /dev/null +++ b/pkg/query-service/app/logs/v3/enrich_query.go @@ -0,0 +1,145 @@ +package v3 + +import ( + "go.signoz.io/signoz/pkg/query-service/constants" + v3 "go.signoz.io/signoz/pkg/query-service/model/v3" +) + +func EnrichmentRequired(params *v3.QueryRangeParamsV3) bool { + compositeQuery := params.CompositeQuery + if compositeQuery == nil { + return false + } + + // Build queries for each builder query + for queryName, query := range compositeQuery.BuilderQueries { + if query.Expression != queryName && query.DataSource != v3.DataSourceLogs { + continue + } + + // check aggregation attribute + if query.AggregateAttribute.Key != "" { + if !isEnriched(query.AggregateAttribute) { + return true + } + } + + // check filter attribute + if query.Filters != nil && len(query.Filters.Items) != 0 { + for _, item := range query.Filters.Items { + if !isEnriched(item.Key) { + return true + } + } + } + + groupByLookup := map[string]struct{}{} + // check groupby + for _, groupBy := range query.GroupBy { + if !isEnriched(groupBy) { + return true + } + groupByLookup[groupBy.Key] = struct{}{} + } + + // check orderby + for _, orderBy := range query.OrderBy { + if _, ok := groupByLookup[orderBy.ColumnName]; !ok { + key := v3.AttributeKey{Key: orderBy.ColumnName} + if !isEnriched(key) { + return true + } + } + } + + } + + return false +} + +func isEnriched(field v3.AttributeKey) bool { + // if it is timestamp/id dont check + if field.Key == "timestamp" || field.Key == "id" || field.Key == constants.SigNozOrderByValue { + return true + } + + if field.IsColumn { + return true + } + + if field.Type == v3.AttributeKeyTypeUnspecified || field.DataType == v3.AttributeKeyDataTypeUnspecified { + return false + } + return true +} + +func Enrich(params *v3.QueryRangeParamsV3, fields map[string]v3.AttributeKey) { + compositeQuery := params.CompositeQuery + if compositeQuery == nil { + return + } + + // Build queries for each builder query + for queryName, query := range compositeQuery.BuilderQueries { + if query.Expression != queryName && query.DataSource != v3.DataSourceLogs { + continue + } + enrichLogsQuery(query, fields) + } +} + +func enrichLogsQuery(query *v3.BuilderQuery, fields map[string]v3.AttributeKey) error { + // enrich aggregation attribute + if query.AggregateAttribute.Key != "" { + query.AggregateAttribute = enrichFieldWithMetadata(query.AggregateAttribute, fields) + } + + // enrich filter attribute + if query.Filters != nil && len(query.Filters.Items) != 0 { + for i := 0; i < len(query.Filters.Items); i++ { + query.Filters.Items[i].Key = enrichFieldWithMetadata(query.Filters.Items[i].Key, fields) + } + } + + // enrich groupby + for i := 0; i < len(query.GroupBy); i++ { + query.GroupBy[i] = enrichFieldWithMetadata(query.GroupBy[i], fields) + } + + // enrich orderby + for i := 0; i < len(query.OrderBy); i++ { + key := v3.AttributeKey{Key: query.OrderBy[i].ColumnName} + key = enrichFieldWithMetadata(key, fields) + query.OrderBy[i].Key = key.Key + query.OrderBy[i].Type = key.Type + query.OrderBy[i].DataType = key.DataType + query.OrderBy[i].IsColumn = key.IsColumn + } + return nil +} + +func enrichFieldWithMetadata(field v3.AttributeKey, fields map[string]v3.AttributeKey) v3.AttributeKey { + if isEnriched(field) { + return field + } + + // if type is unknown check if it is a top level key + if v, ok := constants.StaticFieldsLogsV3[field.Key]; ok { + return v + } + + // check if the field is present in the fields map + if existingField, ok := fields[field.Key]; ok { + if existingField.IsColumn { + return field + } + field.Type = existingField.Type + field.DataType = existingField.DataType + return field + } + + // enrich with default values if metadata is not found + field.Type = v3.AttributeKeyTypeTag + field.DataType = v3.AttributeKeyDataTypeString + return field +} diff --git a/pkg/query-service/app/logs/v3/enrich_query_test.go b/pkg/query-service/app/logs/v3/enrich_query_test.go new file mode 100644 index 0000000000..16d5e74404 --- /dev/null +++ b/pkg/query-service/app/logs/v3/enrich_query_test.go @@ -0,0 +1,273 @@ +package v3 + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" + v3 "go.signoz.io/signoz/pkg/query-service/model/v3" +) + +var testEnrichmentRequiredData = []struct { + Name string + Params v3.QueryRangeParamsV3 + EnrichmentRequired bool +}{ + { + Name: "attribute enrichment not required", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + AggregateAttribute: v3.AttributeKey{ + Key: "test", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeInt64, + }, + }, + }, + }, + }, + EnrichmentRequired: false, + }, + { + Name: "attribute enrichment required", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + AggregateAttribute: v3.AttributeKey{ + Key: "test", + }, + }, + }, + }, + }, + EnrichmentRequired: true, + }, + { + Name: "filter enrichment not required", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ + {Key: v3.AttributeKey{Key: "user_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "john", Operator: "="}, + }}, + }, + }, + }, + }, + EnrichmentRequired: false, + }, + { + Name: "filter enrichment required", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ + {Key: v3.AttributeKey{Key: "user_name"}, Value: "john", Operator: "="}, + }}, + }, + }, + }, + }, + EnrichmentRequired: true, + }, + { + Name: "groupBy enrichment not required", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + GroupBy: []v3.AttributeKey{{Key: "userid", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, + }, + }, + }, + }, + EnrichmentRequired: false, + }, + { + Name: "groupBy enrichment required", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + GroupBy: []v3.AttributeKey{{Key: "userid"}}, + }, + }, + }, + }, + EnrichmentRequired: true, + }, + { + Name: "orderBy enrichment not required", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + GroupBy: []v3.AttributeKey{{Key: "userid", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, + OrderBy: []v3.OrderBy{{ColumnName: "userid"}}, + }, + }, + }, + }, + EnrichmentRequired: false, + }, + { + Name: "orderBy enrichment required", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + OrderBy: []v3.OrderBy{{ColumnName: "userid"}}, + }, + }, + }, + }, + EnrichmentRequired: true, + }, + { + Name: "top level key", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + GroupBy: []v3.AttributeKey{{Key: "trace_id", Type: v3.AttributeKeyTypeUnspecified, DataType: v3.AttributeKeyDataTypeString, IsColumn: true}}, + }, + }, + }, + }, + EnrichmentRequired: false, + }, + { + Name: "orderBy enrichment required", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + GroupBy: []v3.AttributeKey{{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, + OrderBy: []v3.OrderBy{{ColumnName: "#SIGNOZ_VALUE", Order: "ASC"}}, + }, + }, + }, + }, + EnrichmentRequired: false, + }, +} + +func TestEnrichmentRquired(t *testing.T) { + for _, tt := range testEnrichmentRequiredData { + Convey("testEnrichmentRequiredData", t, func() { + res := EnrichmentRequired(&tt.Params) + So(res, ShouldEqual, tt.EnrichmentRequired) + }) + } +} + +var testEnrichParamsData = []struct { + Name string + Params v3.QueryRangeParamsV3 + Fields map[string]v3.AttributeKey + Result v3.QueryRangeParamsV3 +}{ + { + Name: "Enriching query range v3 params", + Params: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + AggregateAttribute: v3.AttributeKey{ + Key: "test", + }, + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ + {Key: v3.AttributeKey{Key: "user_name"}, Value: "john", Operator: "="}, + }}, + GroupBy: []v3.AttributeKey{{Key: "trace_id"}}, + OrderBy: []v3.OrderBy{{ColumnName: "response_time"}}, + }, + }, + }, + }, + Fields: map[string]v3.AttributeKey{ + "test": { + Key: "test", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeInt64, + }, + "user_name": { + Key: "user_name", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeString, + }, + "response_time": { + Key: "response_time", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeInt64, + }, + }, + Result: v3.QueryRangeParamsV3{ + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "test": { + QueryName: "test", + Expression: "test", + DataSource: v3.DataSourceLogs, + AggregateAttribute: v3.AttributeKey{ + Key: "test", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeInt64, + }, + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ + {Key: v3.AttributeKey{Key: "user_name", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeString}, Value: "john", Operator: "="}, + }}, + GroupBy: []v3.AttributeKey{{Key: "trace_id", Type: v3.AttributeKeyTypeUnspecified, DataType: v3.AttributeKeyDataTypeString, IsColumn: true}}, + OrderBy: []v3.OrderBy{{ColumnName: "response_time", Key: "response_time", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeInt64}}, + }, + }, + }, + }, + }, +} + +func TestEnrichParams(t *testing.T) { + for _, tt := range testEnrichParamsData { + Convey("testEnrichmentRequiredData", t, func() { + Enrich(&tt.Params, tt.Fields) + So(tt.Params, ShouldResemble, tt.Result) + }) + } +} diff --git a/pkg/query-service/app/logs/v3/query_builder.go b/pkg/query-service/app/logs/v3/query_builder.go index ba7b3438b0..6ff58a2497 100644 --- a/pkg/query-service/app/logs/v3/query_builder.go +++ b/pkg/query-service/app/logs/v3/query_builder.go @@ -53,33 +53,6 @@ var logOperators = map[v3.FilterOperator]string{ // (todo) check contains/not contains/ } -func enrichFieldWithMetadata(field v3.AttributeKey, fields map[string]v3.AttributeKey) v3.AttributeKey { - if field.Type == "" || field.DataType == "" { - // if type is unknown check if it is a top level key - if v, ok := constants.StaticFieldsLogsV3[field.Key]; ok { - if (v3.AttributeKey{} != v) { - return v - } - } - - // check if the field is present in the fields map - if existingField, ok := fields[field.Key]; ok { - if existingField.IsColumn { - return field - } - field.Type = existingField.Type - field.DataType = existingField.DataType - return field - } - - // enrich with default values if metadata is not found - field.Type = v3.AttributeKeyTypeTag - field.DataType = v3.AttributeKeyDataTypeString - - } - return field -} - func getClickhouseLogsColumnType(columnType v3.AttributeKeyType) string { if columnType == v3.AttributeKeyTypeTag { return "attributes" @@ -99,8 +72,12 @@ func getClickhouseLogsColumnDataType(columnDataType v3.AttributeKeyDataType) str } // getClickhouseColumnName returns the corresponding clickhouse column name for the given attribute/resource key -func getClickhouseColumnName(key v3.AttributeKey, fields map[string]v3.AttributeKey) (string, error) { +func getClickhouseColumnName(key v3.AttributeKey) string { clickhouseColumn := key.Key + if key.Key == constants.TIMESTAMP || key.Key == "id" { + return key.Key + } + //if the key is present in the topLevelColumn then it will be only searched in those columns, //regardless if it is indexed/present again in resource or column attribute if !key.IsColumn { @@ -108,56 +85,44 @@ func getClickhouseColumnName(key v3.AttributeKey, fields map[string]v3.Attribute columnDataType := getClickhouseLogsColumnDataType(key.DataType) clickhouseColumn = fmt.Sprintf("%s_%s_value[indexOf(%s_%s_key, '%s')]", columnType, columnDataType, columnType, columnDataType, key.Key) } - return clickhouseColumn, nil + return clickhouseColumn } // getSelectLabels returns the select labels for the query based on groupBy and aggregateOperator -func getSelectLabels(aggregatorOperator v3.AggregateOperator, groupBy []v3.AttributeKey, fields map[string]v3.AttributeKey) (string, error) { +func getSelectLabels(aggregatorOperator v3.AggregateOperator, groupBy []v3.AttributeKey) (string, error) { var selectLabels string if aggregatorOperator == v3.AggregateOperatorNoOp { selectLabels = "" } else { for _, tag := range groupBy { - enrichedTag := enrichFieldWithMetadata(tag, fields) - columnName, err := getClickhouseColumnName(enrichedTag, fields) - if err != nil { - return "", err - } + columnName := getClickhouseColumnName(tag) selectLabels += fmt.Sprintf(", %s as %s", columnName, tag.Key) } } return selectLabels, nil } -func buildLogsTimeSeriesFilterQuery(fs *v3.FilterSet, groupBy []v3.AttributeKey, fields map[string]v3.AttributeKey) (string, error) { +func buildLogsTimeSeriesFilterQuery(fs *v3.FilterSet, groupBy []v3.AttributeKey) (string, error) { var conditions []string if fs != nil && len(fs.Items) != 0 { for _, item := range fs.Items { op := v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator)))) - key := enrichFieldWithMetadata(item.Key, fields) - value, err := utils.ValidateAndCastValue(item.Value, key.DataType) + value, err := utils.ValidateAndCastValue(item.Value, item.Key.DataType) if err != nil { return "", fmt.Errorf("failed to validate and cast value for %s: %v", item.Key.Key, err) } if logsOp, ok := logOperators[op]; ok { switch op { case v3.FilterOperatorExists, v3.FilterOperatorNotExists: - columnType := getClickhouseLogsColumnType(key.Type) - columnDataType := getClickhouseLogsColumnDataType(key.DataType) - conditions = append(conditions, fmt.Sprintf(logsOp, columnType, columnDataType, key.Key)) + columnType := getClickhouseLogsColumnType(item.Key.Type) + columnDataType := getClickhouseLogsColumnDataType(item.Key.DataType) + conditions = append(conditions, fmt.Sprintf(logsOp, columnType, columnDataType, item.Key.Key)) case v3.FilterOperatorContains, v3.FilterOperatorNotContains: - columnName, err := getClickhouseColumnName(key, fields) - if err != nil { - return "", err - } + columnName := getClickhouseColumnName(item.Key) conditions = append(conditions, fmt.Sprintf("%s %s '%%%s%%'", columnName, logsOp, item.Value)) default: - columnName, err := getClickhouseColumnName(key, fields) - if err != nil { - return "", err - } - + columnName := getClickhouseColumnName(item.Key) fmtVal := utils.ClickHouseFormattedValue(value) conditions = append(conditions, fmt.Sprintf("%s %s %s", columnName, logsOp, fmtVal)) } @@ -169,11 +134,10 @@ func buildLogsTimeSeriesFilterQuery(fs *v3.FilterSet, groupBy []v3.AttributeKey, // add group by conditions to filter out log lines which doesn't have the key for _, attr := range groupBy { - enrichedAttr := enrichFieldWithMetadata(attr, fields) - if !enrichedAttr.IsColumn { - columnType := getClickhouseLogsColumnType(enrichedAttr.Type) - columnDataType := getClickhouseLogsColumnDataType(enrichedAttr.DataType) - conditions = append(conditions, fmt.Sprintf("indexOf(%s_%s_key, '%s') > 0", columnType, columnDataType, enrichedAttr.Key)) + if !attr.IsColumn { + columnType := getClickhouseLogsColumnType(attr.Type) + columnDataType := getClickhouseLogsColumnDataType(attr.DataType) + conditions = append(conditions, fmt.Sprintf("indexOf(%s_%s_key, '%s') > 0", columnType, columnDataType, attr.Key)) } } @@ -199,9 +163,9 @@ func getZerosForEpochNano(epoch int64) int64 { return int64(math.Pow(10, float64(19-count))) } -func buildLogsQuery(start, end, step int64, mq *v3.BuilderQuery, fields map[string]v3.AttributeKey) (string, error) { +func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.BuilderQuery) (string, error) { - filterSubQuery, err := buildLogsTimeSeriesFilterQuery(mq.Filters, mq.GroupBy, fields) + filterSubQuery, err := buildLogsTimeSeriesFilterQuery(mq.Filters, mq.GroupBy) if err != nil { return "", err } @@ -209,7 +173,7 @@ func buildLogsQuery(start, end, step int64, mq *v3.BuilderQuery, fields map[stri // timerange will be sent in epoch millisecond timeFilter := fmt.Sprintf("(timestamp >= %d AND timestamp <= %d)", start*getZerosForEpochNano(start), end*getZerosForEpochNano(end)) - selectLabels, err := getSelectLabels(mq.AggregateOperator, mq.GroupBy, fields) + selectLabels, err := getSelectLabels(mq.AggregateOperator, mq.GroupBy) if err != nil { return "", err } @@ -225,18 +189,14 @@ func buildLogsQuery(start, end, step int64, mq *v3.BuilderQuery, fields map[stri "from signoz_logs.distributed_logs " + "where " + timeFilter + "%s " + "group by %s%s " + - "order by %sts" + "order by %s" groupBy := groupByAttributeKeyTags(mq.GroupBy...) - orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy) + orderBy := orderByAttributeKeyTags(panelType, mq.AggregateOperator, mq.OrderBy, mq.GroupBy) aggregationKey := "" if mq.AggregateAttribute.Key != "" { - enrichedAttribute := enrichFieldWithMetadata(mq.AggregateAttribute, fields) - aggregationKey, err = getClickhouseColumnName(enrichedAttribute, fields) - if err != nil { - return "", err - } + aggregationKey = getClickhouseColumnName(mq.AggregateAttribute) } switch mq.AggregateOperator { @@ -271,9 +231,8 @@ func buildLogsQuery(start, end, step int64, mq *v3.BuilderQuery, fields map[stri return query, nil case v3.AggregateOperatorCount: if mq.AggregateAttribute.Key != "" { - field := enrichFieldWithMetadata(mq.AggregateAttribute, fields) - columnType := getClickhouseLogsColumnType(field.Type) - columnDataType := getClickhouseLogsColumnDataType(field.DataType) + columnType := getClickhouseLogsColumnType(mq.AggregateAttribute.Type) + columnDataType := getClickhouseLogsColumnDataType(mq.AggregateAttribute.DataType) filterSubQuery = fmt.Sprintf("%s AND has(%s_%s_key, '%s')", filterSubQuery, columnType, columnDataType, mq.AggregateAttribute.Key) } @@ -285,8 +244,8 @@ func buildLogsQuery(start, end, step int64, mq *v3.BuilderQuery, fields map[stri query := fmt.Sprintf(queryTmpl, step, op, filterSubQuery, groupBy, having, orderBy) return query, nil case v3.AggregateOperatorNoOp: - queryTmpl := constants.LogsSQLSelect + "from signoz_logs.distributed_logs where %s %s" - query := fmt.Sprintf(queryTmpl, timeFilter, filterSubQuery) + queryTmpl := constants.LogsSQLSelect + "from signoz_logs.distributed_logs where %s %sorder by %s" + query := fmt.Sprintf(queryTmpl, timeFilter, filterSubQuery, orderBy) return query, nil default: return "", fmt.Errorf("unsupported aggregate operator") @@ -309,19 +268,25 @@ func groupByAttributeKeyTags(tags ...v3.AttributeKey) string { } // orderBy returns a string of comma separated tags for order by clause +// if there are remaining items which are not present in tags they are also added // if the order is not specified, it defaults to ASC -func orderBy(items []v3.OrderBy, tags []string) string { +func orderBy(panelType v3.PanelType, items []v3.OrderBy, tags []string) []string { var orderBy []string + + // create a lookup + addedToOrderBy := map[string]bool{} + itemsLookup := map[string]v3.OrderBy{} + + for i := 0; i < len(items); i++ { + addedToOrderBy[items[i].ColumnName] = false + itemsLookup[items[i].ColumnName] = items[i] + } + for _, tag := range tags { - found := false - for _, item := range items { - if item.ColumnName == tag { - found = true - orderBy = append(orderBy, fmt.Sprintf("%s %s", item.ColumnName, item.Order)) - break - } - } - if !found { + if item, ok := itemsLookup[tag]; ok { + orderBy = append(orderBy, fmt.Sprintf("%s %s", item.ColumnName, item.Order)) + addedToOrderBy[item.ColumnName] = true + } else { orderBy = append(orderBy, fmt.Sprintf("%s ASC", tag)) } } @@ -330,20 +295,48 @@ func orderBy(items []v3.OrderBy, tags []string) string { for _, item := range items { if item.ColumnName == constants.SigNozOrderByValue { orderBy = append(orderBy, fmt.Sprintf("value %s", item.Order)) + addedToOrderBy[item.ColumnName] = true } } - return strings.Join(orderBy, ",") + + // add the remaining items + if panelType == v3.PanelTypeList { + for _, item := range items { + // since these are not present in tags we will have to select them correctly + // for list view there is no need to check if it was added since they wont be added yet but this is just for safety + if !addedToOrderBy[item.ColumnName] { + attr := v3.AttributeKey{Key: item.ColumnName, DataType: item.DataType, Type: item.Type, IsColumn: item.IsColumn} + name := getClickhouseColumnName(attr) + orderBy = append(orderBy, fmt.Sprintf("%s %s", name, item.Order)) + } + } + } + return orderBy } -func orderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string { +func orderByAttributeKeyTags(panelType v3.PanelType, aggregatorOperator v3.AggregateOperator, items []v3.OrderBy, tags []v3.AttributeKey) string { var groupTags []string for _, tag := range tags { groupTags = append(groupTags, tag.Key) } - str := orderBy(items, groupTags) - if len(str) > 0 { - str = str + "," + orderByArray := orderBy(panelType, items, groupTags) + + found := false + for i := 0; i < len(orderByArray); i++ { + if strings.Compare(orderByArray[i], constants.TIMESTAMP) == 0 { + orderByArray[i] = "ts" + break + } } + if !found { + if aggregatorOperator == v3.AggregateOperatorNoOp { + orderByArray = append(orderByArray, constants.TIMESTAMP) + } else { + orderByArray = append(orderByArray, "ts") + } + } + + str := strings.Join(orderByArray, ",") return str } @@ -376,22 +369,16 @@ func reduceQuery(query string, reduceTo v3.ReduceToOperator, aggregateOperator v return query, nil } -func addLimitToQuery(query string, limit uint64, panelType v3.PanelType) string { - if limit == 0 { - limit = 100 - } - if panelType == v3.PanelTypeList { - return fmt.Sprintf("%s LIMIT %d", query, limit) - } - return query +func addLimitToQuery(query string, limit uint64) string { + return fmt.Sprintf("%s LIMIT %d", query, limit) } func addOffsetToQuery(query string, offset uint64) string { return fmt.Sprintf("%s OFFSET %d", query, offset) } -func PrepareLogsQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery, fields map[string]v3.AttributeKey) (string, error) { - query, err := buildLogsQuery(start, end, mq.StepInterval, mq, fields) +func PrepareLogsQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery) (string, error) { + query, err := buildLogsQuery(panelType, start, end, mq.StepInterval, mq) if err != nil { return "", err } @@ -399,10 +386,16 @@ func PrepareLogsQuery(start, end int64, queryType v3.QueryType, panelType v3.Pan query, err = reduceQuery(query, mq.ReduceTo, mq.AggregateOperator) } - query = addLimitToQuery(query, mq.Limit, panelType) - - if mq.Offset != 0 { - query = addOffsetToQuery(query, mq.Offset) + if panelType == v3.PanelTypeList { + if mq.PageSize > 0 { + if mq.Limit > 0 && mq.Offset > mq.Limit { + return "", fmt.Errorf("max limit exceeded") + } + query = addLimitToQuery(query, mq.PageSize) + query = addOffsetToQuery(query, mq.Offset) + } else { + query = addLimitToQuery(query, mq.Limit) + } } return query, err diff --git a/pkg/query-service/app/logs/v3/query_builder_test.go b/pkg/query-service/app/logs/v3/query_builder_test.go index 11b2c20770..5103c7a177 100644 --- a/pkg/query-service/app/logs/v3/query_builder_test.go +++ b/pkg/query-service/app/logs/v3/query_builder_test.go @@ -43,8 +43,7 @@ var testGetClickhouseColumnNameData = []struct { func TestGetClickhouseColumnName(t *testing.T) { for _, tt := range testGetClickhouseColumnNameData { Convey("testGetClickhouseColumnNameData", t, func() { - columnName, err := getClickhouseColumnName(tt.AttributeKey, map[string]v3.AttributeKey{}) - So(err, ShouldBeNil) + columnName := getClickhouseColumnName(tt.AttributeKey) So(columnName, ShouldEqual, tt.ExpectedColumnName) }) } @@ -83,12 +82,6 @@ var testGetSelectLabelsData = []struct { GroupByTags: []v3.AttributeKey{{Key: "host", IsColumn: true}}, SelectLabels: ", host as host", }, - { - Name: "trace_id field with missing meta", - AggregateOperator: v3.AggregateOperatorCount, - GroupByTags: []v3.AttributeKey{{Key: "trace_id"}}, - SelectLabels: ", trace_id as trace_id", - }, { Name: "trace_id field as an attribute", AggregateOperator: v3.AggregateOperatorCount, @@ -100,7 +93,7 @@ var testGetSelectLabelsData = []struct { func TestGetSelectLabels(t *testing.T) { for _, tt := range testGetSelectLabelsData { Convey("testGetSelectLabelsData", t, func() { - selectLabels, err := getSelectLabels(tt.AggregateOperator, tt.GroupByTags, map[string]v3.AttributeKey{}) + selectLabels, err := getSelectLabels(tt.AggregateOperator, tt.GroupByTags) So(err, ShouldBeNil) So(selectLabels, ShouldEqual, tt.SelectLabels) }) @@ -187,20 +180,6 @@ var timeSeriesFilterQueryData = []struct { }}, ExpectedFilter: " AND attributes_string_value[indexOf(attributes_string_key, 'host')] NOT ILIKE '%102.%'", }, - { - Name: "Test no metadata", - FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "host"}, Value: "102.", Operator: "ncontains"}, - }}, - ExpectedFilter: " AND attributes_string_value[indexOf(attributes_string_key, 'host')] NOT ILIKE '%102.%'", - }, - { - Name: "Test no metadata number", - FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "bytes"}, Value: 102, Operator: "="}, - }}, - ExpectedFilter: " AND attributes_string_value[indexOf(attributes_string_key, 'bytes')] = '102'", - }, { Name: "Test groupBy", FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ @@ -220,32 +199,15 @@ var timeSeriesFilterQueryData = []struct { { Name: "Wrong data", FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "bytes"}, Value: true, Operator: "="}, + {Key: v3.AttributeKey{Key: "bytes", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeFloat64}, Value: true, Operator: "="}, }}, - Fields: map[string]v3.AttributeKey{"bytes": {Key: "bytes", DataType: v3.AttributeKeyDataTypeFloat64, Type: v3.AttributeKeyTypeTag}}, - Error: "failed to validate and cast value for bytes: invalid data type, expected float, got bool", - }, - { - Name: "Cast data", - FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "bytes"}, Value: 102, Operator: "="}, - }}, - Fields: map[string]v3.AttributeKey{"bytes": {Key: "bytes", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}}, - ExpectedFilter: " AND attributes_int64_value[indexOf(attributes_int64_key, 'bytes')] = 102", - }, - { - Name: "Test top level field w/o metadata", - FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "body"}, Value: "%test%", Operator: "like"}, - }}, - ExpectedFilter: " AND body ILIKE '%test%'", + Error: "failed to validate and cast value for bytes: invalid data type, expected float, got bool", }, { Name: "Test top level field with metadata", FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ {Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "%test%", Operator: "like"}, }}, - Fields: map[string]v3.AttributeKey{"body": {Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, ExpectedFilter: " AND attributes_string_value[indexOf(attributes_string_key, 'body')] ILIKE '%test%'", }, } @@ -253,7 +215,7 @@ var timeSeriesFilterQueryData = []struct { func TestBuildLogsTimeSeriesFilterQuery(t *testing.T) { for _, tt := range timeSeriesFilterQueryData { Convey("TestBuildLogsTimeSeriesFilterQuery", t, func() { - query, err := buildLogsTimeSeriesFilterQuery(tt.FilterSet, tt.GroupBy, tt.Fields) + query, err := buildLogsTimeSeriesFilterQuery(tt.FilterSet, tt.GroupBy) if tt.Error != "" { So(err.Error(), ShouldEqual, tt.Error) } else { @@ -267,6 +229,7 @@ func TestBuildLogsTimeSeriesFilterQuery(t *testing.T) { var testBuildLogsQueryData = []struct { Name string + PanelType v3.PanelType Start int64 End int64 Step int64 @@ -277,10 +240,11 @@ var testBuildLogsQueryData = []struct { ExpectedQuery string }{ { - Name: "Test aggregate count on select field", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate count on select field", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateOperator: v3.AggregateOperatorCount, @@ -290,10 +254,11 @@ var testBuildLogsQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) group by ts order by ts", }, { - Name: "Test aggregate count on a attribute", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate count on a attribute", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "user_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, @@ -304,10 +269,11 @@ var testBuildLogsQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND has(attributes_string_key, 'user_name') group by ts order by ts", }, { - Name: "Test aggregate count on a with filter", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate count on a with filter", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "user_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, @@ -321,10 +287,11 @@ var testBuildLogsQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND attributes_float64_value[indexOf(attributes_float64_key, 'bytes')] > 100.000000 AND has(attributes_string_key, 'user_name') group by ts order by ts", }, { - Name: "Test aggregate count distinct and order by value", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate count distinct and order by value", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "name", IsColumn: true}, @@ -336,10 +303,11 @@ var testBuildLogsQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(name))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) group by ts order by value ASC,ts", }, { - Name: "Test aggregate count distinct on non selected field", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate count distinct on non selected field", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, @@ -350,10 +318,11 @@ var testBuildLogsQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(attributes_string_value[indexOf(attributes_string_key, 'name')]))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) group by ts order by ts", }, { - Name: "Test aggregate count distinct with filter and groupBy", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate count distinct with filter and groupBy", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "name", IsColumn: true}, @@ -365,7 +334,7 @@ var testBuildLogsQueryData = []struct { }, }, GroupBy: []v3.AttributeKey{{Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, - OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}, {ColumnName: "ts", Order: "ASC"}}, + OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}, {ColumnName: "ts", Order: "ASC", Key: "ts", IsColumn: true}}, }, TableName: "logs", ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts," + @@ -378,10 +347,11 @@ var testBuildLogsQueryData = []struct { "order by method ASC,ts", }, { - Name: "Test aggregate count with multiple filter,groupBy and orderBy", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate count with multiple filter,groupBy and orderBy", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "name", IsColumn: true}, @@ -408,10 +378,11 @@ var testBuildLogsQueryData = []struct { "order by method ASC,x ASC,ts", }, { - Name: "Test aggregate avg", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate avg", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "bytes", DataType: v3.AttributeKeyDataTypeFloat64, Type: v3.AttributeKeyTypeTag}, @@ -422,7 +393,7 @@ var testBuildLogsQueryData = []struct { }, }, GroupBy: []v3.AttributeKey{{Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, - OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}, {ColumnName: "x", Order: "ASC"}}, + OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}, {ColumnName: "x", Order: "ASC", Key: "x", IsColumn: true}}, }, TableName: "logs", ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts," + @@ -436,10 +407,11 @@ var testBuildLogsQueryData = []struct { "order by method ASC,ts", }, { - Name: "Test aggregate sum", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate sum", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "bytes", IsColumn: true}, @@ -464,10 +436,11 @@ var testBuildLogsQueryData = []struct { "order by method ASC,ts", }, { - Name: "Test aggregate min", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate min", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "bytes", IsColumn: true}, @@ -492,10 +465,11 @@ var testBuildLogsQueryData = []struct { "order by method ASC,ts", }, { - Name: "Test aggregate max", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate max", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "bytes", IsColumn: true}, @@ -520,10 +494,11 @@ var testBuildLogsQueryData = []struct { "order by method ASC,ts", }, { - Name: "Test aggregate PXX", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate PXX", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "bytes", IsColumn: true}, @@ -544,10 +519,11 @@ var testBuildLogsQueryData = []struct { "order by method ASC,ts", }, { - Name: "Test aggregate RateSum", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate RateSum", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "bytes", IsColumn: true}, @@ -565,10 +541,11 @@ var testBuildLogsQueryData = []struct { "group by method,ts order by method ASC,ts", }, { - Name: "Test aggregate rate", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate rate", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "bytes", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeFloat64}, @@ -587,10 +564,11 @@ var testBuildLogsQueryData = []struct { "order by method ASC,ts", }, { - Name: "Test aggregate RateSum without materialized column", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate RateSum without materialized column", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "bytes", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeFloat64}, @@ -610,29 +588,29 @@ var testBuildLogsQueryData = []struct { "order by method ASC,ts", }, { - Name: "Test Noop", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test Noop", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ SelectColumns: []v3.AttributeKey{}, QueryName: "A", AggregateOperator: v3.AggregateOperatorNoOp, Expression: "A", Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}, - // GroupBy: []v3.AttributeKey{{Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, - // OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}}, }, ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," + "CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," + "CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " + - "from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) ", + "from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) order by timestamp", }, { - Name: "Test aggregate with having clause", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate with having clause", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, @@ -650,10 +628,11 @@ var testBuildLogsQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(attributes_string_value[indexOf(attributes_string_key, 'name')]))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) group by ts having value > 10 order by ts", }, { - Name: "Test aggregate with having clause and filters", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test aggregate with having clause and filters", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, @@ -675,10 +654,11 @@ var testBuildLogsQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(attributes_string_value[indexOf(attributes_string_key, 'name')]))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' group by ts having value > 10 order by ts", }, { - Name: "Test top level key", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test top level key", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, @@ -700,10 +680,11 @@ var testBuildLogsQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(attributes_string_value[indexOf(attributes_string_key, 'name')]))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND body ILIKE '%test%' group by ts having value > 10 order by ts", }, { - Name: "Test attribute with same name as top level key", - Start: 1680066360726210000, - End: 1680066458000000000, - Step: 60, + Name: "Test attribute with same name as top level key", + PanelType: v3.PanelTypeGraph, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, BuilderQuery: &v3.BuilderQuery{ QueryName: "A", AggregateAttribute: v3.AttributeKey{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, @@ -729,7 +710,7 @@ var testBuildLogsQueryData = []struct { func TestBuildLogsQuery(t *testing.T) { for _, tt := range testBuildLogsQueryData { Convey("TestBuildLogsQuery", t, func() { - query, err := buildLogsQuery(tt.Start, tt.End, tt.Step, tt.BuilderQuery, map[string]v3.AttributeKey{}) + query, err := buildLogsQuery(tt.PanelType, tt.Start, tt.End, tt.Step, tt.BuilderQuery) So(err, ShouldBeNil) So(query, ShouldEqual, tt.ExpectedQuery) @@ -768,13 +749,15 @@ func TestGetZerosForEpochNano(t *testing.T) { } var testOrderBy = []struct { - Name string - Items []v3.OrderBy - Tags []string - Result string + Name string + PanelType v3.PanelType + Items []v3.OrderBy + Tags []string + Result []string }{ { - Name: "Test 1", + Name: "Test 1", + PanelType: v3.PanelTypeGraph, Items: []v3.OrderBy{ { ColumnName: "name", @@ -786,10 +769,11 @@ var testOrderBy = []struct { }, }, Tags: []string{"name"}, - Result: "name asc,value desc", + Result: []string{"name asc", "value desc"}, }, { - Name: "Test 2", + Name: "Test 2", + PanelType: v3.PanelTypeGraph, Items: []v3.OrderBy{ { ColumnName: "name", @@ -801,10 +785,11 @@ var testOrderBy = []struct { }, }, Tags: []string{"name", "bytes"}, - Result: "name asc,bytes asc", + Result: []string{"name asc", "bytes asc"}, }, { - Name: "Test 3", + Name: "Test 3", + PanelType: v3.PanelTypeList, Items: []v3.OrderBy{ { ColumnName: "name", @@ -820,18 +805,42 @@ var testOrderBy = []struct { }, }, Tags: []string{"name", "bytes"}, - Result: "name asc,bytes asc,value asc", + Result: []string{"name asc", "bytes asc", "value asc"}, + }, + { + Name: "Test 4", + PanelType: v3.PanelTypeList, + Items: []v3.OrderBy{ + { + ColumnName: "name", + Order: "asc", + }, + { + ColumnName: constants.SigNozOrderByValue, + Order: "asc", + }, + { + ColumnName: "bytes", + Order: "asc", + }, + { + ColumnName: "response_time", + Order: "desc", + Key: "response_time", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeString, + }, + }, + Tags: []string{"name", "bytes"}, + Result: []string{"name asc", "bytes asc", "value asc", "attributes_string_value[indexOf(attributes_string_key, 'response_time')] desc"}, }, } func TestOrderBy(t *testing.T) { for _, tt := range testOrderBy { Convey("testOrderBy", t, func() { - res := orderBy(tt.Items, tt.Tags) - So(res, ShouldEqual, tt.Result) - - // So(multiplier, ShouldEqual, tt.Multiplier) - // So(tt.Epoch*multiplier, ShouldEqual, tt.Result) + res := orderBy(tt.PanelType, tt.Items, tt.Tags) + So(res, ShouldResemble, tt.Result) }) } } diff --git a/pkg/query-service/app/querier/querier.go b/pkg/query-service/app/querier/querier.go index 3a47ccf3a8..9603b00ecd 100644 --- a/pkg/query-service/app/querier/querier.go +++ b/pkg/query-service/app/querier/querier.go @@ -223,7 +223,7 @@ func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series { return mergedSeries } -func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, fields map[string]v3.AttributeKey, keys map[string]v3.AttributeKey) ([]*v3.Series, error, map[string]string) { +func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Series, error, map[string]string) { cacheKeys := q.keyGenerator.GenerateKeys(params) @@ -235,7 +235,7 @@ func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangePa // TODO: add support for logs and traces if builderQuery.DataSource == v3.DataSourceLogs { - query, err := logsV3.PrepareLogsQuery(params.Start, params.End, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, fields) + query, err := logsV3.PrepareLogsQuery(params.Start, params.End, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery) if err != nil { errQueriesByName[queryName] = err.Error() continue @@ -402,14 +402,14 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang return seriesList, err, errQueriesByName } -func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, fields map[string]v3.AttributeKey, keys map[string]v3.AttributeKey) ([]*v3.Series, error, map[string]string) { +func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Series, error, map[string]string) { var seriesList []*v3.Series var err error var errQueriesByName map[string]string if params.CompositeQuery != nil { switch params.CompositeQuery.QueryType { case v3.QueryTypeBuilder: - seriesList, err, errQueriesByName = q.runBuilderQueries(ctx, params, fields, keys) + seriesList, err, errQueriesByName = q.runBuilderQueries(ctx, params, keys) case v3.QueryTypePromQL: seriesList, err, errQueriesByName = q.runPromQueries(ctx, params) case v3.QueryTypeClickHouseSQL: diff --git a/pkg/query-service/app/querier/querier_test.go b/pkg/query-service/app/querier/querier_test.go index a9f7d3d8ae..51293ad493 100644 --- a/pkg/query-service/app/querier/querier_test.go +++ b/pkg/query-service/app/querier/querier_test.go @@ -492,7 +492,7 @@ func TestQueryRange(t *testing.T) { } for i, param := range params { - _, err, errByName := q.QueryRange(context.Background(), param, nil, nil) + _, err, errByName := q.QueryRange(context.Background(), param, nil) if err != nil { t.Errorf("expected no error, got %s", err) } diff --git a/pkg/query-service/app/queryBuilder/query_builder.go b/pkg/query-service/app/queryBuilder/query_builder.go index ee7d7c8ef0..038b06f312 100644 --- a/pkg/query-service/app/queryBuilder/query_builder.go +++ b/pkg/query-service/app/queryBuilder/query_builder.go @@ -37,7 +37,7 @@ var SupportedFunctions = []string{ var EvalFuncs = map[string]govaluate.ExpressionFunction{} type prepareTracesQueryFunc func(start, end int64, queryType v3.QueryType, panelType v3.PanelType, bq *v3.BuilderQuery, keys map[string]v3.AttributeKey) (string, error) -type prepareLogsQueryFunc func(start, end int64, queryType v3.QueryType, panelType v3.PanelType, bq *v3.BuilderQuery, fields map[string]v3.AttributeKey) (string, error) +type prepareLogsQueryFunc func(start, end int64, queryType v3.QueryType, panelType v3.PanelType, bq *v3.BuilderQuery) (string, error) type prepareMetricQueryFunc func(start, end int64, queryType v3.QueryType, panelType v3.PanelType, bq *v3.BuilderQuery) (string, error) type QueryBuilder struct { @@ -141,8 +141,8 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3, args ...in switch query.DataSource { case v3.DataSourceTraces: keys := map[string]v3.AttributeKey{} - if len(args) == 2 { - keys = args[1].(map[string]v3.AttributeKey) + if len(args) > 0 { + keys = args[0].(map[string]v3.AttributeKey) } queryString, err := qb.options.BuildTraceQuery(params.Start, params.End, compositeQuery.QueryType, compositeQuery.PanelType, query, keys) if err != nil { @@ -150,11 +150,7 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3, args ...in } queries[queryName] = queryString case v3.DataSourceLogs: - fields := map[string]v3.AttributeKey{} - if len(args) == 1 { - fields = args[0].(map[string]v3.AttributeKey) - } - queryString, err := qb.options.BuildLogQuery(params.Start, params.End, compositeQuery.QueryType, compositeQuery.PanelType, query, fields) + queryString, err := qb.options.BuildLogQuery(params.Start, params.End, compositeQuery.QueryType, compositeQuery.PanelType, query) if err != nil { return nil, err } diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index ee7f858dbc..853a1b5c09 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -291,3 +291,5 @@ var StaticFieldsLogsV3 = map[string]v3.AttributeKey{ } const SigNozOrderByValue = "#SIGNOZ_VALUE" + +const TIMESTAMP = "timestamp" diff --git a/pkg/query-service/interfaces/interface.go b/pkg/query-service/interfaces/interface.go index c6e0d7e25f..c4d9bbdbb6 100644 --- a/pkg/query-service/interfaces/interface.go +++ b/pkg/query-service/interfaces/interface.go @@ -96,7 +96,7 @@ type Reader interface { } type Querier interface { - QueryRange(context.Context, *v3.QueryRangeParamsV3, map[string]v3.AttributeKey, map[string]v3.AttributeKey) ([]*v3.Series, error, map[string]string) + QueryRange(context.Context, *v3.QueryRangeParamsV3, map[string]v3.AttributeKey) ([]*v3.Series, error, map[string]string) // test helpers QueriesExecuted() []string diff --git a/pkg/query-service/model/v3/v3.go b/pkg/query-service/model/v3/v3.go index a29a6cd93f..7d3028b3d8 100644 --- a/pkg/query-service/model/v3/v3.go +++ b/pkg/query-service/model/v3/v3.go @@ -298,7 +298,7 @@ func (a AttributeKey) Validate() error { if a.IsColumn { switch a.Type { - case AttributeKeyTypeResource, AttributeKeyTypeTag: + case AttributeKeyTypeResource, AttributeKeyTypeTag, AttributeKeyTypeUnspecified: break default: return fmt.Errorf("invalid attribute type: %s", a.Type) @@ -545,8 +545,12 @@ func (f *FilterItem) CacheKey() string { } type OrderBy struct { - ColumnName string `json:"columnName"` - Order string `json:"order"` + ColumnName string `json:"columnName"` + Order string `json:"order"` + Key string `json:"-"` + DataType AttributeKeyDataType `json:"-"` + Type AttributeKeyType `json:"-"` + IsColumn bool `json:"-"` } type Having struct {