diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 88380538d7..504bb70581 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -40,7 +40,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.11.3 + image: signoz/query-service:0.11.4 command: ["-config=/root/config/prometheus.yml"] # ports: # - "6060:6060" # pprof port @@ -70,7 +70,7 @@ services: - clickhouse frontend: - image: signoz/frontend:0.11.3 + image: signoz/frontend:0.11.4 deploy: restart_policy: condition: on-failure diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 1f877ffc78..7357c9df5e 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -39,7 +39,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` query-service: - image: signoz/query-service:0.11.3 + image: signoz/query-service:0.11.4 container_name: query-service command: ["-config=/root/config/prometheus.yml"] # ports: @@ -69,7 +69,7 @@ services: condition: service_healthy frontend: - image: signoz/frontend:0.11.3 + image: signoz/frontend:0.11.4 container_name: frontend restart: on-failure depends_on: diff --git a/ee/query-service/app/api/api.go b/ee/query-service/app/api/api.go index a6497b615e..85bec52122 100644 --- a/ee/query-service/app/api/api.go +++ b/ee/query-service/app/api/api.go @@ -114,6 +114,9 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router) { router.HandleFunc("/api/v1/invite/{token}", baseapp.OpenAccess(ah.getInvite)).Methods(http.MethodGet) router.HandleFunc("/api/v1/register", baseapp.OpenAccess(ah.registerUser)).Methods(http.MethodPost) router.HandleFunc("/api/v1/login", baseapp.OpenAccess(ah.loginUser)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/traces/{traceId}", baseapp.ViewAccess(ah.searchTraces)).Methods(http.MethodGet) + router.HandleFunc("/api/v2/metrics/query_range", baseapp.ViewAccess(ah.queryRangeMetricsV2)).Methods(http.MethodPost) + ah.APIHandler.RegisterRoutes(router) } diff --git a/ee/query-service/app/api/metrics.go b/ee/query-service/app/api/metrics.go new file mode 100644 index 0000000000..4b1a8e49dd --- /dev/null +++ b/ee/query-service/app/api/metrics.go @@ -0,0 +1,236 @@ +package api + +import ( + "bytes" + "fmt" + "net/http" + "sync" + "text/template" + "time" + + "go.signoz.io/signoz/pkg/query-service/app/metrics" + "go.signoz.io/signoz/pkg/query-service/app/parser" + "go.signoz.io/signoz/pkg/query-service/constants" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + querytemplate "go.signoz.io/signoz/pkg/query-service/utils/queryTemplate" + "go.uber.org/zap" +) + +func (ah *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) { + if !ah.CheckFeature(basemodel.CustomMetricsFunction) { + zap.S().Info("CustomMetricsFunction feature is not enabled in this plan") + ah.APIHandler.QueryRangeMetricsV2(w, r) + return + } + metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r) + + if apiErrorObj != nil { + zap.S().Errorf(apiErrorObj.Err.Error()) + RespondError(w, apiErrorObj, nil) + return + } + + // prometheus instant query needs same timestamp + if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE && + metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.PROM { + metricsQueryRangeParams.Start = metricsQueryRangeParams.End + } + + // round up the end to nearest multiple + if metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER { + end := (metricsQueryRangeParams.End) / 1000 + step := metricsQueryRangeParams.Step + metricsQueryRangeParams.End = (end / step * step) * 1000 + } + + type channelResult struct { + Series []*basemodel.Series + TableName string + Err error + Name string + Query string + } + + execClickHouseQueries := func(queries map[string]string) ([]*basemodel.Series, []string, error, map[string]string) { + var seriesList []*basemodel.Series + var tableName []string + ch := make(chan channelResult, len(queries)) + var wg sync.WaitGroup + + for name, query := range queries { + wg.Add(1) + go func(name, query string) { + defer wg.Done() + seriesList, tableName, err := ah.opts.DataConnector.GetMetricResultEE(r.Context(), query) + for _, series := range seriesList { + series.QueryName = name + } + + if err != nil { + ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query} + return + } + ch <- channelResult{Series: seriesList, TableName: tableName} + }(name, query) + } + + wg.Wait() + close(ch) + + var errs []error + errQuriesByName := make(map[string]string) + // read values from the channel + for r := range ch { + if r.Err != nil { + errs = append(errs, r.Err) + errQuriesByName[r.Name] = r.Query + continue + } + seriesList = append(seriesList, r.Series...) + tableName = append(tableName, r.TableName) + } + if len(errs) != 0 { + return nil, nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName + } + return seriesList, tableName, nil, nil + } + + execPromQueries := func(metricsQueryRangeParams *basemodel.QueryRangeParamsV2) ([]*basemodel.Series, error, map[string]string) { + var seriesList []*basemodel.Series + ch := make(chan channelResult, len(metricsQueryRangeParams.CompositeMetricQuery.PromQueries)) + var wg sync.WaitGroup + + for name, query := range metricsQueryRangeParams.CompositeMetricQuery.PromQueries { + if query.Disabled { + continue + } + wg.Add(1) + go func(name string, query *basemodel.PromQuery) { + var seriesList []*basemodel.Series + defer wg.Done() + tmpl := template.New("promql-query") + tmpl, tmplErr := tmpl.Parse(query.Query) + if tmplErr != nil { + ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query} + return + } + var queryBuf bytes.Buffer + tmplErr = tmpl.Execute(&queryBuf, metricsQueryRangeParams.Variables) + if tmplErr != nil { + ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query} + return + } + query.Query = queryBuf.String() + queryModel := basemodel.QueryRangeParams{ + Start: time.UnixMilli(metricsQueryRangeParams.Start), + End: time.UnixMilli(metricsQueryRangeParams.End), + Step: time.Duration(metricsQueryRangeParams.Step * int64(time.Second)), + Query: query.Query, + } + promResult, _, err := ah.opts.DataConnector.GetQueryRangeResult(r.Context(), &queryModel) + if err != nil { + ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query.Query} + return + } + matrix, _ := promResult.Matrix() + for _, v := range matrix { + var s basemodel.Series + s.QueryName = name + s.Labels = v.Metric.Copy().Map() + for _, p := range v.Points { + s.Points = append(s.Points, basemodel.MetricPoint{Timestamp: p.T, Value: p.V}) + } + seriesList = append(seriesList, &s) + } + ch <- channelResult{Series: seriesList} + }(name, query) + } + + wg.Wait() + close(ch) + + var errs []error + errQuriesByName := make(map[string]string) + // read values from the channel + for r := range ch { + if r.Err != nil { + errs = append(errs, r.Err) + errQuriesByName[r.Name] = r.Query + continue + } + seriesList = append(seriesList, r.Series...) + } + if len(errs) != 0 { + return nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName + } + return seriesList, nil, nil + } + + var seriesList []*basemodel.Series + var tableName []string + var err error + var errQuriesByName map[string]string + switch metricsQueryRangeParams.CompositeMetricQuery.QueryType { + case basemodel.QUERY_BUILDER: + runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SIGNOZ_TIMESERIES_TABLENAME) + if runQueries.Err != nil { + RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: runQueries.Err}, nil) + return + } + seriesList, tableName, err, errQuriesByName = execClickHouseQueries(runQueries.Queries) + + case basemodel.CLICKHOUSE: + queries := make(map[string]string) + + for name, chQuery := range metricsQueryRangeParams.CompositeMetricQuery.ClickHouseQueries { + if chQuery.Disabled { + continue + } + tmpl := template.New("clickhouse-query") + tmpl, err := tmpl.Parse(chQuery.Query) + if err != nil { + RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil) + return + } + var query bytes.Buffer + + // replace go template variables + querytemplate.AssignReservedVars(metricsQueryRangeParams) + + err = tmpl.Execute(&query, metricsQueryRangeParams.Variables) + if err != nil { + RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil) + return + } + queries[name] = query.String() + } + seriesList, tableName, err, errQuriesByName = execClickHouseQueries(queries) + case basemodel.PROM: + seriesList, err, errQuriesByName = execPromQueries(metricsQueryRangeParams) + default: + err = fmt.Errorf("invalid query type") + RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, errQuriesByName) + return + } + + if err != nil { + apiErrObj := &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err} + RespondError(w, apiErrObj, errQuriesByName) + return + } + if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE && + len(seriesList) > 1 && + (metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER || + metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.CLICKHOUSE) { + RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil) + return + } + + type ResponseFormat struct { + ResultType string `json:"resultType"` + Result []*basemodel.Series `json:"result"` + TableName []string `json:"tableName"` + } + resp := ResponseFormat{ResultType: "matrix", Result: seriesList, TableName: tableName} + ah.Respond(w, resp) +} diff --git a/ee/query-service/app/api/traces.go b/ee/query-service/app/api/traces.go new file mode 100644 index 0000000000..22d66f7a82 --- /dev/null +++ b/ee/query-service/app/api/traces.go @@ -0,0 +1,39 @@ +package api + +import ( + "net/http" + "strconv" + + "go.signoz.io/signoz/ee/query-service/app/db" + "go.signoz.io/signoz/ee/query-service/constants" + "go.signoz.io/signoz/ee/query-service/model" + baseapp "go.signoz.io/signoz/pkg/query-service/app" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) { + + if !ah.CheckFeature(basemodel.SmartTraceDetail) { + zap.S().Info("SmartTraceDetail feature is not enabled in this plan") + ah.APIHandler.SearchTraces(w, r) + return + } + traceId, spanId, levelUpInt, levelDownInt, err := baseapp.ParseSearchTracesParams(r) + if err != nil { + RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params") + return + } + spanLimit, err := strconv.Atoi(constants.SpanLimitStr) + if err != nil { + zap.S().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable: ", err) + return + } + result, err := ah.opts.DataConnector.SearchTraces(r.Context(), traceId, spanId, levelUpInt, levelDownInt, spanLimit, db.SmartTraceAlgorithm) + if ah.HandleError(w, err, http.StatusBadRequest) { + return + } + + ah.WriteJSON(w, r, result) + +} diff --git a/ee/query-service/app/db/metrics.go b/ee/query-service/app/db/metrics.go new file mode 100644 index 0000000000..3bafc6a638 --- /dev/null +++ b/ee/query-service/app/db/metrics.go @@ -0,0 +1,401 @@ +package db + +import ( + "context" + "crypto/md5" + "encoding/json" + "fmt" + "reflect" + "regexp" + "sort" + "strings" + "time" + + "go.signoz.io/signoz/ee/query-service/model" + baseconst "go.signoz.io/signoz/pkg/query-service/constants" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.signoz.io/signoz/pkg/query-service/utils" + "go.uber.org/zap" +) + +// GetMetricResultEE runs the query and returns list of time series +func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) { + + defer utils.Elapsed("GetMetricResult")() + zap.S().Infof("Executing metric result query: %s", query) + + var hash string + // If getSubTreeSpans function is used in the clickhouse query + if strings.Index(query, "getSubTreeSpans(") != -1 { + var err error + query, hash, err = r.getSubTreeSpansCustomFunction(ctx, query, hash) + if err == fmt.Errorf("No spans found for the given query") { + return nil, "", nil + } + if err != nil { + return nil, "", err + } + } + + rows, err := r.conn.Query(ctx, query) + zap.S().Debug(query) + if err != nil { + zap.S().Debug("Error in processing query: ", err) + return nil, "", fmt.Errorf("error in processing query") + } + + var ( + columnTypes = rows.ColumnTypes() + columnNames = rows.Columns() + vars = make([]interface{}, len(columnTypes)) + ) + for i := range columnTypes { + vars[i] = reflect.New(columnTypes[i].ScanType()).Interface() + } + // when group by is applied, each combination of cartesian product + // of attributes is separate series. each item in metricPointsMap + // represent a unique series. + metricPointsMap := make(map[string][]basemodel.MetricPoint) + // attribute key-value pairs for each group selection + attributesMap := make(map[string]map[string]string) + + defer rows.Close() + for rows.Next() { + if err := rows.Scan(vars...); err != nil { + return nil, "", err + } + var groupBy []string + var metricPoint basemodel.MetricPoint + groupAttributes := make(map[string]string) + // Assuming that the end result row contains a timestamp, value and option labels + // Label key and value are both strings. + for idx, v := range vars { + colName := columnNames[idx] + switch v := v.(type) { + case *string: + // special case for returning all labels + if colName == "fullLabels" { + var metric map[string]string + err := json.Unmarshal([]byte(*v), &metric) + if err != nil { + return nil, "", err + } + for key, val := range metric { + groupBy = append(groupBy, val) + groupAttributes[key] = val + } + } else { + groupBy = append(groupBy, *v) + groupAttributes[colName] = *v + } + case *time.Time: + metricPoint.Timestamp = v.UnixMilli() + case *float64: + metricPoint.Value = *v + case **float64: + // ch seems to return this type when column is derived from + // SELECT count(*)/ SELECT count(*) + floatVal := *v + if floatVal != nil { + metricPoint.Value = *floatVal + } + case *float32: + float32Val := float32(*v) + metricPoint.Value = float64(float32Val) + case *uint8, *uint64, *uint16, *uint32: + if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok { + metricPoint.Value = float64(reflect.ValueOf(v).Elem().Uint()) + } else { + groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())) + groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()) + } + case *int8, *int16, *int32, *int64: + if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok { + metricPoint.Value = float64(reflect.ValueOf(v).Elem().Int()) + } else { + groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())) + groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()) + } + default: + zap.S().Errorf("invalid var found in metric builder query result", v, colName) + } + } + sort.Strings(groupBy) + key := strings.Join(groupBy, "") + attributesMap[key] = groupAttributes + metricPointsMap[key] = append(metricPointsMap[key], metricPoint) + } + + var seriesList []*basemodel.Series + for key := range metricPointsMap { + points := metricPointsMap[key] + // first point in each series could be invalid since the + // aggregations are applied with point from prev series + if len(points) != 0 && len(points) > 1 { + points = points[1:] + } + attributes := attributesMap[key] + series := basemodel.Series{Labels: attributes, Points: points} + seriesList = append(seriesList, &series) + } + // err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash) + // if err != nil { + // zap.S().Error("Error in dropping temporary table: ", err) + // return nil, err + // } + if hash == "" { + return seriesList, hash, nil + } else { + return seriesList, "getSubTreeSpans" + hash, nil + } +} + +func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) { + + zap.S().Debugf("Executing getSubTreeSpans function") + + // str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;` + + // process the query to fetch subTree query + var subtreeInput string + query, subtreeInput, hash = processQuery(query, hash) + + err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash) + if err != nil { + zap.S().Error("Error in dropping temporary table: ", err) + return query, hash, err + } + + // Create temporary table to store the getSubTreeSpans() results + zap.S().Debugf("Creating temporary table getSubTreeSpans%s", hash) + err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)") + if err != nil { + zap.S().Error("Error in creating temporary table: ", err) + return query, hash, err + } + + var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse + getSpansSubQuery := subtreeInput + // Execute the subTree query + zap.S().Debugf("Executing subTree query: %s", getSpansSubQuery) + err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery) + + // zap.S().Info(getSpansSubQuery) + + if err != nil { + zap.S().Debug("Error in processing sql query: ", err) + return query, hash, fmt.Errorf("Error in processing sql query") + } + + var searchScanResponses []basemodel.SearchSpanDBResponseItem + + // TODO : @ankit: I think the algorithm does not need to assume that subtrees are from the same TraceID. We can take this as an improvement later. + // Fetch all the spans from of same TraceID so that we can build subtree + modelQuery := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable) + + if len(getSpansSubQueryDBResponses) == 0 { + return query, hash, fmt.Errorf("No spans found for the given query") + } + zap.S().Debugf("Executing query to fetch all the spans from the same TraceID: %s", modelQuery) + err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID) + + if err != nil { + zap.S().Debug("Error in processing sql query: ", err) + return query, hash, fmt.Errorf("Error in processing sql query") + } + + // Process model to fetch the spans + zap.S().Debugf("Processing model to fetch the spans") + searchSpanResponses := []basemodel.SearchSpanResponseItem{} + for _, item := range searchScanResponses { + var jsonItem basemodel.SearchSpanResponseItem + json.Unmarshal([]byte(item.Model), &jsonItem) + jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano()) + if jsonItem.Events == nil { + jsonItem.Events = []string{} + } + searchSpanResponses = append(searchSpanResponses, jsonItem) + } + // Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash + // Use map to store pointer to the spans to avoid duplicates and save memory + zap.S().Debugf("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans%s", hash) + + treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses) + if err != nil { + zap.S().Error("Error in getSubTreeAlgorithm function: ", err) + return query, hash, err + } + zap.S().Debugf("Preparing batch to store subtree spans in temporary table getSubTreeSpans%s", hash) + statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash)) + if err != nil { + zap.S().Error("Error in preparing batch statement: ", err) + return query, hash, err + } + for _, span := range treeSearchResponse { + var parentID string + if len(span.References) > 0 && span.References[0].RefType == "CHILD_OF" { + parentID = span.References[0].SpanId + } + err = statement.Append( + time.Unix(0, int64(span.TimeUnixNano)), + span.TraceID, + span.SpanID, + parentID, + span.RootSpanID, + span.ServiceName, + span.Name, + span.RootName, + uint64(span.DurationNano), + int8(span.Kind), + span.TagMap, + span.Events, + ) + if err != nil { + zap.S().Debug("Error in processing sql query: ", err) + return query, hash, err + } + } + zap.S().Debugf("Inserting the subtree spans in temporary table getSubTreeSpans%s", hash) + err = statement.Send() + if err != nil { + zap.S().Error("Error in sending statement: ", err) + return query, hash, err + } + return query, hash, nil +} + +func processQuery(query string, hash string) (string, string, string) { + re3 := regexp.MustCompile(`getSubTreeSpans`) + + submatchall3 := re3.FindAllStringIndex(query, -1) + getSubtreeSpansMatchIndex := submatchall3[0][1] + + query2countParenthesis := query[getSubtreeSpansMatchIndex:] + + sqlCompleteIndex := 0 + countParenthesisImbalance := 0 + for i, char := range query2countParenthesis { + + if string(char) == "(" { + countParenthesisImbalance += 1 + } + if string(char) == ")" { + countParenthesisImbalance -= 1 + } + if countParenthesisImbalance == 0 { + sqlCompleteIndex = i + break + } + } + subtreeInput := query2countParenthesis[1:sqlCompleteIndex] + + // hash the subtreeInput + hmd5 := md5.Sum([]byte(subtreeInput)) + hash = fmt.Sprintf("%x", hmd5) + + // Reformat the query to use the getSubTreeSpans function + query = query[:getSubtreeSpansMatchIndex] + hash + " " + query2countParenthesis[sqlCompleteIndex+1:] + return query, subtreeInput, hash +} + +// getSubTreeAlgorithm is an algorithm to build the subtrees of the spans and return the list of spans +func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse) (map[string]*basemodel.SearchSpanResponseItem, error) { + + var spans []*model.SpanForTraceDetails + for _, spanItem := range payload { + var parentID string + if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" { + parentID = spanItem.References[0].SpanId + } + span := &model.SpanForTraceDetails{ + TimeUnixNano: spanItem.TimeUnixNano, + SpanID: spanItem.SpanID, + TraceID: spanItem.TraceID, + ServiceName: spanItem.ServiceName, + Name: spanItem.Name, + Kind: spanItem.Kind, + DurationNano: spanItem.DurationNano, + TagMap: spanItem.TagMap, + ParentID: parentID, + Events: spanItem.Events, + HasError: spanItem.HasError, + } + spans = append(spans, span) + } + + zap.S().Debug("Building Tree") + roots, err := buildSpanTrees(&spans) + if err != nil { + return nil, err + } + searchSpansResult := make(map[string]*basemodel.SearchSpanResponseItem) + // Every span which was fetched from getSubTree Input SQL query is considered root + // For each root, get the subtree spans + for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses { + targetSpan := &model.SpanForTraceDetails{} + // zap.S().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses))) + // Search target span object in the tree + for _, root := range roots { + targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID) + if targetSpan != nil { + break + } + if err != nil { + zap.S().Error("Error during BreadthFirstSearch(): ", err) + return nil, err + } + } + if targetSpan == nil { + return nil, nil + } + // Build subtree for the target span + // Mark the target span as root by setting parent ID as empty string + targetSpan.ParentID = "" + preParents := []*model.SpanForTraceDetails{targetSpan} + children := []*model.SpanForTraceDetails{} + + // Get the subtree child spans + for i := 0; len(preParents) != 0; i++ { + parents := []*model.SpanForTraceDetails{} + for _, parent := range preParents { + children = append(children, parent.Children...) + parents = append(parents, parent.Children...) + } + preParents = parents + } + + resultSpans := children + // Add the target span to the result spans + resultSpans = append(resultSpans, targetSpan) + + for _, item := range resultSpans { + references := []basemodel.OtelSpanRef{ + { + TraceId: item.TraceID, + SpanId: item.ParentID, + RefType: "CHILD_OF", + }, + } + + if item.Events == nil { + item.Events = []string{} + } + searchSpansResult[item.SpanID] = &basemodel.SearchSpanResponseItem{ + TimeUnixNano: item.TimeUnixNano, + SpanID: item.SpanID, + TraceID: item.TraceID, + ServiceName: item.ServiceName, + Name: item.Name, + Kind: item.Kind, + References: references, + DurationNano: item.DurationNano, + TagMap: item.TagMap, + Events: item.Events, + HasError: item.HasError, + RootSpanID: getSpansSubQueryDBResponse.SpanID, + RootName: targetSpan.Name, + } + } + } + return searchSpansResult, nil +} diff --git a/ee/query-service/app/db/reader.go b/ee/query-service/app/db/reader.go index e948ee430b..fc26ec3ce2 100644 --- a/ee/query-service/app/db/reader.go +++ b/ee/query-service/app/db/reader.go @@ -6,6 +6,7 @@ import ( "github.com/jmoiron/sqlx" basechr "go.signoz.io/signoz/pkg/query-service/app/clickhouseReader" + "go.signoz.io/signoz/pkg/query-service/interfaces" ) type ClickhouseReader struct { @@ -14,8 +15,8 @@ type ClickhouseReader struct { *basechr.ClickHouseReader } -func NewDataConnector(localDB *sqlx.DB, promConfigPath string) *ClickhouseReader { - ch := basechr.NewReader(localDB, promConfigPath) +func NewDataConnector(localDB *sqlx.DB, promConfigPath string, lm interfaces.FeatureLookup) *ClickhouseReader { + ch := basechr.NewReader(localDB, promConfigPath, lm) return &ClickhouseReader{ conn: ch.GetConn(), appdb: localDB, diff --git a/ee/query-service/app/db/trace.go b/ee/query-service/app/db/trace.go new file mode 100644 index 0000000000..529a9a93fd --- /dev/null +++ b/ee/query-service/app/db/trace.go @@ -0,0 +1,222 @@ +package db + +import ( + "errors" + "strconv" + + "go.signoz.io/signoz/ee/query-service/model" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +// SmartTraceAlgorithm is an algorithm to find the target span and build a tree of spans around it with the given levelUp and levelDown parameters and the given spanLimit +func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanId string, levelUp int, levelDown int, spanLimit int) ([]basemodel.SearchSpansResult, error) { + var spans []*model.SpanForTraceDetails + + // Build a slice of spans from the payload + for _, spanItem := range payload { + var parentID string + if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" { + parentID = spanItem.References[0].SpanId + } + span := &model.SpanForTraceDetails{ + TimeUnixNano: spanItem.TimeUnixNano, + SpanID: spanItem.SpanID, + TraceID: spanItem.TraceID, + ServiceName: spanItem.ServiceName, + Name: spanItem.Name, + Kind: spanItem.Kind, + DurationNano: spanItem.DurationNano, + TagMap: spanItem.TagMap, + ParentID: parentID, + Events: spanItem.Events, + HasError: spanItem.HasError, + } + spans = append(spans, span) + } + + // Build span trees from the spans + roots, err := buildSpanTrees(&spans) + if err != nil { + return nil, err + } + targetSpan := &model.SpanForTraceDetails{} + + // Find the target span in the span trees + for _, root := range roots { + targetSpan, err = breadthFirstSearch(root, targetSpanId) + if targetSpan != nil { + break + } + if err != nil { + zap.S().Error("Error during BreadthFirstSearch(): ", err) + return nil, err + } + } + + // If the target span is not found, return span not found error + if targetSpan == nil { + return nil, errors.New("Span not found") + } + + // Build the final result + parents := []*model.SpanForTraceDetails{} + + // Get the parent spans of the target span up to the given levelUp parameter and spanLimit + preParent := targetSpan + for i := 0; i < levelUp+1; i++ { + if i == levelUp { + preParent.ParentID = "" + } + if spanLimit-len(preParent.Children) <= 0 { + parents = append(parents, preParent) + parents = append(parents, preParent.Children[:spanLimit]...) + spanLimit -= (len(preParent.Children[:spanLimit]) + 1) + preParent.ParentID = "" + break + } + parents = append(parents, preParent) + parents = append(parents, preParent.Children...) + spanLimit -= (len(preParent.Children) + 1) + preParent = preParent.ParentSpan + if preParent == nil { + break + } + } + + // Get the child spans of the target span until the given levelDown and spanLimit + preParents := []*model.SpanForTraceDetails{targetSpan} + children := []*model.SpanForTraceDetails{} + + for i := 0; i < levelDown && len(preParents) != 0 && spanLimit > 0; i++ { + parents := []*model.SpanForTraceDetails{} + for _, parent := range preParents { + if spanLimit-len(parent.Children) <= 0 { + children = append(children, parent.Children[:spanLimit]...) + spanLimit -= len(parent.Children[:spanLimit]) + break + } + children = append(children, parent.Children...) + parents = append(parents, parent.Children...) + } + preParents = parents + } + + // Store the final list of spans in the resultSpanSet map to avoid duplicates + resultSpansSet := make(map[*model.SpanForTraceDetails]struct{}) + resultSpansSet[targetSpan] = struct{}{} + for _, parent := range parents { + resultSpansSet[parent] = struct{}{} + } + for _, child := range children { + resultSpansSet[child] = struct{}{} + } + + searchSpansResult := []basemodel.SearchSpansResult{{ + Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"}, + Events: make([][]interface{}, len(resultSpansSet)), + }, + } + + // Convert the resultSpansSet map to searchSpansResult + i := 0 // index for spans + for item := range resultSpansSet { + references := []basemodel.OtelSpanRef{ + { + TraceId: item.TraceID, + SpanId: item.ParentID, + RefType: "CHILD_OF", + }, + } + + referencesStringArray := []string{} + for _, item := range references { + referencesStringArray = append(referencesStringArray, item.ToString()) + } + keys := make([]string, 0, len(item.TagMap)) + values := make([]string, 0, len(item.TagMap)) + + for k, v := range item.TagMap { + keys = append(keys, k) + values = append(values, v) + } + if item.Events == nil { + item.Events = []string{} + } + searchSpansResult[0].Events[i] = []interface{}{ + item.TimeUnixNano, + item.SpanID, + item.TraceID, + item.ServiceName, + item.Name, + strconv.Itoa(int(item.Kind)), + strconv.FormatInt(item.DurationNano, 10), + keys, + values, + referencesStringArray, + item.Events, + item.HasError, + } + i++ // increment index + } + return searchSpansResult, nil +} + +// buildSpanTrees builds trees of spans from a list of spans. +func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTraceDetails, error) { + + // Build a map of spanID to span for fast lookup + var roots []*model.SpanForTraceDetails + spans := *spansPtr + mapOfSpans := make(map[string]*model.SpanForTraceDetails, len(spans)) + + for _, span := range spans { + if span.ParentID == "" { + roots = append(roots, span) + } + mapOfSpans[span.SpanID] = span + } + + // Build the span tree by adding children to the parent spans + for _, span := range spans { + if span.ParentID == "" { + continue + } + parent := mapOfSpans[span.ParentID] + + // If the parent span is not found, add current span to list of roots + if parent == nil { + // zap.S().Debug("Parent Span not found parent_id: ", span.ParentID) + roots = append(roots, span) + span.ParentID = "" + continue + } + + span.ParentSpan = parent + parent.Children = append(parent.Children, span) + } + + return roots, nil +} + +// breadthFirstSearch performs a breadth-first search on the span tree to find the target span. +func breadthFirstSearch(spansPtr *model.SpanForTraceDetails, targetId string) (*model.SpanForTraceDetails, error) { + queue := []*model.SpanForTraceDetails{spansPtr} + visited := make(map[string]bool) + + for len(queue) > 0 { + current := queue[0] + visited[current.SpanID] = true + queue = queue[1:] + if current.SpanID == targetId { + return current, nil + } + + for _, child := range current.Children { + if ok, _ := visited[child.SpanID]; !ok { + queue = append(queue, child) + } + } + } + return nil, nil +} diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index 7002af3f41..501ad96aa9 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -98,7 +98,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { storage := os.Getenv("STORAGE") if storage == "clickhouse" { zap.S().Info("Using ClickHouse as datastore ...") - qb := db.NewDataConnector(localDB, serverOptions.PromConfigPath) + qb := db.NewDataConnector(localDB, serverOptions.PromConfigPath, lm) go qb.Start(readerReady) reader = qb } else { diff --git a/ee/query-service/constants/constants.go b/ee/query-service/constants/constants.go index ba9bb141a5..45fad74da6 100644 --- a/ee/query-service/constants/constants.go +++ b/ee/query-service/constants/constants.go @@ -10,6 +10,8 @@ const ( var LicenseSignozIo = "https://license.signoz.io/api/v1" +var SpanLimitStr = GetOrDefaultEnv("SPAN_LIMIT", "5000") + func GetOrDefaultEnv(key string, fallback string) string { v := os.Getenv(key) if len(v) == 0 { diff --git a/ee/query-service/model/plans.go b/ee/query-service/model/plans.go index e68217730a..c42712f693 100644 --- a/ee/query-service/model/plans.go +++ b/ee/query-service/model/plans.go @@ -17,11 +17,15 @@ var BasicPlan = basemodel.FeatureSet{ } var ProPlan = basemodel.FeatureSet{ - Pro: true, - SSO: true, + Pro: true, + SSO: true, + basemodel.SmartTraceDetail: true, + basemodel.CustomMetricsFunction: true, } var EnterprisePlan = basemodel.FeatureSet{ - Enterprise: true, - SSO: true, + Enterprise: true, + SSO: true, + basemodel.SmartTraceDetail: true, + basemodel.CustomMetricsFunction: true, } diff --git a/ee/query-service/model/trace.go b/ee/query-service/model/trace.go new file mode 100644 index 0000000000..708d6d1c5c --- /dev/null +++ b/ee/query-service/model/trace.go @@ -0,0 +1,22 @@ +package model + +type SpanForTraceDetails struct { + TimeUnixNano uint64 `json:"timestamp"` + SpanID string `json:"spanID"` + TraceID string `json:"traceID"` + ParentID string `json:"parentID"` + ParentSpan *SpanForTraceDetails `json:"parentSpan"` + ServiceName string `json:"serviceName"` + Name string `json:"name"` + Kind int32 `json:"kind"` + DurationNano int64 `json:"durationNano"` + TagMap map[string]string `json:"tagMap"` + Events []string `json:"event"` + HasError bool `json:"hasError"` + Children []*SpanForTraceDetails `json:"children"` +} + +type GetSpansSubQueryDBResponse struct { + SpanID string `ch:"spanID"` + TraceID string `ch:"traceID"` +} diff --git a/frontend/i18-generate-hash.js b/frontend/i18-generate-hash.js index 97476e40af..cbc03f9025 100644 --- a/frontend/i18-generate-hash.js +++ b/frontend/i18-generate-hash.js @@ -1,24 +1,20 @@ -/* eslint-disable */ -// @ts-ignore -// @ts-nocheck - const crypto = require('crypto'); const fs = require('fs'); const glob = require('glob'); function generateChecksum(str, algorithm, encoding) { - return crypto - .createHash(algorithm || 'md5') - .update(str, 'utf8') - .digest(encoding || 'hex'); + return crypto + .createHash(algorithm || 'md5') + .update(str, 'utf8') + .digest(encoding || 'hex'); } const result = {}; -glob.sync(`public/locales/**/*.json`).forEach(path => { - const [_, lang] = path.split('public/locales'); - const content = fs.readFileSync(path, { encoding: 'utf-8' }); - result[lang.replace('.json', '')] = generateChecksum(content); +glob.sync(`public/locales/**/*.json`).forEach((path) => { + const [_, lang] = path.split('public/locales'); + const content = fs.readFileSync(path, { encoding: 'utf-8' }); + result[lang.replace('.json', '')] = generateChecksum(content); }); fs.writeFileSync('./i18n-translations-hash.json', JSON.stringify(result)); diff --git a/frontend/public/locales/en-GB/alerts.json b/frontend/public/locales/en-GB/alerts.json index cae309fd45..b5c769a021 100644 --- a/frontend/public/locales/en-GB/alerts.json +++ b/frontend/public/locales/en-GB/alerts.json @@ -28,6 +28,7 @@ "condition_required": "at least one metric condition is required", "alertname_required": "alert name is required", "promql_required": "promql expression is required when query format is set to PromQL", + "chquery_required": "query is required when query format is set to ClickHouse", "button_savechanges": "Save Rule", "button_createrule": "Create Rule", "button_returntorules": "Return to rules", @@ -55,6 +56,7 @@ "button_formula": "Formula", "tab_qb": "Query Builder", "tab_promql": "PromQL", + "tab_chquery": "ClickHouse Query", "title_confirm": "Confirm", "button_ok": "Yes", "button_cancel": "No", @@ -88,5 +90,23 @@ "user_guide_pql_step3": "Step 3 -Alert Configuration", "user_guide_pql_step3a": "Set alert severity, name and descriptions", "user_guide_pql_step3b": "Add tags to the alert in the Label field if needed", - "user_tooltip_more_help": "More details on how to create alerts" + "user_guide_ch_step1": "Step 1 - Define the metric", + "user_guide_ch_step1a": "Write a Clickhouse query for alert evaluation. Follow <0>this tutorial to learn about query format and supported vars.", + "user_guide_ch_step1b": "Format the legends based on labels you want to highlight in the preview chart", + "user_guide_ch_step2": "Step 2 - Define Alert Conditions", + "user_guide_ch_step2a": "Select the threshold type and whether you want to alert above/below a value", + "user_guide_ch_step2b": "Enter the Alert threshold", + "user_guide_ch_step3": "Step 3 -Alert Configuration", + "user_guide_ch_step3a": "Set alert severity, name and descriptions", + "user_guide_ch_step3b": "Add tags to the alert in the Label field if needed", + "user_tooltip_more_help": "More details on how to create alerts", + "choose_alert_type": "Choose a type for the alert:", + "metric_based_alert": "Metric based Alert", + "metric_based_alert_desc": "Send a notification when a condition occurs in the metric data", + "log_based_alert": "Log-based Alert", + "log_based_alert_desc": "Send a notification when a condition occurs in the logs data.", + "traces_based_alert": "Trace-based Alert", + "traces_based_alert_desc": "Send a notification when a condition occurs in the traces data.", + "exceptions_based_alert": "Exceptions-based Alert", + "exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data." } \ No newline at end of file diff --git a/frontend/public/locales/en-GB/licenses.json b/frontend/public/locales/en-GB/licenses.json index 5d46685f9d..ed7c3eea47 100644 --- a/frontend/public/locales/en-GB/licenses.json +++ b/frontend/public/locales/en-GB/licenses.json @@ -9,5 +9,5 @@ "tab_license_history": "History", "loading_licenses": "Loading licenses...", "enter_license_key": "Please enter a license key", - "license_applied": "License applied successfully, please refresh the page to see changes." -} \ No newline at end of file + "license_applied": "License applied successfully" +} diff --git a/frontend/public/locales/en/alerts.json b/frontend/public/locales/en/alerts.json index cae309fd45..b5c769a021 100644 --- a/frontend/public/locales/en/alerts.json +++ b/frontend/public/locales/en/alerts.json @@ -28,6 +28,7 @@ "condition_required": "at least one metric condition is required", "alertname_required": "alert name is required", "promql_required": "promql expression is required when query format is set to PromQL", + "chquery_required": "query is required when query format is set to ClickHouse", "button_savechanges": "Save Rule", "button_createrule": "Create Rule", "button_returntorules": "Return to rules", @@ -55,6 +56,7 @@ "button_formula": "Formula", "tab_qb": "Query Builder", "tab_promql": "PromQL", + "tab_chquery": "ClickHouse Query", "title_confirm": "Confirm", "button_ok": "Yes", "button_cancel": "No", @@ -88,5 +90,23 @@ "user_guide_pql_step3": "Step 3 -Alert Configuration", "user_guide_pql_step3a": "Set alert severity, name and descriptions", "user_guide_pql_step3b": "Add tags to the alert in the Label field if needed", - "user_tooltip_more_help": "More details on how to create alerts" + "user_guide_ch_step1": "Step 1 - Define the metric", + "user_guide_ch_step1a": "Write a Clickhouse query for alert evaluation. Follow <0>this tutorial to learn about query format and supported vars.", + "user_guide_ch_step1b": "Format the legends based on labels you want to highlight in the preview chart", + "user_guide_ch_step2": "Step 2 - Define Alert Conditions", + "user_guide_ch_step2a": "Select the threshold type and whether you want to alert above/below a value", + "user_guide_ch_step2b": "Enter the Alert threshold", + "user_guide_ch_step3": "Step 3 -Alert Configuration", + "user_guide_ch_step3a": "Set alert severity, name and descriptions", + "user_guide_ch_step3b": "Add tags to the alert in the Label field if needed", + "user_tooltip_more_help": "More details on how to create alerts", + "choose_alert_type": "Choose a type for the alert:", + "metric_based_alert": "Metric based Alert", + "metric_based_alert_desc": "Send a notification when a condition occurs in the metric data", + "log_based_alert": "Log-based Alert", + "log_based_alert_desc": "Send a notification when a condition occurs in the logs data.", + "traces_based_alert": "Trace-based Alert", + "traces_based_alert_desc": "Send a notification when a condition occurs in the traces data.", + "exceptions_based_alert": "Exceptions-based Alert", + "exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data." } \ No newline at end of file diff --git a/frontend/public/locales/en/licenses.json b/frontend/public/locales/en/licenses.json index 5d46685f9d..ed7c3eea47 100644 --- a/frontend/public/locales/en/licenses.json +++ b/frontend/public/locales/en/licenses.json @@ -9,5 +9,5 @@ "tab_license_history": "History", "loading_licenses": "Loading licenses...", "enter_license_key": "Please enter a license key", - "license_applied": "License applied successfully, please refresh the page to see changes." -} \ No newline at end of file + "license_applied": "License applied successfully" +} diff --git a/frontend/src/api/trace/getTraceItem.ts b/frontend/src/api/trace/getTraceItem.ts index bf93269669..054c809b33 100644 --- a/frontend/src/api/trace/getTraceItem.ts +++ b/frontend/src/api/trace/getTraceItem.ts @@ -1,15 +1,20 @@ import axios from 'api'; import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; import { AxiosError } from 'axios'; +import { formUrlParams } from 'container/TraceDetail/utils'; import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/trace/getTraceItem'; +import { GetTraceItemProps, PayloadProps } from 'types/api/trace/getTraceItem'; const getTraceItem = async ( - props: Props, + props: GetTraceItemProps, ): Promise | ErrorResponse> => { try { const response = await axios.request({ - url: `/traces/${props.id}`, + url: `/traces/${props.id}${formUrlParams({ + spanId: props.spanId, + levelUp: props.levelUp, + levelDown: props.levelDown, + })}`, method: 'get', }); diff --git a/frontend/src/components/Graph/xAxisConfig.ts b/frontend/src/components/Graph/xAxisConfig.ts index d14d9bba09..ee794b6678 100644 --- a/frontend/src/components/Graph/xAxisConfig.ts +++ b/frontend/src/components/Graph/xAxisConfig.ts @@ -4,9 +4,6 @@ import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; import { GlobalReducer } from 'types/reducer/globalTime'; -interface ITimeUnit { - [key: string]: TimeUnit; -} interface IAxisTimeUintConfig { unitName: TimeUnit; multiplier: number; @@ -22,7 +19,7 @@ export interface ITimeRange { maxTime: number | null; } -export const TIME_UNITS: ITimeUnit = { +export const TIME_UNITS: Record = { millisecond: 'millisecond', second: 'second', minute: 'minute', @@ -31,6 +28,7 @@ export const TIME_UNITS: ITimeUnit = { week: 'week', month: 'month', year: 'year', + quarter: 'quarter', }; const TIME_UNITS_CONFIG: IAxisTimeUintConfig[] = [ @@ -93,6 +91,7 @@ export const convertTimeRange = ( } catch (error) { console.error(error); } + return { unitName: relevantTimeUnit.unitName, stepSize: Math.floor(stepSize) || 1, diff --git a/frontend/src/components/ValueGraph/styles.ts b/frontend/src/components/ValueGraph/styles.ts index af9f9c7ad0..74a412fec3 100644 --- a/frontend/src/components/ValueGraph/styles.ts +++ b/frontend/src/components/ValueGraph/styles.ts @@ -2,5 +2,6 @@ import { Typography } from 'antd'; import styled from 'styled-components'; export const Value = styled(Typography)` - font-size: 3rem; + font-size: 2.5vw; + text-align: center; `; diff --git a/frontend/src/container/AllError/index.tsx b/frontend/src/container/AllError/index.tsx index 253af7dfe1..6db945b9bc 100644 --- a/frontend/src/container/AllError/index.tsx +++ b/frontend/src/container/AllError/index.tsx @@ -1,12 +1,25 @@ -import { notification, Table, TableProps, Tooltip, Typography } from 'antd'; +import { SearchOutlined } from '@ant-design/icons'; +import { + Button, + Card, + Input, + notification, + Space, + Table, + TableProps, + Tooltip, + Typography, +} from 'antd'; +import { ColumnType } from 'antd/es/table'; import { ColumnsType } from 'antd/lib/table'; +import { FilterConfirmProps } from 'antd/lib/table/interface'; import getAll from 'api/errors/getAll'; import getErrorCounts from 'api/errors/getErrorCounts'; import ROUTES from 'constants/routes'; import dayjs from 'dayjs'; import createQueryParams from 'lib/createQueryParams'; import history from 'lib/history'; -import React, { useEffect, useMemo } from 'react'; +import React, { useCallback, useEffect, useMemo } from 'react'; import { useTranslation } from 'react-i18next'; import { useQueries } from 'react-query'; import { useSelector } from 'react-redux'; @@ -93,11 +106,87 @@ function AllErrors(): JSX.Element { {dayjs(value).format('DD/MM/YYYY HH:mm:ss A')} ); + const filterIcon = useCallback(() => , []); + + const handleSearch = ( + confirm: (param?: FilterConfirmProps) => void, + ): VoidFunction => (): void => { + confirm(); + }; + + const filterDropdownWrapper = useCallback( + ({ setSelectedKeys, selectedKeys, confirm, placeholder }) => { + return ( + + + + setSelectedKeys(e.target.value ? [e.target.value] : []) + } + allowClear + onPressEnter={handleSearch(confirm)} + /> + + + + ); + }, + [], + ); + + const onExceptionTypeFilter = useCallback( + (value, record: Exception): boolean => { + if (record.exceptionType && typeof value === 'string') { + return record.exceptionType.toLowerCase().includes(value.toLowerCase()); + } + return false; + }, + [], + ); + + const onApplicationTypeFilter = useCallback( + (value, record: Exception): boolean => { + if (record.serviceName && typeof value === 'string') { + return record.serviceName.toLowerCase().includes(value.toLowerCase()); + } + return false; + }, + [], + ); + + const getFilter = useCallback( + ( + onFilter: ColumnType['onFilter'], + placeholder: string, + ): ColumnType => ({ + onFilter, + filterIcon, + filterDropdown: ({ confirm, selectedKeys, setSelectedKeys }): JSX.Element => + filterDropdownWrapper({ + setSelectedKeys, + selectedKeys, + confirm, + placeholder, + }), + }), + [filterIcon, filterDropdownWrapper], + ); + const columns: ColumnsType = [ { title: 'Exception Type', dataIndex: 'exceptionType', key: 'exceptionType', + ...getFilter(onExceptionTypeFilter, 'Search By Exception'), render: (value, record): JSX.Element => ( value}> { return ( Math.floor(new Date(date).getTime() / 1e3).toString() + - Timestamp.fromString(date).getNano().toString() + String(Timestamp.fromString(date).getNano().toString()).padStart(9, '0') ); }; diff --git a/frontend/src/container/CreateAlertRule/SelectAlertType/index.tsx b/frontend/src/container/CreateAlertRule/SelectAlertType/index.tsx new file mode 100644 index 0000000000..cc2da48727 --- /dev/null +++ b/frontend/src/container/CreateAlertRule/SelectAlertType/index.tsx @@ -0,0 +1,68 @@ +import { Row } from 'antd'; +import React from 'react'; +import { useTranslation } from 'react-i18next'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; + +import { AlertTypeCard, SelectTypeContainer } from './styles'; + +interface OptionType { + title: string; + selection: AlertTypes; + description: string; +} + +function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element { + const { t } = useTranslation(['alerts']); + + const renderOptions = (): JSX.Element => { + const optionList: OptionType[] = [ + { + title: t('metric_based_alert'), + selection: AlertTypes.METRICS_BASED_ALERT, + description: t('metric_based_alert_desc'), + }, + { + title: t('log_based_alert'), + selection: AlertTypes.LOGS_BASED_ALERT, + description: t('log_based_alert_desc'), + }, + { + title: t('traces_based_alert'), + selection: AlertTypes.TRACES_BASED_ALERT, + description: t('traces_based_alert_desc'), + }, + { + title: t('exceptions_based_alert'), + selection: AlertTypes.EXCEPTIONS_BASED_ALERT, + description: t('exceptions_based_alert_desc'), + }, + ]; + return ( + <> + {optionList.map((o: OptionType) => ( + { + onSelect(o.selection); + }} + > + {o.description} + + ))} + + ); + }; + return ( + +

{t('choose_alert_type')}

+ {renderOptions()} +
+ ); +} + +interface SelectAlertTypeProps { + onSelect: (typ: AlertTypes) => void; +} + +export default SelectAlertType; diff --git a/frontend/src/container/CreateAlertRule/SelectAlertType/styles.ts b/frontend/src/container/CreateAlertRule/SelectAlertType/styles.ts new file mode 100644 index 0000000000..9c3323aea3 --- /dev/null +++ b/frontend/src/container/CreateAlertRule/SelectAlertType/styles.ts @@ -0,0 +1,16 @@ +import { Card } from 'antd'; +import styled from 'styled-components'; + +export const SelectTypeContainer = styled.div` + &&& { + padding: 1rem; + } +`; + +export const AlertTypeCard = styled(Card)` + &&& { + margin: 5px; + width: 21rem; + cursor: pointer; + } +`; diff --git a/frontend/src/container/CreateAlertRule/defaults.ts b/frontend/src/container/CreateAlertRule/defaults.ts new file mode 100644 index 0000000000..645d1ff454 --- /dev/null +++ b/frontend/src/container/CreateAlertRule/defaults.ts @@ -0,0 +1,186 @@ +import { AlertTypes } from 'types/api/alerts/alertTypes'; +import { + AlertDef, + defaultCompareOp, + defaultEvalWindow, + defaultMatchType, +} from 'types/api/alerts/def'; + +export const alertDefaults: AlertDef = { + alertType: AlertTypes.METRICS_BASED_ALERT, + condition: { + compositeMetricQuery: { + builderQueries: { + A: { + queryName: 'A', + name: 'A', + formulaOnly: false, + metricName: '', + tagFilters: { + op: 'AND', + items: [], + }, + groupBy: [], + aggregateOperator: 1, + expression: 'A', + disabled: false, + toggleDisable: false, + toggleDelete: false, + }, + }, + promQueries: {}, + chQueries: {}, + queryType: 1, + }, + op: defaultCompareOp, + matchType: defaultMatchType, + }, + labels: { + severity: 'warning', + }, + annotations: { + description: 'A new alert', + }, + evalWindow: defaultEvalWindow, +}; + +export const logAlertDefaults: AlertDef = { + alertType: AlertTypes.LOGS_BASED_ALERT, + condition: { + compositeMetricQuery: { + builderQueries: { + A: { + queryName: 'A', + name: 'A', + formulaOnly: false, + metricName: '', + tagFilters: { + op: 'AND', + items: [], + }, + groupBy: [], + aggregateOperator: 1, + expression: 'A', + disabled: false, + toggleDisable: false, + toggleDelete: false, + }, + }, + promQueries: {}, + chQueries: { + A: { + name: 'A', + query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`, + rawQuery: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`, + legend: '', + disabled: false, + }, + }, + queryType: 2, + }, + op: defaultCompareOp, + matchType: '4', + }, + labels: { + severity: 'warning', + details: `${window.location.protocol}//${window.location.host}/logs`, + }, + annotations: { + description: 'A new log-based alert', + }, + evalWindow: defaultEvalWindow, +}; + +export const traceAlertDefaults: AlertDef = { + alertType: AlertTypes.TRACES_BASED_ALERT, + condition: { + compositeMetricQuery: { + builderQueries: { + A: { + queryName: 'A', + name: 'A', + formulaOnly: false, + metricName: '', + tagFilters: { + op: 'AND', + items: [], + }, + groupBy: [], + aggregateOperator: 1, + expression: 'A', + disabled: false, + toggleDisable: false, + toggleDelete: false, + }, + }, + promQueries: {}, + chQueries: { + A: { + name: 'A', + rawQuery: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`, + query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`, + legend: '', + disabled: false, + }, + }, + queryType: 2, + }, + op: defaultCompareOp, + matchType: '4', + }, + labels: { + severity: 'warning', + details: `${window.location.protocol}//${window.location.host}/traces`, + }, + annotations: { + description: 'A new trace-based alert', + }, + evalWindow: defaultEvalWindow, +}; + +export const exceptionAlertDefaults: AlertDef = { + alertType: AlertTypes.EXCEPTIONS_BASED_ALERT, + condition: { + compositeMetricQuery: { + builderQueries: { + A: { + queryName: 'A', + name: 'A', + formulaOnly: false, + metricName: '', + tagFilters: { + op: 'AND', + items: [], + }, + groupBy: [], + aggregateOperator: 1, + expression: 'A', + disabled: false, + toggleDisable: false, + toggleDelete: false, + }, + }, + promQueries: {}, + chQueries: { + A: { + name: 'A', + rawQuery: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`, + query: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`, + legend: '', + disabled: false, + }, + }, + queryType: 2, + }, + op: defaultCompareOp, + matchType: '4', + }, + labels: { + severity: 'warning', + details: `${window.location.protocol}//${window.location.host}/exceptions`, + }, + annotations: { + description: 'A new exceptions-based alert', + }, + evalWindow: defaultEvalWindow, +}; diff --git a/frontend/src/container/CreateAlertRule/index.tsx b/frontend/src/container/CreateAlertRule/index.tsx index f527fbbdf1..ae3d21897e 100644 --- a/frontend/src/container/CreateAlertRule/index.tsx +++ b/frontend/src/container/CreateAlertRule/index.tsx @@ -1,22 +1,57 @@ -import { Form } from 'antd'; +import { Form, Row } from 'antd'; import FormAlertRules from 'container/FormAlertRules'; -import React from 'react'; -import { AlertDef } from 'types/api/alerts/def'; +import React, { useState } from 'react'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; -function CreateRules({ initialValue }: CreateRulesProps): JSX.Element { +import { + alertDefaults, + exceptionAlertDefaults, + logAlertDefaults, + traceAlertDefaults, +} from './defaults'; +import SelectAlertType from './SelectAlertType'; + +function CreateRules(): JSX.Element { + const [initValues, setInitValues] = useState(alertDefaults); + const [step, setStep] = useState(0); + const [alertType, setAlertType] = useState( + AlertTypes.METRICS_BASED_ALERT, + ); const [formInstance] = Form.useForm(); + const onSelectType = (typ: AlertTypes): void => { + setAlertType(typ); + switch (typ) { + case AlertTypes.LOGS_BASED_ALERT: + setInitValues(logAlertDefaults); + break; + case AlertTypes.TRACES_BASED_ALERT: + setInitValues(traceAlertDefaults); + break; + case AlertTypes.EXCEPTIONS_BASED_ALERT: + setInitValues(exceptionAlertDefaults); + break; + default: + setInitValues(alertDefaults); + } + setStep(1); + }; + + if (step === 0) { + return ( + + + + ); + } return ( ); } -interface CreateRulesProps { - initialValue: AlertDef; -} - export default CreateRules; diff --git a/frontend/src/container/EditRules/index.tsx b/frontend/src/container/EditRules/index.tsx index cf4a02e717..89c9e66410 100644 --- a/frontend/src/container/EditRules/index.tsx +++ b/frontend/src/container/EditRules/index.tsx @@ -1,6 +1,7 @@ import { Form } from 'antd'; import FormAlertRules from 'container/FormAlertRules'; import React from 'react'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; import { AlertDef } from 'types/api/alerts/def'; function EditRules({ initialValue, ruleId }: EditRulesProps): JSX.Element { @@ -8,6 +9,11 @@ function EditRules({ initialValue, ruleId }: EditRulesProps): JSX.Element { return ( { + const chQuery = rawQueryToIChQuery( + chQueries.A, + rawQuery, + legend, + toggleDelete, + ); + + setChQueries({ + A: { + ...chQuery, + }, + }); + }; + + return ( + + ); +} + +interface ChQuerySectionProps { + chQueries: IChQueries; + setChQueries: (q: IChQueries) => void; +} + +export default ChQuerySection; diff --git a/frontend/src/container/FormAlertRules/ChQuerySection/index.ts b/frontend/src/container/FormAlertRules/ChQuerySection/index.ts new file mode 100644 index 0000000000..a28e8594db --- /dev/null +++ b/frontend/src/container/FormAlertRules/ChQuerySection/index.ts @@ -0,0 +1,3 @@ +import ChQuerySection from './ChQuerySection'; + +export default ChQuerySection; diff --git a/frontend/src/container/FormAlertRules/ChQuerySection/transform.ts b/frontend/src/container/FormAlertRules/ChQuerySection/transform.ts new file mode 100644 index 0000000000..5f5198ec81 --- /dev/null +++ b/frontend/src/container/FormAlertRules/ChQuerySection/transform.ts @@ -0,0 +1,37 @@ +import { IChQuery } from 'types/api/alerts/compositeQuery'; +import { IClickHouseQuery } from 'types/api/dashboard/getAll'; + +// @description rawQueryToIChQuery transforms raw query (from ClickHouseQueryBuilder) +// to alert specific IChQuery format +export const rawQueryToIChQuery = ( + src: IChQuery, + rawQuery: string | undefined, + legend: string | undefined, + toggleDelete: boolean | undefined, +): IChQuery => { + if (toggleDelete) { + return { + rawQuery: '', + legend: '', + name: 'A', + disabled: false, + query: '', + }; + } + + return { + rawQuery: rawQuery !== undefined ? rawQuery : src.rawQuery, + query: rawQuery !== undefined ? rawQuery : src.rawQuery, + legend: legend !== undefined ? legend : src.legend, + name: 'A', + disabled: false, + }; +}; + +// @description toIClickHouseQuery transforms IChQuery (alert specific) to +// ClickHouseQueryBuilder format. The main difference is +// use of rawQuery (in ClickHouseQueryBuilder) +// and query (in alert builder) +export const toIClickHouseQuery = (src: IChQuery): IClickHouseQuery => { + return { ...src, name: 'A', rawQuery: src.query }; +}; diff --git a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx index 6243c1d4d4..e7ee323dce 100644 --- a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx +++ b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx @@ -1,11 +1,12 @@ import { InfoCircleOutlined } from '@ant-design/icons'; import { StaticLineProps } from 'components/Graph'; +import Spinner from 'components/Spinner'; import GridGraphComponent from 'container/GridGraphComponent'; import { GRAPH_TYPES } from 'container/NewDashboard/ComponentsSlider'; import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems'; import { Time } from 'container/TopNav/DateTimeSelection/config'; import getChartData from 'lib/getChartData'; -import React from 'react'; +import React, { useMemo } from 'react'; import { useTranslation } from 'react-i18next'; import { useQuery } from 'react-query'; import { GetMetricQueryRange } from 'store/actions/dashboard/getQueryResults'; @@ -22,6 +23,10 @@ export interface ChartPreviewProps { selectedInterval?: Time; headline?: JSX.Element; threshold?: number | undefined; + userQueryKey?: string; +} +interface QueryResponseError { + message?: string; } function ChartPreview({ @@ -32,6 +37,7 @@ function ChartPreview({ selectedInterval = '5min', headline, threshold, + userQueryKey, }: ChartPreviewProps): JSX.Element | null { const { t } = useTranslation('alerts'); const staticLine: StaticLineProps | undefined = @@ -46,9 +52,34 @@ function ChartPreview({ } : undefined; - const queryKey = JSON.stringify(query); + const canQuery = useMemo((): boolean => { + if (!query || query == null) { + return false; + } + + switch (query?.queryType) { + case EQueryType.PROM: + return query.promQL?.length > 0 && query.promQL[0].query !== ''; + case EQueryType.CLICKHOUSE: + return ( + query.clickHouse?.length > 0 && query.clickHouse[0].rawQuery?.length > 0 + ); + case EQueryType.QUERY_BUILDER: + return ( + query.metricsBuilder?.queryBuilder?.length > 0 && + query.metricsBuilder?.queryBuilder[0].metricName !== '' + ); + default: + return false; + } + }, [query]); + const queryResponse = useQuery({ - queryKey: ['chartPreview', queryKey, selectedInterval], + queryKey: [ + 'chartPreview', + userQueryKey || JSON.stringify(query), + selectedInterval, + ], queryFn: () => GetMetricQueryRange({ query: query || { @@ -64,14 +95,8 @@ function ChartPreview({ graphType, selectedTime, }), - enabled: - query != null && - ((query.queryType === EQueryType.PROM && - query.promQL?.length > 0 && - query.promQL[0].query !== '') || - (query.queryType === EQueryType.QUERY_BUILDER && - query.metricsBuilder?.queryBuilder?.length > 0 && - query.metricsBuilder?.queryBuilder[0].metricName !== '')), + retry: false, + enabled: canQuery, }); const chartDataSet = queryResponse.isError @@ -89,15 +114,14 @@ function ChartPreview({ return ( {headline} - {(queryResponse?.data?.error || queryResponse?.isError) && ( + {(queryResponse?.isError || queryResponse?.error) && ( {' '} - {queryResponse?.data?.error || - queryResponse?.error || + {(queryResponse?.error as QueryResponseError).message || t('preview_chart_unexpected_error')} )} - + {queryResponse.isLoading && } {chartDataSet && !queryResponse.isError && ( { + return ; + }; + const renderFormulaButton = (): JSX.Element => { return ( }> @@ -258,23 +283,85 @@ function QuerySection({ ); }; - return ( - <> - {t('alert_form_step1')} - -
+ + const handleRunQuery = (): void => { + runQuery(); + }; + + const renderTabs = (typ: AlertTypes): JSX.Element | null => { + switch (typ) { + case AlertTypes.TRACES_BASED_ALERT: + case AlertTypes.LOGS_BASED_ALERT: + case AlertTypes.EXCEPTIONS_BASED_ALERT: + return ( + + {queryCategory === EQueryType.CLICKHOUSE && ( + + )} + + } + > + + + + ); + case AlertTypes.METRICS_BASED_ALERT: + default: + return ( + {queryCategory === EQueryType.CLICKHOUSE && ( + + )} + + } > + -
- {queryCategory === EQueryType.PROM ? renderPromqlUI() : renderMetricUI()} + ); + } + }; + const renderQuerySection = (c: EQueryType): JSX.Element | null => { + switch (c) { + case EQueryType.PROM: + return renderPromqlUI(); + case EQueryType.CLICKHOUSE: + return renderChQueryUI(); + case EQueryType.QUERY_BUILDER: + return renderMetricUI(); + default: + return null; + } + }; + return ( + <> + {t('alert_form_step1')} + +
{renderTabs(alertType)}
+ {renderQuerySection(queryCategory)}
); @@ -289,6 +376,10 @@ interface QuerySectionProps { setFormulaQueries: (b: IFormulaQueries) => void; promQueries: IPromQueries; setPromQueries: (p: IPromQueries) => void; + chQueries: IChQueries; + setChQueries: (q: IChQueries) => void; + alertType: AlertTypes; + runQuery: () => void; } export default QuerySection; diff --git a/frontend/src/container/FormAlertRules/UserGuide/index.tsx b/frontend/src/container/FormAlertRules/UserGuide/index.tsx index 1cf5dac163..d24ac82cb2 100644 --- a/frontend/src/container/FormAlertRules/UserGuide/index.tsx +++ b/frontend/src/container/FormAlertRules/UserGuide/index.tsx @@ -1,7 +1,7 @@ import { Col, Row, Typography } from 'antd'; import TextToolTip from 'components/TextToolTip'; import React from 'react'; -import { useTranslation } from 'react-i18next'; +import { Trans, useTranslation } from 'react-i18next'; import { EQueryType } from 'types/common/dashboard'; import { @@ -106,6 +106,63 @@ function UserGuide({ queryType }: UserGuideProps): JSX.Element { ); }; + const renderStep1CH = (): JSX.Element => { + return ( + <> + {t('user_guide_ch_step1')} + + + , + ]} + /> + + {t('user_guide_ch_step1b')} + + + ); + }; + const renderStep2CH = (): JSX.Element => { + return ( + <> + {t('user_guide_ch_step2')} + + {t('user_guide_ch_step2a')} + {t('user_guide_ch_step2b')} + + + ); + }; + + const renderStep3CH = (): JSX.Element => { + return ( + <> + {t('user_guide_ch_step3')} + + {t('user_guide_ch_step3a')} + {t('user_guide_ch_step3b')} + + + ); + }; + + const renderGuideForCH = (): JSX.Element => { + return ( + <> + {renderStep1CH()} + {renderStep2CH()} + {renderStep3CH()} + + ); + }; return ( @@ -121,6 +178,7 @@ function UserGuide({ queryType }: UserGuideProps): JSX.Element { {queryType === EQueryType.QUERY_BUILDER && renderGuideForQB()} {queryType === EQueryType.PROM && renderGuideForPQL()} + {queryType === EQueryType.CLICKHOUSE && renderGuideForCH()} ); } diff --git a/frontend/src/container/FormAlertRules/index.tsx b/frontend/src/container/FormAlertRules/index.tsx index 022e913f8e..9f91aed088 100644 --- a/frontend/src/container/FormAlertRules/index.tsx +++ b/frontend/src/container/FormAlertRules/index.tsx @@ -9,7 +9,9 @@ import history from 'lib/history'; import React, { useCallback, useEffect, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { useQueryClient } from 'react-query'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; import { + IChQueries, IFormulaQueries, IMetricQueries, IPromQueries, @@ -45,6 +47,7 @@ import { } from './utils'; function FormAlertRules({ + alertType, formInstance, initialValue, ruleId, @@ -57,6 +60,10 @@ function FormAlertRules({ const [loading, setLoading] = useState(false); + // queryRunId helps to override of query caching for clickhouse query + // tab. A random string will be assigned for each execution + const [runQueryId, setRunQueryId] = useState(); + // alertDef holds the form values to be posted const [alertDef, setAlertDef] = useState(initialValue); @@ -82,9 +89,31 @@ function FormAlertRules({ ...initQuery?.promQueries, }); - // staged query is used to display chart preview + // local state to handle promql queries + const [chQueries, setChQueries] = useState({ + ...initQuery?.chQueries, + }); + + // staged query is used to display chart preview. the query gets + // auto refreshed when any of the params in query section change. + // though this is the source of chart data, the final query used + // by chart will be either debouncedStagedQuery or manualStagedQuery + // depending on the run option (auto-run or use of run query button) const [stagedQuery, setStagedQuery] = useState(); - const debouncedStagedQuery = useDebounce(stagedQuery, 1000); + + // manualStagedQuery requires manual staging of query + // when user clicks run query button. Useful for clickhouse tab where + // run query button is provided. + const [manualStagedQuery, setManualStagedQuery] = useState(); + + // delay to reduce load on backend api with auto-run query. only for clickhouse + // queries we have manual run, hence both debounce and debounceStagedQuery are not required + const debounceDelay = queryCategory !== EQueryType.CLICKHOUSE ? 1000 : 0; + + // debounce query to delay backend api call and chart update. + // used in query builder and promql tabs to enable auto-refresh + // of chart on user edit + const debouncedStagedQuery = useDebounce(stagedQuery, debounceDelay); // this use effect initiates staged query and // other queries based on server data. @@ -101,14 +130,26 @@ function FormAlertRules({ const fq = toFormulaQueries(initQuery?.builderQueries); // prepare staged query - const sq = prepareStagedQuery(typ, mq, fq, initQuery?.promQueries); + const sq = prepareStagedQuery( + typ, + mq, + fq, + initQuery?.promQueries, + initQuery?.chQueries, + ); const pq = initQuery?.promQueries; + const chq = initQuery?.chQueries; setQueryCategory(typ); setMetricQueries(mq); setFormulaQueries(fq); setPromQueries(pq); setStagedQuery(sq); + + // also set manually staged query + setManualStagedQuery(sq); + + setChQueries(chq); setAlertDef(initialValue); }, [initialValue]); @@ -121,9 +162,15 @@ function FormAlertRules({ metricQueries, formulaQueries, promQueries, + chQueries, ); setStagedQuery(sq); - }, [queryCategory, metricQueries, formulaQueries, promQueries]); + }, [queryCategory, chQueries, metricQueries, formulaQueries, promQueries]); + + const onRunQuery = (): void => { + setRunQueryId(Math.random().toString(36).substring(2, 15)); + setManualStagedQuery(stagedQuery); + }; const onCancelHandler = useCallback(() => { history.replace(ROUTES.LIST_ALL_ALERT); @@ -169,6 +216,31 @@ function FormAlertRules({ return retval; }, [t, promQueries, queryCategory]); + const validateChQueryParams = useCallback((): boolean => { + let retval = true; + if (queryCategory !== EQueryType.CLICKHOUSE) return retval; + + if (!chQueries || Object.keys(chQueries).length === 0) { + notification.error({ + message: 'Error', + description: t('chquery_required'), + }); + return false; + } + + Object.keys(chQueries).forEach((key) => { + if (chQueries[key].rawQuery === '') { + notification.error({ + message: 'Error', + description: t('chquery_required'), + }); + retval = false; + } + }); + + return retval; + }, [t, chQueries, queryCategory]); + const validateQBParams = useCallback((): boolean => { let retval = true; if (queryCategory !== EQueryType.QUERY_BUILDER) return true; @@ -224,12 +296,17 @@ function FormAlertRules({ return false; } + if (!validateChQueryParams()) { + return false; + } + return validateQBParams(); - }, [t, validateQBParams, alertDef, validatePromParams]); + }, [t, validateQBParams, validateChQueryParams, alertDef, validatePromParams]); const preparePostData = (): AlertDef => { const postableAlert: AlertDef = { ...alertDef, + alertType, source: window?.location.toString(), ruleType: queryCategory === EQueryType.PROM ? 'promql_rule' : 'threshold_rule', @@ -238,6 +315,7 @@ function FormAlertRules({ compositeMetricQuery: { builderQueries: prepareBuilderQueries(metricQueries, formulaQueries), promQueries, + chQueries, queryType: queryCategory, }, }, @@ -251,6 +329,8 @@ function FormAlertRules({ metricQueries, formulaQueries, promQueries, + chQueries, + alertType, ]); const saveRule = useCallback(async () => { @@ -380,6 +460,18 @@ function FormAlertRules({ ); }; + const renderChQueryChartPreview = (): JSX.Element => { + return ( + } + name="Chart Preview" + threshold={alertDef.condition?.target} + query={manualStagedQuery} + userQueryKey={runQueryId} + selectedInterval={toChartInterval(alertDef.evalWindow)} + /> + ); + }; return ( <> {Element} @@ -392,6 +484,7 @@ function FormAlertRules({ > {queryCategory === EQueryType.QUERY_BUILDER && renderQBChartPreview()} {queryCategory === EQueryType.PROM && renderPromChartPreview()} + {queryCategory === EQueryType.CLICKHOUSE && renderChQueryChartPreview()} { const qbList: IMetricQuery[] = []; const formulaList: IFormulaQuery[] = []; const promList: IPromQuery[] = []; + const chQueryList: IChQuery[] = []; // convert map[string]IMetricQuery to IMetricQuery[] if (m) { @@ -101,6 +105,12 @@ export const prepareStagedQuery = ( promList.push({ ...p[key], name: key }); }); } + // convert map[string]IChQuery to IChQuery[] + if (c) { + Object.keys(c).forEach((key) => { + chQueryList.push({ ...c[key], name: key, rawQuery: c[key].query }); + }); + } return { queryType: t, @@ -109,7 +119,7 @@ export const prepareStagedQuery = ( formulas: formulaList, queryBuilder: qbList, }, - clickHouse: [], + clickHouse: chQueryList, }; }; @@ -125,7 +135,7 @@ export const toChartInterval = (evalWindow: string | undefined): Time => { case '30m0s': return '30min'; case '60m0s': - return '30min'; + return '1hr'; case '4h0m0s': return '4hr'; case '24h0m0s': diff --git a/frontend/src/container/Licenses/ApplyLicenseForm.tsx b/frontend/src/container/Licenses/ApplyLicenseForm.tsx index 898ae6d78c..858bf38d54 100644 --- a/frontend/src/container/Licenses/ApplyLicenseForm.tsx +++ b/frontend/src/container/Licenses/ApplyLicenseForm.tsx @@ -1,15 +1,30 @@ import { Button, Input, notification } from 'antd'; import FormItem from 'antd/lib/form/FormItem'; +import getFeaturesFlags from 'api/features/getFeatureFlags'; import apply from 'api/licenses/apply'; import React, { useState } from 'react'; import { useTranslation } from 'react-i18next'; +import { QueryObserverResult, RefetchOptions, useQuery } from 'react-query'; +import { useDispatch } from 'react-redux'; +import { Dispatch } from 'redux'; +import { AppAction, UPDATE_FEATURE_FLAGS } from 'types/actions/app'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/licenses/getAll'; -import { ApplyForm, ApplyFormContainer, LicenseInput } from './applyFormStyles'; +import { ApplyForm, ApplyFormContainer, LicenseInput } from './styles'; -function ApplyLicenseForm(): JSX.Element { +function ApplyLicenseForm({ + licenseRefetch, +}: ApplyLicenseFormProps): JSX.Element { const { t } = useTranslation(['licenses']); const [key, setKey] = useState(''); const [loading, setLoading] = useState(false); + const dispatch = useDispatch>(); + const { refetch } = useQuery({ + queryFn: getFeaturesFlags, + queryKey: 'getFeatureFlags', + enabled: false, + }); const onFinish = async (values: unknown | { key: string }): Promise => { const params = values as { key: string }; @@ -28,6 +43,16 @@ function ApplyLicenseForm(): JSX.Element { }); if (response.statusCode === 200) { + const [featureFlagsResponse] = await Promise.all([ + refetch(), + licenseRefetch(), + ]); + if (featureFlagsResponse.data?.payload) { + dispatch({ + type: UPDATE_FEATURE_FLAGS, + payload: featureFlagsResponse.data.payload, + }); + } notification.success({ message: 'Success', description: t('license_applied'), @@ -74,4 +99,12 @@ function ApplyLicenseForm(): JSX.Element { ); } +interface ApplyLicenseFormProps { + licenseRefetch: ( + options?: RefetchOptions, + ) => Promise< + QueryObserverResult | ErrorResponse, unknown> + >; +} + export default ApplyLicenseForm; diff --git a/frontend/src/container/Licenses/ListLicenses.tsx b/frontend/src/container/Licenses/ListLicenses.tsx index ba19fe9179..d9a994cfbf 100644 --- a/frontend/src/container/Licenses/ListLicenses.tsx +++ b/frontend/src/container/Licenses/ListLicenses.tsx @@ -1,4 +1,3 @@ -/* eslint-disable react/display-name */ import { Table } from 'antd'; import { ColumnsType } from 'antd/lib/table'; import React from 'react'; diff --git a/frontend/src/container/Licenses/index.tsx b/frontend/src/container/Licenses/index.tsx index 04b20b9927..b326a5b0e7 100644 --- a/frontend/src/container/Licenses/index.tsx +++ b/frontend/src/container/Licenses/index.tsx @@ -1,9 +1,9 @@ import { Tabs, Typography } from 'antd'; import getAll from 'api/licenses/getAll'; import Spinner from 'components/Spinner'; -import useFetch from 'hooks/useFetch'; import React from 'react'; import { useTranslation } from 'react-i18next'; +import { useQuery } from 'react-query'; import ApplyLicenseForm from './ApplyLicenseForm'; import ListLicenses from './ListLicenses'; @@ -12,29 +12,31 @@ const { TabPane } = Tabs; function Licenses(): JSX.Element { const { t } = useTranslation(['licenses']); - const { loading, payload, error, errorMessage } = useFetch(getAll); + const { data, isError, isLoading, refetch } = useQuery({ + queryFn: getAll, + queryKey: 'getAllLicenses', + }); - if (error) { - return {errorMessage}; + if (isError || data?.error) { + return {data?.error}; } - if (loading || payload === undefined) { + if (isLoading || data?.payload === undefined) { return ; } + const allValidLicense = + data?.payload?.filter((license) => license.isCurrent) || []; + return ( - - l.isCurrent === true) : []} - /> + + - l.isCurrent === false) : []} - /> + ); diff --git a/frontend/src/container/Licenses/applyFormStyles.ts b/frontend/src/container/Licenses/styles.ts similarity index 100% rename from frontend/src/container/Licenses/applyFormStyles.ts rename to frontend/src/container/Licenses/styles.ts diff --git a/frontend/src/container/LogControls/styles.ts b/frontend/src/container/LogControls/styles.ts index 304b443c54..f91bc43363 100644 --- a/frontend/src/container/LogControls/styles.ts +++ b/frontend/src/container/LogControls/styles.ts @@ -5,5 +5,4 @@ export const Container = styled.div` align-items: center; justify-content: flex-end; gap: 0.5rem; - margin-bottom: 0.5rem; `; diff --git a/frontend/src/container/LogLiveTail/OptionIcon.tsx b/frontend/src/container/LogLiveTail/OptionIcon.tsx deleted file mode 100644 index 9a6f935ed9..0000000000 --- a/frontend/src/container/LogLiveTail/OptionIcon.tsx +++ /dev/null @@ -1,26 +0,0 @@ -import React from 'react'; - -interface OptionIconProps { - isDarkMode: boolean; -} -function OptionIcon({ isDarkMode }: OptionIconProps): JSX.Element { - return ( - - - - ); -} - -export default OptionIcon; diff --git a/frontend/src/container/LogLiveTail/config.ts b/frontend/src/container/LogLiveTail/config.ts new file mode 100644 index 0000000000..27ee168205 --- /dev/null +++ b/frontend/src/container/LogLiveTail/config.ts @@ -0,0 +1,26 @@ +export const TIME_PICKER_OPTIONS = [ + { + value: 5, + label: '5m', + }, + { + value: 15, + label: '15m', + }, + { + value: 30, + label: '30m', + }, + { + value: 60, + label: '1hr', + }, + { + value: 360, + label: '6hrs', + }, + { + value: 720, + label: '12hrs', + }, +]; diff --git a/frontend/src/container/LogLiveTail/index.tsx b/frontend/src/container/LogLiveTail/index.tsx index 315c629828..1a4a38343d 100644 --- a/frontend/src/container/LogLiveTail/index.tsx +++ b/frontend/src/container/LogLiveTail/index.tsx @@ -1,13 +1,17 @@ -/* eslint-disable react-hooks/exhaustive-deps */ import { green } from '@ant-design/colors'; -import { PauseOutlined, PlayCircleOutlined } from '@ant-design/icons'; -import { Button, Popover, Row, Select } from 'antd'; +import { + MoreOutlined, + PauseOutlined, + PlayCircleOutlined, +} from '@ant-design/icons'; +import { Button, Popover, Select, Space } from 'antd'; import { LiveTail } from 'api/logs/livetail'; import dayjs from 'dayjs'; import { throttle } from 'lodash-es'; import React, { useCallback, useEffect, useMemo, useRef } from 'react'; import { useDispatch, useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; +import { UPDATE_AUTO_REFRESH_DISABLED } from 'types/actions/globalTime'; import { FLUSH_LOGS, PUSH_LIVE_TAIL_EVENT, @@ -16,40 +20,14 @@ import { } from 'types/actions/logs'; import { TLogsLiveTailState } from 'types/api/logs/liveTail'; import AppReducer from 'types/reducer/app'; +import { GlobalReducer } from 'types/reducer/globalTime'; import { ILogsReducer } from 'types/reducer/logs'; -import OptionIcon from './OptionIcon'; -import { TimePickerCard, TimePickerSelect } from './styles'; +import { TIME_PICKER_OPTIONS } from './config'; +import { StopContainer, TimePickerCard, TimePickerSelect } from './styles'; const { Option } = Select; -const TIME_PICKER_OPTIONS = [ - { - value: 5, - label: '5m', - }, - { - value: 15, - label: '15m', - }, - { - value: 30, - label: '30m', - }, - { - value: 60, - label: '1hr', - }, - { - value: 360, - label: '6hrs', - }, - { - value: 720, - label: '12hrs', - }, -]; - function LogLiveTail(): JSX.Element { const { liveTail, @@ -58,12 +36,20 @@ function LogLiveTail(): JSX.Element { logs, } = useSelector((state) => state.logs); const { isDarkMode } = useSelector((state) => state.app); + const { selectedAutoRefreshInterval } = useSelector( + (state) => state.globalTime, + ); + const dispatch = useDispatch(); const handleLiveTail = (toggleState: TLogsLiveTailState): void => { dispatch({ type: TOGGLE_LIVE_TAIL, payload: toggleState, }); + dispatch({ + type: UPDATE_AUTO_REFRESH_DISABLED, + payload: toggleState === 'PLAYING', + }); }; const batchedEventsRef = useRef[]>([]); @@ -75,14 +61,12 @@ function LogLiveTail(): JSX.Element { type: PUSH_LIVE_TAIL_EVENT, payload: batchedEventsRef.current.reverse(), }); - // console.log('DISPATCH', batchedEventsRef.current.length); batchedEventsRef.current = []; }, 1500), [], ); const batchLiveLog = (e: { data: string }): void => { - // console.log('EVENT BATCHED'); batchedEventsRef.current.push(JSON.parse(e.data as string) as never); pushLiveLog(); }; @@ -123,6 +107,7 @@ function LogLiveTail(): JSX.Element { if (liveTail === 'STOPPED') { liveTailSourceRef.current = null; } + // eslint-disable-next-line react-hooks/exhaustive-deps }, [liveTail]); const handleLiveTailStart = (): void => { @@ -155,47 +140,44 @@ function LogLiveTail(): JSX.Element { ), [dispatch, liveTail, liveTailStartRange], ); + + const isDisabled = useMemo(() => selectedAutoRefreshInterval?.length > 0, [ + selectedAutoRefreshInterval, + ]); + return ( - -
- {liveTail === 'PLAYING' ? ( - - ) : ( - - )} - {liveTail !== 'STOPPED' && ( - - )} -
+ + {liveTail === 'PLAYING' ? ( + + ) : ( + + )} + + {liveTail !== 'STOPPED' && ( + + )} - - - + -
+
); } diff --git a/frontend/src/container/LogLiveTail/styles.ts b/frontend/src/container/LogLiveTail/styles.ts index 4e8691dbd0..66081585b3 100644 --- a/frontend/src/container/LogLiveTail/styles.ts +++ b/frontend/src/container/LogLiveTail/styles.ts @@ -3,6 +3,7 @@ import styled from 'styled-components'; export const TimePickerCard = styled(Card)` .ant-card-body { + display: flex; padding: 0; } `; @@ -10,3 +11,15 @@ export const TimePickerCard = styled(Card)` export const TimePickerSelect = styled(Select)` min-width: 100px; `; + +interface Props { + isDarkMode: boolean; +} + +export const StopContainer = styled.div` + height: 0.8rem; + width: 0.8rem; + border-radius: 0.1rem; + background-color: ${({ isDarkMode }): string => + isDarkMode ? '#fff' : '#000'}; +`; diff --git a/frontend/src/container/Logs/index.tsx b/frontend/src/container/Logs/index.tsx deleted file mode 100644 index 9139d7c9e2..0000000000 --- a/frontend/src/container/Logs/index.tsx +++ /dev/null @@ -1,66 +0,0 @@ -import { Divider, Row } from 'antd'; -import LogControls from 'container/LogControls'; -import LogDetailedView from 'container/LogDetailedView'; -import LogLiveTail from 'container/LogLiveTail'; -import LogsAggregate from 'container/LogsAggregate'; -import LogsFilters from 'container/LogsFilters'; -import SearchFilter from 'container/LogsSearchFilter'; -import LogsTable from 'container/LogsTable'; -import useUrlQuery from 'hooks/useUrlQuery'; -import React, { memo, useEffect } from 'react'; -import { connect, useDispatch } from 'react-redux'; -import { bindActionCreators, Dispatch } from 'redux'; -import { ThunkDispatch } from 'redux-thunk'; -import { GetLogsFields } from 'store/actions/logs/getFields'; -import AppActions from 'types/actions'; -import { SET_SEARCH_QUERY_STRING } from 'types/actions/logs'; - -function Logs({ getLogsFields }: LogsProps): JSX.Element { - const urlQuery = useUrlQuery(); - - const dispatch = useDispatch(); - - useEffect(() => { - dispatch({ - type: SET_SEARCH_QUERY_STRING, - payload: urlQuery.get('q'), - }); - }, [dispatch, urlQuery]); - - useEffect(() => { - getLogsFields(); - }, [getLogsFields]); - - return ( -
- - - - - - - - - - - - - - -
- ); -} - -type LogsProps = DispatchProps; - -interface DispatchProps { - getLogsFields: () => (dispatch: Dispatch) => void; -} - -const mapDispatchToProps = ( - dispatch: ThunkDispatch, -): DispatchProps => ({ - getLogsFields: bindActionCreators(GetLogsFields, dispatch), -}); - -export default connect(null, mapDispatchToProps)(memo(Logs)); diff --git a/frontend/src/container/LogsAggregate/index.tsx b/frontend/src/container/LogsAggregate/index.tsx index 5b7c330135..34200049c4 100644 --- a/frontend/src/container/LogsAggregate/index.tsx +++ b/frontend/src/container/LogsAggregate/index.tsx @@ -1,4 +1,3 @@ -/* eslint-disable react-hooks/exhaustive-deps */ import { blue } from '@ant-design/colors'; import Graph from 'components/Graph'; import Spinner from 'components/Spinner'; @@ -16,9 +15,6 @@ import { ILogsReducer } from 'types/reducer/logs'; import { Container } from './styles'; -interface LogsAggregateProps { - getLogsAggregate: (arg0: Parameters[0]) => void; -} function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element { const { searchFilter: { queryString }, @@ -35,6 +31,7 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element { ); const reFetchIntervalRef = useRef | null>(null); + useEffect(() => { switch (liveTail) { case 'STOPPED': { @@ -42,18 +39,6 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element { clearInterval(reFetchIntervalRef.current); } reFetchIntervalRef.current = null; - getLogsAggregate({ - timestampStart: minTime, - timestampEnd: maxTime, - step: getStep({ - start: minTime, - end: maxTime, - inputFormat: 'ns', - }), - q: queryString, - ...(idStart ? { idGt: idStart } : {}), - ...(idEnd ? { idLt: idEnd } : {}), - }); break; } @@ -89,18 +74,9 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element { break; } } + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getLogsAggregate, maxTime, minTime, liveTail]); - const data = { - labels: logsAggregate.map((s) => new Date(s.timestamp / 1000000)), - datasets: [ - { - data: logsAggregate.map((s) => s.value), - backgroundColor: blue[4], - }, - ], - }; - return ( {isLoadingAggregate ? ( @@ -108,16 +84,28 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element { ) : ( new Date(s.timestamp / 1000000)), + datasets: [ + { + data: logsAggregate.map((s) => s.value), + backgroundColor: blue[4], + }, + ], + }} type="bar" containerHeight="100%" - animate={false} + animate /> )} ); } +interface LogsAggregateProps { + getLogsAggregate: (arg0: Parameters[0]) => void; +} + interface DispatchProps { getLogsAggregate: ( props: Parameters[0], diff --git a/frontend/src/container/LogsFilters/index.tsx b/frontend/src/container/LogsFilters/index.tsx index 28480edccc..42d8bc500e 100644 --- a/frontend/src/container/LogsFilters/index.tsx +++ b/frontend/src/container/LogsFilters/index.tsx @@ -21,9 +21,6 @@ import { CategoryContainer, Container, FieldContainer } from './styles'; const RESTRICTED_SELECTED_FIELDS = ['timestamp', 'id']; -interface LogsFiltersProps { - getLogsFields: () => void; -} function LogsFilters({ getLogsFields }: LogsFiltersProps): JSX.Element { const { fields: { interesting, selected }, @@ -150,4 +147,6 @@ const mapDispatchToProps = ( getLogsFields: bindActionCreators(GetLogsFields, dispatch), }); +type LogsFiltersProps = DispatchProps; + export default connect(null, mapDispatchToProps)(memo(LogsFilters)); diff --git a/frontend/src/container/LogsSearchFilter/SearchFields/QueryBuilder/QueryBuilder.tsx b/frontend/src/container/LogsSearchFilter/SearchFields/QueryBuilder/QueryBuilder.tsx index 0723d2378e..1aab2e8e75 100644 --- a/frontend/src/container/LogsSearchFilter/SearchFields/QueryBuilder/QueryBuilder.tsx +++ b/frontend/src/container/LogsSearchFilter/SearchFields/QueryBuilder/QueryBuilder.tsx @@ -4,7 +4,7 @@ /* eslint-disable no-param-reassign */ /* eslint-disable react/no-array-index-key */ /* eslint-disable react-hooks/exhaustive-deps */ -import { CloseOutlined } from '@ant-design/icons'; +import { CloseOutlined, CloseSquareOutlined } from '@ant-design/icons'; import { Button, Input, Select } from 'antd'; import CategoryHeading from 'components/Logs/CategoryHeading'; import { @@ -19,12 +19,40 @@ import { AppState } from 'store/reducers'; import { ILogsReducer } from 'types/reducer/logs'; import { v4 } from 'uuid'; +import { SearchFieldsProps } from '..'; import FieldKey from '../FieldKey'; -import { QueryConditionContainer, QueryFieldContainer } from '../styles'; +import { QueryFieldContainer } from '../styles'; import { createParsedQueryStructure } from '../utils'; +import { Container, QueryWrapper } from './styles'; import { hashCode, parseQuery } from './utils'; const { Option } = Select; + +function QueryConditionField({ + query, + queryIndex, + onUpdate, +}: QueryConditionFieldProps): JSX.Element { + const allOptions = Object.values(ConditionalOperators); + return ( + + ); +} + interface QueryFieldProps { query: Query; queryIndex: number; @@ -141,40 +169,13 @@ interface QueryConditionFieldProps { queryIndex: number; onUpdate: (arg0: unknown, arg1: number) => void; } -function QueryConditionField({ - query, - queryIndex, - onUpdate, -}: QueryConditionFieldProps): JSX.Element { - return ( - - - - ); -} export type Query = { value: string | string[]; type: string }[]; function QueryBuilder({ updateParsedQuery, -}: { - updateParsedQuery: (arg0: unknown) => void; -}): JSX.Element { + onDropDownToggleHandler, +}: SearchFieldsProps): JSX.Element { const { searchFilter: { parsedQuery }, } = useSelector((store) => store.logs); @@ -225,27 +226,25 @@ function QueryBuilder({ ); return ( - +
+ +
); }); + return ( -
- LOG QUERY BUILDER -
- {QueryUI()} -
-
+ <> + + LOG QUERY BUILDER + + + + {QueryUI()} + ); } diff --git a/frontend/src/container/LogsSearchFilter/SearchFields/QueryBuilder/styles.ts b/frontend/src/container/LogsSearchFilter/SearchFields/QueryBuilder/styles.ts new file mode 100644 index 0000000000..211a5b6407 --- /dev/null +++ b/frontend/src/container/LogsSearchFilter/SearchFields/QueryBuilder/styles.ts @@ -0,0 +1,17 @@ +import styled from 'styled-components'; + +interface Props { + isMargin: boolean; +} +export const Container = styled.div` + display: flex; + justify-content: space-between; + width: 100%; + margin-bottom: ${(props): string => (props.isMargin ? '2rem' : '0')}; +`; + +export const QueryWrapper = styled.div` + display: grid; + grid-template-columns: 80px 1fr; + margin: 0.5rem 0px; +`; diff --git a/frontend/src/container/LogsSearchFilter/SearchFields/Suggestions.tsx b/frontend/src/container/LogsSearchFilter/SearchFields/Suggestions.tsx index c80b996e48..838d790954 100644 --- a/frontend/src/container/LogsSearchFilter/SearchFields/Suggestions.tsx +++ b/frontend/src/container/LogsSearchFilter/SearchFields/Suggestions.tsx @@ -1,6 +1,6 @@ import { Button } from 'antd'; import CategoryHeading from 'components/Logs/CategoryHeading'; -import { map } from 'lodash-es'; +import map from 'lodash-es/map'; import React from 'react'; import { useDispatch, useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; diff --git a/frontend/src/container/LogsSearchFilter/SearchFields/index.tsx b/frontend/src/container/LogsSearchFilter/SearchFields/index.tsx index 888257279f..9080c454ab 100644 --- a/frontend/src/container/LogsSearchFilter/SearchFields/index.tsx +++ b/frontend/src/container/LogsSearchFilter/SearchFields/index.tsx @@ -2,14 +2,23 @@ import React from 'react'; import QueryBuilder from './QueryBuilder/QueryBuilder'; import Suggestions from './Suggestions'; +import { QueryFields } from './utils'; -interface SearchFieldsProps { - updateParsedQuery: () => void; +export interface SearchFieldsProps { + updateParsedQuery: (query: QueryFields[]) => void; + onDropDownToggleHandler: (value: boolean) => VoidFunction; } -function SearchFields({ updateParsedQuery }: SearchFieldsProps): JSX.Element { + +function SearchFields({ + updateParsedQuery, + onDropDownToggleHandler, +}: SearchFieldsProps): JSX.Element { return ( <> - + ); diff --git a/frontend/src/container/LogsSearchFilter/SearchFields/styles.tsx b/frontend/src/container/LogsSearchFilter/SearchFields/styles.tsx index 3ec67d2fd7..a826da07d4 100644 --- a/frontend/src/container/LogsSearchFilter/SearchFields/styles.tsx +++ b/frontend/src/container/LogsSearchFilter/SearchFields/styles.tsx @@ -9,12 +9,8 @@ export const QueryFieldContainer = styled.div` align-items: center; border-radius: 0.25rem; gap: 1rem; + width: 100%; &:hover { background: ${blue[6]}; } `; - -export const QueryConditionContainer = styled.div` - padding: 0.25rem 0rem; - margin: 0.1rem 0; -`; diff --git a/frontend/src/container/LogsSearchFilter/index.tsx b/frontend/src/container/LogsSearchFilter/index.tsx index 6c3d43d874..cf71f3d3bd 100644 --- a/frontend/src/container/LogsSearchFilter/index.tsx +++ b/frontend/src/container/LogsSearchFilter/index.tsx @@ -1,11 +1,8 @@ -/* eslint-disable react-hooks/exhaustive-deps */ -import { CloseSquareOutlined } from '@ant-design/icons'; -import { Button, Input } from 'antd'; -import useClickOutside from 'hooks/useClickOutside'; +import { Input, InputRef, Popover } from 'antd'; +import useUrlQuery from 'hooks/useUrlQuery'; import getStep from 'lib/getStep'; -import React, { memo, useEffect, useMemo, useRef, useState } from 'react'; +import React, { useCallback, useEffect, useRef, useState } from 'react'; import { connect, useDispatch, useSelector } from 'react-redux'; -import { useLocation } from 'react-use'; import { bindActionCreators, Dispatch } from 'redux'; import { ThunkDispatch } from 'redux-thunk'; import { getLogs } from 'store/actions/logs/getLogs'; @@ -17,17 +14,9 @@ import { GlobalReducer } from 'types/reducer/globalTime'; import { ILogsReducer } from 'types/reducer/logs'; import SearchFields from './SearchFields'; -import { DropDownContainer } from './styles'; +import { Container, DropDownContainer } from './styles'; import { useSearchParser } from './useSearchParser'; -const { Search } = Input; - -interface SearchFilterProps { - getLogs: (props: Parameters[0]) => ReturnType; - getLogsAggregate: ( - props: Parameters[0], - ) => ReturnType; -} function SearchFilter({ getLogs, getLogsAggregate, @@ -38,6 +27,14 @@ function SearchFilter({ updateQueryString, } = useSearchParser(); const [showDropDown, setShowDropDown] = useState(false); + const searchRef = useRef(null); + + const onDropDownToggleHandler = useCallback( + (value: boolean) => (): void => { + setShowDropDown(value); + }, + [], + ); const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector< AppState, @@ -48,117 +45,105 @@ function SearchFilter({ (state) => state.globalTime, ); - const searchComponentRef = useRef(null); + const dispatch = useDispatch>(); - useClickOutside(searchComponentRef, (e: HTMLElement) => { - // using this hack as overlay span is voilating this condition - if ( - e.nodeName === 'svg' || - e.nodeName === 'path' || - e.nodeName === 'span' || - e.nodeName === 'button' - ) { - return; - } + const handleSearch = useCallback( + (customQuery) => { + if (liveTail === 'PLAYING') { + dispatch({ + type: TOGGLE_LIVE_TAIL, + payload: 'PAUSED', + }); + setTimeout( + () => + dispatch({ + type: TOGGLE_LIVE_TAIL, + payload: liveTail, + }), + 0, + ); + } else { + getLogs({ + q: customQuery, + limit: logLinesPerPage, + orderBy: 'timestamp', + order: 'desc', + timestampStart: minTime, + timestampEnd: maxTime, + ...(idStart ? { idGt: idStart } : {}), + ...(idEnd ? { idLt: idEnd } : {}), + }); - if ( - e.nodeName === 'DIV' && - ![ - 'ant-empty-image', - 'ant-select-item', - 'ant-col', - 'ant-select-item-option-content', - 'ant-select-item-option-active', - ].find((p) => p.indexOf(e.className) !== -1) && - !(e.ariaSelected === 'true') && - showDropDown - ) { - setShowDropDown(false); - } - }); - const { search } = useLocation(); - const dispatch = useDispatch(); - const handleSearch = (customQuery = ''): void => { - if (liveTail === 'PLAYING') { - dispatch({ - type: TOGGLE_LIVE_TAIL, - payload: 'PAUSED', - }); - setTimeout( - () => - dispatch({ - type: TOGGLE_LIVE_TAIL, - payload: liveTail, + getLogsAggregate({ + timestampStart: minTime, + timestampEnd: maxTime, + step: getStep({ + start: minTime, + end: maxTime, + inputFormat: 'ns', }), - 0, - ); - } else { - getLogs({ - q: customQuery || queryString, - limit: logLinesPerPage, - orderBy: 'timestamp', - order: 'desc', - timestampStart: minTime, - timestampEnd: maxTime, - ...(idStart ? { idGt: idStart } : {}), - ...(idEnd ? { idLt: idEnd } : {}), - }); + q: customQuery, + }); + } + }, + [ + dispatch, + getLogs, + getLogsAggregate, + idEnd, + idStart, + liveTail, + logLinesPerPage, + maxTime, + minTime, + ], + ); - getLogsAggregate({ - timestampStart: minTime, - timestampEnd: maxTime, - step: getStep({ - start: minTime, - end: maxTime, - inputFormat: 'ns', - }), - q: customQuery || queryString, - }); - } - setShowDropDown(false); - }; - - const urlQuery = useMemo(() => { - return new URLSearchParams(search); - }, [search]); + const urlQuery = useUrlQuery(); + const urlQueryString = urlQuery.get('q'); useEffect(() => { - const urlQueryString = urlQuery.get('q'); - if (urlQueryString !== null) handleSearch(urlQueryString); - }, []); + handleSearch(urlQueryString || ''); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [urlQueryString, maxTime, minTime]); return ( -
- setShowDropDown(true)} - value={queryString} - onChange={(e): void => { - updateQueryString(e.target.value); - }} - onSearch={handleSearch} - /> -
- {showDropDown && ( + + - - + - )} -
-
+ } + trigger="click" + overlayInnerStyle={{ + width: `${searchRef?.current?.input?.offsetWidth || 0}px`, + }} + visible={showDropDown} + destroyTooltipOnHide + onVisibleChange={(value): void => { + onDropDownToggleHandler(value)(); + }} + > + { + updateQueryString(e.target.value); + }} + allowClear + onSearch={handleSearch} + /> + + ); } + interface DispatchProps { getLogs: ( props: Parameters[0], @@ -168,6 +153,8 @@ interface DispatchProps { ) => (dispatch: Dispatch) => void; } +type SearchFilterProps = DispatchProps; + const mapDispatchToProps = ( dispatch: ThunkDispatch, ): DispatchProps => ({ @@ -175,4 +162,4 @@ const mapDispatchToProps = ( getLogsAggregate: bindActionCreators(getLogsAggregate, dispatch), }); -export default connect(null, mapDispatchToProps)(memo(SearchFilter)); +export default connect(null, mapDispatchToProps)(SearchFilter); diff --git a/frontend/src/container/LogsSearchFilter/styles.ts b/frontend/src/container/LogsSearchFilter/styles.ts index 640881c3fa..7b379b08da 100644 --- a/frontend/src/container/LogsSearchFilter/styles.ts +++ b/frontend/src/container/LogsSearchFilter/styles.ts @@ -2,11 +2,13 @@ import { Card } from 'antd'; import styled from 'styled-components'; export const DropDownContainer = styled(Card)` - top: 0.5rem; - position: absolute; - width: 100%; - z-index: 1; .ant-card-body { - padding: 0.8rem; + width: 100%; } `; + +export const Container = styled.div` + width: 100%; + flex: 1; + position: relative; +`; diff --git a/frontend/src/container/LogsSearchFilter/useSearchParser.ts b/frontend/src/container/LogsSearchFilter/useSearchParser.ts index 50c4cc1e35..7bb43eeea0 100644 --- a/frontend/src/container/LogsSearchFilter/useSearchParser.ts +++ b/frontend/src/container/LogsSearchFilter/useSearchParser.ts @@ -23,10 +23,12 @@ export function useSearchParser(): { const updateQueryString = useCallback( (updatedQueryString) => { - history.push({ - pathname: history.location.pathname, - search: updatedQueryString ? `?q=${updatedQueryString}` : '', - }); + if (updatedQueryString) { + history.push({ + pathname: history.location.pathname, + search: updatedQueryString ? `?q=${updatedQueryString}` : '', + }); + } dispatch({ type: SET_SEARCH_QUERY_STRING, diff --git a/frontend/src/container/LogsTable/index.tsx b/frontend/src/container/LogsTable/index.tsx index 7997fac91f..68758c59fe 100644 --- a/frontend/src/container/LogsTable/index.tsx +++ b/frontend/src/container/LogsTable/index.tsx @@ -3,48 +3,18 @@ import { Typography } from 'antd'; import LogItem from 'components/Logs/LogItem'; import Spinner from 'components/Spinner'; import { map } from 'lodash-es'; -import React, { memo, useEffect } from 'react'; -import { connect, useSelector } from 'react-redux'; -import { bindActionCreators, Dispatch } from 'redux'; -import { ThunkDispatch } from 'redux-thunk'; -import { getLogs } from 'store/actions/logs/getLogs'; +import React, { memo } from 'react'; +import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; -import AppActions from 'types/actions'; -import { GlobalReducer } from 'types/reducer/globalTime'; import { ILogsReducer } from 'types/reducer/logs'; import { Container, Heading } from './styles'; -function LogsTable({ getLogs }: LogsTableProps): JSX.Element { - const { - searchFilter: { queryString }, - logs, - logLinesPerPage, - idEnd, - idStart, - isLoading, - liveTail, - } = useSelector((state) => state.logs); - - const { maxTime, minTime } = useSelector( - (state) => state.globalTime, +function LogsTable(): JSX.Element { + const { logs, isLoading, liveTail } = useSelector( + (state) => state.logs, ); - useEffect(() => { - if (liveTail === 'STOPPED') - getLogs({ - q: queryString, - limit: logLinesPerPage, - orderBy: 'timestamp', - order: 'desc', - timestampStart: minTime, - timestampEnd: maxTime, - ...(idStart ? { idGt: idStart } : {}), - ...(idEnd ? { idLt: idEnd } : {}), - }); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getLogs, idEnd, idStart, logLinesPerPage, maxTime, minTime, liveTail]); - if (isLoading) { return ; } @@ -72,20 +42,4 @@ function LogsTable({ getLogs }: LogsTableProps): JSX.Element { ); } -interface DispatchProps { - getLogs: ( - props: Parameters[0], - ) => (dispatch: Dispatch) => void; -} - -const mapDispatchToProps = ( - dispatch: ThunkDispatch, -): DispatchProps => ({ - getLogs: bindActionCreators(getLogs, dispatch), -}); - -interface LogsTableProps { - getLogs: (props: Parameters[0]) => ReturnType; -} - -export default connect(null, mapDispatchToProps)(memo(LogsTable)); +export default memo(LogsTable); diff --git a/frontend/src/container/MetricsTable/index.tsx b/frontend/src/container/MetricsTable/index.tsx index bfc97ba9e2..3042e94d1a 100644 --- a/frontend/src/container/MetricsTable/index.tsx +++ b/frontend/src/container/MetricsTable/index.tsx @@ -1,6 +1,6 @@ import { blue } from '@ant-design/colors'; import { SearchOutlined } from '@ant-design/icons'; -import { Button, Input, Space, Table } from 'antd'; +import { Button, Card, Input, Space, Table } from 'antd'; import type { ColumnsType, ColumnType } from 'antd/es/table'; import type { FilterConfirmProps } from 'antd/es/table/interface'; import localStorageGet from 'api/browser/localstorage/get'; @@ -48,37 +48,27 @@ function Metrics(): JSX.Element { const filterDropdown = useCallback( ({ setSelectedKeys, selectedKeys, confirm }) => ( -
- - setSelectedKeys(e.target.value ? [e.target.value] : []) - } - allowClear - onPressEnter={(): void => handleSearch(confirm)} - style={{ - marginBottom: 8, - }} - /> - + + + + setSelectedKeys(e.target.value ? [e.target.value] : []) + } + allowClear + onPressEnter={(): void => handleSearch(confirm)} + /> -
+ ), [], ); diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/index.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/index.tsx index b2712c86ae..95b0fa8bfd 100644 --- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/index.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/index.tsx @@ -27,24 +27,34 @@ function ClickHouseQueryContainer({ toggleDisable, toggleDelete, }: IClickHouseQueryHandleChange): void => { - const allQueries = queryData[WIDGET_CLICKHOUSE_QUERY_KEY_NAME]; - const currentIndexQuery = allQueries[queryIndex]; + // we must check if queryIndex is number type. because - + // ClickHouseQueryBuilder.handleQueryChange has a queryIndex + // parameter which supports both number and string formats. + // it is because, the dashboard side of query builder has queryIndex as number + // while the alert builder uses string format for query index (similar to backend) + // hence, this method is only applies when queryIndex is in number format. - if (rawQuery !== undefined) { - currentIndexQuery.rawQuery = rawQuery; - } + if (typeof queryIndex === 'number') { + const allQueries = queryData[WIDGET_CLICKHOUSE_QUERY_KEY_NAME]; - if (legend !== undefined) { - currentIndexQuery.legend = legend; - } + const currentIndexQuery = allQueries[queryIndex]; - if (toggleDisable) { - currentIndexQuery.disabled = !currentIndexQuery.disabled; + if (rawQuery !== undefined) { + currentIndexQuery.rawQuery = rawQuery; + } + + if (legend !== undefined) { + currentIndexQuery.legend = legend; + } + + if (toggleDisable) { + currentIndexQuery.disabled = !currentIndexQuery.disabled; + } + if (toggleDelete) { + allQueries.splice(queryIndex, 1); + } + updateQueryData({ updatedQuery: { ...queryData } }); } - if (toggleDelete) { - allQueries.splice(queryIndex, 1); - } - updateQueryData({ updatedQuery: { ...queryData } }); }; const addQueryHandler = (): void => { queryData[WIDGET_CLICKHOUSE_QUERY_KEY_NAME].push({ diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx index 7e9a3df0c2..389b7e15c4 100644 --- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx @@ -8,7 +8,7 @@ import { IClickHouseQueryHandleChange } from './types'; interface IClickHouseQueryBuilderProps { queryData: IClickHouseQuery; - queryIndex: number; + queryIndex: number | string; handleQueryChange: (args: IClickHouseQueryHandleChange) => void; } @@ -43,6 +43,9 @@ function ClickHouseQueryBuilder({ scrollbar: { alwaysConsumeMouseWheel: false, }, + minimap: { + enabled: false, + }, }} /> diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/types.ts b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/types.ts index 6e0d21b206..5e1b95b3b3 100644 --- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/types.ts +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/types.ts @@ -1,7 +1,7 @@ import { IClickHouseQuery } from 'types/api/dashboard/getAll'; export interface IClickHouseQueryHandleChange { - queryIndex: number; + queryIndex: number | string; rawQuery?: IClickHouseQuery['rawQuery']; legend?: IClickHouseQuery['legend']; toggleDisable?: IClickHouseQuery['disabled']; diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/Switch/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/Switch/index.tsx index b305517429..fc24014830 100644 --- a/frontend/src/container/OrganizationSettings/AuthDomains/Switch/index.tsx +++ b/frontend/src/container/OrganizationSettings/AuthDomains/Switch/index.tsx @@ -25,7 +25,7 @@ function SwitchComponent({ setIsLoading(false); }; - const isInValidCertificate = useMemo( + const isInValidVerificate = useMemo( () => !getIsValidCertificate(record?.samlConfig), [record], ); @@ -33,7 +33,7 @@ function SwitchComponent({ return ( diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx index 221ba963dc..3ae10eb859 100644 --- a/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx +++ b/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx @@ -1,5 +1,5 @@ import { LockTwoTone } from '@ant-design/icons'; -import { Button, Modal, notification, Space, Table, Typography } from 'antd'; +import { Button, Modal, notification, Space, Table } from 'antd'; import { ColumnsType } from 'antd/lib/table'; import deleteDomain from 'api/SAML/deleteDomain'; import listAllDomain from 'api/SAML/listAllDomain'; @@ -20,7 +20,6 @@ import AddDomain from './AddDomain'; import Create from './Create'; import EditSaml from './Edit'; import SwitchComponent from './Switch'; -import { getIsValidCertificate } from './utils'; function AuthDomains(): JSX.Element { const { t } = useTranslation(['common', 'organizationsettings']); @@ -196,12 +195,6 @@ function AuthDomains(): JSX.Element { ); } - const isValidCertificate = getIsValidCertificate(record.samlConfig); - - if (!isValidCertificate) { - return Configure SSO  ; - } - return (