Merge pull request #1776 from SigNoz/release/v0.11.4

Release/v0.11.4
This commit is contained in:
Ankit Nayan 2022-11-29 17:36:31 +05:30 committed by GitHub
commit 18d80d47e5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
108 changed files with 3343 additions and 885 deletions

View File

@ -40,7 +40,7 @@ services:
condition: on-failure condition: on-failure
query-service: query-service:
image: signoz/query-service:0.11.3 image: signoz/query-service:0.11.4
command: ["-config=/root/config/prometheus.yml"] command: ["-config=/root/config/prometheus.yml"]
# ports: # ports:
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
@ -70,7 +70,7 @@ services:
- clickhouse - clickhouse
frontend: frontend:
image: signoz/frontend:0.11.3 image: signoz/frontend:0.11.4
deploy: deploy:
restart_policy: restart_policy:
condition: on-failure condition: on-failure

View File

@ -39,7 +39,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service: query-service:
image: signoz/query-service:0.11.3 image: signoz/query-service:0.11.4
container_name: query-service container_name: query-service
command: ["-config=/root/config/prometheus.yml"] command: ["-config=/root/config/prometheus.yml"]
# ports: # ports:
@ -69,7 +69,7 @@ services:
condition: service_healthy condition: service_healthy
frontend: frontend:
image: signoz/frontend:0.11.3 image: signoz/frontend:0.11.4
container_name: frontend container_name: frontend
restart: on-failure restart: on-failure
depends_on: depends_on:

View File

@ -114,6 +114,9 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router) {
router.HandleFunc("/api/v1/invite/{token}", baseapp.OpenAccess(ah.getInvite)).Methods(http.MethodGet) router.HandleFunc("/api/v1/invite/{token}", baseapp.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/register", baseapp.OpenAccess(ah.registerUser)).Methods(http.MethodPost) router.HandleFunc("/api/v1/register", baseapp.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/login", baseapp.OpenAccess(ah.loginUser)).Methods(http.MethodPost) router.HandleFunc("/api/v1/login", baseapp.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/traces/{traceId}", baseapp.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
router.HandleFunc("/api/v2/metrics/query_range", baseapp.ViewAccess(ah.queryRangeMetricsV2)).Methods(http.MethodPost)
ah.APIHandler.RegisterRoutes(router) ah.APIHandler.RegisterRoutes(router)
} }

View File

@ -0,0 +1,236 @@
package api
import (
"bytes"
"fmt"
"net/http"
"sync"
"text/template"
"time"
"go.signoz.io/signoz/pkg/query-service/app/metrics"
"go.signoz.io/signoz/pkg/query-service/app/parser"
"go.signoz.io/signoz/pkg/query-service/constants"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
querytemplate "go.signoz.io/signoz/pkg/query-service/utils/queryTemplate"
"go.uber.org/zap"
)
func (ah *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(basemodel.CustomMetricsFunction) {
zap.S().Info("CustomMetricsFunction feature is not enabled in this plan")
ah.APIHandler.QueryRangeMetricsV2(w, r)
return
}
metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r)
if apiErrorObj != nil {
zap.S().Errorf(apiErrorObj.Err.Error())
RespondError(w, apiErrorObj, nil)
return
}
// prometheus instant query needs same timestamp
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE &&
metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.PROM {
metricsQueryRangeParams.Start = metricsQueryRangeParams.End
}
// round up the end to nearest multiple
if metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER {
end := (metricsQueryRangeParams.End) / 1000
step := metricsQueryRangeParams.Step
metricsQueryRangeParams.End = (end / step * step) * 1000
}
type channelResult struct {
Series []*basemodel.Series
TableName string
Err error
Name string
Query string
}
execClickHouseQueries := func(queries map[string]string) ([]*basemodel.Series, []string, error, map[string]string) {
var seriesList []*basemodel.Series
var tableName []string
ch := make(chan channelResult, len(queries))
var wg sync.WaitGroup
for name, query := range queries {
wg.Add(1)
go func(name, query string) {
defer wg.Done()
seriesList, tableName, err := ah.opts.DataConnector.GetMetricResultEE(r.Context(), query)
for _, series := range seriesList {
series.QueryName = name
}
if err != nil {
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query}
return
}
ch <- channelResult{Series: seriesList, TableName: tableName}
}(name, query)
}
wg.Wait()
close(ch)
var errs []error
errQuriesByName := make(map[string]string)
// read values from the channel
for r := range ch {
if r.Err != nil {
errs = append(errs, r.Err)
errQuriesByName[r.Name] = r.Query
continue
}
seriesList = append(seriesList, r.Series...)
tableName = append(tableName, r.TableName)
}
if len(errs) != 0 {
return nil, nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
}
return seriesList, tableName, nil, nil
}
execPromQueries := func(metricsQueryRangeParams *basemodel.QueryRangeParamsV2) ([]*basemodel.Series, error, map[string]string) {
var seriesList []*basemodel.Series
ch := make(chan channelResult, len(metricsQueryRangeParams.CompositeMetricQuery.PromQueries))
var wg sync.WaitGroup
for name, query := range metricsQueryRangeParams.CompositeMetricQuery.PromQueries {
if query.Disabled {
continue
}
wg.Add(1)
go func(name string, query *basemodel.PromQuery) {
var seriesList []*basemodel.Series
defer wg.Done()
tmpl := template.New("promql-query")
tmpl, tmplErr := tmpl.Parse(query.Query)
if tmplErr != nil {
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
return
}
var queryBuf bytes.Buffer
tmplErr = tmpl.Execute(&queryBuf, metricsQueryRangeParams.Variables)
if tmplErr != nil {
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
return
}
query.Query = queryBuf.String()
queryModel := basemodel.QueryRangeParams{
Start: time.UnixMilli(metricsQueryRangeParams.Start),
End: time.UnixMilli(metricsQueryRangeParams.End),
Step: time.Duration(metricsQueryRangeParams.Step * int64(time.Second)),
Query: query.Query,
}
promResult, _, err := ah.opts.DataConnector.GetQueryRangeResult(r.Context(), &queryModel)
if err != nil {
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query.Query}
return
}
matrix, _ := promResult.Matrix()
for _, v := range matrix {
var s basemodel.Series
s.QueryName = name
s.Labels = v.Metric.Copy().Map()
for _, p := range v.Points {
s.Points = append(s.Points, basemodel.MetricPoint{Timestamp: p.T, Value: p.V})
}
seriesList = append(seriesList, &s)
}
ch <- channelResult{Series: seriesList}
}(name, query)
}
wg.Wait()
close(ch)
var errs []error
errQuriesByName := make(map[string]string)
// read values from the channel
for r := range ch {
if r.Err != nil {
errs = append(errs, r.Err)
errQuriesByName[r.Name] = r.Query
continue
}
seriesList = append(seriesList, r.Series...)
}
if len(errs) != 0 {
return nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
}
return seriesList, nil, nil
}
var seriesList []*basemodel.Series
var tableName []string
var err error
var errQuriesByName map[string]string
switch metricsQueryRangeParams.CompositeMetricQuery.QueryType {
case basemodel.QUERY_BUILDER:
runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SIGNOZ_TIMESERIES_TABLENAME)
if runQueries.Err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: runQueries.Err}, nil)
return
}
seriesList, tableName, err, errQuriesByName = execClickHouseQueries(runQueries.Queries)
case basemodel.CLICKHOUSE:
queries := make(map[string]string)
for name, chQuery := range metricsQueryRangeParams.CompositeMetricQuery.ClickHouseQueries {
if chQuery.Disabled {
continue
}
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(chQuery.Query)
if err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil)
return
}
var query bytes.Buffer
// replace go template variables
querytemplate.AssignReservedVars(metricsQueryRangeParams)
err = tmpl.Execute(&query, metricsQueryRangeParams.Variables)
if err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil)
return
}
queries[name] = query.String()
}
seriesList, tableName, err, errQuriesByName = execClickHouseQueries(queries)
case basemodel.PROM:
seriesList, err, errQuriesByName = execPromQueries(metricsQueryRangeParams)
default:
err = fmt.Errorf("invalid query type")
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, errQuriesByName)
return
}
if err != nil {
apiErrObj := &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
return
}
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE &&
len(seriesList) > 1 &&
(metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER ||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.CLICKHOUSE) {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil)
return
}
type ResponseFormat struct {
ResultType string `json:"resultType"`
Result []*basemodel.Series `json:"result"`
TableName []string `json:"tableName"`
}
resp := ResponseFormat{ResultType: "matrix", Result: seriesList, TableName: tableName}
ah.Respond(w, resp)
}

View File

@ -0,0 +1,39 @@
package api
import (
"net/http"
"strconv"
"go.signoz.io/signoz/ee/query-service/app/db"
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/model"
baseapp "go.signoz.io/signoz/pkg/query-service/app"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
zap.S().Info("SmartTraceDetail feature is not enabled in this plan")
ah.APIHandler.SearchTraces(w, r)
return
}
traceId, spanId, levelUpInt, levelDownInt, err := baseapp.ParseSearchTracesParams(r)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
return
}
spanLimit, err := strconv.Atoi(constants.SpanLimitStr)
if err != nil {
zap.S().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable: ", err)
return
}
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), traceId, spanId, levelUpInt, levelDownInt, spanLimit, db.SmartTraceAlgorithm)
if ah.HandleError(w, err, http.StatusBadRequest) {
return
}
ah.WriteJSON(w, r, result)
}

View File

@ -0,0 +1,401 @@
package db
import (
"context"
"crypto/md5"
"encoding/json"
"fmt"
"reflect"
"regexp"
"sort"
"strings"
"time"
"go.signoz.io/signoz/ee/query-service/model"
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
// GetMetricResultEE runs the query and returns list of time series
func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) {
defer utils.Elapsed("GetMetricResult")()
zap.S().Infof("Executing metric result query: %s", query)
var hash string
// If getSubTreeSpans function is used in the clickhouse query
if strings.Index(query, "getSubTreeSpans(") != -1 {
var err error
query, hash, err = r.getSubTreeSpansCustomFunction(ctx, query, hash)
if err == fmt.Errorf("No spans found for the given query") {
return nil, "", nil
}
if err != nil {
return nil, "", err
}
}
rows, err := r.conn.Query(ctx, query)
zap.S().Debug(query)
if err != nil {
zap.S().Debug("Error in processing query: ", err)
return nil, "", fmt.Errorf("error in processing query")
}
var (
columnTypes = rows.ColumnTypes()
columnNames = rows.Columns()
vars = make([]interface{}, len(columnTypes))
)
for i := range columnTypes {
vars[i] = reflect.New(columnTypes[i].ScanType()).Interface()
}
// when group by is applied, each combination of cartesian product
// of attributes is separate series. each item in metricPointsMap
// represent a unique series.
metricPointsMap := make(map[string][]basemodel.MetricPoint)
// attribute key-value pairs for each group selection
attributesMap := make(map[string]map[string]string)
defer rows.Close()
for rows.Next() {
if err := rows.Scan(vars...); err != nil {
return nil, "", err
}
var groupBy []string
var metricPoint basemodel.MetricPoint
groupAttributes := make(map[string]string)
// Assuming that the end result row contains a timestamp, value and option labels
// Label key and value are both strings.
for idx, v := range vars {
colName := columnNames[idx]
switch v := v.(type) {
case *string:
// special case for returning all labels
if colName == "fullLabels" {
var metric map[string]string
err := json.Unmarshal([]byte(*v), &metric)
if err != nil {
return nil, "", err
}
for key, val := range metric {
groupBy = append(groupBy, val)
groupAttributes[key] = val
}
} else {
groupBy = append(groupBy, *v)
groupAttributes[colName] = *v
}
case *time.Time:
metricPoint.Timestamp = v.UnixMilli()
case *float64:
metricPoint.Value = *v
case **float64:
// ch seems to return this type when column is derived from
// SELECT count(*)/ SELECT count(*)
floatVal := *v
if floatVal != nil {
metricPoint.Value = *floatVal
}
case *float32:
float32Val := float32(*v)
metricPoint.Value = float64(float32Val)
case *uint8, *uint64, *uint16, *uint32:
if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok {
metricPoint.Value = float64(reflect.ValueOf(v).Elem().Uint())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())
}
case *int8, *int16, *int32, *int64:
if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok {
metricPoint.Value = float64(reflect.ValueOf(v).Elem().Int())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
}
default:
zap.S().Errorf("invalid var found in metric builder query result", v, colName)
}
}
sort.Strings(groupBy)
key := strings.Join(groupBy, "")
attributesMap[key] = groupAttributes
metricPointsMap[key] = append(metricPointsMap[key], metricPoint)
}
var seriesList []*basemodel.Series
for key := range metricPointsMap {
points := metricPointsMap[key]
// first point in each series could be invalid since the
// aggregations are applied with point from prev series
if len(points) != 0 && len(points) > 1 {
points = points[1:]
}
attributes := attributesMap[key]
series := basemodel.Series{Labels: attributes, Points: points}
seriesList = append(seriesList, &series)
}
// err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash)
// if err != nil {
// zap.S().Error("Error in dropping temporary table: ", err)
// return nil, err
// }
if hash == "" {
return seriesList, hash, nil
} else {
return seriesList, "getSubTreeSpans" + hash, nil
}
}
func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) {
zap.S().Debugf("Executing getSubTreeSpans function")
// str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;`
// process the query to fetch subTree query
var subtreeInput string
query, subtreeInput, hash = processQuery(query, hash)
err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash)
if err != nil {
zap.S().Error("Error in dropping temporary table: ", err)
return query, hash, err
}
// Create temporary table to store the getSubTreeSpans() results
zap.S().Debugf("Creating temporary table getSubTreeSpans%s", hash)
err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)")
if err != nil {
zap.S().Error("Error in creating temporary table: ", err)
return query, hash, err
}
var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse
getSpansSubQuery := subtreeInput
// Execute the subTree query
zap.S().Debugf("Executing subTree query: %s", getSpansSubQuery)
err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery)
// zap.S().Info(getSpansSubQuery)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return query, hash, fmt.Errorf("Error in processing sql query")
}
var searchScanResponses []basemodel.SearchSpanDBResponseItem
// TODO : @ankit: I think the algorithm does not need to assume that subtrees are from the same TraceID. We can take this as an improvement later.
// Fetch all the spans from of same TraceID so that we can build subtree
modelQuery := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
if len(getSpansSubQueryDBResponses) == 0 {
return query, hash, fmt.Errorf("No spans found for the given query")
}
zap.S().Debugf("Executing query to fetch all the spans from the same TraceID: %s", modelQuery)
err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return query, hash, fmt.Errorf("Error in processing sql query")
}
// Process model to fetch the spans
zap.S().Debugf("Processing model to fetch the spans")
searchSpanResponses := []basemodel.SearchSpanResponseItem{}
for _, item := range searchScanResponses {
var jsonItem basemodel.SearchSpanResponseItem
json.Unmarshal([]byte(item.Model), &jsonItem)
jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano())
if jsonItem.Events == nil {
jsonItem.Events = []string{}
}
searchSpanResponses = append(searchSpanResponses, jsonItem)
}
// Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash
// Use map to store pointer to the spans to avoid duplicates and save memory
zap.S().Debugf("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans%s", hash)
treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses)
if err != nil {
zap.S().Error("Error in getSubTreeAlgorithm function: ", err)
return query, hash, err
}
zap.S().Debugf("Preparing batch to store subtree spans in temporary table getSubTreeSpans%s", hash)
statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash))
if err != nil {
zap.S().Error("Error in preparing batch statement: ", err)
return query, hash, err
}
for _, span := range treeSearchResponse {
var parentID string
if len(span.References) > 0 && span.References[0].RefType == "CHILD_OF" {
parentID = span.References[0].SpanId
}
err = statement.Append(
time.Unix(0, int64(span.TimeUnixNano)),
span.TraceID,
span.SpanID,
parentID,
span.RootSpanID,
span.ServiceName,
span.Name,
span.RootName,
uint64(span.DurationNano),
int8(span.Kind),
span.TagMap,
span.Events,
)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return query, hash, err
}
}
zap.S().Debugf("Inserting the subtree spans in temporary table getSubTreeSpans%s", hash)
err = statement.Send()
if err != nil {
zap.S().Error("Error in sending statement: ", err)
return query, hash, err
}
return query, hash, nil
}
func processQuery(query string, hash string) (string, string, string) {
re3 := regexp.MustCompile(`getSubTreeSpans`)
submatchall3 := re3.FindAllStringIndex(query, -1)
getSubtreeSpansMatchIndex := submatchall3[0][1]
query2countParenthesis := query[getSubtreeSpansMatchIndex:]
sqlCompleteIndex := 0
countParenthesisImbalance := 0
for i, char := range query2countParenthesis {
if string(char) == "(" {
countParenthesisImbalance += 1
}
if string(char) == ")" {
countParenthesisImbalance -= 1
}
if countParenthesisImbalance == 0 {
sqlCompleteIndex = i
break
}
}
subtreeInput := query2countParenthesis[1:sqlCompleteIndex]
// hash the subtreeInput
hmd5 := md5.Sum([]byte(subtreeInput))
hash = fmt.Sprintf("%x", hmd5)
// Reformat the query to use the getSubTreeSpans function
query = query[:getSubtreeSpansMatchIndex] + hash + " " + query2countParenthesis[sqlCompleteIndex+1:]
return query, subtreeInput, hash
}
// getSubTreeAlgorithm is an algorithm to build the subtrees of the spans and return the list of spans
func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse) (map[string]*basemodel.SearchSpanResponseItem, error) {
var spans []*model.SpanForTraceDetails
for _, spanItem := range payload {
var parentID string
if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" {
parentID = spanItem.References[0].SpanId
}
span := &model.SpanForTraceDetails{
TimeUnixNano: spanItem.TimeUnixNano,
SpanID: spanItem.SpanID,
TraceID: spanItem.TraceID,
ServiceName: spanItem.ServiceName,
Name: spanItem.Name,
Kind: spanItem.Kind,
DurationNano: spanItem.DurationNano,
TagMap: spanItem.TagMap,
ParentID: parentID,
Events: spanItem.Events,
HasError: spanItem.HasError,
}
spans = append(spans, span)
}
zap.S().Debug("Building Tree")
roots, err := buildSpanTrees(&spans)
if err != nil {
return nil, err
}
searchSpansResult := make(map[string]*basemodel.SearchSpanResponseItem)
// Every span which was fetched from getSubTree Input SQL query is considered root
// For each root, get the subtree spans
for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses {
targetSpan := &model.SpanForTraceDetails{}
// zap.S().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
// Search target span object in the tree
for _, root := range roots {
targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID)
if targetSpan != nil {
break
}
if err != nil {
zap.S().Error("Error during BreadthFirstSearch(): ", err)
return nil, err
}
}
if targetSpan == nil {
return nil, nil
}
// Build subtree for the target span
// Mark the target span as root by setting parent ID as empty string
targetSpan.ParentID = ""
preParents := []*model.SpanForTraceDetails{targetSpan}
children := []*model.SpanForTraceDetails{}
// Get the subtree child spans
for i := 0; len(preParents) != 0; i++ {
parents := []*model.SpanForTraceDetails{}
for _, parent := range preParents {
children = append(children, parent.Children...)
parents = append(parents, parent.Children...)
}
preParents = parents
}
resultSpans := children
// Add the target span to the result spans
resultSpans = append(resultSpans, targetSpan)
for _, item := range resultSpans {
references := []basemodel.OtelSpanRef{
{
TraceId: item.TraceID,
SpanId: item.ParentID,
RefType: "CHILD_OF",
},
}
if item.Events == nil {
item.Events = []string{}
}
searchSpansResult[item.SpanID] = &basemodel.SearchSpanResponseItem{
TimeUnixNano: item.TimeUnixNano,
SpanID: item.SpanID,
TraceID: item.TraceID,
ServiceName: item.ServiceName,
Name: item.Name,
Kind: item.Kind,
References: references,
DurationNano: item.DurationNano,
TagMap: item.TagMap,
Events: item.Events,
HasError: item.HasError,
RootSpanID: getSpansSubQueryDBResponse.SpanID,
RootName: targetSpan.Name,
}
}
}
return searchSpansResult, nil
}

View File

@ -6,6 +6,7 @@ import (
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
basechr "go.signoz.io/signoz/pkg/query-service/app/clickhouseReader" basechr "go.signoz.io/signoz/pkg/query-service/app/clickhouseReader"
"go.signoz.io/signoz/pkg/query-service/interfaces"
) )
type ClickhouseReader struct { type ClickhouseReader struct {
@ -14,8 +15,8 @@ type ClickhouseReader struct {
*basechr.ClickHouseReader *basechr.ClickHouseReader
} }
func NewDataConnector(localDB *sqlx.DB, promConfigPath string) *ClickhouseReader { func NewDataConnector(localDB *sqlx.DB, promConfigPath string, lm interfaces.FeatureLookup) *ClickhouseReader {
ch := basechr.NewReader(localDB, promConfigPath) ch := basechr.NewReader(localDB, promConfigPath, lm)
return &ClickhouseReader{ return &ClickhouseReader{
conn: ch.GetConn(), conn: ch.GetConn(),
appdb: localDB, appdb: localDB,

View File

@ -0,0 +1,222 @@
package db
import (
"errors"
"strconv"
"go.signoz.io/signoz/ee/query-service/model"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
// SmartTraceAlgorithm is an algorithm to find the target span and build a tree of spans around it with the given levelUp and levelDown parameters and the given spanLimit
func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanId string, levelUp int, levelDown int, spanLimit int) ([]basemodel.SearchSpansResult, error) {
var spans []*model.SpanForTraceDetails
// Build a slice of spans from the payload
for _, spanItem := range payload {
var parentID string
if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" {
parentID = spanItem.References[0].SpanId
}
span := &model.SpanForTraceDetails{
TimeUnixNano: spanItem.TimeUnixNano,
SpanID: spanItem.SpanID,
TraceID: spanItem.TraceID,
ServiceName: spanItem.ServiceName,
Name: spanItem.Name,
Kind: spanItem.Kind,
DurationNano: spanItem.DurationNano,
TagMap: spanItem.TagMap,
ParentID: parentID,
Events: spanItem.Events,
HasError: spanItem.HasError,
}
spans = append(spans, span)
}
// Build span trees from the spans
roots, err := buildSpanTrees(&spans)
if err != nil {
return nil, err
}
targetSpan := &model.SpanForTraceDetails{}
// Find the target span in the span trees
for _, root := range roots {
targetSpan, err = breadthFirstSearch(root, targetSpanId)
if targetSpan != nil {
break
}
if err != nil {
zap.S().Error("Error during BreadthFirstSearch(): ", err)
return nil, err
}
}
// If the target span is not found, return span not found error
if targetSpan == nil {
return nil, errors.New("Span not found")
}
// Build the final result
parents := []*model.SpanForTraceDetails{}
// Get the parent spans of the target span up to the given levelUp parameter and spanLimit
preParent := targetSpan
for i := 0; i < levelUp+1; i++ {
if i == levelUp {
preParent.ParentID = ""
}
if spanLimit-len(preParent.Children) <= 0 {
parents = append(parents, preParent)
parents = append(parents, preParent.Children[:spanLimit]...)
spanLimit -= (len(preParent.Children[:spanLimit]) + 1)
preParent.ParentID = ""
break
}
parents = append(parents, preParent)
parents = append(parents, preParent.Children...)
spanLimit -= (len(preParent.Children) + 1)
preParent = preParent.ParentSpan
if preParent == nil {
break
}
}
// Get the child spans of the target span until the given levelDown and spanLimit
preParents := []*model.SpanForTraceDetails{targetSpan}
children := []*model.SpanForTraceDetails{}
for i := 0; i < levelDown && len(preParents) != 0 && spanLimit > 0; i++ {
parents := []*model.SpanForTraceDetails{}
for _, parent := range preParents {
if spanLimit-len(parent.Children) <= 0 {
children = append(children, parent.Children[:spanLimit]...)
spanLimit -= len(parent.Children[:spanLimit])
break
}
children = append(children, parent.Children...)
parents = append(parents, parent.Children...)
}
preParents = parents
}
// Store the final list of spans in the resultSpanSet map to avoid duplicates
resultSpansSet := make(map[*model.SpanForTraceDetails]struct{})
resultSpansSet[targetSpan] = struct{}{}
for _, parent := range parents {
resultSpansSet[parent] = struct{}{}
}
for _, child := range children {
resultSpansSet[child] = struct{}{}
}
searchSpansResult := []basemodel.SearchSpansResult{{
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"},
Events: make([][]interface{}, len(resultSpansSet)),
},
}
// Convert the resultSpansSet map to searchSpansResult
i := 0 // index for spans
for item := range resultSpansSet {
references := []basemodel.OtelSpanRef{
{
TraceId: item.TraceID,
SpanId: item.ParentID,
RefType: "CHILD_OF",
},
}
referencesStringArray := []string{}
for _, item := range references {
referencesStringArray = append(referencesStringArray, item.ToString())
}
keys := make([]string, 0, len(item.TagMap))
values := make([]string, 0, len(item.TagMap))
for k, v := range item.TagMap {
keys = append(keys, k)
values = append(values, v)
}
if item.Events == nil {
item.Events = []string{}
}
searchSpansResult[0].Events[i] = []interface{}{
item.TimeUnixNano,
item.SpanID,
item.TraceID,
item.ServiceName,
item.Name,
strconv.Itoa(int(item.Kind)),
strconv.FormatInt(item.DurationNano, 10),
keys,
values,
referencesStringArray,
item.Events,
item.HasError,
}
i++ // increment index
}
return searchSpansResult, nil
}
// buildSpanTrees builds trees of spans from a list of spans.
func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTraceDetails, error) {
// Build a map of spanID to span for fast lookup
var roots []*model.SpanForTraceDetails
spans := *spansPtr
mapOfSpans := make(map[string]*model.SpanForTraceDetails, len(spans))
for _, span := range spans {
if span.ParentID == "" {
roots = append(roots, span)
}
mapOfSpans[span.SpanID] = span
}
// Build the span tree by adding children to the parent spans
for _, span := range spans {
if span.ParentID == "" {
continue
}
parent := mapOfSpans[span.ParentID]
// If the parent span is not found, add current span to list of roots
if parent == nil {
// zap.S().Debug("Parent Span not found parent_id: ", span.ParentID)
roots = append(roots, span)
span.ParentID = ""
continue
}
span.ParentSpan = parent
parent.Children = append(parent.Children, span)
}
return roots, nil
}
// breadthFirstSearch performs a breadth-first search on the span tree to find the target span.
func breadthFirstSearch(spansPtr *model.SpanForTraceDetails, targetId string) (*model.SpanForTraceDetails, error) {
queue := []*model.SpanForTraceDetails{spansPtr}
visited := make(map[string]bool)
for len(queue) > 0 {
current := queue[0]
visited[current.SpanID] = true
queue = queue[1:]
if current.SpanID == targetId {
return current, nil
}
for _, child := range current.Children {
if ok, _ := visited[child.SpanID]; !ok {
queue = append(queue, child)
}
}
}
return nil, nil
}

View File

@ -98,7 +98,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
storage := os.Getenv("STORAGE") storage := os.Getenv("STORAGE")
if storage == "clickhouse" { if storage == "clickhouse" {
zap.S().Info("Using ClickHouse as datastore ...") zap.S().Info("Using ClickHouse as datastore ...")
qb := db.NewDataConnector(localDB, serverOptions.PromConfigPath) qb := db.NewDataConnector(localDB, serverOptions.PromConfigPath, lm)
go qb.Start(readerReady) go qb.Start(readerReady)
reader = qb reader = qb
} else { } else {

View File

@ -10,6 +10,8 @@ const (
var LicenseSignozIo = "https://license.signoz.io/api/v1" var LicenseSignozIo = "https://license.signoz.io/api/v1"
var SpanLimitStr = GetOrDefaultEnv("SPAN_LIMIT", "5000")
func GetOrDefaultEnv(key string, fallback string) string { func GetOrDefaultEnv(key string, fallback string) string {
v := os.Getenv(key) v := os.Getenv(key)
if len(v) == 0 { if len(v) == 0 {

View File

@ -17,11 +17,15 @@ var BasicPlan = basemodel.FeatureSet{
} }
var ProPlan = basemodel.FeatureSet{ var ProPlan = basemodel.FeatureSet{
Pro: true, Pro: true,
SSO: true, SSO: true,
basemodel.SmartTraceDetail: true,
basemodel.CustomMetricsFunction: true,
} }
var EnterprisePlan = basemodel.FeatureSet{ var EnterprisePlan = basemodel.FeatureSet{
Enterprise: true, Enterprise: true,
SSO: true, SSO: true,
basemodel.SmartTraceDetail: true,
basemodel.CustomMetricsFunction: true,
} }

View File

@ -0,0 +1,22 @@
package model
type SpanForTraceDetails struct {
TimeUnixNano uint64 `json:"timestamp"`
SpanID string `json:"spanID"`
TraceID string `json:"traceID"`
ParentID string `json:"parentID"`
ParentSpan *SpanForTraceDetails `json:"parentSpan"`
ServiceName string `json:"serviceName"`
Name string `json:"name"`
Kind int32 `json:"kind"`
DurationNano int64 `json:"durationNano"`
TagMap map[string]string `json:"tagMap"`
Events []string `json:"event"`
HasError bool `json:"hasError"`
Children []*SpanForTraceDetails `json:"children"`
}
type GetSpansSubQueryDBResponse struct {
SpanID string `ch:"spanID"`
TraceID string `ch:"traceID"`
}

View File

@ -1,24 +1,20 @@
/* eslint-disable */
// @ts-ignore
// @ts-nocheck
const crypto = require('crypto'); const crypto = require('crypto');
const fs = require('fs'); const fs = require('fs');
const glob = require('glob'); const glob = require('glob');
function generateChecksum(str, algorithm, encoding) { function generateChecksum(str, algorithm, encoding) {
return crypto return crypto
.createHash(algorithm || 'md5') .createHash(algorithm || 'md5')
.update(str, 'utf8') .update(str, 'utf8')
.digest(encoding || 'hex'); .digest(encoding || 'hex');
} }
const result = {}; const result = {};
glob.sync(`public/locales/**/*.json`).forEach(path => { glob.sync(`public/locales/**/*.json`).forEach((path) => {
const [_, lang] = path.split('public/locales'); const [_, lang] = path.split('public/locales');
const content = fs.readFileSync(path, { encoding: 'utf-8' }); const content = fs.readFileSync(path, { encoding: 'utf-8' });
result[lang.replace('.json', '')] = generateChecksum(content); result[lang.replace('.json', '')] = generateChecksum(content);
}); });
fs.writeFileSync('./i18n-translations-hash.json', JSON.stringify(result)); fs.writeFileSync('./i18n-translations-hash.json', JSON.stringify(result));

View File

@ -28,6 +28,7 @@
"condition_required": "at least one metric condition is required", "condition_required": "at least one metric condition is required",
"alertname_required": "alert name is required", "alertname_required": "alert name is required",
"promql_required": "promql expression is required when query format is set to PromQL", "promql_required": "promql expression is required when query format is set to PromQL",
"chquery_required": "query is required when query format is set to ClickHouse",
"button_savechanges": "Save Rule", "button_savechanges": "Save Rule",
"button_createrule": "Create Rule", "button_createrule": "Create Rule",
"button_returntorules": "Return to rules", "button_returntorules": "Return to rules",
@ -55,6 +56,7 @@
"button_formula": "Formula", "button_formula": "Formula",
"tab_qb": "Query Builder", "tab_qb": "Query Builder",
"tab_promql": "PromQL", "tab_promql": "PromQL",
"tab_chquery": "ClickHouse Query",
"title_confirm": "Confirm", "title_confirm": "Confirm",
"button_ok": "Yes", "button_ok": "Yes",
"button_cancel": "No", "button_cancel": "No",
@ -88,5 +90,23 @@
"user_guide_pql_step3": "Step 3 -Alert Configuration", "user_guide_pql_step3": "Step 3 -Alert Configuration",
"user_guide_pql_step3a": "Set alert severity, name and descriptions", "user_guide_pql_step3a": "Set alert severity, name and descriptions",
"user_guide_pql_step3b": "Add tags to the alert in the Label field if needed", "user_guide_pql_step3b": "Add tags to the alert in the Label field if needed",
"user_tooltip_more_help": "More details on how to create alerts" "user_guide_ch_step1": "Step 1 - Define the metric",
"user_guide_ch_step1a": "Write a Clickhouse query for alert evaluation. Follow <0>this tutorial</0> to learn about query format and supported vars.",
"user_guide_ch_step1b": "Format the legends based on labels you want to highlight in the preview chart",
"user_guide_ch_step2": "Step 2 - Define Alert Conditions",
"user_guide_ch_step2a": "Select the threshold type and whether you want to alert above/below a value",
"user_guide_ch_step2b": "Enter the Alert threshold",
"user_guide_ch_step3": "Step 3 -Alert Configuration",
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
"user_tooltip_more_help": "More details on how to create alerts",
"choose_alert_type": "Choose a type for the alert:",
"metric_based_alert": "Metric based Alert",
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
"log_based_alert": "Log-based Alert",
"log_based_alert_desc": "Send a notification when a condition occurs in the logs data.",
"traces_based_alert": "Trace-based Alert",
"traces_based_alert_desc": "Send a notification when a condition occurs in the traces data.",
"exceptions_based_alert": "Exceptions-based Alert",
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data."
} }

View File

@ -9,5 +9,5 @@
"tab_license_history": "History", "tab_license_history": "History",
"loading_licenses": "Loading licenses...", "loading_licenses": "Loading licenses...",
"enter_license_key": "Please enter a license key", "enter_license_key": "Please enter a license key",
"license_applied": "License applied successfully, please refresh the page to see changes." "license_applied": "License applied successfully"
} }

View File

@ -28,6 +28,7 @@
"condition_required": "at least one metric condition is required", "condition_required": "at least one metric condition is required",
"alertname_required": "alert name is required", "alertname_required": "alert name is required",
"promql_required": "promql expression is required when query format is set to PromQL", "promql_required": "promql expression is required when query format is set to PromQL",
"chquery_required": "query is required when query format is set to ClickHouse",
"button_savechanges": "Save Rule", "button_savechanges": "Save Rule",
"button_createrule": "Create Rule", "button_createrule": "Create Rule",
"button_returntorules": "Return to rules", "button_returntorules": "Return to rules",
@ -55,6 +56,7 @@
"button_formula": "Formula", "button_formula": "Formula",
"tab_qb": "Query Builder", "tab_qb": "Query Builder",
"tab_promql": "PromQL", "tab_promql": "PromQL",
"tab_chquery": "ClickHouse Query",
"title_confirm": "Confirm", "title_confirm": "Confirm",
"button_ok": "Yes", "button_ok": "Yes",
"button_cancel": "No", "button_cancel": "No",
@ -88,5 +90,23 @@
"user_guide_pql_step3": "Step 3 -Alert Configuration", "user_guide_pql_step3": "Step 3 -Alert Configuration",
"user_guide_pql_step3a": "Set alert severity, name and descriptions", "user_guide_pql_step3a": "Set alert severity, name and descriptions",
"user_guide_pql_step3b": "Add tags to the alert in the Label field if needed", "user_guide_pql_step3b": "Add tags to the alert in the Label field if needed",
"user_tooltip_more_help": "More details on how to create alerts" "user_guide_ch_step1": "Step 1 - Define the metric",
"user_guide_ch_step1a": "Write a Clickhouse query for alert evaluation. Follow <0>this tutorial</0> to learn about query format and supported vars.",
"user_guide_ch_step1b": "Format the legends based on labels you want to highlight in the preview chart",
"user_guide_ch_step2": "Step 2 - Define Alert Conditions",
"user_guide_ch_step2a": "Select the threshold type and whether you want to alert above/below a value",
"user_guide_ch_step2b": "Enter the Alert threshold",
"user_guide_ch_step3": "Step 3 -Alert Configuration",
"user_guide_ch_step3a": "Set alert severity, name and descriptions",
"user_guide_ch_step3b": "Add tags to the alert in the Label field if needed",
"user_tooltip_more_help": "More details on how to create alerts",
"choose_alert_type": "Choose a type for the alert:",
"metric_based_alert": "Metric based Alert",
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
"log_based_alert": "Log-based Alert",
"log_based_alert_desc": "Send a notification when a condition occurs in the logs data.",
"traces_based_alert": "Trace-based Alert",
"traces_based_alert_desc": "Send a notification when a condition occurs in the traces data.",
"exceptions_based_alert": "Exceptions-based Alert",
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data."
} }

View File

@ -9,5 +9,5 @@
"tab_license_history": "History", "tab_license_history": "History",
"loading_licenses": "Loading licenses...", "loading_licenses": "Loading licenses...",
"enter_license_key": "Please enter a license key", "enter_license_key": "Please enter a license key",
"license_applied": "License applied successfully, please refresh the page to see changes." "license_applied": "License applied successfully"
} }

View File

@ -1,15 +1,20 @@
import axios from 'api'; import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios'; import { AxiosError } from 'axios';
import { formUrlParams } from 'container/TraceDetail/utils';
import { ErrorResponse, SuccessResponse } from 'types/api'; import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/trace/getTraceItem'; import { GetTraceItemProps, PayloadProps } from 'types/api/trace/getTraceItem';
const getTraceItem = async ( const getTraceItem = async (
props: Props, props: GetTraceItemProps,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => { ): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try { try {
const response = await axios.request<PayloadProps>({ const response = await axios.request<PayloadProps>({
url: `/traces/${props.id}`, url: `/traces/${props.id}${formUrlParams({
spanId: props.spanId,
levelUp: props.levelUp,
levelDown: props.levelDown,
})}`,
method: 'get', method: 'get',
}); });

View File

@ -4,9 +4,6 @@ import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import { GlobalReducer } from 'types/reducer/globalTime'; import { GlobalReducer } from 'types/reducer/globalTime';
interface ITimeUnit {
[key: string]: TimeUnit;
}
interface IAxisTimeUintConfig { interface IAxisTimeUintConfig {
unitName: TimeUnit; unitName: TimeUnit;
multiplier: number; multiplier: number;
@ -22,7 +19,7 @@ export interface ITimeRange {
maxTime: number | null; maxTime: number | null;
} }
export const TIME_UNITS: ITimeUnit = { export const TIME_UNITS: Record<TimeUnit, TimeUnit> = {
millisecond: 'millisecond', millisecond: 'millisecond',
second: 'second', second: 'second',
minute: 'minute', minute: 'minute',
@ -31,6 +28,7 @@ export const TIME_UNITS: ITimeUnit = {
week: 'week', week: 'week',
month: 'month', month: 'month',
year: 'year', year: 'year',
quarter: 'quarter',
}; };
const TIME_UNITS_CONFIG: IAxisTimeUintConfig[] = [ const TIME_UNITS_CONFIG: IAxisTimeUintConfig[] = [
@ -93,6 +91,7 @@ export const convertTimeRange = (
} catch (error) { } catch (error) {
console.error(error); console.error(error);
} }
return { return {
unitName: relevantTimeUnit.unitName, unitName: relevantTimeUnit.unitName,
stepSize: Math.floor(stepSize) || 1, stepSize: Math.floor(stepSize) || 1,

View File

@ -2,5 +2,6 @@ import { Typography } from 'antd';
import styled from 'styled-components'; import styled from 'styled-components';
export const Value = styled(Typography)` export const Value = styled(Typography)`
font-size: 3rem; font-size: 2.5vw;
text-align: center;
`; `;

View File

@ -1,12 +1,25 @@
import { notification, Table, TableProps, Tooltip, Typography } from 'antd'; import { SearchOutlined } from '@ant-design/icons';
import {
Button,
Card,
Input,
notification,
Space,
Table,
TableProps,
Tooltip,
Typography,
} from 'antd';
import { ColumnType } from 'antd/es/table';
import { ColumnsType } from 'antd/lib/table'; import { ColumnsType } from 'antd/lib/table';
import { FilterConfirmProps } from 'antd/lib/table/interface';
import getAll from 'api/errors/getAll'; import getAll from 'api/errors/getAll';
import getErrorCounts from 'api/errors/getErrorCounts'; import getErrorCounts from 'api/errors/getErrorCounts';
import ROUTES from 'constants/routes'; import ROUTES from 'constants/routes';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import createQueryParams from 'lib/createQueryParams'; import createQueryParams from 'lib/createQueryParams';
import history from 'lib/history'; import history from 'lib/history';
import React, { useEffect, useMemo } from 'react'; import React, { useCallback, useEffect, useMemo } from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { useQueries } from 'react-query'; import { useQueries } from 'react-query';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
@ -93,11 +106,87 @@ function AllErrors(): JSX.Element {
<Typography>{dayjs(value).format('DD/MM/YYYY HH:mm:ss A')}</Typography> <Typography>{dayjs(value).format('DD/MM/YYYY HH:mm:ss A')}</Typography>
); );
const filterIcon = useCallback(() => <SearchOutlined />, []);
const handleSearch = (
confirm: (param?: FilterConfirmProps) => void,
): VoidFunction => (): void => {
confirm();
};
const filterDropdownWrapper = useCallback(
({ setSelectedKeys, selectedKeys, confirm, placeholder }) => {
return (
<Card size="small">
<Space align="start" direction="vertical">
<Input
placeholder={placeholder}
value={selectedKeys[0]}
onChange={(e): void =>
setSelectedKeys(e.target.value ? [e.target.value] : [])
}
allowClear
onPressEnter={handleSearch(confirm)}
/>
<Button
type="primary"
onClick={handleSearch(confirm)}
icon={<SearchOutlined />}
size="small"
>
Search
</Button>
</Space>
</Card>
);
},
[],
);
const onExceptionTypeFilter = useCallback(
(value, record: Exception): boolean => {
if (record.exceptionType && typeof value === 'string') {
return record.exceptionType.toLowerCase().includes(value.toLowerCase());
}
return false;
},
[],
);
const onApplicationTypeFilter = useCallback(
(value, record: Exception): boolean => {
if (record.serviceName && typeof value === 'string') {
return record.serviceName.toLowerCase().includes(value.toLowerCase());
}
return false;
},
[],
);
const getFilter = useCallback(
(
onFilter: ColumnType<Exception>['onFilter'],
placeholder: string,
): ColumnType<Exception> => ({
onFilter,
filterIcon,
filterDropdown: ({ confirm, selectedKeys, setSelectedKeys }): JSX.Element =>
filterDropdownWrapper({
setSelectedKeys,
selectedKeys,
confirm,
placeholder,
}),
}),
[filterIcon, filterDropdownWrapper],
);
const columns: ColumnsType<Exception> = [ const columns: ColumnsType<Exception> = [
{ {
title: 'Exception Type', title: 'Exception Type',
dataIndex: 'exceptionType', dataIndex: 'exceptionType',
key: 'exceptionType', key: 'exceptionType',
...getFilter(onExceptionTypeFilter, 'Search By Exception'),
render: (value, record): JSX.Element => ( render: (value, record): JSX.Element => (
<Tooltip overlay={(): JSX.Element => value}> <Tooltip overlay={(): JSX.Element => value}>
<Link <Link
@ -177,6 +266,7 @@ function AllErrors(): JSX.Element {
updatedOrder, updatedOrder,
'serviceName', 'serviceName',
), ),
...getFilter(onApplicationTypeFilter, 'Search By Application'),
}, },
]; ];

View File

@ -77,7 +77,7 @@ export const getDefaultOrder = (
export const getNanoSeconds = (date: string): string => { export const getNanoSeconds = (date: string): string => {
return ( return (
Math.floor(new Date(date).getTime() / 1e3).toString() + Math.floor(new Date(date).getTime() / 1e3).toString() +
Timestamp.fromString(date).getNano().toString() String(Timestamp.fromString(date).getNano().toString()).padStart(9, '0')
); );
}; };

View File

@ -0,0 +1,68 @@
import { Row } from 'antd';
import React from 'react';
import { useTranslation } from 'react-i18next';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { AlertTypeCard, SelectTypeContainer } from './styles';
interface OptionType {
title: string;
selection: AlertTypes;
description: string;
}
function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element {
const { t } = useTranslation(['alerts']);
const renderOptions = (): JSX.Element => {
const optionList: OptionType[] = [
{
title: t('metric_based_alert'),
selection: AlertTypes.METRICS_BASED_ALERT,
description: t('metric_based_alert_desc'),
},
{
title: t('log_based_alert'),
selection: AlertTypes.LOGS_BASED_ALERT,
description: t('log_based_alert_desc'),
},
{
title: t('traces_based_alert'),
selection: AlertTypes.TRACES_BASED_ALERT,
description: t('traces_based_alert_desc'),
},
{
title: t('exceptions_based_alert'),
selection: AlertTypes.EXCEPTIONS_BASED_ALERT,
description: t('exceptions_based_alert_desc'),
},
];
return (
<>
{optionList.map((o: OptionType) => (
<AlertTypeCard
key={o.selection}
title={o.title}
onClick={(): void => {
onSelect(o.selection);
}}
>
{o.description}
</AlertTypeCard>
))}
</>
);
};
return (
<SelectTypeContainer>
<h3> {t('choose_alert_type')} </h3>
<Row>{renderOptions()}</Row>
</SelectTypeContainer>
);
}
interface SelectAlertTypeProps {
onSelect: (typ: AlertTypes) => void;
}
export default SelectAlertType;

View File

@ -0,0 +1,16 @@
import { Card } from 'antd';
import styled from 'styled-components';
export const SelectTypeContainer = styled.div`
&&& {
padding: 1rem;
}
`;
export const AlertTypeCard = styled(Card)`
&&& {
margin: 5px;
width: 21rem;
cursor: pointer;
}
`;

View File

@ -0,0 +1,186 @@
import { AlertTypes } from 'types/api/alerts/alertTypes';
import {
AlertDef,
defaultCompareOp,
defaultEvalWindow,
defaultMatchType,
} from 'types/api/alerts/def';
export const alertDefaults: AlertDef = {
alertType: AlertTypes.METRICS_BASED_ALERT,
condition: {
compositeMetricQuery: {
builderQueries: {
A: {
queryName: 'A',
name: 'A',
formulaOnly: false,
metricName: '',
tagFilters: {
op: 'AND',
items: [],
},
groupBy: [],
aggregateOperator: 1,
expression: 'A',
disabled: false,
toggleDisable: false,
toggleDelete: false,
},
},
promQueries: {},
chQueries: {},
queryType: 1,
},
op: defaultCompareOp,
matchType: defaultMatchType,
},
labels: {
severity: 'warning',
},
annotations: {
description: 'A new alert',
},
evalWindow: defaultEvalWindow,
};
export const logAlertDefaults: AlertDef = {
alertType: AlertTypes.LOGS_BASED_ALERT,
condition: {
compositeMetricQuery: {
builderQueries: {
A: {
queryName: 'A',
name: 'A',
formulaOnly: false,
metricName: '',
tagFilters: {
op: 'AND',
items: [],
},
groupBy: [],
aggregateOperator: 1,
expression: 'A',
disabled: false,
toggleDisable: false,
toggleDelete: false,
},
},
promQueries: {},
chQueries: {
A: {
name: 'A',
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
rawQuery: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
legend: '',
disabled: false,
},
},
queryType: 2,
},
op: defaultCompareOp,
matchType: '4',
},
labels: {
severity: 'warning',
details: `${window.location.protocol}//${window.location.host}/logs`,
},
annotations: {
description: 'A new log-based alert',
},
evalWindow: defaultEvalWindow,
};
export const traceAlertDefaults: AlertDef = {
alertType: AlertTypes.TRACES_BASED_ALERT,
condition: {
compositeMetricQuery: {
builderQueries: {
A: {
queryName: 'A',
name: 'A',
formulaOnly: false,
metricName: '',
tagFilters: {
op: 'AND',
items: [],
},
groupBy: [],
aggregateOperator: 1,
expression: 'A',
disabled: false,
toggleDisable: false,
toggleDelete: false,
},
},
promQueries: {},
chQueries: {
A: {
name: 'A',
rawQuery: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
legend: '',
disabled: false,
},
},
queryType: 2,
},
op: defaultCompareOp,
matchType: '4',
},
labels: {
severity: 'warning',
details: `${window.location.protocol}//${window.location.host}/traces`,
},
annotations: {
description: 'A new trace-based alert',
},
evalWindow: defaultEvalWindow,
};
export const exceptionAlertDefaults: AlertDef = {
alertType: AlertTypes.EXCEPTIONS_BASED_ALERT,
condition: {
compositeMetricQuery: {
builderQueries: {
A: {
queryName: 'A',
name: 'A',
formulaOnly: false,
metricName: '',
tagFilters: {
op: 'AND',
items: [],
},
groupBy: [],
aggregateOperator: 1,
expression: 'A',
disabled: false,
toggleDisable: false,
toggleDelete: false,
},
},
promQueries: {},
chQueries: {
A: {
name: 'A',
rawQuery: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
query: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
legend: '',
disabled: false,
},
},
queryType: 2,
},
op: defaultCompareOp,
matchType: '4',
},
labels: {
severity: 'warning',
details: `${window.location.protocol}//${window.location.host}/exceptions`,
},
annotations: {
description: 'A new exceptions-based alert',
},
evalWindow: defaultEvalWindow,
};

View File

@ -1,22 +1,57 @@
import { Form } from 'antd'; import { Form, Row } from 'antd';
import FormAlertRules from 'container/FormAlertRules'; import FormAlertRules from 'container/FormAlertRules';
import React from 'react'; import React, { useState } from 'react';
import { AlertDef } from 'types/api/alerts/def'; import { AlertTypes } from 'types/api/alerts/alertTypes';
function CreateRules({ initialValue }: CreateRulesProps): JSX.Element { import {
alertDefaults,
exceptionAlertDefaults,
logAlertDefaults,
traceAlertDefaults,
} from './defaults';
import SelectAlertType from './SelectAlertType';
function CreateRules(): JSX.Element {
const [initValues, setInitValues] = useState(alertDefaults);
const [step, setStep] = useState(0);
const [alertType, setAlertType] = useState<AlertTypes>(
AlertTypes.METRICS_BASED_ALERT,
);
const [formInstance] = Form.useForm(); const [formInstance] = Form.useForm();
const onSelectType = (typ: AlertTypes): void => {
setAlertType(typ);
switch (typ) {
case AlertTypes.LOGS_BASED_ALERT:
setInitValues(logAlertDefaults);
break;
case AlertTypes.TRACES_BASED_ALERT:
setInitValues(traceAlertDefaults);
break;
case AlertTypes.EXCEPTIONS_BASED_ALERT:
setInitValues(exceptionAlertDefaults);
break;
default:
setInitValues(alertDefaults);
}
setStep(1);
};
if (step === 0) {
return (
<Row wrap={false}>
<SelectAlertType onSelect={onSelectType} />
</Row>
);
}
return ( return (
<FormAlertRules <FormAlertRules
alertType={alertType}
formInstance={formInstance} formInstance={formInstance}
initialValue={initialValue} initialValue={initValues}
ruleId={0} ruleId={0}
/> />
); );
} }
interface CreateRulesProps {
initialValue: AlertDef;
}
export default CreateRules; export default CreateRules;

View File

@ -1,6 +1,7 @@
import { Form } from 'antd'; import { Form } from 'antd';
import FormAlertRules from 'container/FormAlertRules'; import FormAlertRules from 'container/FormAlertRules';
import React from 'react'; import React from 'react';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { AlertDef } from 'types/api/alerts/def'; import { AlertDef } from 'types/api/alerts/def';
function EditRules({ initialValue, ruleId }: EditRulesProps): JSX.Element { function EditRules({ initialValue, ruleId }: EditRulesProps): JSX.Element {
@ -8,6 +9,11 @@ function EditRules({ initialValue, ruleId }: EditRulesProps): JSX.Element {
return ( return (
<FormAlertRules <FormAlertRules
alertType={
initialValue.alertType
? (initialValue.alertType as AlertTypes)
: AlertTypes.METRICS_BASED_ALERT
}
formInstance={formInstance} formInstance={formInstance}
initialValue={initialValue} initialValue={initialValue}
ruleId={ruleId} ruleId={ruleId}

View File

@ -0,0 +1,46 @@
import ClickHouseQueryBuilder from 'container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query';
import { IClickHouseQueryHandleChange } from 'container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/types';
import React from 'react';
import { IChQueries } from 'types/api/alerts/compositeQuery';
import { rawQueryToIChQuery, toIClickHouseQuery } from './transform';
function ChQuerySection({
chQueries,
setChQueries,
}: ChQuerySectionProps): JSX.Element {
const handleChQueryChange = ({
rawQuery,
legend,
toggleDelete,
}: IClickHouseQueryHandleChange): void => {
const chQuery = rawQueryToIChQuery(
chQueries.A,
rawQuery,
legend,
toggleDelete,
);
setChQueries({
A: {
...chQuery,
},
});
};
return (
<ClickHouseQueryBuilder
key="A"
queryIndex="A"
queryData={toIClickHouseQuery(chQueries?.A)}
handleQueryChange={handleChQueryChange}
/>
);
}
interface ChQuerySectionProps {
chQueries: IChQueries;
setChQueries: (q: IChQueries) => void;
}
export default ChQuerySection;

View File

@ -0,0 +1,3 @@
import ChQuerySection from './ChQuerySection';
export default ChQuerySection;

View File

@ -0,0 +1,37 @@
import { IChQuery } from 'types/api/alerts/compositeQuery';
import { IClickHouseQuery } from 'types/api/dashboard/getAll';
// @description rawQueryToIChQuery transforms raw query (from ClickHouseQueryBuilder)
// to alert specific IChQuery format
export const rawQueryToIChQuery = (
src: IChQuery,
rawQuery: string | undefined,
legend: string | undefined,
toggleDelete: boolean | undefined,
): IChQuery => {
if (toggleDelete) {
return {
rawQuery: '',
legend: '',
name: 'A',
disabled: false,
query: '',
};
}
return {
rawQuery: rawQuery !== undefined ? rawQuery : src.rawQuery,
query: rawQuery !== undefined ? rawQuery : src.rawQuery,
legend: legend !== undefined ? legend : src.legend,
name: 'A',
disabled: false,
};
};
// @description toIClickHouseQuery transforms IChQuery (alert specific) to
// ClickHouseQueryBuilder format. The main difference is
// use of rawQuery (in ClickHouseQueryBuilder)
// and query (in alert builder)
export const toIClickHouseQuery = (src: IChQuery): IClickHouseQuery => {
return { ...src, name: 'A', rawQuery: src.query };
};

View File

@ -1,11 +1,12 @@
import { InfoCircleOutlined } from '@ant-design/icons'; import { InfoCircleOutlined } from '@ant-design/icons';
import { StaticLineProps } from 'components/Graph'; import { StaticLineProps } from 'components/Graph';
import Spinner from 'components/Spinner';
import GridGraphComponent from 'container/GridGraphComponent'; import GridGraphComponent from 'container/GridGraphComponent';
import { GRAPH_TYPES } from 'container/NewDashboard/ComponentsSlider'; import { GRAPH_TYPES } from 'container/NewDashboard/ComponentsSlider';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems'; import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
import { Time } from 'container/TopNav/DateTimeSelection/config'; import { Time } from 'container/TopNav/DateTimeSelection/config';
import getChartData from 'lib/getChartData'; import getChartData from 'lib/getChartData';
import React from 'react'; import React, { useMemo } from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { useQuery } from 'react-query'; import { useQuery } from 'react-query';
import { GetMetricQueryRange } from 'store/actions/dashboard/getQueryResults'; import { GetMetricQueryRange } from 'store/actions/dashboard/getQueryResults';
@ -22,6 +23,10 @@ export interface ChartPreviewProps {
selectedInterval?: Time; selectedInterval?: Time;
headline?: JSX.Element; headline?: JSX.Element;
threshold?: number | undefined; threshold?: number | undefined;
userQueryKey?: string;
}
interface QueryResponseError {
message?: string;
} }
function ChartPreview({ function ChartPreview({
@ -32,6 +37,7 @@ function ChartPreview({
selectedInterval = '5min', selectedInterval = '5min',
headline, headline,
threshold, threshold,
userQueryKey,
}: ChartPreviewProps): JSX.Element | null { }: ChartPreviewProps): JSX.Element | null {
const { t } = useTranslation('alerts'); const { t } = useTranslation('alerts');
const staticLine: StaticLineProps | undefined = const staticLine: StaticLineProps | undefined =
@ -46,9 +52,34 @@ function ChartPreview({
} }
: undefined; : undefined;
const queryKey = JSON.stringify(query); const canQuery = useMemo((): boolean => {
if (!query || query == null) {
return false;
}
switch (query?.queryType) {
case EQueryType.PROM:
return query.promQL?.length > 0 && query.promQL[0].query !== '';
case EQueryType.CLICKHOUSE:
return (
query.clickHouse?.length > 0 && query.clickHouse[0].rawQuery?.length > 0
);
case EQueryType.QUERY_BUILDER:
return (
query.metricsBuilder?.queryBuilder?.length > 0 &&
query.metricsBuilder?.queryBuilder[0].metricName !== ''
);
default:
return false;
}
}, [query]);
const queryResponse = useQuery({ const queryResponse = useQuery({
queryKey: ['chartPreview', queryKey, selectedInterval], queryKey: [
'chartPreview',
userQueryKey || JSON.stringify(query),
selectedInterval,
],
queryFn: () => queryFn: () =>
GetMetricQueryRange({ GetMetricQueryRange({
query: query || { query: query || {
@ -64,14 +95,8 @@ function ChartPreview({
graphType, graphType,
selectedTime, selectedTime,
}), }),
enabled: retry: false,
query != null && enabled: canQuery,
((query.queryType === EQueryType.PROM &&
query.promQL?.length > 0 &&
query.promQL[0].query !== '') ||
(query.queryType === EQueryType.QUERY_BUILDER &&
query.metricsBuilder?.queryBuilder?.length > 0 &&
query.metricsBuilder?.queryBuilder[0].metricName !== '')),
}); });
const chartDataSet = queryResponse.isError const chartDataSet = queryResponse.isError
@ -89,15 +114,14 @@ function ChartPreview({
return ( return (
<ChartContainer> <ChartContainer>
{headline} {headline}
{(queryResponse?.data?.error || queryResponse?.isError) && ( {(queryResponse?.isError || queryResponse?.error) && (
<FailedMessageContainer color="red" title="Failed to refresh the chart"> <FailedMessageContainer color="red" title="Failed to refresh the chart">
<InfoCircleOutlined />{' '} <InfoCircleOutlined />{' '}
{queryResponse?.data?.error || {(queryResponse?.error as QueryResponseError).message ||
queryResponse?.error ||
t('preview_chart_unexpected_error')} t('preview_chart_unexpected_error')}
</FailedMessageContainer> </FailedMessageContainer>
)} )}
{queryResponse.isLoading && <Spinner size="large" tip="Loading..." />}
{chartDataSet && !queryResponse.isError && ( {chartDataSet && !queryResponse.isError && (
<GridGraphComponent <GridGraphComponent
title={name} title={name}
@ -118,6 +142,7 @@ ChartPreview.defaultProps = {
selectedInterval: '5min', selectedInterval: '5min',
headline: undefined, headline: undefined,
threshold: undefined, threshold: undefined,
userQueryKey: '',
}; };
export default ChartPreview; export default ChartPreview;

View File

@ -1,3 +1,4 @@
import { red } from '@ant-design/colors';
import { Card, Tooltip } from 'antd'; import { Card, Tooltip } from 'antd';
import styled from 'styled-components'; import styled from 'styled-components';
@ -10,7 +11,8 @@ export const NotFoundContainer = styled.div`
export const FailedMessageContainer = styled(Tooltip)` export const FailedMessageContainer = styled(Tooltip)`
position: absolute; position: absolute;
top: 10px; color: ${red};
top: 4rem;
left: 10px; left: 10px;
`; `;

View File

@ -1,5 +1,5 @@
import { PlusOutlined } from '@ant-design/icons'; import { PlusOutlined } from '@ant-design/icons';
import { notification, Tabs } from 'antd'; import { Button, notification, Tabs } from 'antd';
import MetricsBuilderFormula from 'container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/formula'; import MetricsBuilderFormula from 'container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/formula';
import MetricsBuilder from 'container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/query'; import MetricsBuilder from 'container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/query';
import { import {
@ -8,13 +8,16 @@ import {
} from 'container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/types'; } from 'container/NewWidget/LeftContainer/QuerySection/QueryBuilder/queryBuilder/types';
import React, { useCallback } from 'react'; import React, { useCallback } from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { import {
IChQueries,
IFormulaQueries, IFormulaQueries,
IMetricQueries, IMetricQueries,
IPromQueries, IPromQueries,
} from 'types/api/alerts/compositeQuery'; } from 'types/api/alerts/compositeQuery';
import { EAggregateOperator, EQueryType } from 'types/common/dashboard'; import { EAggregateOperator, EQueryType } from 'types/common/dashboard';
import ChQuerySection from './ChQuerySection';
import PromqlSection from './PromqlSection'; import PromqlSection from './PromqlSection';
import { FormContainer, QueryButton, StepHeading } from './styles'; import { FormContainer, QueryButton, StepHeading } from './styles';
import { toIMetricsBuilderQuery } from './utils'; import { toIMetricsBuilderQuery } from './utils';
@ -29,6 +32,10 @@ function QuerySection({
setFormulaQueries, setFormulaQueries,
promQueries, promQueries,
setPromQueries, setPromQueries,
chQueries,
setChQueries,
alertType,
runQuery,
}: QuerySectionProps): JSX.Element { }: QuerySectionProps): JSX.Element {
// init namespace for translations // init namespace for translations
const { t } = useTranslation('alerts'); const { t } = useTranslation('alerts');
@ -49,6 +56,20 @@ function QuerySection({
}); });
} }
if (
parseInt(s, 10) === EQueryType.CLICKHOUSE &&
(!chQueries || Object.keys(chQueries).length === 0)
) {
setChQueries({
A: {
rawQuery: '',
name: 'A',
query: '',
legend: '',
disabled: false,
},
});
}
setQueryCategory(parseInt(s, 10)); setQueryCategory(parseInt(s, 10));
}; };
@ -196,6 +217,10 @@ function QuerySection({
); );
}; };
const renderChQueryUI = (): JSX.Element => {
return <ChQuerySection chQueries={chQueries} setChQueries={setChQueries} />;
};
const renderFormulaButton = (): JSX.Element => { const renderFormulaButton = (): JSX.Element => {
return ( return (
<QueryButton onClick={addFormula} icon={<PlusOutlined />}> <QueryButton onClick={addFormula} icon={<PlusOutlined />}>
@ -258,23 +283,85 @@ function QuerySection({
</div> </div>
); );
}; };
return (
<> const handleRunQuery = (): void => {
<StepHeading> {t('alert_form_step1')}</StepHeading> runQuery();
<FormContainer> };
<div style={{ display: 'flex' }}>
const renderTabs = (typ: AlertTypes): JSX.Element | null => {
switch (typ) {
case AlertTypes.TRACES_BASED_ALERT:
case AlertTypes.LOGS_BASED_ALERT:
case AlertTypes.EXCEPTIONS_BASED_ALERT:
return (
<Tabs
type="card"
style={{ width: '100%' }}
defaultActiveKey={EQueryType.CLICKHOUSE.toString()}
activeKey={queryCategory.toString()}
onChange={handleQueryCategoryChange}
tabBarExtraContent={
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
{queryCategory === EQueryType.CLICKHOUSE && (
<Button type="primary" onClick={handleRunQuery}>
Run Query
</Button>
)}
</span>
}
>
<TabPane
tab={t('tab_qb')}
key={EQueryType.QUERY_BUILDER.toString()}
disabled
/>
<TabPane tab={t('tab_chquery')} key={EQueryType.CLICKHOUSE.toString()} />
</Tabs>
);
case AlertTypes.METRICS_BASED_ALERT:
default:
return (
<Tabs <Tabs
type="card" type="card"
style={{ width: '100%' }} style={{ width: '100%' }}
defaultActiveKey={EQueryType.QUERY_BUILDER.toString()} defaultActiveKey={EQueryType.QUERY_BUILDER.toString()}
activeKey={queryCategory.toString()} activeKey={queryCategory.toString()}
onChange={handleQueryCategoryChange} onChange={handleQueryCategoryChange}
tabBarExtraContent={
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
{queryCategory === EQueryType.CLICKHOUSE && (
<Button type="primary" onClick={handleRunQuery}>
Run Query
</Button>
)}
</span>
}
> >
<TabPane tab={t('tab_qb')} key={EQueryType.QUERY_BUILDER.toString()} /> <TabPane tab={t('tab_qb')} key={EQueryType.QUERY_BUILDER.toString()} />
<TabPane tab={t('tab_chquery')} key={EQueryType.CLICKHOUSE.toString()} />
<TabPane tab={t('tab_promql')} key={EQueryType.PROM.toString()} /> <TabPane tab={t('tab_promql')} key={EQueryType.PROM.toString()} />
</Tabs> </Tabs>
</div> );
{queryCategory === EQueryType.PROM ? renderPromqlUI() : renderMetricUI()} }
};
const renderQuerySection = (c: EQueryType): JSX.Element | null => {
switch (c) {
case EQueryType.PROM:
return renderPromqlUI();
case EQueryType.CLICKHOUSE:
return renderChQueryUI();
case EQueryType.QUERY_BUILDER:
return renderMetricUI();
default:
return null;
}
};
return (
<>
<StepHeading> {t('alert_form_step1')}</StepHeading>
<FormContainer>
<div style={{ display: 'flex' }}>{renderTabs(alertType)}</div>
{renderQuerySection(queryCategory)}
</FormContainer> </FormContainer>
</> </>
); );
@ -289,6 +376,10 @@ interface QuerySectionProps {
setFormulaQueries: (b: IFormulaQueries) => void; setFormulaQueries: (b: IFormulaQueries) => void;
promQueries: IPromQueries; promQueries: IPromQueries;
setPromQueries: (p: IPromQueries) => void; setPromQueries: (p: IPromQueries) => void;
chQueries: IChQueries;
setChQueries: (q: IChQueries) => void;
alertType: AlertTypes;
runQuery: () => void;
} }
export default QuerySection; export default QuerySection;

View File

@ -1,7 +1,7 @@
import { Col, Row, Typography } from 'antd'; import { Col, Row, Typography } from 'antd';
import TextToolTip from 'components/TextToolTip'; import TextToolTip from 'components/TextToolTip';
import React from 'react'; import React from 'react';
import { useTranslation } from 'react-i18next'; import { Trans, useTranslation } from 'react-i18next';
import { EQueryType } from 'types/common/dashboard'; import { EQueryType } from 'types/common/dashboard';
import { import {
@ -106,6 +106,63 @@ function UserGuide({ queryType }: UserGuideProps): JSX.Element {
); );
}; };
const renderStep1CH = (): JSX.Element => {
return (
<>
<StyledTopic>{t('user_guide_ch_step1')}</StyledTopic>
<StyledList>
<StyledListItem>
<Trans
i18nKey="user_guide_ch_step1a"
t={t}
components={[
// eslint-disable-next-line jsx-a11y/control-has-associated-label, jsx-a11y/anchor-has-content
<a
key={1}
target="_blank"
href=" https://signoz.io/docs/tutorial/writing-clickhouse-queries-in-dashboard/?utm_source=frontend&utm_medium=product&utm_id=alerts</>"
/>,
]}
/>
</StyledListItem>
<StyledListItem>{t('user_guide_ch_step1b')}</StyledListItem>
</StyledList>
</>
);
};
const renderStep2CH = (): JSX.Element => {
return (
<>
<StyledTopic>{t('user_guide_ch_step2')}</StyledTopic>
<StyledList>
<StyledListItem>{t('user_guide_ch_step2a')}</StyledListItem>
<StyledListItem>{t('user_guide_ch_step2b')}</StyledListItem>
</StyledList>
</>
);
};
const renderStep3CH = (): JSX.Element => {
return (
<>
<StyledTopic>{t('user_guide_ch_step3')}</StyledTopic>
<StyledList>
<StyledListItem>{t('user_guide_ch_step3a')}</StyledListItem>
<StyledListItem>{t('user_guide_ch_step3b')}</StyledListItem>
</StyledList>
</>
);
};
const renderGuideForCH = (): JSX.Element => {
return (
<>
{renderStep1CH()}
{renderStep2CH()}
{renderStep3CH()}
</>
);
};
return ( return (
<StyledMainContainer> <StyledMainContainer>
<Row> <Row>
@ -121,6 +178,7 @@ function UserGuide({ queryType }: UserGuideProps): JSX.Element {
</Row> </Row>
{queryType === EQueryType.QUERY_BUILDER && renderGuideForQB()} {queryType === EQueryType.QUERY_BUILDER && renderGuideForQB()}
{queryType === EQueryType.PROM && renderGuideForPQL()} {queryType === EQueryType.PROM && renderGuideForPQL()}
{queryType === EQueryType.CLICKHOUSE && renderGuideForCH()}
</StyledMainContainer> </StyledMainContainer>
); );
} }

View File

@ -9,7 +9,9 @@ import history from 'lib/history';
import React, { useCallback, useEffect, useState } from 'react'; import React, { useCallback, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { useQueryClient } from 'react-query'; import { useQueryClient } from 'react-query';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { import {
IChQueries,
IFormulaQueries, IFormulaQueries,
IMetricQueries, IMetricQueries,
IPromQueries, IPromQueries,
@ -45,6 +47,7 @@ import {
} from './utils'; } from './utils';
function FormAlertRules({ function FormAlertRules({
alertType,
formInstance, formInstance,
initialValue, initialValue,
ruleId, ruleId,
@ -57,6 +60,10 @@ function FormAlertRules({
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
// queryRunId helps to override of query caching for clickhouse query
// tab. A random string will be assigned for each execution
const [runQueryId, setRunQueryId] = useState<string>();
// alertDef holds the form values to be posted // alertDef holds the form values to be posted
const [alertDef, setAlertDef] = useState<AlertDef>(initialValue); const [alertDef, setAlertDef] = useState<AlertDef>(initialValue);
@ -82,9 +89,31 @@ function FormAlertRules({
...initQuery?.promQueries, ...initQuery?.promQueries,
}); });
// staged query is used to display chart preview // local state to handle promql queries
const [chQueries, setChQueries] = useState<IChQueries>({
...initQuery?.chQueries,
});
// staged query is used to display chart preview. the query gets
// auto refreshed when any of the params in query section change.
// though this is the source of chart data, the final query used
// by chart will be either debouncedStagedQuery or manualStagedQuery
// depending on the run option (auto-run or use of run query button)
const [stagedQuery, setStagedQuery] = useState<StagedQuery>(); const [stagedQuery, setStagedQuery] = useState<StagedQuery>();
const debouncedStagedQuery = useDebounce(stagedQuery, 1000);
// manualStagedQuery requires manual staging of query
// when user clicks run query button. Useful for clickhouse tab where
// run query button is provided.
const [manualStagedQuery, setManualStagedQuery] = useState<StagedQuery>();
// delay to reduce load on backend api with auto-run query. only for clickhouse
// queries we have manual run, hence both debounce and debounceStagedQuery are not required
const debounceDelay = queryCategory !== EQueryType.CLICKHOUSE ? 1000 : 0;
// debounce query to delay backend api call and chart update.
// used in query builder and promql tabs to enable auto-refresh
// of chart on user edit
const debouncedStagedQuery = useDebounce(stagedQuery, debounceDelay);
// this use effect initiates staged query and // this use effect initiates staged query and
// other queries based on server data. // other queries based on server data.
@ -101,14 +130,26 @@ function FormAlertRules({
const fq = toFormulaQueries(initQuery?.builderQueries); const fq = toFormulaQueries(initQuery?.builderQueries);
// prepare staged query // prepare staged query
const sq = prepareStagedQuery(typ, mq, fq, initQuery?.promQueries); const sq = prepareStagedQuery(
typ,
mq,
fq,
initQuery?.promQueries,
initQuery?.chQueries,
);
const pq = initQuery?.promQueries; const pq = initQuery?.promQueries;
const chq = initQuery?.chQueries;
setQueryCategory(typ); setQueryCategory(typ);
setMetricQueries(mq); setMetricQueries(mq);
setFormulaQueries(fq); setFormulaQueries(fq);
setPromQueries(pq); setPromQueries(pq);
setStagedQuery(sq); setStagedQuery(sq);
// also set manually staged query
setManualStagedQuery(sq);
setChQueries(chq);
setAlertDef(initialValue); setAlertDef(initialValue);
}, [initialValue]); }, [initialValue]);
@ -121,9 +162,15 @@ function FormAlertRules({
metricQueries, metricQueries,
formulaQueries, formulaQueries,
promQueries, promQueries,
chQueries,
); );
setStagedQuery(sq); setStagedQuery(sq);
}, [queryCategory, metricQueries, formulaQueries, promQueries]); }, [queryCategory, chQueries, metricQueries, formulaQueries, promQueries]);
const onRunQuery = (): void => {
setRunQueryId(Math.random().toString(36).substring(2, 15));
setManualStagedQuery(stagedQuery);
};
const onCancelHandler = useCallback(() => { const onCancelHandler = useCallback(() => {
history.replace(ROUTES.LIST_ALL_ALERT); history.replace(ROUTES.LIST_ALL_ALERT);
@ -169,6 +216,31 @@ function FormAlertRules({
return retval; return retval;
}, [t, promQueries, queryCategory]); }, [t, promQueries, queryCategory]);
const validateChQueryParams = useCallback((): boolean => {
let retval = true;
if (queryCategory !== EQueryType.CLICKHOUSE) return retval;
if (!chQueries || Object.keys(chQueries).length === 0) {
notification.error({
message: 'Error',
description: t('chquery_required'),
});
return false;
}
Object.keys(chQueries).forEach((key) => {
if (chQueries[key].rawQuery === '') {
notification.error({
message: 'Error',
description: t('chquery_required'),
});
retval = false;
}
});
return retval;
}, [t, chQueries, queryCategory]);
const validateQBParams = useCallback((): boolean => { const validateQBParams = useCallback((): boolean => {
let retval = true; let retval = true;
if (queryCategory !== EQueryType.QUERY_BUILDER) return true; if (queryCategory !== EQueryType.QUERY_BUILDER) return true;
@ -224,12 +296,17 @@ function FormAlertRules({
return false; return false;
} }
if (!validateChQueryParams()) {
return false;
}
return validateQBParams(); return validateQBParams();
}, [t, validateQBParams, alertDef, validatePromParams]); }, [t, validateQBParams, validateChQueryParams, alertDef, validatePromParams]);
const preparePostData = (): AlertDef => { const preparePostData = (): AlertDef => {
const postableAlert: AlertDef = { const postableAlert: AlertDef = {
...alertDef, ...alertDef,
alertType,
source: window?.location.toString(), source: window?.location.toString(),
ruleType: ruleType:
queryCategory === EQueryType.PROM ? 'promql_rule' : 'threshold_rule', queryCategory === EQueryType.PROM ? 'promql_rule' : 'threshold_rule',
@ -238,6 +315,7 @@ function FormAlertRules({
compositeMetricQuery: { compositeMetricQuery: {
builderQueries: prepareBuilderQueries(metricQueries, formulaQueries), builderQueries: prepareBuilderQueries(metricQueries, formulaQueries),
promQueries, promQueries,
chQueries,
queryType: queryCategory, queryType: queryCategory,
}, },
}, },
@ -251,6 +329,8 @@ function FormAlertRules({
metricQueries, metricQueries,
formulaQueries, formulaQueries,
promQueries, promQueries,
chQueries,
alertType,
]); ]);
const saveRule = useCallback(async () => { const saveRule = useCallback(async () => {
@ -380,6 +460,18 @@ function FormAlertRules({
); );
}; };
const renderChQueryChartPreview = (): JSX.Element => {
return (
<ChartPreview
headline={<PlotTag queryType={queryCategory} />}
name="Chart Preview"
threshold={alertDef.condition?.target}
query={manualStagedQuery}
userQueryKey={runQueryId}
selectedInterval={toChartInterval(alertDef.evalWindow)}
/>
);
};
return ( return (
<> <>
{Element} {Element}
@ -392,6 +484,7 @@ function FormAlertRules({
> >
{queryCategory === EQueryType.QUERY_BUILDER && renderQBChartPreview()} {queryCategory === EQueryType.QUERY_BUILDER && renderQBChartPreview()}
{queryCategory === EQueryType.PROM && renderPromChartPreview()} {queryCategory === EQueryType.PROM && renderPromChartPreview()}
{queryCategory === EQueryType.CLICKHOUSE && renderChQueryChartPreview()}
<QuerySection <QuerySection
queryCategory={queryCategory} queryCategory={queryCategory}
setQueryCategory={onQueryCategoryChange} setQueryCategory={onQueryCategoryChange}
@ -401,6 +494,10 @@ function FormAlertRules({
setFormulaQueries={setFormulaQueries} setFormulaQueries={setFormulaQueries}
promQueries={promQueries} promQueries={promQueries}
setPromQueries={setPromQueries} setPromQueries={setPromQueries}
chQueries={chQueries}
setChQueries={setChQueries}
alertType={alertType || AlertTypes.METRICS_BASED_ALERT}
runQuery={onRunQuery}
/> />
<RuleOptions <RuleOptions
@ -446,7 +543,12 @@ function FormAlertRules({
); );
} }
FormAlertRules.defaultProps = {
alertType: AlertTypes.METRICS_BASED_ALERT,
};
interface FormAlertRuleProps { interface FormAlertRuleProps {
alertType?: AlertTypes;
formInstance: FormInstance; formInstance: FormInstance;
initialValue: AlertDef; initialValue: AlertDef;
ruleId: number; ruleId: number;

View File

@ -1,6 +1,8 @@
import { Time } from 'container/TopNav/DateTimeSelection/config'; import { Time } from 'container/TopNav/DateTimeSelection/config';
import { import {
IBuilderQueries, IBuilderQueries,
IChQueries,
IChQuery,
IFormulaQueries, IFormulaQueries,
IFormulaQuery, IFormulaQuery,
IMetricQueries, IMetricQueries,
@ -76,10 +78,12 @@ export const prepareStagedQuery = (
m: IMetricQueries, m: IMetricQueries,
f: IFormulaQueries, f: IFormulaQueries,
p: IPromQueries, p: IPromQueries,
c: IChQueries,
): IStagedQuery => { ): IStagedQuery => {
const qbList: IMetricQuery[] = []; const qbList: IMetricQuery[] = [];
const formulaList: IFormulaQuery[] = []; const formulaList: IFormulaQuery[] = [];
const promList: IPromQuery[] = []; const promList: IPromQuery[] = [];
const chQueryList: IChQuery[] = [];
// convert map[string]IMetricQuery to IMetricQuery[] // convert map[string]IMetricQuery to IMetricQuery[]
if (m) { if (m) {
@ -101,6 +105,12 @@ export const prepareStagedQuery = (
promList.push({ ...p[key], name: key }); promList.push({ ...p[key], name: key });
}); });
} }
// convert map[string]IChQuery to IChQuery[]
if (c) {
Object.keys(c).forEach((key) => {
chQueryList.push({ ...c[key], name: key, rawQuery: c[key].query });
});
}
return { return {
queryType: t, queryType: t,
@ -109,7 +119,7 @@ export const prepareStagedQuery = (
formulas: formulaList, formulas: formulaList,
queryBuilder: qbList, queryBuilder: qbList,
}, },
clickHouse: [], clickHouse: chQueryList,
}; };
}; };
@ -125,7 +135,7 @@ export const toChartInterval = (evalWindow: string | undefined): Time => {
case '30m0s': case '30m0s':
return '30min'; return '30min';
case '60m0s': case '60m0s':
return '30min'; return '1hr';
case '4h0m0s': case '4h0m0s':
return '4hr'; return '4hr';
case '24h0m0s': case '24h0m0s':

View File

@ -1,15 +1,30 @@
import { Button, Input, notification } from 'antd'; import { Button, Input, notification } from 'antd';
import FormItem from 'antd/lib/form/FormItem'; import FormItem from 'antd/lib/form/FormItem';
import getFeaturesFlags from 'api/features/getFeatureFlags';
import apply from 'api/licenses/apply'; import apply from 'api/licenses/apply';
import React, { useState } from 'react'; import React, { useState } from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { QueryObserverResult, RefetchOptions, useQuery } from 'react-query';
import { useDispatch } from 'react-redux';
import { Dispatch } from 'redux';
import { AppAction, UPDATE_FEATURE_FLAGS } from 'types/actions/app';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps } from 'types/api/licenses/getAll';
import { ApplyForm, ApplyFormContainer, LicenseInput } from './applyFormStyles'; import { ApplyForm, ApplyFormContainer, LicenseInput } from './styles';
function ApplyLicenseForm(): JSX.Element { function ApplyLicenseForm({
licenseRefetch,
}: ApplyLicenseFormProps): JSX.Element {
const { t } = useTranslation(['licenses']); const { t } = useTranslation(['licenses']);
const [key, setKey] = useState(''); const [key, setKey] = useState('');
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const dispatch = useDispatch<Dispatch<AppAction>>();
const { refetch } = useQuery({
queryFn: getFeaturesFlags,
queryKey: 'getFeatureFlags',
enabled: false,
});
const onFinish = async (values: unknown | { key: string }): Promise<void> => { const onFinish = async (values: unknown | { key: string }): Promise<void> => {
const params = values as { key: string }; const params = values as { key: string };
@ -28,6 +43,16 @@ function ApplyLicenseForm(): JSX.Element {
}); });
if (response.statusCode === 200) { if (response.statusCode === 200) {
const [featureFlagsResponse] = await Promise.all([
refetch(),
licenseRefetch(),
]);
if (featureFlagsResponse.data?.payload) {
dispatch({
type: UPDATE_FEATURE_FLAGS,
payload: featureFlagsResponse.data.payload,
});
}
notification.success({ notification.success({
message: 'Success', message: 'Success',
description: t('license_applied'), description: t('license_applied'),
@ -74,4 +99,12 @@ function ApplyLicenseForm(): JSX.Element {
); );
} }
interface ApplyLicenseFormProps {
licenseRefetch: (
options?: RefetchOptions,
) => Promise<
QueryObserverResult<SuccessResponse<PayloadProps> | ErrorResponse, unknown>
>;
}
export default ApplyLicenseForm; export default ApplyLicenseForm;

View File

@ -1,4 +1,3 @@
/* eslint-disable react/display-name */
import { Table } from 'antd'; import { Table } from 'antd';
import { ColumnsType } from 'antd/lib/table'; import { ColumnsType } from 'antd/lib/table';
import React from 'react'; import React from 'react';

View File

@ -1,9 +1,9 @@
import { Tabs, Typography } from 'antd'; import { Tabs, Typography } from 'antd';
import getAll from 'api/licenses/getAll'; import getAll from 'api/licenses/getAll';
import Spinner from 'components/Spinner'; import Spinner from 'components/Spinner';
import useFetch from 'hooks/useFetch';
import React from 'react'; import React from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { useQuery } from 'react-query';
import ApplyLicenseForm from './ApplyLicenseForm'; import ApplyLicenseForm from './ApplyLicenseForm';
import ListLicenses from './ListLicenses'; import ListLicenses from './ListLicenses';
@ -12,29 +12,31 @@ const { TabPane } = Tabs;
function Licenses(): JSX.Element { function Licenses(): JSX.Element {
const { t } = useTranslation(['licenses']); const { t } = useTranslation(['licenses']);
const { loading, payload, error, errorMessage } = useFetch(getAll); const { data, isError, isLoading, refetch } = useQuery({
queryFn: getAll,
queryKey: 'getAllLicenses',
});
if (error) { if (isError || data?.error) {
return <Typography>{errorMessage}</Typography>; return <Typography>{data?.error}</Typography>;
} }
if (loading || payload === undefined) { if (isLoading || data?.payload === undefined) {
return <Spinner tip={t('loading_licenses')} height="90vh" />; return <Spinner tip={t('loading_licenses')} height="90vh" />;
} }
const allValidLicense =
data?.payload?.filter((license) => license.isCurrent) || [];
return ( return (
<Tabs destroyInactiveTabPane defaultActiveKey="licenses"> <Tabs destroyInactiveTabPane defaultActiveKey="licenses">
<TabPane tabKey="licenses" tab={t('tab_current_license')} key="licenses"> <TabPane tabKey="licenses" tab={t('tab_current_license')} key="licenses">
<ApplyLicenseForm /> <ApplyLicenseForm licenseRefetch={refetch} />
<ListLicenses <ListLicenses licenses={allValidLicense} />
licenses={payload ? payload.filter((l) => l.isCurrent === true) : []}
/>
</TabPane> </TabPane>
<TabPane tabKey="history" tab={t('tab_license_history')} key="history"> <TabPane tabKey="history" tab={t('tab_license_history')} key="history">
<ListLicenses <ListLicenses licenses={allValidLicense} />
licenses={payload ? payload.filter((l) => l.isCurrent === false) : []}
/>
</TabPane> </TabPane>
</Tabs> </Tabs>
); );

View File

@ -5,5 +5,4 @@ export const Container = styled.div`
align-items: center; align-items: center;
justify-content: flex-end; justify-content: flex-end;
gap: 0.5rem; gap: 0.5rem;
margin-bottom: 0.5rem;
`; `;

View File

@ -1,26 +0,0 @@
import React from 'react';
interface OptionIconProps {
isDarkMode: boolean;
}
function OptionIcon({ isDarkMode }: OptionIconProps): JSX.Element {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
x="0px"
y="0px"
width="1rem"
height="1rem"
viewBox="0 0 52 52"
enableBackground="new 0 0 52 52"
fill={isDarkMode ? '#eee' : '#222'}
>
<path
d="M20,44c0-3.3,2.7-6,6-6s6,2.7,6,6s-2.7,6-6,6S20,47.3,20,44z M20,26c0-3.3,2.7-6,6-6s6,2.7,6,6s-2.7,6-6,6
S20,29.3,20,26z M20,8c0-3.3,2.7-6,6-6s6,2.7,6,6s-2.7,6-6,6S20,11.3,20,8z"
/>
</svg>
);
}
export default OptionIcon;

View File

@ -0,0 +1,26 @@
export const TIME_PICKER_OPTIONS = [
{
value: 5,
label: '5m',
},
{
value: 15,
label: '15m',
},
{
value: 30,
label: '30m',
},
{
value: 60,
label: '1hr',
},
{
value: 360,
label: '6hrs',
},
{
value: 720,
label: '12hrs',
},
];

View File

@ -1,13 +1,17 @@
/* eslint-disable react-hooks/exhaustive-deps */
import { green } from '@ant-design/colors'; import { green } from '@ant-design/colors';
import { PauseOutlined, PlayCircleOutlined } from '@ant-design/icons'; import {
import { Button, Popover, Row, Select } from 'antd'; MoreOutlined,
PauseOutlined,
PlayCircleOutlined,
} from '@ant-design/icons';
import { Button, Popover, Select, Space } from 'antd';
import { LiveTail } from 'api/logs/livetail'; import { LiveTail } from 'api/logs/livetail';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import { throttle } from 'lodash-es'; import { throttle } from 'lodash-es';
import React, { useCallback, useEffect, useMemo, useRef } from 'react'; import React, { useCallback, useEffect, useMemo, useRef } from 'react';
import { useDispatch, useSelector } from 'react-redux'; import { useDispatch, useSelector } from 'react-redux';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import { UPDATE_AUTO_REFRESH_DISABLED } from 'types/actions/globalTime';
import { import {
FLUSH_LOGS, FLUSH_LOGS,
PUSH_LIVE_TAIL_EVENT, PUSH_LIVE_TAIL_EVENT,
@ -16,40 +20,14 @@ import {
} from 'types/actions/logs'; } from 'types/actions/logs';
import { TLogsLiveTailState } from 'types/api/logs/liveTail'; import { TLogsLiveTailState } from 'types/api/logs/liveTail';
import AppReducer from 'types/reducer/app'; import AppReducer from 'types/reducer/app';
import { GlobalReducer } from 'types/reducer/globalTime';
import { ILogsReducer } from 'types/reducer/logs'; import { ILogsReducer } from 'types/reducer/logs';
import OptionIcon from './OptionIcon'; import { TIME_PICKER_OPTIONS } from './config';
import { TimePickerCard, TimePickerSelect } from './styles'; import { StopContainer, TimePickerCard, TimePickerSelect } from './styles';
const { Option } = Select; const { Option } = Select;
const TIME_PICKER_OPTIONS = [
{
value: 5,
label: '5m',
},
{
value: 15,
label: '15m',
},
{
value: 30,
label: '30m',
},
{
value: 60,
label: '1hr',
},
{
value: 360,
label: '6hrs',
},
{
value: 720,
label: '12hrs',
},
];
function LogLiveTail(): JSX.Element { function LogLiveTail(): JSX.Element {
const { const {
liveTail, liveTail,
@ -58,12 +36,20 @@ function LogLiveTail(): JSX.Element {
logs, logs,
} = useSelector<AppState, ILogsReducer>((state) => state.logs); } = useSelector<AppState, ILogsReducer>((state) => state.logs);
const { isDarkMode } = useSelector<AppState, AppReducer>((state) => state.app); const { isDarkMode } = useSelector<AppState, AppReducer>((state) => state.app);
const { selectedAutoRefreshInterval } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const dispatch = useDispatch(); const dispatch = useDispatch();
const handleLiveTail = (toggleState: TLogsLiveTailState): void => { const handleLiveTail = (toggleState: TLogsLiveTailState): void => {
dispatch({ dispatch({
type: TOGGLE_LIVE_TAIL, type: TOGGLE_LIVE_TAIL,
payload: toggleState, payload: toggleState,
}); });
dispatch({
type: UPDATE_AUTO_REFRESH_DISABLED,
payload: toggleState === 'PLAYING',
});
}; };
const batchedEventsRef = useRef<Record<string, unknown>[]>([]); const batchedEventsRef = useRef<Record<string, unknown>[]>([]);
@ -75,14 +61,12 @@ function LogLiveTail(): JSX.Element {
type: PUSH_LIVE_TAIL_EVENT, type: PUSH_LIVE_TAIL_EVENT,
payload: batchedEventsRef.current.reverse(), payload: batchedEventsRef.current.reverse(),
}); });
// console.log('DISPATCH', batchedEventsRef.current.length);
batchedEventsRef.current = []; batchedEventsRef.current = [];
}, 1500), }, 1500),
[], [],
); );
const batchLiveLog = (e: { data: string }): void => { const batchLiveLog = (e: { data: string }): void => {
// console.log('EVENT BATCHED');
batchedEventsRef.current.push(JSON.parse(e.data as string) as never); batchedEventsRef.current.push(JSON.parse(e.data as string) as never);
pushLiveLog(); pushLiveLog();
}; };
@ -123,6 +107,7 @@ function LogLiveTail(): JSX.Element {
if (liveTail === 'STOPPED') { if (liveTail === 'STOPPED') {
liveTailSourceRef.current = null; liveTailSourceRef.current = null;
} }
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [liveTail]); }, [liveTail]);
const handleLiveTailStart = (): void => { const handleLiveTailStart = (): void => {
@ -155,47 +140,44 @@ function LogLiveTail(): JSX.Element {
), ),
[dispatch, liveTail, liveTailStartRange], [dispatch, liveTail, liveTailStartRange],
); );
const isDisabled = useMemo(() => selectedAutoRefreshInterval?.length > 0, [
selectedAutoRefreshInterval,
]);
return ( return (
<TimePickerCard> <TimePickerCard>
<Row <Space size={0} align="center">
style={{ gap: '0.5rem', alignItems: 'center', justifyContent: 'center' }} {liveTail === 'PLAYING' ? (
> <Button
<div> type="primary"
{liveTail === 'PLAYING' ? ( onClick={(): void => handleLiveTail('PAUSED')}
<Button title="Pause live tail"
type="primary" style={{ background: green[6] }}
onClick={(): void => handleLiveTail('PAUSED')} >
title="Pause live tail" <span>Pause</span>
style={{ background: green[6] }} <PauseOutlined />
> </Button>
Pause <PauseOutlined /> ) : (
</Button> <Button
) : ( type="primary"
<Button onClick={handleLiveTailStart}
type="primary" title="Start live tail"
onClick={handleLiveTailStart} disabled={isDisabled}
title="Start live tail" >
> Go Live <PlayCircleOutlined />
Go Live <PlayCircleOutlined /> </Button>
</Button> )}
)}
{liveTail !== 'STOPPED' && ( {liveTail !== 'STOPPED' && (
<Button <Button
type="dashed" type="dashed"
onClick={(): void => handleLiveTail('STOPPED')} onClick={(): void => handleLiveTail('STOPPED')}
title="Exit live tail" title="Exit live tail"
> >
<div <StopContainer isDarkMode={isDarkMode} />
style={{ </Button>
height: '0.8rem', )}
width: '0.8rem',
background: isDarkMode ? '#eee' : '#222',
borderRadius: '0.1rem',
}}
/>
</Button>
)}
</div>
<Popover <Popover
placement="bottomRight" placement="bottomRight"
@ -203,18 +185,9 @@ function LogLiveTail(): JSX.Element {
trigger="click" trigger="click"
content={OptionsPopOverContent} content={OptionsPopOverContent}
> >
<span <MoreOutlined style={{ fontSize: 24 }} />
style={{
padding: '0.3rem 0.4rem 0.3rem 0',
display: 'flex',
justifyContent: 'center',
alignContent: 'center',
}}
>
<OptionIcon isDarkMode={isDarkMode} />
</span>
</Popover> </Popover>
</Row> </Space>
</TimePickerCard> </TimePickerCard>
); );
} }

View File

@ -3,6 +3,7 @@ import styled from 'styled-components';
export const TimePickerCard = styled(Card)` export const TimePickerCard = styled(Card)`
.ant-card-body { .ant-card-body {
display: flex;
padding: 0; padding: 0;
} }
`; `;
@ -10,3 +11,15 @@ export const TimePickerCard = styled(Card)`
export const TimePickerSelect = styled(Select)` export const TimePickerSelect = styled(Select)`
min-width: 100px; min-width: 100px;
`; `;
interface Props {
isDarkMode: boolean;
}
export const StopContainer = styled.div<Props>`
height: 0.8rem;
width: 0.8rem;
border-radius: 0.1rem;
background-color: ${({ isDarkMode }): string =>
isDarkMode ? '#fff' : '#000'};
`;

View File

@ -1,66 +0,0 @@
import { Divider, Row } from 'antd';
import LogControls from 'container/LogControls';
import LogDetailedView from 'container/LogDetailedView';
import LogLiveTail from 'container/LogLiveTail';
import LogsAggregate from 'container/LogsAggregate';
import LogsFilters from 'container/LogsFilters';
import SearchFilter from 'container/LogsSearchFilter';
import LogsTable from 'container/LogsTable';
import useUrlQuery from 'hooks/useUrlQuery';
import React, { memo, useEffect } from 'react';
import { connect, useDispatch } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
import { GetLogsFields } from 'store/actions/logs/getFields';
import AppActions from 'types/actions';
import { SET_SEARCH_QUERY_STRING } from 'types/actions/logs';
function Logs({ getLogsFields }: LogsProps): JSX.Element {
const urlQuery = useUrlQuery();
const dispatch = useDispatch();
useEffect(() => {
dispatch({
type: SET_SEARCH_QUERY_STRING,
payload: urlQuery.get('q'),
});
}, [dispatch, urlQuery]);
useEffect(() => {
getLogsFields();
}, [getLogsFields]);
return (
<div style={{ position: 'relative' }}>
<Row style={{ justifyContent: 'center', alignItems: 'center' }}>
<SearchFilter />
<Divider type="vertical" style={{ height: '2rem' }} />
<LogLiveTail />
</Row>
<LogsAggregate />
<LogControls />
<Divider style={{ margin: 0 }} />
<Row gutter={20} style={{ flexWrap: 'nowrap' }}>
<LogsFilters />
<Divider type="vertical" style={{ height: '100%', margin: 0 }} />
<LogsTable />
</Row>
<LogDetailedView />
</div>
);
}
type LogsProps = DispatchProps;
interface DispatchProps {
getLogsFields: () => (dispatch: Dispatch<AppActions>) => void;
}
const mapDispatchToProps = (
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
): DispatchProps => ({
getLogsFields: bindActionCreators(GetLogsFields, dispatch),
});
export default connect(null, mapDispatchToProps)(memo(Logs));

View File

@ -1,4 +1,3 @@
/* eslint-disable react-hooks/exhaustive-deps */
import { blue } from '@ant-design/colors'; import { blue } from '@ant-design/colors';
import Graph from 'components/Graph'; import Graph from 'components/Graph';
import Spinner from 'components/Spinner'; import Spinner from 'components/Spinner';
@ -16,9 +15,6 @@ import { ILogsReducer } from 'types/reducer/logs';
import { Container } from './styles'; import { Container } from './styles';
interface LogsAggregateProps {
getLogsAggregate: (arg0: Parameters<typeof getLogsAggregate>[0]) => void;
}
function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element { function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element {
const { const {
searchFilter: { queryString }, searchFilter: { queryString },
@ -35,6 +31,7 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element {
); );
const reFetchIntervalRef = useRef<ReturnType<typeof setInterval> | null>(null); const reFetchIntervalRef = useRef<ReturnType<typeof setInterval> | null>(null);
useEffect(() => { useEffect(() => {
switch (liveTail) { switch (liveTail) {
case 'STOPPED': { case 'STOPPED': {
@ -42,18 +39,6 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element {
clearInterval(reFetchIntervalRef.current); clearInterval(reFetchIntervalRef.current);
} }
reFetchIntervalRef.current = null; reFetchIntervalRef.current = null;
getLogsAggregate({
timestampStart: minTime,
timestampEnd: maxTime,
step: getStep({
start: minTime,
end: maxTime,
inputFormat: 'ns',
}),
q: queryString,
...(idStart ? { idGt: idStart } : {}),
...(idEnd ? { idLt: idEnd } : {}),
});
break; break;
} }
@ -89,18 +74,9 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element {
break; break;
} }
} }
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [getLogsAggregate, maxTime, minTime, liveTail]); }, [getLogsAggregate, maxTime, minTime, liveTail]);
const data = {
labels: logsAggregate.map((s) => new Date(s.timestamp / 1000000)),
datasets: [
{
data: logsAggregate.map((s) => s.value),
backgroundColor: blue[4],
},
],
};
return ( return (
<Container> <Container>
{isLoadingAggregate ? ( {isLoadingAggregate ? (
@ -108,16 +84,28 @@ function LogsAggregate({ getLogsAggregate }: LogsAggregateProps): JSX.Element {
) : ( ) : (
<Graph <Graph
name="usage" name="usage"
data={data} data={{
labels: logsAggregate.map((s) => new Date(s.timestamp / 1000000)),
datasets: [
{
data: logsAggregate.map((s) => s.value),
backgroundColor: blue[4],
},
],
}}
type="bar" type="bar"
containerHeight="100%" containerHeight="100%"
animate={false} animate
/> />
)} )}
</Container> </Container>
); );
} }
interface LogsAggregateProps {
getLogsAggregate: (arg0: Parameters<typeof getLogsAggregate>[0]) => void;
}
interface DispatchProps { interface DispatchProps {
getLogsAggregate: ( getLogsAggregate: (
props: Parameters<typeof getLogsAggregate>[0], props: Parameters<typeof getLogsAggregate>[0],

View File

@ -21,9 +21,6 @@ import { CategoryContainer, Container, FieldContainer } from './styles';
const RESTRICTED_SELECTED_FIELDS = ['timestamp', 'id']; const RESTRICTED_SELECTED_FIELDS = ['timestamp', 'id'];
interface LogsFiltersProps {
getLogsFields: () => void;
}
function LogsFilters({ getLogsFields }: LogsFiltersProps): JSX.Element { function LogsFilters({ getLogsFields }: LogsFiltersProps): JSX.Element {
const { const {
fields: { interesting, selected }, fields: { interesting, selected },
@ -150,4 +147,6 @@ const mapDispatchToProps = (
getLogsFields: bindActionCreators(GetLogsFields, dispatch), getLogsFields: bindActionCreators(GetLogsFields, dispatch),
}); });
type LogsFiltersProps = DispatchProps;
export default connect(null, mapDispatchToProps)(memo(LogsFilters)); export default connect(null, mapDispatchToProps)(memo(LogsFilters));

View File

@ -4,7 +4,7 @@
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
/* eslint-disable react/no-array-index-key */ /* eslint-disable react/no-array-index-key */
/* eslint-disable react-hooks/exhaustive-deps */ /* eslint-disable react-hooks/exhaustive-deps */
import { CloseOutlined } from '@ant-design/icons'; import { CloseOutlined, CloseSquareOutlined } from '@ant-design/icons';
import { Button, Input, Select } from 'antd'; import { Button, Input, Select } from 'antd';
import CategoryHeading from 'components/Logs/CategoryHeading'; import CategoryHeading from 'components/Logs/CategoryHeading';
import { import {
@ -19,12 +19,40 @@ import { AppState } from 'store/reducers';
import { ILogsReducer } from 'types/reducer/logs'; import { ILogsReducer } from 'types/reducer/logs';
import { v4 } from 'uuid'; import { v4 } from 'uuid';
import { SearchFieldsProps } from '..';
import FieldKey from '../FieldKey'; import FieldKey from '../FieldKey';
import { QueryConditionContainer, QueryFieldContainer } from '../styles'; import { QueryFieldContainer } from '../styles';
import { createParsedQueryStructure } from '../utils'; import { createParsedQueryStructure } from '../utils';
import { Container, QueryWrapper } from './styles';
import { hashCode, parseQuery } from './utils'; import { hashCode, parseQuery } from './utils';
const { Option } = Select; const { Option } = Select;
function QueryConditionField({
query,
queryIndex,
onUpdate,
}: QueryConditionFieldProps): JSX.Element {
const allOptions = Object.values(ConditionalOperators);
return (
<Select
defaultValue={
(query as any).value &&
(((query as any)?.value as any) as string).toUpperCase()
}
onChange={(e): void => {
onUpdate({ ...query, value: e }, queryIndex);
}}
>
{allOptions.map((cond) => (
<Option key={cond} value={cond} label={cond}>
{cond}
</Option>
))}
</Select>
);
}
interface QueryFieldProps { interface QueryFieldProps {
query: Query; query: Query;
queryIndex: number; queryIndex: number;
@ -141,40 +169,13 @@ interface QueryConditionFieldProps {
queryIndex: number; queryIndex: number;
onUpdate: (arg0: unknown, arg1: number) => void; onUpdate: (arg0: unknown, arg1: number) => void;
} }
function QueryConditionField({
query,
queryIndex,
onUpdate,
}: QueryConditionFieldProps): JSX.Element {
return (
<QueryConditionContainer>
<Select
defaultValue={
(query as any).value &&
(((query as any)?.value as any) as string).toUpperCase()
}
onChange={(e): void => {
onUpdate({ ...query, value: e }, queryIndex);
}}
style={{ width: '100%' }}
>
{Object.values(ConditionalOperators).map((cond) => (
<Option key={cond} value={cond} label={cond}>
{cond}
</Option>
))}
</Select>
</QueryConditionContainer>
);
}
export type Query = { value: string | string[]; type: string }[]; export type Query = { value: string | string[]; type: string }[];
function QueryBuilder({ function QueryBuilder({
updateParsedQuery, updateParsedQuery,
}: { onDropDownToggleHandler,
updateParsedQuery: (arg0: unknown) => void; }: SearchFieldsProps): JSX.Element {
}): JSX.Element {
const { const {
searchFilter: { parsedQuery }, searchFilter: { parsedQuery },
} = useSelector<AppState, ILogsReducer>((store) => store.logs); } = useSelector<AppState, ILogsReducer>((store) => store.logs);
@ -225,27 +226,25 @@ function QueryBuilder({
); );
return ( return (
<QueryConditionField <div key={keyPrefix + idx}>
key={keyPrefix + idx} <QueryConditionField
query={query} query={query}
queryIndex={idx} queryIndex={idx}
onUpdate={handleUpdate as never} onUpdate={handleUpdate as never}
/> />
</div>
); );
}); });
return ( return (
<div> <>
<CategoryHeading>LOG QUERY BUILDER</CategoryHeading> <Container isMargin={generatedQueryStructure.length === 0}>
<div <CategoryHeading>LOG QUERY BUILDER</CategoryHeading>
style={{ <CloseSquareOutlined onClick={onDropDownToggleHandler(false)} />
display: 'grid', </Container>
gridTemplateColumns: '80px 1fr',
margin: '0.5rem 0', <QueryWrapper>{QueryUI()}</QueryWrapper>
}} </>
>
{QueryUI()}
</div>
</div>
); );
} }

View File

@ -0,0 +1,17 @@
import styled from 'styled-components';
interface Props {
isMargin: boolean;
}
export const Container = styled.div<Props>`
display: flex;
justify-content: space-between;
width: 100%;
margin-bottom: ${(props): string => (props.isMargin ? '2rem' : '0')};
`;
export const QueryWrapper = styled.div`
display: grid;
grid-template-columns: 80px 1fr;
margin: 0.5rem 0px;
`;

View File

@ -1,6 +1,6 @@
import { Button } from 'antd'; import { Button } from 'antd';
import CategoryHeading from 'components/Logs/CategoryHeading'; import CategoryHeading from 'components/Logs/CategoryHeading';
import { map } from 'lodash-es'; import map from 'lodash-es/map';
import React from 'react'; import React from 'react';
import { useDispatch, useSelector } from 'react-redux'; import { useDispatch, useSelector } from 'react-redux';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';

View File

@ -2,14 +2,23 @@ import React from 'react';
import QueryBuilder from './QueryBuilder/QueryBuilder'; import QueryBuilder from './QueryBuilder/QueryBuilder';
import Suggestions from './Suggestions'; import Suggestions from './Suggestions';
import { QueryFields } from './utils';
interface SearchFieldsProps { export interface SearchFieldsProps {
updateParsedQuery: () => void; updateParsedQuery: (query: QueryFields[]) => void;
onDropDownToggleHandler: (value: boolean) => VoidFunction;
} }
function SearchFields({ updateParsedQuery }: SearchFieldsProps): JSX.Element {
function SearchFields({
updateParsedQuery,
onDropDownToggleHandler,
}: SearchFieldsProps): JSX.Element {
return ( return (
<> <>
<QueryBuilder updateParsedQuery={updateParsedQuery} /> <QueryBuilder
onDropDownToggleHandler={onDropDownToggleHandler}
updateParsedQuery={updateParsedQuery}
/>
<Suggestions /> <Suggestions />
</> </>
); );

View File

@ -9,12 +9,8 @@ export const QueryFieldContainer = styled.div`
align-items: center; align-items: center;
border-radius: 0.25rem; border-radius: 0.25rem;
gap: 1rem; gap: 1rem;
width: 100%;
&:hover { &:hover {
background: ${blue[6]}; background: ${blue[6]};
} }
`; `;
export const QueryConditionContainer = styled.div`
padding: 0.25rem 0rem;
margin: 0.1rem 0;
`;

View File

@ -1,11 +1,8 @@
/* eslint-disable react-hooks/exhaustive-deps */ import { Input, InputRef, Popover } from 'antd';
import { CloseSquareOutlined } from '@ant-design/icons'; import useUrlQuery from 'hooks/useUrlQuery';
import { Button, Input } from 'antd';
import useClickOutside from 'hooks/useClickOutside';
import getStep from 'lib/getStep'; import getStep from 'lib/getStep';
import React, { memo, useEffect, useMemo, useRef, useState } from 'react'; import React, { useCallback, useEffect, useRef, useState } from 'react';
import { connect, useDispatch, useSelector } from 'react-redux'; import { connect, useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-use';
import { bindActionCreators, Dispatch } from 'redux'; import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk'; import { ThunkDispatch } from 'redux-thunk';
import { getLogs } from 'store/actions/logs/getLogs'; import { getLogs } from 'store/actions/logs/getLogs';
@ -17,17 +14,9 @@ import { GlobalReducer } from 'types/reducer/globalTime';
import { ILogsReducer } from 'types/reducer/logs'; import { ILogsReducer } from 'types/reducer/logs';
import SearchFields from './SearchFields'; import SearchFields from './SearchFields';
import { DropDownContainer } from './styles'; import { Container, DropDownContainer } from './styles';
import { useSearchParser } from './useSearchParser'; import { useSearchParser } from './useSearchParser';
const { Search } = Input;
interface SearchFilterProps {
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>;
getLogsAggregate: (
props: Parameters<typeof getLogsAggregate>[0],
) => ReturnType<typeof getLogsAggregate>;
}
function SearchFilter({ function SearchFilter({
getLogs, getLogs,
getLogsAggregate, getLogsAggregate,
@ -38,6 +27,14 @@ function SearchFilter({
updateQueryString, updateQueryString,
} = useSearchParser(); } = useSearchParser();
const [showDropDown, setShowDropDown] = useState(false); const [showDropDown, setShowDropDown] = useState(false);
const searchRef = useRef<InputRef>(null);
const onDropDownToggleHandler = useCallback(
(value: boolean) => (): void => {
setShowDropDown(value);
},
[],
);
const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector< const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector<
AppState, AppState,
@ -48,117 +45,105 @@ function SearchFilter({
(state) => state.globalTime, (state) => state.globalTime,
); );
const searchComponentRef = useRef<HTMLDivElement>(null); const dispatch = useDispatch<Dispatch<AppActions>>();
useClickOutside(searchComponentRef, (e: HTMLElement) => { const handleSearch = useCallback(
// using this hack as overlay span is voilating this condition (customQuery) => {
if ( if (liveTail === 'PLAYING') {
e.nodeName === 'svg' || dispatch({
e.nodeName === 'path' || type: TOGGLE_LIVE_TAIL,
e.nodeName === 'span' || payload: 'PAUSED',
e.nodeName === 'button' });
) { setTimeout(
return; () =>
} dispatch({
type: TOGGLE_LIVE_TAIL,
payload: liveTail,
}),
0,
);
} else {
getLogs({
q: customQuery,
limit: logLinesPerPage,
orderBy: 'timestamp',
order: 'desc',
timestampStart: minTime,
timestampEnd: maxTime,
...(idStart ? { idGt: idStart } : {}),
...(idEnd ? { idLt: idEnd } : {}),
});
if ( getLogsAggregate({
e.nodeName === 'DIV' && timestampStart: minTime,
![ timestampEnd: maxTime,
'ant-empty-image', step: getStep({
'ant-select-item', start: minTime,
'ant-col', end: maxTime,
'ant-select-item-option-content', inputFormat: 'ns',
'ant-select-item-option-active',
].find((p) => p.indexOf(e.className) !== -1) &&
!(e.ariaSelected === 'true') &&
showDropDown
) {
setShowDropDown(false);
}
});
const { search } = useLocation();
const dispatch = useDispatch();
const handleSearch = (customQuery = ''): void => {
if (liveTail === 'PLAYING') {
dispatch({
type: TOGGLE_LIVE_TAIL,
payload: 'PAUSED',
});
setTimeout(
() =>
dispatch({
type: TOGGLE_LIVE_TAIL,
payload: liveTail,
}), }),
0, q: customQuery,
); });
} else { }
getLogs({ },
q: customQuery || queryString, [
limit: logLinesPerPage, dispatch,
orderBy: 'timestamp', getLogs,
order: 'desc', getLogsAggregate,
timestampStart: minTime, idEnd,
timestampEnd: maxTime, idStart,
...(idStart ? { idGt: idStart } : {}), liveTail,
...(idEnd ? { idLt: idEnd } : {}), logLinesPerPage,
}); maxTime,
minTime,
],
);
getLogsAggregate({ const urlQuery = useUrlQuery();
timestampStart: minTime, const urlQueryString = urlQuery.get('q');
timestampEnd: maxTime,
step: getStep({
start: minTime,
end: maxTime,
inputFormat: 'ns',
}),
q: customQuery || queryString,
});
}
setShowDropDown(false);
};
const urlQuery = useMemo(() => {
return new URLSearchParams(search);
}, [search]);
useEffect(() => { useEffect(() => {
const urlQueryString = urlQuery.get('q'); handleSearch(urlQueryString || '');
if (urlQueryString !== null) handleSearch(urlQueryString); // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, [urlQueryString, maxTime, minTime]);
return ( return (
<div ref={searchComponentRef} style={{ flex: 1 }}> <Container>
<Search <Popover
placeholder="Search Filter" placement="bottom"
onFocus={(): void => setShowDropDown(true)} content={
value={queryString}
onChange={(e): void => {
updateQueryString(e.target.value);
}}
onSearch={handleSearch}
/>
<div style={{ position: 'relative' }}>
{showDropDown && (
<DropDownContainer> <DropDownContainer>
<Button <SearchFields
type="text" onDropDownToggleHandler={onDropDownToggleHandler}
onClick={(): void => setShowDropDown(false)} updateParsedQuery={updateParsedQuery as never}
style={{ />
position: 'absolute',
top: 0,
right: 0,
}}
>
<CloseSquareOutlined />
</Button>
<SearchFields updateParsedQuery={updateParsedQuery as never} />
</DropDownContainer> </DropDownContainer>
)} }
</div> trigger="click"
</div> overlayInnerStyle={{
width: `${searchRef?.current?.input?.offsetWidth || 0}px`,
}}
visible={showDropDown}
destroyTooltipOnHide
onVisibleChange={(value): void => {
onDropDownToggleHandler(value)();
}}
>
<Input.Search
ref={searchRef}
placeholder="Search Filter"
value={queryString}
onChange={(e): void => {
updateQueryString(e.target.value);
}}
allowClear
onSearch={handleSearch}
/>
</Popover>
</Container>
); );
} }
interface DispatchProps { interface DispatchProps {
getLogs: ( getLogs: (
props: Parameters<typeof getLogs>[0], props: Parameters<typeof getLogs>[0],
@ -168,6 +153,8 @@ interface DispatchProps {
) => (dispatch: Dispatch<AppActions>) => void; ) => (dispatch: Dispatch<AppActions>) => void;
} }
type SearchFilterProps = DispatchProps;
const mapDispatchToProps = ( const mapDispatchToProps = (
dispatch: ThunkDispatch<unknown, unknown, AppActions>, dispatch: ThunkDispatch<unknown, unknown, AppActions>,
): DispatchProps => ({ ): DispatchProps => ({
@ -175,4 +162,4 @@ const mapDispatchToProps = (
getLogsAggregate: bindActionCreators(getLogsAggregate, dispatch), getLogsAggregate: bindActionCreators(getLogsAggregate, dispatch),
}); });
export default connect(null, mapDispatchToProps)(memo(SearchFilter)); export default connect(null, mapDispatchToProps)(SearchFilter);

View File

@ -2,11 +2,13 @@ import { Card } from 'antd';
import styled from 'styled-components'; import styled from 'styled-components';
export const DropDownContainer = styled(Card)` export const DropDownContainer = styled(Card)`
top: 0.5rem;
position: absolute;
width: 100%;
z-index: 1;
.ant-card-body { .ant-card-body {
padding: 0.8rem; width: 100%;
} }
`; `;
export const Container = styled.div`
width: 100%;
flex: 1;
position: relative;
`;

View File

@ -23,10 +23,12 @@ export function useSearchParser(): {
const updateQueryString = useCallback( const updateQueryString = useCallback(
(updatedQueryString) => { (updatedQueryString) => {
history.push({ if (updatedQueryString) {
pathname: history.location.pathname, history.push({
search: updatedQueryString ? `?q=${updatedQueryString}` : '', pathname: history.location.pathname,
}); search: updatedQueryString ? `?q=${updatedQueryString}` : '',
});
}
dispatch({ dispatch({
type: SET_SEARCH_QUERY_STRING, type: SET_SEARCH_QUERY_STRING,

View File

@ -3,48 +3,18 @@ import { Typography } from 'antd';
import LogItem from 'components/Logs/LogItem'; import LogItem from 'components/Logs/LogItem';
import Spinner from 'components/Spinner'; import Spinner from 'components/Spinner';
import { map } from 'lodash-es'; import { map } from 'lodash-es';
import React, { memo, useEffect } from 'react'; import React, { memo } from 'react';
import { connect, useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
import { getLogs } from 'store/actions/logs/getLogs';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import { GlobalReducer } from 'types/reducer/globalTime';
import { ILogsReducer } from 'types/reducer/logs'; import { ILogsReducer } from 'types/reducer/logs';
import { Container, Heading } from './styles'; import { Container, Heading } from './styles';
function LogsTable({ getLogs }: LogsTableProps): JSX.Element { function LogsTable(): JSX.Element {
const { const { logs, isLoading, liveTail } = useSelector<AppState, ILogsReducer>(
searchFilter: { queryString }, (state) => state.logs,
logs,
logLinesPerPage,
idEnd,
idStart,
isLoading,
liveTail,
} = useSelector<AppState, ILogsReducer>((state) => state.logs);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
); );
useEffect(() => {
if (liveTail === 'STOPPED')
getLogs({
q: queryString,
limit: logLinesPerPage,
orderBy: 'timestamp',
order: 'desc',
timestampStart: minTime,
timestampEnd: maxTime,
...(idStart ? { idGt: idStart } : {}),
...(idEnd ? { idLt: idEnd } : {}),
});
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [getLogs, idEnd, idStart, logLinesPerPage, maxTime, minTime, liveTail]);
if (isLoading) { if (isLoading) {
return <Spinner height={20} tip="Getting Logs" />; return <Spinner height={20} tip="Getting Logs" />;
} }
@ -72,20 +42,4 @@ function LogsTable({ getLogs }: LogsTableProps): JSX.Element {
); );
} }
interface DispatchProps { export default memo(LogsTable);
getLogs: (
props: Parameters<typeof getLogs>[0],
) => (dispatch: Dispatch<AppActions>) => void;
}
const mapDispatchToProps = (
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
): DispatchProps => ({
getLogs: bindActionCreators(getLogs, dispatch),
});
interface LogsTableProps {
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>;
}
export default connect(null, mapDispatchToProps)(memo(LogsTable));

View File

@ -1,6 +1,6 @@
import { blue } from '@ant-design/colors'; import { blue } from '@ant-design/colors';
import { SearchOutlined } from '@ant-design/icons'; import { SearchOutlined } from '@ant-design/icons';
import { Button, Input, Space, Table } from 'antd'; import { Button, Card, Input, Space, Table } from 'antd';
import type { ColumnsType, ColumnType } from 'antd/es/table'; import type { ColumnsType, ColumnType } from 'antd/es/table';
import type { FilterConfirmProps } from 'antd/es/table/interface'; import type { FilterConfirmProps } from 'antd/es/table/interface';
import localStorageGet from 'api/browser/localstorage/get'; import localStorageGet from 'api/browser/localstorage/get';
@ -48,37 +48,27 @@ function Metrics(): JSX.Element {
const filterDropdown = useCallback( const filterDropdown = useCallback(
({ setSelectedKeys, selectedKeys, confirm }) => ( ({ setSelectedKeys, selectedKeys, confirm }) => (
<div <Card size="small">
style={{ <Space align="start" direction="vertical">
padding: 8, <Input
}} placeholder="Search by service"
> value={selectedKeys[0]}
<Input onChange={(e): void =>
placeholder="Search by service" setSelectedKeys(e.target.value ? [e.target.value] : [])
value={selectedKeys[0]} }
onChange={(e): void => allowClear
setSelectedKeys(e.target.value ? [e.target.value] : []) onPressEnter={(): void => handleSearch(confirm)}
} />
allowClear
onPressEnter={(): void => handleSearch(confirm)}
style={{
marginBottom: 8,
}}
/>
<Space>
<Button <Button
type="primary" type="primary"
onClick={(): void => handleSearch(confirm)} onClick={(): void => handleSearch(confirm)}
icon={<SearchOutlined />} icon={<SearchOutlined />}
size="small" size="small"
style={{
width: 90,
}}
> >
Search Search
</Button> </Button>
</Space> </Space>
</div> </Card>
), ),
[], [],
); );

View File

@ -27,24 +27,34 @@ function ClickHouseQueryContainer({
toggleDisable, toggleDisable,
toggleDelete, toggleDelete,
}: IClickHouseQueryHandleChange): void => { }: IClickHouseQueryHandleChange): void => {
const allQueries = queryData[WIDGET_CLICKHOUSE_QUERY_KEY_NAME]; // we must check if queryIndex is number type. because -
const currentIndexQuery = allQueries[queryIndex]; // ClickHouseQueryBuilder.handleQueryChange has a queryIndex
// parameter which supports both number and string formats.
// it is because, the dashboard side of query builder has queryIndex as number
// while the alert builder uses string format for query index (similar to backend)
// hence, this method is only applies when queryIndex is in number format.
if (rawQuery !== undefined) { if (typeof queryIndex === 'number') {
currentIndexQuery.rawQuery = rawQuery; const allQueries = queryData[WIDGET_CLICKHOUSE_QUERY_KEY_NAME];
}
if (legend !== undefined) { const currentIndexQuery = allQueries[queryIndex];
currentIndexQuery.legend = legend;
}
if (toggleDisable) { if (rawQuery !== undefined) {
currentIndexQuery.disabled = !currentIndexQuery.disabled; currentIndexQuery.rawQuery = rawQuery;
}
if (legend !== undefined) {
currentIndexQuery.legend = legend;
}
if (toggleDisable) {
currentIndexQuery.disabled = !currentIndexQuery.disabled;
}
if (toggleDelete) {
allQueries.splice(queryIndex, 1);
}
updateQueryData({ updatedQuery: { ...queryData } });
} }
if (toggleDelete) {
allQueries.splice(queryIndex, 1);
}
updateQueryData({ updatedQuery: { ...queryData } });
}; };
const addQueryHandler = (): void => { const addQueryHandler = (): void => {
queryData[WIDGET_CLICKHOUSE_QUERY_KEY_NAME].push({ queryData[WIDGET_CLICKHOUSE_QUERY_KEY_NAME].push({

View File

@ -8,7 +8,7 @@ import { IClickHouseQueryHandleChange } from './types';
interface IClickHouseQueryBuilderProps { interface IClickHouseQueryBuilderProps {
queryData: IClickHouseQuery; queryData: IClickHouseQuery;
queryIndex: number; queryIndex: number | string;
handleQueryChange: (args: IClickHouseQueryHandleChange) => void; handleQueryChange: (args: IClickHouseQueryHandleChange) => void;
} }
@ -43,6 +43,9 @@ function ClickHouseQueryBuilder({
scrollbar: { scrollbar: {
alwaysConsumeMouseWheel: false, alwaysConsumeMouseWheel: false,
}, },
minimap: {
enabled: false,
},
}} }}
/> />
<Input <Input
@ -51,6 +54,7 @@ function ClickHouseQueryBuilder({
} }
size="middle" size="middle"
defaultValue={queryData.legend} defaultValue={queryData.legend}
value={queryData.legend}
addonBefore="Legend Format" addonBefore="Legend Format"
/> />
</QueryHeader> </QueryHeader>

View File

@ -1,7 +1,7 @@
import { IClickHouseQuery } from 'types/api/dashboard/getAll'; import { IClickHouseQuery } from 'types/api/dashboard/getAll';
export interface IClickHouseQueryHandleChange { export interface IClickHouseQueryHandleChange {
queryIndex: number; queryIndex: number | string;
rawQuery?: IClickHouseQuery['rawQuery']; rawQuery?: IClickHouseQuery['rawQuery'];
legend?: IClickHouseQuery['legend']; legend?: IClickHouseQuery['legend'];
toggleDisable?: IClickHouseQuery['disabled']; toggleDisable?: IClickHouseQuery['disabled'];

View File

@ -25,7 +25,7 @@ function SwitchComponent({
setIsLoading(false); setIsLoading(false);
}; };
const isInValidCertificate = useMemo( const isInValidVerificate = useMemo(
() => !getIsValidCertificate(record?.samlConfig), () => !getIsValidCertificate(record?.samlConfig),
[record], [record],
); );
@ -33,7 +33,7 @@ function SwitchComponent({
return ( return (
<Switch <Switch
loading={isLoading} loading={isLoading}
disabled={isInValidCertificate} disabled={isInValidVerificate}
checked={isChecked} checked={isChecked}
onChange={onChangeHandler} onChange={onChangeHandler}
/> />

View File

@ -1,5 +1,5 @@
import { LockTwoTone } from '@ant-design/icons'; import { LockTwoTone } from '@ant-design/icons';
import { Button, Modal, notification, Space, Table, Typography } from 'antd'; import { Button, Modal, notification, Space, Table } from 'antd';
import { ColumnsType } from 'antd/lib/table'; import { ColumnsType } from 'antd/lib/table';
import deleteDomain from 'api/SAML/deleteDomain'; import deleteDomain from 'api/SAML/deleteDomain';
import listAllDomain from 'api/SAML/listAllDomain'; import listAllDomain from 'api/SAML/listAllDomain';
@ -20,7 +20,6 @@ import AddDomain from './AddDomain';
import Create from './Create'; import Create from './Create';
import EditSaml from './Edit'; import EditSaml from './Edit';
import SwitchComponent from './Switch'; import SwitchComponent from './Switch';
import { getIsValidCertificate } from './utils';
function AuthDomains(): JSX.Element { function AuthDomains(): JSX.Element {
const { t } = useTranslation(['common', 'organizationsettings']); const { t } = useTranslation(['common', 'organizationsettings']);
@ -196,12 +195,6 @@ function AuthDomains(): JSX.Element {
); );
} }
const isValidCertificate = getIsValidCertificate(record.samlConfig);
if (!isValidCertificate) {
return <Typography>Configure SSO &nbsp;</Typography>;
}
return ( return (
<Button type="link" onClick={onEditHandler(record)}> <Button type="link" onClick={onEditHandler(record)}>
Edit SSO Edit SSO

View File

@ -1,6 +1,13 @@
export const getIsValidCertificate = ( import { SAMLConfig } from 'types/api/SAML/listDomain';
config: Record<string, string>,
): boolean => export const getIsValidCertificate = (config: SAMLConfig): boolean => {
config?.samlCert.length !== 0 && if (config === null) {
config?.samlEntity.length !== 0 && return false;
config?.samlIdp.length !== 0; }
return (
config?.samlCert?.length !== 0 &&
config?.samlEntity?.length !== 0 &&
config?.samlIdp?.length !== 0
);
};

View File

@ -1,3 +1,7 @@
import GetMinMax, { GetMinMaxPayload } from 'lib/getMinMax';
import { Time } from '../DateTimeSelection/config';
export const options: IOptions[] = [ export const options: IOptions[] = [
{ {
label: 'off', label: 'off',
@ -61,3 +65,12 @@ export interface IOptions {
key: string; key: string;
value: number; value: number;
} }
export const getMinMax = (
selectedTime: Time,
minTime: number,
maxTime: number,
): GetMinMaxPayload =>
selectedTime !== 'custom'
? GetMinMax(selectedTime)
: GetMinMax(selectedTime, [minTime, maxTime]);

View File

@ -12,7 +12,6 @@ import { CheckboxChangeEvent } from 'antd/lib/checkbox';
import get from 'api/browser/localstorage/get'; import get from 'api/browser/localstorage/get';
import set from 'api/browser/localstorage/set'; import set from 'api/browser/localstorage/set';
import { DASHBOARD_TIME_IN_DURATION } from 'constants/app'; import { DASHBOARD_TIME_IN_DURATION } from 'constants/app';
import dayjs from 'dayjs';
import useUrlQuery from 'hooks/useUrlQuery'; import useUrlQuery from 'hooks/useUrlQuery';
import _omit from 'lodash-es/omit'; import _omit from 'lodash-es/omit';
import React, { useCallback, useEffect, useMemo, useState } from 'react'; import React, { useCallback, useEffect, useMemo, useState } from 'react';
@ -22,19 +21,26 @@ import { useInterval } from 'react-use';
import { Dispatch } from 'redux'; import { Dispatch } from 'redux';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import AppActions from 'types/actions'; import AppActions from 'types/actions';
import { UPDATE_TIME_INTERVAL } from 'types/actions/globalTime'; import {
UPDATE_AUTO_REFRESH_INTERVAL,
UPDATE_TIME_INTERVAL,
} from 'types/actions/globalTime';
import { GlobalReducer } from 'types/reducer/globalTime'; import { GlobalReducer } from 'types/reducer/globalTime';
import { options } from './config'; import { getMinMax, options } from './config';
import { ButtonContainer, Container } from './styles'; import { ButtonContainer, Container } from './styles';
function AutoRefresh({ disabled = false }: AutoRefreshProps): JSX.Element { function AutoRefresh({ disabled = false }: AutoRefreshProps): JSX.Element {
const { minTime: initialMinTime, selectedTime } = useSelector< const globalTime = useSelector<AppState, GlobalReducer>(
AppState, (state) => state.globalTime,
GlobalReducer );
>((state) => state.globalTime);
const { pathname } = useLocation(); const { pathname } = useLocation();
const isDisabled = useMemo(
() => disabled || globalTime.isAutoRefreshDisabled,
[globalTime.isAutoRefreshDisabled, disabled],
);
const localStorageData = JSON.parse(get(DASHBOARD_TIME_IN_DURATION) || '{}'); const localStorageData = JSON.parse(get(DASHBOARD_TIME_IN_DURATION) || '{}');
const localStorageValue = useMemo(() => localStorageData[pathname], [ const localStorageValue = useMemo(() => localStorageData[pathname], [
@ -46,13 +52,19 @@ function AutoRefresh({ disabled = false }: AutoRefreshProps): JSX.Element {
Boolean(localStorageValue), Boolean(localStorageValue),
); );
const dispatch = useDispatch<Dispatch<AppActions>>();
useEffect(() => { useEffect(() => {
setIsAutoRefreshfreshEnabled(Boolean(localStorageValue)); const isAutoRefreshEnabled = Boolean(localStorageValue);
}, [localStorageValue]); dispatch({
type: UPDATE_AUTO_REFRESH_INTERVAL,
payload: localStorageValue,
});
setIsAutoRefreshfreshEnabled(isAutoRefreshEnabled);
}, [localStorageValue, dispatch]);
const params = useUrlQuery(); const params = useUrlQuery();
const dispatch = useDispatch<Dispatch<AppActions>>();
const [selectedOption, setSelectedOption] = useState<string>( const [selectedOption, setSelectedOption] = useState<string>(
localStorageValue || options[0].key, localStorageValue || options[0].key,
); );
@ -69,19 +81,23 @@ function AutoRefresh({ disabled = false }: AutoRefreshProps): JSX.Element {
useInterval(() => { useInterval(() => {
const selectedValue = getOption?.value; const selectedValue = getOption?.value;
if (disabled || !isAutoRefreshEnabled) { if (isDisabled || !isAutoRefreshEnabled) {
return; return;
} }
if (selectedOption !== 'off' && selectedValue) { if (selectedOption !== 'off' && selectedValue) {
const min = initialMinTime / 1000000; const { maxTime, minTime } = getMinMax(
globalTime.selectedTime,
globalTime.minTime,
globalTime.maxTime,
);
dispatch({ dispatch({
type: UPDATE_TIME_INTERVAL, type: UPDATE_TIME_INTERVAL,
payload: { payload: {
maxTime: dayjs().valueOf() * 1000000, maxTime,
minTime: dayjs(min).subtract(selectedValue, 'second').valueOf() * 1000000, minTime,
selectedTime, selectedTime: globalTime.selectedTime,
}, },
}); });
} }
@ -125,21 +141,23 @@ function AutoRefresh({ disabled = false }: AutoRefreshProps): JSX.Element {
<Checkbox <Checkbox
onChange={onChangeAutoRefreshHandler} onChange={onChangeAutoRefreshHandler}
checked={isAutoRefreshEnabled} checked={isAutoRefreshEnabled}
disabled={disabled} disabled={isDisabled}
> >
Auto Refresh Auto Refresh
</Checkbox> </Checkbox>
<Divider /> <Divider />
<Typography.Paragraph>Refresh Interval</Typography.Paragraph> <Typography.Paragraph disabled={isDisabled}>
Refresh Interval
</Typography.Paragraph>
<Radio.Group onChange={onChangeHandler} value={selectedOption}> <Radio.Group onChange={onChangeHandler} value={selectedOption}>
<Space direction="vertical"> <Space direction="vertical">
{options {options
.filter((e) => e.label !== 'off') .filter((e) => e.label !== 'off')
.map((option) => ( .map((option) => (
<Radio key={option.key} value={option.key}> <Radio disabled={isDisabled} key={option.key} value={option.key}>
{option.label} {option.label}
</Radio> </Radio>
))} ))}

View File

@ -15,7 +15,7 @@ const breadcrumbNameMap = {
[ROUTES.VERSION]: 'Status', [ROUTES.VERSION]: 'Status',
[ROUTES.ORG_SETTINGS]: 'Organization Settings', [ROUTES.ORG_SETTINGS]: 'Organization Settings',
[ROUTES.MY_SETTINGS]: 'My Settings', [ROUTES.MY_SETTINGS]: 'My Settings',
[ROUTES.ERROR_DETAIL]: 'Errors', [ROUTES.ERROR_DETAIL]: 'Exceptions',
[ROUTES.LIST_ALL_ALERT]: 'Alerts', [ROUTES.LIST_ALL_ALERT]: 'Alerts',
[ROUTES.ALL_DASHBOARD]: 'Dashboard', [ROUTES.ALL_DASHBOARD]: 'Dashboard',
[ROUTES.LOGS]: 'Logs', [ROUTES.LOGS]: 'Logs',

View File

@ -1,8 +1,14 @@
import { TableProps, Tag, Typography } from 'antd'; import { TableProps, Tag, Typography } from 'antd';
import Table, { ColumnsType } from 'antd/lib/table'; import Table, { ColumnsType } from 'antd/lib/table';
import ROUTES from 'constants/routes'; import ROUTES from 'constants/routes';
import {
getSpanOrder,
getSpanOrderParam,
} from 'container/Trace/TraceTable/util';
import { formUrlParams } from 'container/TraceDetail/utils';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import duration from 'dayjs/plugin/duration'; import duration from 'dayjs/plugin/duration';
import history from 'lib/history';
import omit from 'lodash-es/omit'; import omit from 'lodash-es/omit';
import React from 'react'; import React from 'react';
import { useDispatch, useSelector } from 'react-redux'; import { useDispatch, useSelector } from 'react-redux';
@ -43,7 +49,11 @@ function TraceTable(): JSX.Element {
type TableType = FlatArray<TraceReducer['spansAggregate']['data'], 1>; type TableType = FlatArray<TraceReducer['spansAggregate']['data'], 1>;
const getLink = (record: TableType): string => { const getLink = (record: TableType): string => {
return `${ROUTES.TRACE}/${record.traceID}?spanId=${record.spanID}`; return `${ROUTES.TRACE}/${record.traceID}${formUrlParams({
spanId: record.spanID,
levelUp: 0,
levelDown: 0,
})}`;
}; };
const getValue = (value: string): JSX.Element => { const getValue = (value: string): JSX.Element => {
@ -110,16 +120,6 @@ function TraceTable(): JSX.Element {
}, },
]; ];
const getSortKey = (key: string): string => {
if (key === 'durationNano') {
return 'duration';
}
if (key === 'timestamp') {
return 'timestamp';
}
return '';
};
const onChangeHandler: TableProps<TableType>['onChange'] = ( const onChangeHandler: TableProps<TableType>['onChange'] = (
props, props,
_, _,
@ -128,8 +128,8 @@ function TraceTable(): JSX.Element {
if (!Array.isArray(sort)) { if (!Array.isArray(sort)) {
const { order = spansAggregateOrder } = sort; const { order = spansAggregateOrder } = sort;
if (props.current && props.pageSize) { if (props.current && props.pageSize) {
const spanOrder = order === 'ascend' ? 'ascending' : 'descending'; const spanOrder = getSpanOrder(order || '');
const orderParam = getSortKey(sort.field as string); const orderParam = getSpanOrderParam(sort.field as string);
dispatch({ dispatch({
type: UPDATE_SPAN_ORDER, type: UPDATE_SPAN_ORDER,
@ -194,7 +194,11 @@ function TraceTable(): JSX.Element {
onClick: (event): void => { onClick: (event): void => {
event.preventDefault(); event.preventDefault();
event.stopPropagation(); event.stopPropagation();
window.open(getLink(record)); if (event.metaKey || event.ctrlKey) {
window.open(getLink(record), '_blank');
} else {
history.push(getLink(record));
}
}, },
})} })}
pagination={{ pagination={{

View File

@ -0,0 +1,19 @@
export const getSpanOrderParam = (key: string): string => {
if (key === 'durationNano') {
return 'duration';
}
if (key === 'timestamp') {
return 'timestamp';
}
return '';
};
export const getSpanOrder = (order: string): string => {
if (order === 'ascend') {
return 'ascending';
}
if (order === 'descend') {
return 'descending';
}
return '';
};

View File

@ -29,6 +29,7 @@ import SelectedSpanDetails from './SelectedSpanDetails';
import * as styles from './styles'; import * as styles from './styles';
import { FlameGraphMissingSpansContainer, GanttChartWrapper } from './styles'; import { FlameGraphMissingSpansContainer, GanttChartWrapper } from './styles';
import { import {
formUrlParams,
getSortedData, getSortedData,
getTreeLevelsCount, getTreeLevelsCount,
IIntervalUnit, IIntervalUnit,
@ -50,7 +51,13 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
// const [searchSpanString, setSearchSpanString] = useState(''); // const [searchSpanString, setSearchSpanString] = useState('');
const [activeHoverId, setActiveHoverId] = useState<string>(''); const [activeHoverId, setActiveHoverId] = useState<string>('');
const [activeSelectedId, setActiveSelectedId] = useState<string>(spanId || ''); const [activeSelectedId, setActiveSelectedId] = useState<string>(spanId || '');
const { levelDown, levelUp } = useMemo(
() => ({
levelDown: urlQuery.get('levelDown'),
levelUp: urlQuery.get('levelUp'),
}),
[urlQuery],
);
const [treesData, setTreesData] = useState<ITraceForest>( const [treesData, setTreesData] = useState<ITraceForest>(
spanToTreeUtil(response[0].events), spanToTreeUtil(response[0].events),
); );
@ -77,10 +84,14 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
if (activeSelectedId) { if (activeSelectedId) {
history.replace({ history.replace({
pathname: history.location.pathname, pathname: history.location.pathname,
search: `?spanId=${activeSelectedId}`, search: `${formUrlParams({
spanId: activeSelectedId,
levelUp,
levelDown,
})}`,
}); });
} }
}, [activeSelectedId]); }, [activeSelectedId, levelDown, levelUp]);
const getSelectedNode = useMemo(() => { const getSelectedNode = useMemo(() => {
return getNodeById(activeSelectedId, treesData); return getNodeById(activeSelectedId, treesData);

View File

@ -98,3 +98,28 @@ export const getTreeLevelsCount = (tree: ITraceTree): number => {
return levels; return levels;
}; };
export const formUrlParams = (params: Record<string, any>): string => {
let urlParams = '';
Object.entries(params).forEach(([key, value], index) => {
let encodedValue: string;
try {
encodedValue = decodeURIComponent(value);
encodedValue = encodeURIComponent(encodedValue);
} catch (error) {
encodedValue = '';
}
if (index === 0) {
if (encodedValue) {
urlParams = `?${key}=${encodedValue}`;
} else {
urlParams = `?${key}=`;
}
} else if (encodedValue) {
urlParams = `${urlParams}&${key}=${encodedValue}`;
} else {
urlParams = `${urlParams}&${key}=`;
}
});
return urlParams;
};

View File

@ -20,22 +20,22 @@
<meta <meta
data-react-helmet="true" data-react-helmet="true"
name="description" name="description"
content="SigNoz is an opensource observability platform to help you find issues in your deployed applications &amp; solve them quickly. It provides an integrated UI for metrics and traces with deep filtering and aggregation to pin down specific issues very quickly. Built on Kafka and Druid, it is designed to handle enterprise scale." content="SigNoz is an open source observability platform to help you find issues in your deployed applications & solve them quickly. It provides a single pane of glass for metrics, traces and logs with deep filtering and aggregation to pin down specific issues very quickly."
/> />
<meta <meta
data-react-helmet="true" data-react-helmet="true"
property="og:description" property="og:description"
content="SigNoz is an opensource observability platform to help you find issues in your deployed applications &amp; solve them quickly. It provides an integrated UI for metrics and traces with deep filtering and aggregation to pin down specific issues very quickly. Built on Kafka and Druid, it is designed to handle enterprise scale." content="SigNoz is an open source observability platform to help you find issues in your deployed applications & solve them quickly. It provides a single pane of glass for metrics, traces and logs with deep filtering and aggregation to pin down specific issues very quickly."
/> />
<meta <meta
data-react-helmet="true" data-react-helmet="true"
property="og:image" property="og:image"
content="https://signoz.io/img/HeroShot-3.jpg" content="https://signoz.io/img/signoz-hero-image.webp"
/> />
<meta <meta
data-react-helmet="true" data-react-helmet="true"
name="twitter:image" name="twitter:image"
content="https://signoz.io/img/HeroShot-3.jpg" content="https://signoz.io/img/signoz-hero-image.webp"
/> />
<meta <meta
data-react-helmet="true" data-react-helmet="true"

View File

@ -53,7 +53,7 @@ const GetMinMax = (
}; };
}; };
interface GetMinMaxPayload { export interface GetMinMaxPayload {
minTime: GlobalReducer['minTime']; minTime: GlobalReducer['minTime'];
maxTime: GlobalReducer['maxTime']; maxTime: GlobalReducer['maxTime'];
} }

View File

@ -1,9 +1,8 @@
import CreateAlertRule from 'container/CreateAlertRule'; import CreateAlertRule from 'container/CreateAlertRule';
import React from 'react'; import React from 'react';
import { alertDefaults } from 'types/api/alerts/create';
function CreateAlertPage(): JSX.Element { function CreateAlertPage(): JSX.Element {
return <CreateAlertRule initialValue={alertDefaults} />; return <CreateAlertRule />;
} }
export default CreateAlertPage; export default CreateAlertPage;

View File

@ -48,7 +48,7 @@ function ErrorDetails(): JSX.Element {
}, },
); );
const { data, status } = useQuery([maxTime, minTime, groupId], { const { data, status } = useQuery([maxTime, minTime, groupId, errorId], {
queryFn: () => queryFn: () =>
getByErrorType({ getByErrorType({
groupID: groupId || '', groupID: groupId || '',

View File

@ -1,8 +1,72 @@
import Logs from 'container/Logs'; import { Divider, Row } from 'antd';
import React from 'react'; import LogControls from 'container/LogControls';
import LogDetailedView from 'container/LogDetailedView';
import LogLiveTail from 'container/LogLiveTail';
import LogsAggregate from 'container/LogsAggregate';
import LogsFilters from 'container/LogsFilters';
import LogsSearchFilter from 'container/LogsSearchFilter';
import LogsTable from 'container/LogsTable';
import useUrlQuery from 'hooks/useUrlQuery';
import React, { memo, useEffect } from 'react';
import { connect, useDispatch } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
import { GetLogsFields } from 'store/actions/logs/getFields';
import AppActions from 'types/actions';
import { SET_SEARCH_QUERY_STRING } from 'types/actions/logs';
function LogsHome(): JSX.Element { import SpaceContainer from './styles';
return <Logs />;
function Logs({ getLogsFields }: LogsProps): JSX.Element {
const urlQuery = useUrlQuery();
const dispatch = useDispatch();
useEffect(() => {
dispatch({
type: SET_SEARCH_QUERY_STRING,
payload: urlQuery.get('q'),
});
}, [dispatch, urlQuery]);
useEffect(() => {
getLogsFields();
}, [getLogsFields]);
return (
<>
<SpaceContainer
split={<Divider type="vertical" />}
align="center"
direction="horizontal"
>
<LogsSearchFilter />
<LogLiveTail />
</SpaceContainer>
<LogsAggregate />
<LogControls />
<Divider plain orientationMargin={1} />
<Row gutter={20} wrap={false}>
<LogsFilters />
<Divider type="vertical" />
<LogsTable />
</Row>
<LogDetailedView />
</>
);
} }
export default LogsHome; type LogsProps = DispatchProps;
interface DispatchProps {
getLogsFields: () => (dispatch: Dispatch<AppActions>) => void;
}
const mapDispatchToProps = (
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
): DispatchProps => ({
getLogsFields: bindActionCreators(GetLogsFields, dispatch),
});
export default connect(null, mapDispatchToProps)(memo(Logs));

View File

@ -0,0 +1,10 @@
import { Space } from 'antd';
import styled from 'styled-components';
const SpaceContainer = styled(Space)`
.ant-space-item:nth-child(1) {
width: 100%;
}
`;
export default SpaceContainer;

View File

@ -2,16 +2,26 @@ import { Typography } from 'antd';
import getTraceItem from 'api/trace/getTraceItem'; import getTraceItem from 'api/trace/getTraceItem';
import Spinner from 'components/Spinner'; import Spinner from 'components/Spinner';
import TraceDetailContainer from 'container/TraceDetail'; import TraceDetailContainer from 'container/TraceDetail';
import React from 'react'; import useUrlQuery from 'hooks/useUrlQuery';
import React, { useMemo } from 'react';
import { useQuery } from 'react-query'; import { useQuery } from 'react-query';
import { useParams } from 'react-router-dom'; import { useParams } from 'react-router-dom';
import { Props as TraceDetailProps } from 'types/api/trace/getTraceItem'; import { Props as TraceDetailProps } from 'types/api/trace/getTraceItem';
function TraceDetail(): JSX.Element { function TraceDetail(): JSX.Element {
const { id } = useParams<TraceDetailProps>(); const { id } = useParams<TraceDetailProps>();
const urlQuery = useUrlQuery();
const { spanId, levelUp, levelDown } = useMemo(
() => ({
spanId: urlQuery.get('spanId'),
levelUp: urlQuery.get('levelUp'),
levelDown: urlQuery.get('levelDown'),
}),
[urlQuery],
);
const { data: traceDetailResponse, error, isLoading, isError } = useQuery( const { data: traceDetailResponse, error, isLoading, isError } = useQuery(
`getTraceItem/${id}`, `getTraceItem/${id}`,
() => getTraceItem({ id }), () => getTraceItem({ id, spanId, levelUp, levelDown }),
{ {
cacheTime: 3000, cacheTime: 3000,
}, },

View File

@ -2,6 +2,8 @@ import { getDefaultOption } from 'container/TopNav/DateTimeSelection/config';
import { import {
GLOBAL_TIME_LOADING_START, GLOBAL_TIME_LOADING_START,
GlobalTimeAction, GlobalTimeAction,
UPDATE_AUTO_REFRESH_DISABLED,
UPDATE_AUTO_REFRESH_INTERVAL,
UPDATE_TIME_INTERVAL, UPDATE_TIME_INTERVAL,
} from 'types/actions/globalTime'; } from 'types/actions/globalTime';
import { GlobalReducer } from 'types/reducer/globalTime'; import { GlobalReducer } from 'types/reducer/globalTime';
@ -13,6 +15,8 @@ const intitalState: GlobalReducer = {
selectedTime: getDefaultOption( selectedTime: getDefaultOption(
typeof window !== 'undefined' ? window?.location?.pathname : '', typeof window !== 'undefined' ? window?.location?.pathname : '',
), ),
isAutoRefreshDisabled: false,
selectedAutoRefreshInterval: '',
}; };
const globalTimeReducer = ( const globalTimeReducer = (
@ -35,6 +39,20 @@ const globalTimeReducer = (
}; };
} }
case UPDATE_AUTO_REFRESH_DISABLED: {
return {
...state,
isAutoRefreshDisabled: action.payload,
};
}
case UPDATE_AUTO_REFRESH_INTERVAL: {
return {
...state,
selectedAutoRefreshInterval: action.payload,
};
}
default: default:
return state; return state;
} }

View File

@ -99,13 +99,11 @@ export const LogsReducer = (
} }
case ADD_SEARCH_FIELD_QUERY_STRING: { case ADD_SEARCH_FIELD_QUERY_STRING: {
const updatedQueryString = const updatedQueryString = `${state?.searchFilter?.queryString || ''}${
state.searchFilter.queryString || state.searchFilter.queryString && state.searchFilter.queryString.length > 0
`${ ? ' and '
state.searchFilter.queryString && state.searchFilter.queryString.length > 0 : ''
? ' and ' }${action.payload}`;
: ''
}${action.payload}`;
const updatedParsedQuery = parseQuery(updatedQueryString); const updatedParsedQuery = parseQuery(updatedQueryString);
return { return {

View File

@ -2,6 +2,8 @@ import { Time } from 'container/TopNav/DateTimeSelection/config';
export const UPDATE_TIME_INTERVAL = 'UPDATE_TIME_INTERVAL'; export const UPDATE_TIME_INTERVAL = 'UPDATE_TIME_INTERVAL';
export const GLOBAL_TIME_LOADING_START = 'GLOBAL_TIME_LOADING_START'; export const GLOBAL_TIME_LOADING_START = 'GLOBAL_TIME_LOADING_START';
export const UPDATE_AUTO_REFRESH_DISABLED = 'UPDATE_AUTO_REFRESH_DISABLED';
export const UPDATE_AUTO_REFRESH_INTERVAL = 'UPDATE_AUTO_REFRESH_INTERVAL';
export type GlobalTime = { export type GlobalTime = {
maxTime: number; maxTime: number;
@ -17,8 +19,22 @@ interface UpdateTimeInterval {
payload: UpdateTime; payload: UpdateTime;
} }
interface UpdateAutoRefreshDisabled {
type: typeof UPDATE_AUTO_REFRESH_DISABLED;
payload: boolean;
}
interface GlobalTimeLoading { interface GlobalTimeLoading {
type: typeof GLOBAL_TIME_LOADING_START; type: typeof GLOBAL_TIME_LOADING_START;
} }
export type GlobalTimeAction = UpdateTimeInterval | GlobalTimeLoading; interface UpdateAutoRefreshInterval {
type: typeof UPDATE_AUTO_REFRESH_INTERVAL;
payload: string;
}
export type GlobalTimeAction =
| UpdateTimeInterval
| GlobalTimeLoading
| UpdateAutoRefreshDisabled
| UpdateAutoRefreshInterval;

View File

@ -1,16 +1,18 @@
import { Organization } from '../user/getOrganization'; import { Organization } from '../user/getOrganization';
export interface SAMLConfig {
samlEntity: string;
samlIdp: string;
samlCert: string;
}
export interface SAMLDomain { export interface SAMLDomain {
id: string; id: string;
name: string; name: string;
orgId: Organization['id']; orgId: Organization['id'];
ssoEnabled: boolean; ssoEnabled: boolean;
ssoType: 'SAML'; ssoType: 'SAML';
samlConfig: { samlConfig: SAMLConfig;
samlEntity: string;
samlIdp: string;
samlCert: string;
};
} }
export interface Props { export interface Props {

View File

@ -0,0 +1,8 @@
// this list must exactly match with the backend
export enum AlertTypes {
NONE = 'NONE',
METRICS_BASED_ALERT = 'METRIC_BASED_ALERT',
LOGS_BASED_ALERT = 'LOGS_BASED_ALERT',
TRACES_BASED_ALERT = 'TRACES_BASED_ALERT',
EXCEPTIONS_BASED_ALERT = 'EXCEPTIONS_BASED_ALERT',
}

View File

@ -1,4 +1,5 @@
import { import {
IClickHouseQuery,
IMetricsBuilderFormula, IMetricsBuilderFormula,
IMetricsBuilderQuery, IMetricsBuilderQuery,
IPromQLQuery, IPromQLQuery,
@ -9,17 +10,25 @@ import { EAggregateOperator, EQueryType } from 'types/common/dashboard';
export interface ICompositeMetricQuery { export interface ICompositeMetricQuery {
builderQueries: IBuilderQueries; builderQueries: IBuilderQueries;
promQueries: IPromQueries; promQueries: IPromQueries;
chQueries: IChQueries;
queryType: EQueryType; queryType: EQueryType;
} }
export interface IPromQueries { export interface IChQueries {
[key: string]: IPromQuery; [key: string]: IChQuery;
}
export interface IChQuery extends IClickHouseQuery {
query: string;
} }
export interface IPromQuery extends IPromQLQuery { export interface IPromQuery extends IPromQLQuery {
stats?: ''; stats?: '';
} }
export interface IPromQueries {
[key: string]: IPromQuery;
}
export interface IBuilderQueries { export interface IBuilderQueries {
[key: string]: IBuilderQuery; [key: string]: IBuilderQuery;
} }

View File

@ -1,7 +1,5 @@
import { AlertDef } from 'types/api/alerts/def'; import { AlertDef } from 'types/api/alerts/def';
import { defaultCompareOp, defaultEvalWindow, defaultMatchType } from './def';
export interface Props { export interface Props {
data: AlertDef; data: AlertDef;
} }
@ -10,39 +8,3 @@ export interface PayloadProps {
status: string; status: string;
data: string; data: string;
} }
export const alertDefaults: AlertDef = {
condition: {
compositeMetricQuery: {
builderQueries: {
A: {
queryName: 'A',
name: 'A',
formulaOnly: false,
metricName: '',
tagFilters: {
op: 'AND',
items: [],
},
groupBy: [],
aggregateOperator: 1,
expression: 'A',
disabled: false,
toggleDisable: false,
toggleDelete: false,
},
},
promQueries: {},
queryType: 1,
},
op: defaultCompareOp,
matchType: defaultMatchType,
},
labels: {
severity: 'warning',
},
annotations: {
description: 'A new alert',
},
evalWindow: defaultEvalWindow,
};

View File

@ -11,6 +11,7 @@ export const defaultCompareOp = '1';
export interface AlertDef { export interface AlertDef {
id?: number; id?: number;
alertType?: string;
alert?: string; alert?: string;
ruleType?: string; ruleType?: string;
condition: RuleCondition; condition: RuleCondition;

View File

@ -2,6 +2,13 @@ export interface Props {
id: string; id: string;
} }
export interface GetTraceItemProps {
id: string;
spanId: string | null;
levelUp: string | null;
levelDown: string | null;
}
export interface PayloadProps { export interface PayloadProps {
[id: string]: { [id: string]: {
events: Span[]; events: Span[];

View File

@ -6,4 +6,6 @@ export interface GlobalReducer {
minTime: GlobalTime['minTime']; minTime: GlobalTime['minTime'];
loading: boolean; loading: boolean;
selectedTime: Time; selectedTime: Time;
isAutoRefreshDisabled: boolean;
selectedAutoRefreshInterval: string;
} }

View File

@ -77,12 +77,7 @@ export const spanToTreeUtil = (inputSpanList: Span[]): ITraceForest => {
serviceName: span[3], serviceName: span[3],
hasError: !!span[11], hasError: !!span[11],
serviceColour: '', serviceColour: '',
event: span[10].map((e) => { event: span[10].map((e) => JSON.parse(e || '{}') || {}),
return (
JSON.parse(decodeURIComponent((e as never) || ('{}' as never))) ||
({} as Record<string, unknown>)
);
}),
references: spanReferences, references: spanReferences,
}; };
spanMap[span[1]] = spanObject; spanMap[span[1]] = spanObject;

7
go.mod
View File

@ -12,6 +12,7 @@ require (
github.com/gosimple/slug v1.10.0 github.com/gosimple/slug v1.10.0
github.com/jmoiron/sqlx v1.3.4 github.com/jmoiron/sqlx v1.3.4
github.com/json-iterator/go v1.1.12 github.com/json-iterator/go v1.1.12
github.com/mailru/easyjson v0.7.7
github.com/mattn/go-sqlite3 v1.14.8 github.com/mattn/go-sqlite3 v1.14.8
github.com/minio/minio-go/v6 v6.0.57 github.com/minio/minio-go/v6 v6.0.57
github.com/mitchellh/mapstructure v1.5.0 github.com/mitchellh/mapstructure v1.5.0
@ -34,7 +35,9 @@ require (
require ( require (
github.com/beevik/etree v1.1.0 // indirect github.com/beevik/etree v1.1.0 // indirect
github.com/form3tech-oss/jwt-go v3.2.2+incompatible // indirect github.com/form3tech-oss/jwt-go v3.2.2+incompatible // indirect
github.com/google/go-cmp v0.5.8 // indirect
github.com/jonboulle/clockwork v0.2.2 // indirect github.com/jonboulle/clockwork v0.2.2 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/klauspost/cpuid v1.2.3 // indirect github.com/klauspost/cpuid v1.2.3 // indirect
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
github.com/minio/md5-simd v1.1.0 // indirect github.com/minio/md5-simd v1.1.0 // indirect
@ -68,7 +71,7 @@ require (
github.com/gogo/protobuf v1.3.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang-jwt/jwt v3.2.2+incompatible github.com/golang-jwt/jwt v3.2.2+incompatible
github.com/golang/glog v0.0.0-20210429001901-424d2337a529 // indirect github.com/golang/glog v0.0.0-20210429001901-424d2337a529 // indirect
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.2 // indirect github.com/golang/protobuf v1.5.2 // indirect
github.com/golang/snappy v0.0.4 // indirect github.com/golang/snappy v0.0.4 // indirect
github.com/google/btree v1.0.0 // indirect github.com/google/btree v1.0.0 // indirect
@ -126,7 +129,7 @@ require (
go.uber.org/atomic v1.6.0 // indirect go.uber.org/atomic v1.6.0 // indirect
go.uber.org/multierr v1.5.0 // indirect go.uber.org/multierr v1.5.0 // indirect
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 golang.org/x/crypto v0.0.0-20210921155107-089bfa567519
golang.org/x/net v0.0.0-20211013171255-e13a2654a71e golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f
golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 // indirect golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 // indirect
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect
golang.org/x/sys v0.0.0-20211110154304-99a53858aa08 // indirect golang.org/x/sys v0.0.0-20211110154304-99a53858aa08 // indirect

14
go.sum
View File

@ -151,8 +151,9 @@ github.com/golang/glog v0.0.0-20210429001901-424d2337a529 h1:2voWjNECnrZRbfwXxHB
github.com/golang/glog v0.0.0-20210429001901-424d2337a529/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v0.0.0-20210429001901-424d2337a529/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
@ -200,8 +201,9 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o=
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@ -296,6 +298,8 @@ github.com/jmoiron/sqlx v1.3.4 h1:wv+0IJZfL5z0uZoUjlpKgHkgaFSYD+r9CfrXjEXsO7w=
github.com/jmoiron/sqlx v1.3.4/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ= github.com/jmoiron/sqlx v1.3.4/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ=
github.com/jonboulle/clockwork v0.2.2 h1:UOGuzwb1PwsrDAObMuhUnj0p5ULPj8V/xJ7Kx9qUBdQ= github.com/jonboulle/clockwork v0.2.2 h1:UOGuzwb1PwsrDAObMuhUnj0p5ULPj8V/xJ7Kx9qUBdQ=
github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
@ -323,6 +327,8 @@ github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E= github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E=
github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattermost/xml-roundtrip-validator v0.1.0 h1:RXbVD2UAl7A7nOTR4u7E3ILa4IbtvKBHw64LDsmu9hU= github.com/mattermost/xml-roundtrip-validator v0.1.0 h1:RXbVD2UAl7A7nOTR4u7E3ILa4IbtvKBHw64LDsmu9hU=
github.com/mattermost/xml-roundtrip-validator v0.1.0/go.mod h1:qccnGMcpgwcNaBnxqpJpWWUiPNr5H3O8eDgGV9gT5To= github.com/mattermost/xml-roundtrip-validator v0.1.0/go.mod h1:qccnGMcpgwcNaBnxqpJpWWUiPNr5H3O8eDgGV9gT5To=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
@ -566,8 +572,8 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211013171255-e13a2654a71e h1:Xj+JO91noE97IN6F/7WZxzC5QE6yENAQPrwIYhW3bsA= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f h1:OfiFi4JbukWwe3lzw+xunroH1mnC1e2Gy5cxNJApiSY=
golang.org/x/net v0.0.0-20211013171255-e13a2654a71e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=

View File

@ -21,6 +21,7 @@ import (
"github.com/go-kit/log" "github.com/go-kit/log"
"github.com/go-kit/log/level" "github.com/go-kit/log/level"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/mailru/easyjson"
"github.com/oklog/oklog/pkg/group" "github.com/oklog/oklog/pkg/group"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/prometheus/common/promlog" "github.com/prometheus/common/promlog"
@ -42,6 +43,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/app/logs" "go.signoz.io/signoz/pkg/query-service/app/logs"
"go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/constants"
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager" am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
"go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model" "go.signoz.io/signoz/pkg/query-service/model"
"go.signoz.io/signoz/pkg/query-service/utils" "go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap" "go.uber.org/zap"
@ -79,13 +81,13 @@ var (
type ClickHouseReader struct { type ClickHouseReader struct {
db clickhouse.Conn db clickhouse.Conn
localDB *sqlx.DB localDB *sqlx.DB
traceDB string TraceDB string
operationsTable string operationsTable string
durationTable string durationTable string
indexTable string indexTable string
errorTable string errorTable string
usageExplorerTable string usageExplorerTable string
spansTable string SpansTable string
dependencyGraphTable string dependencyGraphTable string
topLevelOperationsTable string topLevelOperationsTable string
logsDB string logsDB string
@ -99,12 +101,13 @@ type ClickHouseReader struct {
promConfigFile string promConfigFile string
promConfig *config.Config promConfig *config.Config
alertManager am.Manager alertManager am.Manager
featureFlags interfaces.FeatureLookup
liveTailRefreshSeconds int liveTailRefreshSeconds int
} }
// NewTraceReader returns a TraceReader for the database // NewTraceReader returns a TraceReader for the database
func NewReader(localDB *sqlx.DB, configFile string) *ClickHouseReader { func NewReader(localDB *sqlx.DB, configFile string, featureFlag interfaces.FeatureLookup) *ClickHouseReader {
datasource := os.Getenv("ClickHouseUrl") datasource := os.Getenv("ClickHouseUrl")
options := NewOptions(datasource, primaryNamespace, archiveNamespace) options := NewOptions(datasource, primaryNamespace, archiveNamespace)
@ -125,14 +128,14 @@ func NewReader(localDB *sqlx.DB, configFile string) *ClickHouseReader {
return &ClickHouseReader{ return &ClickHouseReader{
db: db, db: db,
localDB: localDB, localDB: localDB,
traceDB: options.primary.TraceDB, TraceDB: options.primary.TraceDB,
alertManager: alertManager, alertManager: alertManager,
operationsTable: options.primary.OperationsTable, operationsTable: options.primary.OperationsTable,
indexTable: options.primary.IndexTable, indexTable: options.primary.IndexTable,
errorTable: options.primary.ErrorTable, errorTable: options.primary.ErrorTable,
usageExplorerTable: options.primary.UsageExplorerTable, usageExplorerTable: options.primary.UsageExplorerTable,
durationTable: options.primary.DurationTable, durationTable: options.primary.DurationTable,
spansTable: options.primary.SpansTable, SpansTable: options.primary.SpansTable,
dependencyGraphTable: options.primary.DependencyGraphTable, dependencyGraphTable: options.primary.DependencyGraphTable,
topLevelOperationsTable: options.primary.TopLevelOperationsTable, topLevelOperationsTable: options.primary.TopLevelOperationsTable,
logsDB: options.primary.LogsDB, logsDB: options.primary.LogsDB,
@ -141,6 +144,7 @@ func NewReader(localDB *sqlx.DB, configFile string) *ClickHouseReader {
logsResourceKeys: options.primary.LogsResourceKeysTable, logsResourceKeys: options.primary.LogsResourceKeysTable,
liveTailRefreshSeconds: options.primary.LiveTailRefreshSeconds, liveTailRefreshSeconds: options.primary.LiveTailRefreshSeconds,
promConfigFile: configFile, promConfigFile: configFile,
featureFlags: featureFlag,
} }
} }
@ -665,7 +669,7 @@ func (r *ClickHouseReader) GetQueryRangeResult(ctx context.Context, query *model
func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, error) { func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, error) {
services := []string{} services := []string{}
query := fmt.Sprintf(`SELECT DISTINCT serviceName FROM %s.%s WHERE toDate(timestamp) > now() - INTERVAL 1 DAY`, r.traceDB, r.indexTable) query := fmt.Sprintf(`SELECT DISTINCT serviceName FROM %s.%s WHERE toDate(timestamp) > now() - INTERVAL 1 DAY`, r.TraceDB, r.indexTable)
rows, err := r.db.Query(ctx, query) rows, err := r.db.Query(ctx, query)
@ -690,7 +694,7 @@ func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, erro
func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context) (*map[string][]string, *model.ApiError) { func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context) (*map[string][]string, *model.ApiError) {
operations := map[string][]string{} operations := map[string][]string{}
query := fmt.Sprintf(`SELECT DISTINCT name, serviceName FROM %s.%s`, r.traceDB, r.topLevelOperationsTable) query := fmt.Sprintf(`SELECT DISTINCT name, serviceName FROM %s.%s`, r.TraceDB, r.topLevelOperationsTable)
rows, err := r.db.Query(ctx, query) rows, err := r.db.Query(ctx, query)
@ -745,14 +749,14 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
count(*) as numCalls count(*) as numCalls
FROM %s.%s FROM %s.%s
WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end`, WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end`,
r.traceDB, r.indexTable, r.TraceDB, r.indexTable,
) )
errorQuery := fmt.Sprintf( errorQuery := fmt.Sprintf(
`SELECT `SELECT
count(*) as numErrors count(*) as numErrors
FROM %s.%s FROM %s.%s
WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end AND statusCode=2`, WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end AND statusCode=2`,
r.traceDB, r.indexTable, r.TraceDB, r.indexTable,
) )
args := []interface{}{} args := []interface{}{}
@ -835,7 +839,7 @@ func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *
count(*) as numCalls count(*) as numCalls
FROM %s.%s FROM %s.%s
WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end`, WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end`,
r.traceDB, r.indexTable, r.TraceDB, r.indexTable,
) )
args := []interface{}{} args := []interface{}{}
args = append(args, namedArgs...) args = append(args, namedArgs...)
@ -861,7 +865,7 @@ func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *
count(*) as numErrors count(*) as numErrors
FROM %s.%s FROM %s.%s
WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end AND statusCode=2`, WHERE serviceName = @serviceName AND name In [@names] AND timestamp>= @start AND timestamp<= @end AND statusCode=2`,
r.traceDB, r.indexTable, r.TraceDB, r.indexTable,
) )
args = []interface{}{} args = []interface{}{}
args = append(args, namedArgs...) args = append(args, namedArgs...)
@ -1001,7 +1005,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
case constants.TraceID: case constants.TraceID:
continue continue
case constants.ServiceName: case constants.ServiceName:
finalQuery := fmt.Sprintf("SELECT serviceName, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT serviceName, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY serviceName" finalQuery += " GROUP BY serviceName"
var dBResponse []model.DBResponseServiceName var dBResponse []model.DBResponseServiceName
@ -1018,7 +1022,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
} }
} }
case constants.HttpCode: case constants.HttpCode:
finalQuery := fmt.Sprintf("SELECT httpCode, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT httpCode, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY httpCode" finalQuery += " GROUP BY httpCode"
var dBResponse []model.DBResponseHttpCode var dBResponse []model.DBResponseHttpCode
@ -1035,7 +1039,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
} }
} }
case constants.HttpRoute: case constants.HttpRoute:
finalQuery := fmt.Sprintf("SELECT httpRoute, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT httpRoute, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY httpRoute" finalQuery += " GROUP BY httpRoute"
var dBResponse []model.DBResponseHttpRoute var dBResponse []model.DBResponseHttpRoute
@ -1052,7 +1056,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
} }
} }
case constants.HttpUrl: case constants.HttpUrl:
finalQuery := fmt.Sprintf("SELECT httpUrl, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT httpUrl, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY httpUrl" finalQuery += " GROUP BY httpUrl"
var dBResponse []model.DBResponseHttpUrl var dBResponse []model.DBResponseHttpUrl
@ -1069,7 +1073,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
} }
} }
case constants.HttpMethod: case constants.HttpMethod:
finalQuery := fmt.Sprintf("SELECT httpMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT httpMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY httpMethod" finalQuery += " GROUP BY httpMethod"
var dBResponse []model.DBResponseHttpMethod var dBResponse []model.DBResponseHttpMethod
@ -1086,7 +1090,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
} }
} }
case constants.HttpHost: case constants.HttpHost:
finalQuery := fmt.Sprintf("SELECT httpHost, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT httpHost, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY httpHost" finalQuery += " GROUP BY httpHost"
var dBResponse []model.DBResponseHttpHost var dBResponse []model.DBResponseHttpHost
@ -1103,7 +1107,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
} }
} }
case constants.OperationRequest: case constants.OperationRequest:
finalQuery := fmt.Sprintf("SELECT name, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT name, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY name" finalQuery += " GROUP BY name"
var dBResponse []model.DBResponseOperation var dBResponse []model.DBResponseOperation
@ -1120,7 +1124,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
} }
} }
case constants.Component: case constants.Component:
finalQuery := fmt.Sprintf("SELECT component, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT component, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY component" finalQuery += " GROUP BY component"
var dBResponse []model.DBResponseComponent var dBResponse []model.DBResponseComponent
@ -1137,7 +1141,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
} }
} }
case constants.Status: case constants.Status:
finalQuery := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = true", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = true", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
var dBResponse []model.DBResponseTotal var dBResponse []model.DBResponseTotal
err := r.db.Select(ctx, &dBResponse, finalQuery, args...) err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
@ -1148,7 +1152,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
} }
finalQuery2 := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = false", r.traceDB, r.indexTable) finalQuery2 := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = false", r.TraceDB, r.indexTable)
finalQuery2 += query finalQuery2 += query
var dBResponse2 []model.DBResponseTotal var dBResponse2 []model.DBResponseTotal
err = r.db.Select(ctx, &dBResponse2, finalQuery2, args...) err = r.db.Select(ctx, &dBResponse2, finalQuery2, args...)
@ -1168,7 +1172,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
traceFilterReponse.Status = map[string]uint64{"ok": 0, "error": 0} traceFilterReponse.Status = map[string]uint64{"ok": 0, "error": 0}
} }
case constants.Duration: case constants.Duration:
finalQuery := fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.durationTable) finalQuery := fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
finalQuery += query finalQuery += query
finalQuery += " ORDER BY durationNano LIMIT 1" finalQuery += " ORDER BY durationNano LIMIT 1"
var dBResponse []model.DBResponseTotal var dBResponse []model.DBResponseTotal
@ -1179,7 +1183,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
zap.S().Debug("Error in processing sql query: ", err) zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
} }
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.durationTable) finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
finalQuery += query finalQuery += query
finalQuery += " ORDER BY durationNano DESC LIMIT 1" finalQuery += " ORDER BY durationNano DESC LIMIT 1"
var dBResponse2 []model.DBResponseTotal var dBResponse2 []model.DBResponseTotal
@ -1197,7 +1201,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
traceFilterReponse.Duration["maxDuration"] = dBResponse2[0].NumTotal traceFilterReponse.Duration["maxDuration"] = dBResponse2[0].NumTotal
} }
case constants.RPCMethod: case constants.RPCMethod:
finalQuery := fmt.Sprintf("SELECT rpcMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT rpcMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY rpcMethod" finalQuery += " GROUP BY rpcMethod"
var dBResponse []model.DBResponseRPCMethod var dBResponse []model.DBResponseRPCMethod
@ -1215,7 +1219,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
} }
case constants.ResponseStatusCode: case constants.ResponseStatusCode:
finalQuery := fmt.Sprintf("SELECT responseStatusCode, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT responseStatusCode, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY responseStatusCode" finalQuery += " GROUP BY responseStatusCode"
var dBResponse []model.DBResponseStatusCodeMethod var dBResponse []model.DBResponseStatusCodeMethod
@ -1263,7 +1267,7 @@ func getStatusFilters(query string, statusParams []string, excludeMap map[string
func (r *ClickHouseReader) GetFilteredSpans(ctx context.Context, queryParams *model.GetFilteredSpansParams) (*model.GetFilterSpansResponse, *model.ApiError) { func (r *ClickHouseReader) GetFilteredSpans(ctx context.Context, queryParams *model.GetFilteredSpansParams) (*model.GetFilterSpansResponse, *model.ApiError) {
queryTable := fmt.Sprintf("%s.%s", r.traceDB, r.indexTable) queryTable := fmt.Sprintf("%s.%s", r.TraceDB, r.indexTable)
excludeMap := make(map[string]struct{}) excludeMap := make(map[string]struct{})
for _, e := range queryParams.Exclude { for _, e := range queryParams.Exclude {
@ -1333,7 +1337,7 @@ func (r *ClickHouseReader) GetFilteredSpans(ctx context.Context, queryParams *mo
if len(queryParams.OrderParam) != 0 { if len(queryParams.OrderParam) != 0 {
if queryParams.OrderParam == constants.Duration { if queryParams.OrderParam == constants.Duration {
queryTable = fmt.Sprintf("%s.%s", r.traceDB, r.durationTable) queryTable = fmt.Sprintf("%s.%s", r.TraceDB, r.durationTable)
if queryParams.Order == constants.Descending { if queryParams.Order == constants.Descending {
query = query + " ORDER BY durationNano DESC" query = query + " ORDER BY durationNano DESC"
} }
@ -1515,7 +1519,7 @@ func (r *ClickHouseReader) GetTagFilters(ctx context.Context, queryParams *model
tagFilters := []model.TagFilters{} tagFilters := []model.TagFilters{}
finalQuery := fmt.Sprintf(`SELECT DISTINCT arrayJoin(tagMap.keys) as tagKeys FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.traceDB, r.indexTable) finalQuery := fmt.Sprintf(`SELECT DISTINCT arrayJoin(tagMap.keys) as tagKeys FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
// Alternative query: SELECT groupUniqArrayArray(mapKeys(tagMap)) as tagKeys FROM signoz_index_v2 // Alternative query: SELECT groupUniqArrayArray(mapKeys(tagMap)) as tagKeys FROM signoz_index_v2
finalQuery += query finalQuery += query
err := r.db.Select(ctx, &tagFilters, finalQuery, args...) err := r.db.Select(ctx, &tagFilters, finalQuery, args...)
@ -1608,7 +1612,7 @@ func (r *ClickHouseReader) GetTagValues(ctx context.Context, queryParams *model.
tagValues := []model.TagValues{} tagValues := []model.TagValues{}
finalQuery := fmt.Sprintf(`SELECT tagMap[@key] as tagValues FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.traceDB, r.indexTable) finalQuery := fmt.Sprintf(`SELECT tagMap[@key] as tagValues FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
finalQuery += query finalQuery += query
finalQuery += " GROUP BY tagMap[@key]" finalQuery += " GROUP BY tagMap[@key]"
args = append(args, clickhouse.Named("key", queryParams.TagKey)) args = append(args, clickhouse.Named("key", queryParams.TagKey))
@ -1649,7 +1653,7 @@ func (r *ClickHouseReader) GetTopOperations(ctx context.Context, queryParams *mo
name name
FROM %s.%s FROM %s.%s
WHERE serviceName = @serviceName AND timestamp>= @start AND timestamp<= @end`, WHERE serviceName = @serviceName AND timestamp>= @start AND timestamp<= @end`,
r.traceDB, r.indexTable, r.TraceDB, r.indexTable,
) )
args := []interface{}{} args := []interface{}{}
args = append(args, namedArgs...) args = append(args, namedArgs...)
@ -1685,9 +1689,9 @@ func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetU
var query string var query string
if len(queryParams.ServiceName) != 0 { if len(queryParams.ServiceName) != 0 {
namedArgs = append(namedArgs, clickhouse.Named("serviceName", queryParams.ServiceName)) namedArgs = append(namedArgs, clickhouse.Named("serviceName", queryParams.ServiceName))
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL @interval HOUR) as time, sum(count) as count FROM %s.%s WHERE service_name=@serviceName AND timestamp>=@start AND timestamp<=@end GROUP BY time ORDER BY time ASC", r.traceDB, r.usageExplorerTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL @interval HOUR) as time, sum(count) as count FROM %s.%s WHERE service_name=@serviceName AND timestamp>=@start AND timestamp<=@end GROUP BY time ORDER BY time ASC", r.TraceDB, r.usageExplorerTable)
} else { } else {
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL @interval HOUR) as time, sum(count) as count FROM %s.%s WHERE timestamp>=@start AND timestamp<=@end GROUP BY time ORDER BY time ASC", r.traceDB, r.usageExplorerTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL @interval HOUR) as time, sum(count) as count FROM %s.%s WHERE timestamp>=@start AND timestamp<=@end GROUP BY time ORDER BY time ASC", r.TraceDB, r.usageExplorerTable)
} }
err := r.db.Select(ctx, &usageItems, query, namedArgs...) err := r.db.Select(ctx, &usageItems, query, namedArgs...)
@ -1710,13 +1714,15 @@ func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetU
return &usageItems, nil return &usageItems, nil
} }
func (r *ClickHouseReader) SearchTraces(ctx context.Context, traceId string) (*[]model.SearchSpansResult, error) { func (r *ClickHouseReader) SearchTraces(ctx context.Context, traceId string, spanId string, levelUp int, levelDown int, spanLimit int, smartTraceAlgorithm func(payload []model.SearchSpanResponseItem, targetSpanId string, levelUp int, levelDown int, spanLimit int) ([]model.SearchSpansResult, error)) (*[]model.SearchSpansResult, error) {
var searchScanReponses []model.SearchSpanDBReponseItem var searchScanResponses []model.SearchSpanDBResponseItem
query := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.traceDB, r.spansTable) query := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
err := r.db.Select(ctx, &searchScanReponses, query, traceId) start := time.Now()
err := r.db.Select(ctx, &searchScanResponses, query, traceId)
zap.S().Info(query) zap.S().Info(query)
@ -1724,30 +1730,43 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, traceId string) (*[
zap.S().Debug("Error in processing sql query: ", err) zap.S().Debug("Error in processing sql query: ", err)
return nil, fmt.Errorf("Error in processing sql query") return nil, fmt.Errorf("Error in processing sql query")
} }
end := time.Now()
zap.S().Debug("getTraceSQLQuery took: ", end.Sub(start))
searchSpansResult := []model.SearchSpansResult{{ searchSpansResult := []model.SearchSpansResult{{
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"}, Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"},
Events: make([][]interface{}, len(searchScanReponses)), Events: make([][]interface{}, len(searchScanResponses)),
}, },
} }
for i, item := range searchScanReponses { searchSpanResponses := []model.SearchSpanResponseItem{}
var jsonItem model.SearchSpanReponseItem start = time.Now()
json.Unmarshal([]byte(item.Model), &jsonItem) for _, item := range searchScanResponses {
var jsonItem model.SearchSpanResponseItem
easyjson.Unmarshal([]byte(item.Model), &jsonItem)
jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano() / 1000000) jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano() / 1000000)
spanEvents := jsonItem.GetValues() searchSpanResponses = append(searchSpanResponses, jsonItem)
searchSpansResult[0].Events[i] = spanEvents }
end = time.Now()
zap.S().Debug("getTraceSQLQuery unmarshal took: ", end.Sub(start))
err = r.featureFlags.CheckFeature(model.SmartTraceDetail)
smartAlgoEnabled := err == nil
if len(searchScanResponses) > spanLimit && spanId != "" && smartAlgoEnabled {
start = time.Now()
searchSpansResult, err = smartTraceAlgorithm(searchSpanResponses, spanId, levelUp, levelDown, spanLimit)
if err != nil {
return nil, err
}
end = time.Now()
zap.S().Debug("smartTraceAlgo took: ", end.Sub(start))
} else {
for i, item := range searchSpanResponses {
spanEvents := item.GetValues()
searchSpansResult[0].Events[i] = spanEvents
}
} }
return &searchSpansResult, nil return &searchSpansResult, nil
}
func interfaceArrayToStringArray(array []interface{}) []string {
var strArray []string
for _, item := range array {
strArray = append(strArray, item.(string))
}
return strArray
} }
func (r *ClickHouseReader) GetDependencyGraph(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error) { func (r *ClickHouseReader) GetDependencyGraph(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error) {
@ -1781,7 +1800,7 @@ func (r *ClickHouseReader) GetDependencyGraph(ctx context.Context, queryParams *
GROUP BY GROUP BY
src, src,
dest`, dest`,
r.traceDB, r.dependencyGraphTable, r.TraceDB, r.dependencyGraphTable,
) )
zap.S().Debug(query, args) zap.S().Debug(query, args)
@ -1841,41 +1860,41 @@ func (r *ClickHouseReader) GetFilteredSpansAggregates(ctx context.Context, query
if queryParams.GroupBy != "" { if queryParams.GroupBy != "" {
switch queryParams.GroupBy { switch queryParams.GroupBy {
case constants.ServiceName: case constants.ServiceName:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, serviceName as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, serviceName as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.HttpCode: case constants.HttpCode:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, httpCode as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, httpCode as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.HttpMethod: case constants.HttpMethod:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, httpMethod as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, httpMethod as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.HttpUrl: case constants.HttpUrl:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, httpUrl as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, httpUrl as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.HttpRoute: case constants.HttpRoute:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, httpRoute as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, httpRoute as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.HttpHost: case constants.HttpHost:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, httpHost as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, httpHost as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.DBName: case constants.DBName:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, dbName as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, dbName as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.DBOperation: case constants.DBOperation:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, dbOperation as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, dbOperation as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.OperationRequest: case constants.OperationRequest:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, name as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, name as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.MsgSystem: case constants.MsgSystem:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, msgSystem as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, msgSystem as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.MsgOperation: case constants.MsgOperation:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, msgOperation as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, msgOperation as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.DBSystem: case constants.DBSystem:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, dbSystem as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, dbSystem as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.Component: case constants.Component:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, component as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, component as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.RPCMethod: case constants.RPCMethod:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, rpcMethod as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, rpcMethod as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
case constants.ResponseStatusCode: case constants.ResponseStatusCode:
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, responseStatusCode as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, responseStatusCode as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
default: default:
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("groupBy type: %s not supported", queryParams.GroupBy)} return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("groupBy type: %s not supported", queryParams.GroupBy)}
} }
} else { } else {
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.traceDB, r.indexTable) query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
} }
if len(queryParams.TraceID) > 0 { if len(queryParams.TraceID) > 0 {
@ -2458,7 +2477,7 @@ func (r *ClickHouseReader) ListErrors(ctx context.Context, queryParams *model.Li
var getErrorResponses []model.Error var getErrorResponses []model.Error
query := fmt.Sprintf("SELECT any(exceptionType) as exceptionType, any(exceptionMessage) as exceptionMessage, count() AS exceptionCount, min(timestamp) as firstSeen, max(timestamp) as lastSeen, any(serviceName) as serviceName, groupID FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU GROUP BY groupID", r.traceDB, r.errorTable) query := fmt.Sprintf("SELECT any(exceptionType) as exceptionType, any(exceptionMessage) as exceptionMessage, count() AS exceptionCount, min(timestamp) as firstSeen, max(timestamp) as lastSeen, any(serviceName) as serviceName, groupID FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU GROUP BY groupID", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))} args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
if len(queryParams.OrderParam) != 0 { if len(queryParams.OrderParam) != 0 {
if queryParams.Order == constants.Descending { if queryParams.Order == constants.Descending {
@ -2492,7 +2511,7 @@ func (r *ClickHouseReader) CountErrors(ctx context.Context, queryParams *model.C
var errorCount uint64 var errorCount uint64
query := fmt.Sprintf("SELECT count(distinct(groupID)) FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.errorTable) query := fmt.Sprintf("SELECT count(distinct(groupID)) FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))} args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
err := r.db.QueryRow(ctx, query, args...).Scan(&errorCount) err := r.db.QueryRow(ctx, query, args...).Scan(&errorCount)
@ -2514,7 +2533,7 @@ func (r *ClickHouseReader) GetErrorFromErrorID(ctx context.Context, queryParams
} }
var getErrorWithSpanReponse []model.ErrorWithSpan var getErrorWithSpanReponse []model.ErrorWithSpan
query := fmt.Sprintf("SELECT * FROM %s.%s WHERE timestamp = @timestamp AND groupID = @groupID AND errorID = @errorID LIMIT 1", r.traceDB, r.errorTable) query := fmt.Sprintf("SELECT * FROM %s.%s WHERE timestamp = @timestamp AND groupID = @groupID AND errorID = @errorID LIMIT 1", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))} args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
err := r.db.Select(ctx, &getErrorWithSpanReponse, query, args...) err := r.db.Select(ctx, &getErrorWithSpanReponse, query, args...)
@ -2537,7 +2556,7 @@ func (r *ClickHouseReader) GetErrorFromGroupID(ctx context.Context, queryParams
var getErrorWithSpanReponse []model.ErrorWithSpan var getErrorWithSpanReponse []model.ErrorWithSpan
query := fmt.Sprintf("SELECT * FROM %s.%s WHERE timestamp = @timestamp AND groupID = @groupID LIMIT 1", r.traceDB, r.errorTable) query := fmt.Sprintf("SELECT * FROM %s.%s WHERE timestamp = @timestamp AND groupID = @groupID LIMIT 1", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))} args := []interface{}{clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
err := r.db.Select(ctx, &getErrorWithSpanReponse, query, args...) err := r.db.Select(ctx, &getErrorWithSpanReponse, query, args...)
@ -2585,7 +2604,7 @@ func (r *ClickHouseReader) getNextErrorID(ctx context.Context, queryParams *mode
var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse
query := fmt.Sprintf("SELECT errorID as nextErrorID, timestamp as nextTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp >= @timestamp AND errorID != @errorID ORDER BY timestamp ASC LIMIT 2", r.traceDB, r.errorTable) query := fmt.Sprintf("SELECT errorID as nextErrorID, timestamp as nextTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp >= @timestamp AND errorID != @errorID ORDER BY timestamp ASC LIMIT 2", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))} args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...) err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...)
@ -2606,7 +2625,7 @@ func (r *ClickHouseReader) getNextErrorID(ctx context.Context, queryParams *mode
if getNextErrorIDReponse[0].Timestamp.UnixNano() == getNextErrorIDReponse[1].Timestamp.UnixNano() { if getNextErrorIDReponse[0].Timestamp.UnixNano() == getNextErrorIDReponse[1].Timestamp.UnixNano() {
var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse
query := fmt.Sprintf("SELECT errorID as nextErrorID, timestamp as nextTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp = @timestamp AND errorID > @errorID ORDER BY errorID ASC LIMIT 1", r.traceDB, r.errorTable) query := fmt.Sprintf("SELECT errorID as nextErrorID, timestamp as nextTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp = @timestamp AND errorID > @errorID ORDER BY errorID ASC LIMIT 1", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))} args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...) err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...)
@ -2620,7 +2639,7 @@ func (r *ClickHouseReader) getNextErrorID(ctx context.Context, queryParams *mode
if len(getNextErrorIDReponse) == 0 { if len(getNextErrorIDReponse) == 0 {
var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse
query := fmt.Sprintf("SELECT errorID as nextErrorID, timestamp as nextTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp > @timestamp ORDER BY timestamp ASC LIMIT 1", r.traceDB, r.errorTable) query := fmt.Sprintf("SELECT errorID as nextErrorID, timestamp as nextTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp > @timestamp ORDER BY timestamp ASC LIMIT 1", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))} args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...) err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...)
@ -2654,7 +2673,7 @@ func (r *ClickHouseReader) getPrevErrorID(ctx context.Context, queryParams *mode
var getPrevErrorIDReponse []model.NextPrevErrorIDsDBResponse var getPrevErrorIDReponse []model.NextPrevErrorIDsDBResponse
query := fmt.Sprintf("SELECT errorID as prevErrorID, timestamp as prevTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp <= @timestamp AND errorID != @errorID ORDER BY timestamp DESC LIMIT 2", r.traceDB, r.errorTable) query := fmt.Sprintf("SELECT errorID as prevErrorID, timestamp as prevTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp <= @timestamp AND errorID != @errorID ORDER BY timestamp DESC LIMIT 2", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))} args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...) err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...)
@ -2675,7 +2694,7 @@ func (r *ClickHouseReader) getPrevErrorID(ctx context.Context, queryParams *mode
if getPrevErrorIDReponse[0].Timestamp.UnixNano() == getPrevErrorIDReponse[1].Timestamp.UnixNano() { if getPrevErrorIDReponse[0].Timestamp.UnixNano() == getPrevErrorIDReponse[1].Timestamp.UnixNano() {
var getPrevErrorIDReponse []model.NextPrevErrorIDsDBResponse var getPrevErrorIDReponse []model.NextPrevErrorIDsDBResponse
query := fmt.Sprintf("SELECT errorID as prevErrorID, timestamp as prevTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp = @timestamp AND errorID < @errorID ORDER BY errorID DESC LIMIT 1", r.traceDB, r.errorTable) query := fmt.Sprintf("SELECT errorID as prevErrorID, timestamp as prevTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp = @timestamp AND errorID < @errorID ORDER BY errorID DESC LIMIT 1", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))} args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...) err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...)
@ -2689,7 +2708,7 @@ func (r *ClickHouseReader) getPrevErrorID(ctx context.Context, queryParams *mode
if len(getPrevErrorIDReponse) == 0 { if len(getPrevErrorIDReponse) == 0 {
var getPrevErrorIDReponse []model.NextPrevErrorIDsDBResponse var getPrevErrorIDReponse []model.NextPrevErrorIDsDBResponse
query := fmt.Sprintf("SELECT errorID as prevErrorID, timestamp as prevTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp < @timestamp ORDER BY timestamp DESC LIMIT 1", r.traceDB, r.errorTable) query := fmt.Sprintf("SELECT errorID as prevErrorID, timestamp as prevTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp < @timestamp ORDER BY timestamp DESC LIMIT 1", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))} args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...) err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...)
@ -2830,6 +2849,11 @@ func (r *ClickHouseReader) GetMetricAutocompleteMetricNames(ctx context.Context,
} }
func (r *ClickHouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*model.Series, string, error) {
zap.S().Error("GetMetricResultEE is not implemented for opensource version")
return nil, "", fmt.Errorf("GetMetricResultEE is not implemented for opensource version")
}
// GetMetricResult runs the query and returns list of time series // GetMetricResult runs the query and returns list of time series
func (r *ClickHouseReader) GetMetricResult(ctx context.Context, query string) ([]*model.Series, error) { func (r *ClickHouseReader) GetMetricResult(ctx context.Context, query string) ([]*model.Series, error) {
@ -2892,6 +2916,32 @@ func (r *ClickHouseReader) GetMetricResult(ctx context.Context, query string) ([
metricPoint.Timestamp = v.UnixMilli() metricPoint.Timestamp = v.UnixMilli()
case *float64: case *float64:
metricPoint.Value = *v metricPoint.Value = *v
case **float64:
// ch seems to return this type when column is derived from
// SELECT count(*)/ SELECT count(*)
floatVal := *v
if floatVal != nil {
metricPoint.Value = *floatVal
}
case *float32:
float32Val := float32(*v)
metricPoint.Value = float64(float32Val)
case *uint8, *uint64, *uint16, *uint32:
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok {
metricPoint.Value = float64(reflect.ValueOf(v).Elem().Uint())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())
}
case *int8, *int16, *int32, *int64:
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok {
metricPoint.Value = float64(reflect.ValueOf(v).Elem().Int())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
}
default:
zap.S().Errorf("invalid var found in metric builder query result", v, colName)
} }
} }
sort.Strings(groupBy) sort.Strings(groupBy)
@ -3001,7 +3051,7 @@ func (r *ClickHouseReader) GetLogsInfoInLastHeartBeatInterval(ctx context.Contex
func (r *ClickHouseReader) GetTagsInfoInLastHeartBeatInterval(ctx context.Context) (*model.TagsInfo, error) { func (r *ClickHouseReader) GetTagsInfoInLastHeartBeatInterval(ctx context.Context) (*model.TagsInfo, error) {
queryStr := fmt.Sprintf("select tagMap['service.name'] as serviceName, tagMap['deployment.environment'] as env, tagMap['telemetry.sdk.language'] as language from %s.%s where timestamp > toUnixTimestamp(now()-toIntervalMinute(%d));", r.traceDB, r.indexTable, 1) queryStr := fmt.Sprintf("select tagMap['service.name'] as serviceName, tagMap['deployment.environment'] as env, tagMap['telemetry.sdk.language'] as language from %s.%s where timestamp > toUnixTimestamp(now()-toIntervalMinute(%d));", r.TraceDB, r.indexTable, 1)
tagTelemetryDataList := []model.TagTelemetryData{} tagTelemetryDataList := []model.TagTelemetryData{}
err := r.db.Select(ctx, &tagTelemetryDataList, queryStr) err := r.db.Select(ctx, &tagTelemetryDataList, queryStr)
@ -3250,16 +3300,16 @@ func (r *ClickHouseReader) AggregateLogs(ctx context.Context, params *model.Logs
if params.GroupBy != "" { if params.GroupBy != "" {
query = fmt.Sprintf("SELECT toInt64(toUnixTimestamp(toStartOfInterval(toDateTime(timestamp/1000000000), INTERVAL %d minute))*1000000000) as ts_start_interval, toString(%s) as groupBy, "+ query = fmt.Sprintf("SELECT toInt64(toUnixTimestamp(toStartOfInterval(toDateTime(timestamp/1000000000), INTERVAL %d minute))*1000000000) as ts_start_interval, toString(%s) as groupBy, "+
"%s "+ "%s "+
"FROM %s.%s WHERE timestamp >= '%d' AND timestamp <= '%d' ", "FROM %s.%s WHERE (timestamp >= '%d' AND timestamp <= '%d' )",
params.StepSeconds/60, params.GroupBy, function, r.logsDB, r.logsTable, params.TimestampStart, params.TimestampEnd) params.StepSeconds/60, params.GroupBy, function, r.logsDB, r.logsTable, params.TimestampStart, params.TimestampEnd)
} else { } else {
query = fmt.Sprintf("SELECT toInt64(toUnixTimestamp(toStartOfInterval(toDateTime(timestamp/1000000000), INTERVAL %d minute))*1000000000) as ts_start_interval, "+ query = fmt.Sprintf("SELECT toInt64(toUnixTimestamp(toStartOfInterval(toDateTime(timestamp/1000000000), INTERVAL %d minute))*1000000000) as ts_start_interval, "+
"%s "+ "%s "+
"FROM %s.%s WHERE timestamp >= '%d' AND timestamp <= '%d' ", "FROM %s.%s WHERE (timestamp >= '%d' AND timestamp <= '%d' )",
params.StepSeconds/60, function, r.logsDB, r.logsTable, params.TimestampStart, params.TimestampEnd) params.StepSeconds/60, function, r.logsDB, r.logsTable, params.TimestampStart, params.TimestampEnd)
} }
if filterSql != "" { if filterSql != "" {
query = fmt.Sprintf("%s AND %s ", query, filterSql) query = fmt.Sprintf("%s AND ( %s ) ", query, filterSql)
} }
if params.GroupBy != "" { if params.GroupBy != "" {
query = fmt.Sprintf("%s GROUP BY ts_start_interval, toString(%s) as groupBy ORDER BY ts_start_interval", query, params.GroupBy) query = fmt.Sprintf("%s GROUP BY ts_start_interval, toString(%s) as groupBy ORDER BY ts_start_interval", query, params.GroupBy)

View File

@ -24,6 +24,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/app/parser" "go.signoz.io/signoz/pkg/query-service/app/parser"
"go.signoz.io/signoz/pkg/query-service/auth" "go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/constants"
querytemplate "go.signoz.io/signoz/pkg/query-service/utils/queryTemplate"
"go.signoz.io/signoz/pkg/query-service/dao" "go.signoz.io/signoz/pkg/query-service/dao"
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager" am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
@ -212,7 +213,7 @@ func writeHttpResponse(w http.ResponseWriter, data interface{}) {
func (aH *APIHandler) RegisterMetricsRoutes(router *mux.Router) { func (aH *APIHandler) RegisterMetricsRoutes(router *mux.Router) {
subRouter := router.PathPrefix("/api/v2/metrics").Subrouter() subRouter := router.PathPrefix("/api/v2/metrics").Subrouter()
subRouter.HandleFunc("/query_range", ViewAccess(aH.queryRangeMetricsV2)).Methods(http.MethodPost) subRouter.HandleFunc("/query_range", ViewAccess(aH.QueryRangeMetricsV2)).Methods(http.MethodPost)
subRouter.HandleFunc("/autocomplete/list", ViewAccess(aH.metricAutocompleteMetricName)).Methods(http.MethodGet) subRouter.HandleFunc("/autocomplete/list", ViewAccess(aH.metricAutocompleteMetricName)).Methods(http.MethodGet)
subRouter.HandleFunc("/autocomplete/tagKey", ViewAccess(aH.metricAutocompleteTagKey)).Methods(http.MethodGet) subRouter.HandleFunc("/autocomplete/tagKey", ViewAccess(aH.metricAutocompleteTagKey)).Methods(http.MethodGet)
subRouter.HandleFunc("/autocomplete/tagValue", ViewAccess(aH.metricAutocompleteTagValue)).Methods(http.MethodGet) subRouter.HandleFunc("/autocomplete/tagValue", ViewAccess(aH.metricAutocompleteTagValue)).Methods(http.MethodGet)
@ -353,7 +354,7 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router) {
router.HandleFunc("/api/v1/service/overview", ViewAccess(aH.getServiceOverview)).Methods(http.MethodPost) router.HandleFunc("/api/v1/service/overview", ViewAccess(aH.getServiceOverview)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/service/top_operations", ViewAccess(aH.getTopOperations)).Methods(http.MethodPost) router.HandleFunc("/api/v1/service/top_operations", ViewAccess(aH.getTopOperations)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/service/top_level_operations", ViewAccess(aH.getServicesTopLevelOps)).Methods(http.MethodPost) router.HandleFunc("/api/v1/service/top_level_operations", ViewAccess(aH.getServicesTopLevelOps)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/traces/{traceId}", ViewAccess(aH.searchTraces)).Methods(http.MethodGet) router.HandleFunc("/api/v1/traces/{traceId}", ViewAccess(aH.SearchTraces)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/usage", ViewAccess(aH.getUsage)).Methods(http.MethodGet) router.HandleFunc("/api/v1/usage", ViewAccess(aH.getUsage)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/dependency_graph", ViewAccess(aH.dependencyGraph)).Methods(http.MethodPost) router.HandleFunc("/api/v1/dependency_graph", ViewAccess(aH.dependencyGraph)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/settings/ttl", AdminAccess(aH.setTTL)).Methods(http.MethodPost) router.HandleFunc("/api/v1/settings/ttl", AdminAccess(aH.setTTL)).Methods(http.MethodPost)
@ -485,7 +486,7 @@ func (aH *APIHandler) metricAutocompleteTagValue(w http.ResponseWriter, r *http.
aH.Respond(w, tagValueList) aH.Respond(w, tagValueList)
} }
func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) QueryRangeMetricsV2(w http.ResponseWriter, r *http.Request) {
metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r) metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r)
if apiErrorObj != nil { if apiErrorObj != nil {
@ -652,11 +653,16 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
return return
} }
var query bytes.Buffer var query bytes.Buffer
// replace go template variables
querytemplate.AssignReservedVars(metricsQueryRangeParams)
err = tmpl.Execute(&query, metricsQueryRangeParams.Variables) err = tmpl.Execute(&query, metricsQueryRangeParams.Variables)
if err != nil { if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return return
} }
queries[name] = query.String() queries[name] = query.String()
} }
seriesList, err, errQuriesByName = execClickHouseQueries(queries) seriesList, err, errQuriesByName = execClickHouseQueries(queries)
@ -1354,12 +1360,15 @@ func (aH *APIHandler) getServicesList(w http.ResponseWriter, r *http.Request) {
} }
func (aH *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) SearchTraces(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r) traceId, spanId, levelUpInt, levelDownInt, err := ParseSearchTracesParams(r)
traceId := vars["traceId"] if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
return
}
result, err := aH.reader.SearchTraces(r.Context(), traceId) result, err := aH.reader.SearchTraces(r.Context(), traceId, spanId, levelUpInt, levelDownInt, 0, nil)
if aH.HandleError(w, err, http.StatusBadRequest) { if aH.HandleError(w, err, http.StatusBadRequest) {
return return
} }

View File

@ -309,9 +309,16 @@ func GenerateSQLWhere(allFields *model.GetFieldsResponse, params *model.LogsFilt
filterTokens = append(filterTokens, filter) filterTokens = append(filterTokens, filter)
} }
if len(filterTokens) > 0 { lenFilterTokens := len(filterTokens)
if len(tokens) > 0 { if lenFilterTokens > 0 {
tokens[0] = fmt.Sprintf("and %s", tokens[0]) // add parenthesis
filterTokens[0] = fmt.Sprintf("( %s", filterTokens[0])
filterTokens[lenFilterTokens-1] = fmt.Sprintf("%s) ", filterTokens[lenFilterTokens-1])
lenTokens := len(tokens)
if lenTokens > 0 {
tokens[0] = fmt.Sprintf("and ( %s", tokens[0])
tokens[lenTokens-1] = fmt.Sprintf("%s) ", tokens[lenTokens-1])
} }
filterTokens = append(filterTokens, tokens...) filterTokens = append(filterTokens, tokens...)
tokens = filterTokens tokens = filterTokens

View File

@ -271,32 +271,65 @@ func TestCheckIfPrevousPaginateAndModifyOrder(t *testing.T) {
} }
} }
func TestGenerateSQLQuery(t *testing.T) { var generateSQLQueryFields = model.GetFieldsResponse{
allFields := model.GetFieldsResponse{ Selected: []model.LogField{
Selected: []model.LogField{ {
{ Name: "field1",
Name: "id", DataType: "int64",
DataType: "int64", Type: "attributes",
Type: "attributes",
},
}, },
Interesting: []model.LogField{ {
{ Name: "field2",
Name: "code", DataType: "double64",
DataType: "int64", Type: "attributes",
Type: "attributes",
},
}, },
} {
Name: "field2",
query := "id lt 100 and id gt 50 and code lte 500 and code gte 400" DataType: "string",
tsStart := uint64(1657689292000) Type: "attributes",
tsEnd := uint64(1657689294000) },
idStart := "2BsKLKv8cZrLCn6rkOcRGkdjBdM" },
idEnd := "2BsKG6tRpFWjYMcWsAGKfSxoQdU" Interesting: []model.LogField{
sqlWhere := "timestamp >= '1657689292000' and timestamp <= '1657689294000' and id > '2BsKLKv8cZrLCn6rkOcRGkdjBdM' and id < '2BsKG6tRpFWjYMcWsAGKfSxoQdU' and id < 100 and id > 50 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 " {
Convey("testGenerateSQL", t, func() { Name: "code",
res, _ := GenerateSQLWhere(&allFields, &model.LogsFilterParams{Query: query, TimestampStart: tsStart, TimestampEnd: tsEnd, IdGt: idStart, IdLT: idEnd}) DataType: "int64",
So(res, ShouldEqual, sqlWhere) Type: "attributes",
}) },
},
}
var generateSQLQueryTestCases = []struct {
Name string
Filter model.LogsFilterParams
SqlFilter string
}{
{
Name: "first query with more than 1 compulsory filters",
Filter: model.LogsFilterParams{
Query: "field1 lt 100 and field1 gt 50 and code lte 500 and code gte 400",
TimestampStart: uint64(1657689292000),
TimestampEnd: uint64(1657689294000),
IdGt: "2BsKLKv8cZrLCn6rkOcRGkdjBdM",
IdLT: "2BsKG6tRpFWjYMcWsAGKfSxoQdU",
},
SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' and id > '2BsKLKv8cZrLCn6rkOcRGkdjBdM' and id < '2BsKG6tRpFWjYMcWsAGKfSxoQdU' ) and ( field1 < 100 and field1 > 50 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 ) ",
},
{
Name: "second query with only timestamp range",
Filter: model.LogsFilterParams{
Query: "field1 lt 100 and field1 gt 50 and code lte 500 and code gte 400",
TimestampStart: uint64(1657689292000),
TimestampEnd: uint64(1657689294000),
},
SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' ) and ( field1 < 100 and field1 > 50 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 ) ",
},
}
func TestGenerateSQLQuery(t *testing.T) {
for _, test := range generateSQLQueryTestCases {
Convey("testGenerateSQL", t, func() {
res, _ := GenerateSQLWhere(&generateSQLQueryFields, &test.Filter)
So(res, ShouldEqual, test.SqlFilter)
})
}
} }

View File

@ -225,6 +225,30 @@ func parseGetServicesRequest(r *http.Request) (*model.GetServicesParams, error)
return postData, nil return postData, nil
} }
func ParseSearchTracesParams(r *http.Request) (string, string, int, int, error) {
vars := mux.Vars(r)
traceId := vars["traceId"]
spanId := r.URL.Query().Get("spanId")
levelUp := r.URL.Query().Get("levelUp")
levelDown := r.URL.Query().Get("levelDown")
if levelUp == "" || levelUp == "null" {
levelUp = "0"
}
if levelDown == "" || levelDown == "null" {
levelDown = "0"
}
levelUpInt, err := strconv.Atoi(levelUp)
if err != nil {
return "", "", 0, 0, err
}
levelDownInt, err := strconv.Atoi(levelDown)
if err != nil {
return "", "", 0, 0, err
}
return traceId, spanId, levelUpInt, levelDownInt, nil
}
func DoesExistInSlice(item string, list []string) bool { func DoesExistInSlice(item string, list []string) bool {
for _, element := range list { for _, element := range list {
if item == element { if item == element {

View File

@ -88,7 +88,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
storage := os.Getenv("STORAGE") storage := os.Getenv("STORAGE")
if storage == "clickhouse" { if storage == "clickhouse" {
zap.S().Info("Using ClickHouse as datastore ...") zap.S().Info("Using ClickHouse as datastore ...")
clickhouseReader := clickhouseReader.NewReader(localDB, serverOptions.PromConfigPath) clickhouseReader := clickhouseReader.NewReader(localDB, serverOptions.PromConfigPath, fm)
go clickhouseReader.Start(readerReady) go clickhouseReader.Start(readerReady)
reader = clickhouseReader reader = clickhouseReader
} else { } else {

View File

@ -259,7 +259,7 @@ func RegisterFirstUser(ctx context.Context, req *RegisterRequest) (*model.User,
OrgId: org.Id, OrgId: org.Id,
} }
return dao.DB().CreateUser(ctx, user) return dao.DB().CreateUser(ctx, user, true)
} }
// RegisterInvitedUser handles registering a invited user // RegisterInvitedUser handles registering a invited user
@ -338,7 +338,7 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b
} }
// TODO(Ahsan): Ideally create user and delete invitation should happen in a txn. // TODO(Ahsan): Ideally create user and delete invitation should happen in a txn.
user, apiErr = dao.DB().CreateUser(ctx, user) user, apiErr = dao.DB().CreateUser(ctx, user, false)
if apiErr != nil { if apiErr != nil {
zap.S().Debugf("CreateUser failed, err: %v\n", apiErr.Err) zap.S().Debugf("CreateUser failed, err: %v\n", apiErr.Err)
return nil, apiErr return nil, apiErr

View File

@ -190,3 +190,8 @@ const (
"CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," + "CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," +
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " "CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string "
) )
// ReservedColumnTargetAliases identifies result value from a user
// written clickhouse query. The column alias indcate which value is
// to be considered as final result (or target)
var ReservedColumnTargetAliases = map[string]bool{"result": true, "res": true, "value": true}

View File

@ -37,7 +37,7 @@ type Mutations interface {
CreateInviteEntry(ctx context.Context, req *model.InvitationObject) *model.ApiError CreateInviteEntry(ctx context.Context, req *model.InvitationObject) *model.ApiError
DeleteInvitation(ctx context.Context, email string) *model.ApiError DeleteInvitation(ctx context.Context, email string) *model.ApiError
CreateUser(ctx context.Context, user *model.User) (*model.User, *model.ApiError) CreateUser(ctx context.Context, user *model.User, isFirstUser bool) (*model.User, *model.ApiError)
EditUser(ctx context.Context, update *model.User) (*model.User, *model.ApiError) EditUser(ctx context.Context, update *model.User) (*model.User, *model.ApiError)
DeleteUser(ctx context.Context, id string) *model.ApiError DeleteUser(ctx context.Context, id string) *model.ApiError

Some files were not shown because too many files have changed in this diff Show More