mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-13 04:19:01 +08:00
perf: exception page optimization (#1287)
* feat: update ListErrors API * feat: update error detail APIs and add a new API for fetching next prev error IDs * feat: update GetNextPrevErrorIDs API to handle an edge case * perf: use timestamp for fetching individual column * feat: add countErrors API
This commit is contained in:
parent
a09a4c264e
commit
a84754e8a8
@ -22,7 +22,7 @@ const (
|
|||||||
defaultTraceDB string = "signoz_traces"
|
defaultTraceDB string = "signoz_traces"
|
||||||
defaultOperationsTable string = "signoz_operations"
|
defaultOperationsTable string = "signoz_operations"
|
||||||
defaultIndexTable string = "signoz_index_v2"
|
defaultIndexTable string = "signoz_index_v2"
|
||||||
defaultErrorTable string = "signoz_error_index"
|
defaultErrorTable string = "signoz_error_index_v2"
|
||||||
defaulDurationTable string = "durationSortMV"
|
defaulDurationTable string = "durationSortMV"
|
||||||
defaultSpansTable string = "signoz_spans"
|
defaultSpansTable string = "signoz_spans"
|
||||||
defaultWriteBatchDelay time.Duration = 5 * time.Second
|
defaultWriteBatchDelay time.Duration = 5 * time.Second
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"database/sql"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -60,7 +59,7 @@ const (
|
|||||||
signozTraceDBName = "signoz_traces"
|
signozTraceDBName = "signoz_traces"
|
||||||
signozDurationMVTable = "durationSort"
|
signozDurationMVTable = "durationSort"
|
||||||
signozSpansTable = "signoz_spans"
|
signozSpansTable = "signoz_spans"
|
||||||
signozErrorIndexTable = "signoz_error_index"
|
signozErrorIndexTable = "signoz_error_index_v2"
|
||||||
signozTraceTableName = "signoz_index_v2"
|
signozTraceTableName = "signoz_index_v2"
|
||||||
signozMetricDBName = "signoz_metrics"
|
signozMetricDBName = "signoz_metrics"
|
||||||
signozSampleTableName = "samples_v2"
|
signozSampleTableName = "samples_v2"
|
||||||
@ -2634,15 +2633,30 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, ttlParams *model.GetTTLPa
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *ClickHouseReader) GetErrors(ctx context.Context, queryParams *model.GetErrorsParams) (*[]model.Error, *model.ApiError) {
|
func (r *ClickHouseReader) ListErrors(ctx context.Context, queryParams *model.ListErrorsParams) (*[]model.Error, *model.ApiError) {
|
||||||
|
|
||||||
var getErrorReponses []model.Error
|
var getErrorResponses []model.Error
|
||||||
|
|
||||||
query := fmt.Sprintf("SELECT exceptionType, exceptionMessage, count() AS exceptionCount, min(timestamp) as firstSeen, max(timestamp) as lastSeen, serviceName FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU GROUP BY serviceName, exceptionType, exceptionMessage", r.traceDB, r.errorTable)
|
query := fmt.Sprintf("SELECT any(exceptionType) as exceptionType, any(exceptionMessage) as exceptionMessage, count() AS exceptionCount, min(timestamp) as firstSeen, max(timestamp) as lastSeen, any(serviceName) as serviceName, groupID FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU GROUP BY groupID", r.traceDB, r.errorTable)
|
||||||
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
|
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
|
||||||
|
if len(queryParams.OrderParam) != 0 {
|
||||||
|
if queryParams.Order == constants.Descending {
|
||||||
|
query = query + " ORDER BY " + queryParams.OrderParam + " DESC"
|
||||||
|
} else if queryParams.Order == constants.Ascending {
|
||||||
|
query = query + " ORDER BY " + queryParams.OrderParam + " ASC"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if queryParams.Limit > 0 {
|
||||||
|
query = query + " LIMIT @limit"
|
||||||
|
args = append(args, clickhouse.Named("limit", queryParams.Limit))
|
||||||
|
}
|
||||||
|
|
||||||
err := r.db.Select(ctx, &getErrorReponses, query, args...)
|
if queryParams.Offset > 0 {
|
||||||
|
query = query + " OFFSET @offset"
|
||||||
|
args = append(args, clickhouse.Named("offset", queryParams.Offset))
|
||||||
|
}
|
||||||
|
|
||||||
|
err := r.db.Select(ctx, &getErrorResponses, query, args...)
|
||||||
zap.S().Info(query)
|
zap.S().Info(query)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -2650,30 +2664,41 @@ func (r *ClickHouseReader) GetErrors(ctx context.Context, queryParams *model.Get
|
|||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &getErrorReponses, nil
|
return &getErrorResponses, nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *ClickHouseReader) GetErrorForId(ctx context.Context, queryParams *model.GetErrorParams) (*model.ErrorWithSpan, *model.ApiError) {
|
func (r *ClickHouseReader) CountErrors(ctx context.Context, queryParams *model.CountErrorsParams) (uint64, *model.ApiError) {
|
||||||
|
|
||||||
|
var errorCount uint64
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT count(distinct(groupID)) FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.traceDB, r.errorTable)
|
||||||
|
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
|
||||||
|
|
||||||
|
err := r.db.QueryRow(ctx, query, args...).Scan(&errorCount)
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return 0, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
||||||
|
}
|
||||||
|
|
||||||
|
return errorCount, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetErrorFromErrorID(ctx context.Context, queryParams *model.GetErrorParams) (*model.ErrorWithSpan, *model.ApiError) {
|
||||||
|
|
||||||
if queryParams.ErrorID == "" {
|
if queryParams.ErrorID == "" {
|
||||||
zap.S().Debug("errorId missing from params")
|
zap.S().Debug("errorId missing from params")
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("ErrorID missing from params")}
|
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("ErrorID missing from params")}
|
||||||
}
|
}
|
||||||
var getErrorWithSpanReponse []model.ErrorWithSpan
|
var getErrorWithSpanReponse []model.ErrorWithSpan
|
||||||
|
|
||||||
// TODO: Optimize this query further
|
query := fmt.Sprintf("SELECT * FROM %s.%s WHERE timestamp = @timestamp AND groupID = @groupID AND errorID = @errorID LIMIT 1", r.traceDB, r.errorTable)
|
||||||
query := fmt.Sprintf("SELECT spanID, traceID, errorID, timestamp, serviceName, exceptionType, exceptionMessage, exceptionStacktrace, exceptionEscaped, olderErrorId, newerErrorId FROM (SELECT *, lagInFrame(toNullable(errorID)) over w as olderErrorId, leadInFrame(toNullable(errorID)) over w as newerErrorId FROM %s.%s window w as (ORDER BY exceptionType, serviceName, timestamp rows between unbounded preceding and unbounded following)) WHERE errorID = @errorID", r.traceDB, r.errorTable)
|
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
|
||||||
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID)}
|
|
||||||
|
|
||||||
err := r.db.Select(ctx, &getErrorWithSpanReponse, query, args...)
|
err := r.db.Select(ctx, &getErrorWithSpanReponse, query, args...)
|
||||||
|
|
||||||
zap.S().Info(query)
|
zap.S().Info(query)
|
||||||
|
|
||||||
if err == sql.ErrNoRows {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Debug("Error in processing sql query: ", err)
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
||||||
@ -2682,22 +2707,17 @@ func (r *ClickHouseReader) GetErrorForId(ctx context.Context, queryParams *model
|
|||||||
if len(getErrorWithSpanReponse) > 0 {
|
if len(getErrorWithSpanReponse) > 0 {
|
||||||
return &getErrorWithSpanReponse[0], nil
|
return &getErrorWithSpanReponse[0], nil
|
||||||
} else {
|
} else {
|
||||||
return &model.ErrorWithSpan{}, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("Error ID not found")}
|
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("Error/Exception not found")}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *ClickHouseReader) GetErrorForType(ctx context.Context, queryParams *model.GetErrorParams) (*model.ErrorWithSpan, *model.ApiError) {
|
func (r *ClickHouseReader) GetErrorFromGroupID(ctx context.Context, queryParams *model.GetErrorParams) (*model.ErrorWithSpan, *model.ApiError) {
|
||||||
|
|
||||||
if queryParams.ErrorType == "" || queryParams.ServiceName == "" {
|
|
||||||
zap.S().Debug("errorType/serviceName missing from params")
|
|
||||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("ErrorType/serviceName missing from params")}
|
|
||||||
}
|
|
||||||
var getErrorWithSpanReponse []model.ErrorWithSpan
|
var getErrorWithSpanReponse []model.ErrorWithSpan
|
||||||
|
|
||||||
// TODO: Optimize this query further
|
query := fmt.Sprintf("SELECT * FROM %s.%s WHERE timestamp = @timestamp AND groupID = @groupID LIMIT 1", r.traceDB, r.errorTable)
|
||||||
query := fmt.Sprintf("SELECT spanID, traceID, errorID, timestamp , serviceName, exceptionType, exceptionMessage, exceptionStacktrace, exceptionEscaped, newerErrorId, olderErrorId FROM (SELECT *, lagInFrame(errorID) over w as olderErrorId, leadInFrame(errorID) over w as newerErrorId FROM %s.%s WHERE serviceName = @serviceName AND exceptionType = @errorType window w as (ORDER BY timestamp DESC rows between unbounded preceding and unbounded following))", r.traceDB, r.errorTable)
|
args := []interface{}{clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
|
||||||
args := []interface{}{clickhouse.Named("serviceName", queryParams.ServiceName), clickhouse.Named("errorType", queryParams.ErrorType)}
|
|
||||||
|
|
||||||
err := r.db.Select(ctx, &getErrorWithSpanReponse, query, args...)
|
err := r.db.Select(ctx, &getErrorWithSpanReponse, query, args...)
|
||||||
|
|
||||||
@ -2711,11 +2731,173 @@ func (r *ClickHouseReader) GetErrorForType(ctx context.Context, queryParams *mod
|
|||||||
if len(getErrorWithSpanReponse) > 0 {
|
if len(getErrorWithSpanReponse) > 0 {
|
||||||
return &getErrorWithSpanReponse[0], nil
|
return &getErrorWithSpanReponse[0], nil
|
||||||
} else {
|
} else {
|
||||||
return nil, &model.ApiError{Typ: model.ErrorUnavailable, Err: fmt.Errorf("Error/Exception not found")}
|
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("Error/Exception not found")}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetNextPrevErrorIDs(ctx context.Context, queryParams *model.GetErrorParams) (*model.NextPrevErrorIDs, *model.ApiError) {
|
||||||
|
|
||||||
|
if queryParams.ErrorID == "" {
|
||||||
|
zap.S().Debug("errorId missing from params")
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("ErrorID missing from params")}
|
||||||
|
}
|
||||||
|
var err *model.ApiError
|
||||||
|
getNextPrevErrorIDsResponse := model.NextPrevErrorIDs{
|
||||||
|
GroupID: queryParams.GroupID,
|
||||||
|
}
|
||||||
|
getNextPrevErrorIDsResponse.NextErrorID, getNextPrevErrorIDsResponse.NextTimestamp, err = r.getNextErrorID(ctx, queryParams)
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Unable to get next error ID due to err: ", err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
getNextPrevErrorIDsResponse.PrevErrorID, getNextPrevErrorIDsResponse.PrevTimestamp, err = r.getPrevErrorID(ctx, queryParams)
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Unable to get prev error ID due to err: ", err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &getNextPrevErrorIDsResponse, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) getNextErrorID(ctx context.Context, queryParams *model.GetErrorParams) (string, time.Time, *model.ApiError) {
|
||||||
|
|
||||||
|
var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT errorID as nextErrorID, timestamp as nextTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp >= @timestamp AND errorID != @errorID ORDER BY timestamp ASC LIMIT 2", r.traceDB, r.errorTable)
|
||||||
|
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
|
||||||
|
|
||||||
|
err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
||||||
|
}
|
||||||
|
if len(getNextErrorIDReponse) == 0 {
|
||||||
|
zap.S().Info("NextErrorID not found")
|
||||||
|
return "", time.Time{}, nil
|
||||||
|
} else if len(getNextErrorIDReponse) == 1 {
|
||||||
|
zap.S().Info("NextErrorID found")
|
||||||
|
return getNextErrorIDReponse[0].NextErrorID, getNextErrorIDReponse[0].NextTimestamp, nil
|
||||||
|
} else {
|
||||||
|
if getNextErrorIDReponse[0].Timestamp.UnixNano() == getNextErrorIDReponse[1].Timestamp.UnixNano() {
|
||||||
|
var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT errorID as nextErrorID, timestamp as nextTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp = @timestamp AND errorID > @errorID ORDER BY errorID ASC LIMIT 1", r.traceDB, r.errorTable)
|
||||||
|
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
|
||||||
|
|
||||||
|
err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
||||||
|
}
|
||||||
|
if len(getNextErrorIDReponse) == 0 {
|
||||||
|
var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT errorID as nextErrorID, timestamp as nextTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp > @timestamp ORDER BY timestamp ASC LIMIT 1", r.traceDB, r.errorTable)
|
||||||
|
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
|
||||||
|
|
||||||
|
err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(getNextErrorIDReponse) == 0 {
|
||||||
|
zap.S().Info("NextErrorID not found")
|
||||||
|
return "", time.Time{}, nil
|
||||||
|
} else {
|
||||||
|
zap.S().Info("NextErrorID found")
|
||||||
|
return getNextErrorIDReponse[0].NextErrorID, getNextErrorIDReponse[0].NextTimestamp, nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
zap.S().Info("NextErrorID found")
|
||||||
|
return getNextErrorIDReponse[0].NextErrorID, getNextErrorIDReponse[0].NextTimestamp, nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
zap.S().Info("NextErrorID found")
|
||||||
|
return getNextErrorIDReponse[0].NextErrorID, getNextErrorIDReponse[0].NextTimestamp, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) getPrevErrorID(ctx context.Context, queryParams *model.GetErrorParams) (string, time.Time, *model.ApiError) {
|
||||||
|
|
||||||
|
var getPrevErrorIDReponse []model.NextPrevErrorIDsDBResponse
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT errorID as prevErrorID, timestamp as prevTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp <= @timestamp AND errorID != @errorID ORDER BY timestamp DESC LIMIT 2", r.traceDB, r.errorTable)
|
||||||
|
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
|
||||||
|
|
||||||
|
err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
||||||
|
}
|
||||||
|
if len(getPrevErrorIDReponse) == 0 {
|
||||||
|
zap.S().Info("PrevErrorID not found")
|
||||||
|
return "", time.Time{}, nil
|
||||||
|
} else if len(getPrevErrorIDReponse) == 1 {
|
||||||
|
zap.S().Info("PrevErrorID found")
|
||||||
|
return getPrevErrorIDReponse[0].PrevErrorID, getPrevErrorIDReponse[0].PrevTimestamp, nil
|
||||||
|
} else {
|
||||||
|
if getPrevErrorIDReponse[0].Timestamp.UnixNano() == getPrevErrorIDReponse[1].Timestamp.UnixNano() {
|
||||||
|
var getPrevErrorIDReponse []model.NextPrevErrorIDsDBResponse
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT errorID as prevErrorID, timestamp as prevTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp = @timestamp AND errorID < @errorID ORDER BY errorID DESC LIMIT 1", r.traceDB, r.errorTable)
|
||||||
|
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
|
||||||
|
|
||||||
|
err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
||||||
|
}
|
||||||
|
if len(getPrevErrorIDReponse) == 0 {
|
||||||
|
var getPrevErrorIDReponse []model.NextPrevErrorIDsDBResponse
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT errorID as prevErrorID, timestamp as prevTimestamp FROM %s.%s WHERE groupID = @groupID AND timestamp < @timestamp ORDER BY timestamp DESC LIMIT 1", r.traceDB, r.errorTable)
|
||||||
|
args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))}
|
||||||
|
|
||||||
|
err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(getPrevErrorIDReponse) == 0 {
|
||||||
|
zap.S().Info("PrevErrorID not found")
|
||||||
|
return "", time.Time{}, nil
|
||||||
|
} else {
|
||||||
|
zap.S().Info("PrevErrorID found")
|
||||||
|
return getPrevErrorIDReponse[0].PrevErrorID, getPrevErrorIDReponse[0].PrevTimestamp, nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
zap.S().Info("PrevErrorID found")
|
||||||
|
return getPrevErrorIDReponse[0].PrevErrorID, getPrevErrorIDReponse[0].PrevTimestamp, nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
zap.S().Info("PrevErrorID found")
|
||||||
|
return getPrevErrorIDReponse[0].PrevErrorID, getPrevErrorIDReponse[0].PrevTimestamp, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (r *ClickHouseReader) GetMetricAutocompleteTagKey(ctx context.Context, params *model.MetricAutocompleteTagParams) (*[]string, *model.ApiError) {
|
func (r *ClickHouseReader) GetMetricAutocompleteTagKey(ctx context.Context, params *model.MetricAutocompleteTagParams) (*[]string, *model.ApiError) {
|
||||||
|
|
||||||
var query string
|
var query string
|
||||||
|
@ -327,11 +327,13 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router) {
|
|||||||
router.HandleFunc("/api/v1/getTagFilters", ViewAccess(aH.getTagFilters)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/getTagFilters", ViewAccess(aH.getTagFilters)).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v1/getFilteredSpans", ViewAccess(aH.getFilteredSpans)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/getFilteredSpans", ViewAccess(aH.getFilteredSpans)).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v1/getFilteredSpans/aggregates", ViewAccess(aH.getFilteredSpanAggregates)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/getFilteredSpans/aggregates", ViewAccess(aH.getFilteredSpanAggregates)).Methods(http.MethodPost)
|
||||||
|
|
||||||
router.HandleFunc("/api/v1/getTagValues", ViewAccess(aH.getTagValues)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/getTagValues", ViewAccess(aH.getTagValues)).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v1/errors", ViewAccess(aH.getErrors)).Methods(http.MethodGet)
|
|
||||||
router.HandleFunc("/api/v1/errorWithId", ViewAccess(aH.getErrorForId)).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/listErrors", ViewAccess(aH.listErrors)).Methods(http.MethodGet)
|
||||||
router.HandleFunc("/api/v1/errorWithType", ViewAccess(aH.getErrorForType)).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/countErrors", ViewAccess(aH.countErrors)).Methods(http.MethodGet)
|
||||||
|
router.HandleFunc("/api/v1/errorFromErrorID", ViewAccess(aH.getErrorFromErrorID)).Methods(http.MethodGet)
|
||||||
|
router.HandleFunc("/api/v1/errorFromGroupID", ViewAccess(aH.getErrorFromGroupID)).Methods(http.MethodGet)
|
||||||
|
router.HandleFunc("/api/v1/nextPrevErrorIDs", ViewAccess(aH.getNextPrevErrorIDs)).Methods(http.MethodGet)
|
||||||
|
|
||||||
router.HandleFunc("/api/v1/disks", ViewAccess(aH.getDisks)).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/disks", ViewAccess(aH.getDisks)).Methods(http.MethodGet)
|
||||||
|
|
||||||
@ -1177,49 +1179,78 @@ func (aH *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) getErrors(w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) listErrors(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
query, err := parseErrorsRequest(r)
|
query, err := parseListErrorsRequest(r)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
result, apiErr := (*aH.reader).GetErrors(r.Context(), query)
|
result, apiErr := (*aH.reader).ListErrors(r.Context(), query)
|
||||||
if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
|
if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
aH.writeJSON(w, r, result)
|
aH.writeJSON(w, r, result)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) getErrorForId(w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) countErrors(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
query, err := parseErrorRequest(r)
|
query, err := parseCountErrorsRequest(r)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
result, apiErr := (*aH.reader).GetErrorForId(r.Context(), query)
|
result, apiErr := (*aH.reader).CountErrors(r.Context(), query)
|
||||||
if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
|
if apiErr != nil {
|
||||||
|
respondError(w, apiErr, nil)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
aH.writeJSON(w, r, result)
|
aH.writeJSON(w, r, result)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) getErrorForType(w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) getErrorFromErrorID(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
query, err := parseErrorRequest(r)
|
query, err := parseGetErrorRequest(r)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
result, apiErr := (*aH.reader).GetErrorForType(r.Context(), query)
|
result, apiErr := (*aH.reader).GetErrorFromErrorID(r.Context(), query)
|
||||||
if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
|
if apiErr != nil {
|
||||||
|
respondError(w, apiErr, nil)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
aH.writeJSON(w, r, result)
|
aH.writeJSON(w, r, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aH *APIHandler) getNextPrevErrorIDs(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
query, err := parseGetErrorRequest(r)
|
||||||
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
result, apiErr := (*aH.reader).GetNextPrevErrorIDs(r.Context(), query)
|
||||||
|
if apiErr != nil {
|
||||||
|
respondError(w, apiErr, nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
aH.writeJSON(w, r, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aH *APIHandler) getErrorFromGroupID(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
query, err := parseGetErrorRequest(r)
|
||||||
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
result, apiErr := (*aH.reader).GetErrorFromGroupID(r.Context(), query)
|
||||||
|
if apiErr != nil {
|
||||||
|
respondError(w, apiErr, nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
aH.writeJSON(w, r, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) getSpanFilters(w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) getSpanFilters(w http.ResponseWriter, r *http.Request) {
|
||||||
|
@ -360,28 +360,6 @@ func parseFilteredSpanAggregatesRequest(r *http.Request) (*model.GetFilteredSpan
|
|||||||
return postData, nil
|
return postData, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseErrorRequest(r *http.Request) (*model.GetErrorParams, error) {
|
|
||||||
|
|
||||||
params := &model.GetErrorParams{}
|
|
||||||
|
|
||||||
serviceName := r.URL.Query().Get("serviceName")
|
|
||||||
if len(serviceName) != 0 {
|
|
||||||
params.ServiceName = serviceName
|
|
||||||
}
|
|
||||||
|
|
||||||
errorType := r.URL.Query().Get("errorType")
|
|
||||||
if len(errorType) != 0 {
|
|
||||||
params.ErrorType = errorType
|
|
||||||
}
|
|
||||||
|
|
||||||
errorId := r.URL.Query().Get("errorId")
|
|
||||||
if len(errorId) != 0 {
|
|
||||||
params.ErrorID = errorId
|
|
||||||
}
|
|
||||||
|
|
||||||
return params, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseTagFilterRequest(r *http.Request) (*model.TagFilterParams, error) {
|
func parseTagFilterRequest(r *http.Request) (*model.TagFilterParams, error) {
|
||||||
var postData *model.TagFilterParams
|
var postData *model.TagFilterParams
|
||||||
err := json.NewDecoder(r.Body).Decode(&postData)
|
err := json.NewDecoder(r.Body).Decode(&postData)
|
||||||
@ -427,7 +405,10 @@ func parseTagValueRequest(r *http.Request) (*model.TagFilterParams, error) {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseErrorsRequest(r *http.Request) (*model.GetErrorsParams, error) {
|
func parseListErrorsRequest(r *http.Request) (*model.ListErrorsParams, error) {
|
||||||
|
|
||||||
|
var allowedOrderParams = []string{"exceptionType", "exceptionCount", "firstSeen", "lastSeen", "serviceName"}
|
||||||
|
var allowedOrderDirections = []string{"ascending", "descending"}
|
||||||
|
|
||||||
startTime, err := parseTime("start", r)
|
startTime, err := parseTime("start", r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -438,9 +419,79 @@ func parseErrorsRequest(r *http.Request) (*model.GetErrorsParams, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
params := &model.GetErrorsParams{
|
order := r.URL.Query().Get("order")
|
||||||
|
if len(order) > 0 && !DoesExistInSlice(order, allowedOrderDirections) {
|
||||||
|
return nil, errors.New(fmt.Sprintf("given order: %s is not allowed in query", order))
|
||||||
|
}
|
||||||
|
orderParam := r.URL.Query().Get("orderParam")
|
||||||
|
if len(order) > 0 && !DoesExistInSlice(orderParam, allowedOrderParams) {
|
||||||
|
return nil, errors.New(fmt.Sprintf("given orderParam: %s is not allowed in query", orderParam))
|
||||||
|
}
|
||||||
|
limit := r.URL.Query().Get("limit")
|
||||||
|
offset := r.URL.Query().Get("offset")
|
||||||
|
|
||||||
|
if len(offset) == 0 || len(limit) == 0 {
|
||||||
|
return nil, fmt.Errorf("offset or limit param cannot be empty from the query")
|
||||||
|
}
|
||||||
|
|
||||||
|
limitInt, err := strconv.Atoi(limit)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("limit param is not in correct format")
|
||||||
|
}
|
||||||
|
offsetInt, err := strconv.Atoi(offset)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("offset param is not in correct format")
|
||||||
|
}
|
||||||
|
|
||||||
|
params := &model.ListErrorsParams{
|
||||||
Start: startTime,
|
Start: startTime,
|
||||||
End: endTime,
|
End: endTime,
|
||||||
|
OrderParam: orderParam,
|
||||||
|
Order: order,
|
||||||
|
Limit: int64(limitInt),
|
||||||
|
Offset: int64(offsetInt),
|
||||||
|
}
|
||||||
|
|
||||||
|
return params, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseCountErrorsRequest(r *http.Request) (*model.CountErrorsParams, error) {
|
||||||
|
|
||||||
|
startTime, err := parseTime("start", r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
endTime, err := parseTimeMinusBuffer("end", r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
params := &model.CountErrorsParams{
|
||||||
|
Start: startTime,
|
||||||
|
End: endTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
return params, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseGetErrorRequest(r *http.Request) (*model.GetErrorParams, error) {
|
||||||
|
|
||||||
|
timestamp, err := parseTime("timestamp", r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
groupID := r.URL.Query().Get("groupID")
|
||||||
|
|
||||||
|
if len(groupID) == 0 {
|
||||||
|
return nil, fmt.Errorf("groupID param cannot be empty from the query")
|
||||||
|
}
|
||||||
|
errorID := r.URL.Query().Get("errorID")
|
||||||
|
|
||||||
|
params := &model.GetErrorParams{
|
||||||
|
Timestamp: timestamp,
|
||||||
|
GroupID: groupID,
|
||||||
|
ErrorID: errorID,
|
||||||
}
|
}
|
||||||
|
|
||||||
return params, nil
|
return params, nil
|
||||||
|
@ -61,6 +61,10 @@ const (
|
|||||||
StatusPending = "pending"
|
StatusPending = "pending"
|
||||||
StatusFailed = "failed"
|
StatusFailed = "failed"
|
||||||
StatusSuccess = "success"
|
StatusSuccess = "success"
|
||||||
|
ExceptionType = "exceptionType"
|
||||||
|
ExceptionCount = "exceptionCount"
|
||||||
|
LastSeen = "lastSeen"
|
||||||
|
FirstSeen = "firstSeen"
|
||||||
)
|
)
|
||||||
const (
|
const (
|
||||||
SIGNOZ_METRIC_DBNAME = "signoz_metrics"
|
SIGNOZ_METRIC_DBNAME = "signoz_metrics"
|
||||||
|
@ -41,9 +41,12 @@ type Reader interface {
|
|||||||
GetFilteredSpans(ctx context.Context, query *model.GetFilteredSpansParams) (*model.GetFilterSpansResponse, *model.ApiError)
|
GetFilteredSpans(ctx context.Context, query *model.GetFilteredSpansParams) (*model.GetFilterSpansResponse, *model.ApiError)
|
||||||
GetFilteredSpansAggregates(ctx context.Context, query *model.GetFilteredSpanAggregatesParams) (*model.GetFilteredSpansAggregatesResponse, *model.ApiError)
|
GetFilteredSpansAggregates(ctx context.Context, query *model.GetFilteredSpanAggregatesParams) (*model.GetFilteredSpansAggregatesResponse, *model.ApiError)
|
||||||
|
|
||||||
GetErrors(ctx context.Context, params *model.GetErrorsParams) (*[]model.Error, *model.ApiError)
|
ListErrors(ctx context.Context, params *model.ListErrorsParams) (*[]model.Error, *model.ApiError)
|
||||||
GetErrorForId(ctx context.Context, params *model.GetErrorParams) (*model.ErrorWithSpan, *model.ApiError)
|
CountErrors(ctx context.Context, params *model.CountErrorsParams) (uint64, *model.ApiError)
|
||||||
GetErrorForType(ctx context.Context, params *model.GetErrorParams) (*model.ErrorWithSpan, *model.ApiError)
|
GetErrorFromErrorID(ctx context.Context, params *model.GetErrorParams) (*model.ErrorWithSpan, *model.ApiError)
|
||||||
|
GetErrorFromGroupID(ctx context.Context, params *model.GetErrorParams) (*model.ErrorWithSpan, *model.ApiError)
|
||||||
|
GetNextPrevErrorIDs(ctx context.Context, params *model.GetErrorParams) (*model.NextPrevErrorIDs, *model.ApiError)
|
||||||
|
|
||||||
// Search Interfaces
|
// Search Interfaces
|
||||||
SearchTraces(ctx context.Context, traceID string) (*[]model.SearchSpansResult, error)
|
SearchTraces(ctx context.Context, traceID string) (*[]model.SearchSpansResult, error)
|
||||||
|
|
||||||
|
@ -282,15 +282,24 @@ type GetTTLParams struct {
|
|||||||
Type string
|
Type string
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetErrorsParams struct {
|
type ListErrorsParams struct {
|
||||||
|
Start *time.Time
|
||||||
|
End *time.Time
|
||||||
|
Limit int64
|
||||||
|
OrderParam string
|
||||||
|
Order string
|
||||||
|
Offset int64
|
||||||
|
}
|
||||||
|
|
||||||
|
type CountErrorsParams struct {
|
||||||
Start *time.Time
|
Start *time.Time
|
||||||
End *time.Time
|
End *time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetErrorParams struct {
|
type GetErrorParams struct {
|
||||||
ErrorType string
|
GroupID string
|
||||||
ErrorID string
|
ErrorID string
|
||||||
ServiceName string
|
Timestamp *time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
type FilterItem struct {
|
type FilterItem struct {
|
||||||
|
@ -341,20 +341,36 @@ type Error struct {
|
|||||||
LastSeen time.Time `json:"lastSeen" ch:"lastSeen"`
|
LastSeen time.Time `json:"lastSeen" ch:"lastSeen"`
|
||||||
FirstSeen time.Time `json:"firstSeen" ch:"firstSeen"`
|
FirstSeen time.Time `json:"firstSeen" ch:"firstSeen"`
|
||||||
ServiceName string `json:"serviceName" ch:"serviceName"`
|
ServiceName string `json:"serviceName" ch:"serviceName"`
|
||||||
|
GroupID string `json:"groupID" ch:"groupID"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ErrorWithSpan struct {
|
type ErrorWithSpan struct {
|
||||||
ErrorID string `json:"errorId" ch:"errorID"`
|
ErrorID string `json:"errorId" ch:"errorID"`
|
||||||
ExceptionType string `json:"exceptionType" ch:"exceptionType"`
|
ExceptionType string `json:"exceptionType" ch:"exceptionType"`
|
||||||
ExceptionStacktrace string `json:"exceptionStacktrace" ch:"exceptionStacktrace"`
|
ExceptionStacktrace string `json:"exceptionStacktrace" ch:"exceptionStacktrace"`
|
||||||
ExceptionEscaped string `json:"exceptionEscaped" ch:"exceptionEscaped"`
|
ExceptionEscaped bool `json:"exceptionEscaped" ch:"exceptionEscaped"`
|
||||||
ExceptionMsg string `json:"exceptionMessage" ch:"exceptionMessage"`
|
ExceptionMsg string `json:"exceptionMessage" ch:"exceptionMessage"`
|
||||||
Timestamp time.Time `json:"timestamp" ch:"timestamp"`
|
Timestamp time.Time `json:"timestamp" ch:"timestamp"`
|
||||||
SpanID string `json:"spanID" ch:"spanID"`
|
SpanID string `json:"spanID" ch:"spanID"`
|
||||||
TraceID string `json:"traceID" ch:"traceID"`
|
TraceID string `json:"traceID" ch:"traceID"`
|
||||||
ServiceName string `json:"serviceName" ch:"serviceName"`
|
ServiceName string `json:"serviceName" ch:"serviceName"`
|
||||||
NewerErrorID string `json:"newerErrorId" ch:"newerErrorId"`
|
GroupID string `json:"groupID" ch:"groupID"`
|
||||||
OlderErrorID string `json:"olderErrorId" ch:"olderErrorId"`
|
}
|
||||||
|
|
||||||
|
type NextPrevErrorIDsDBResponse struct {
|
||||||
|
NextErrorID string `ch:"nextErrorID"`
|
||||||
|
NextTimestamp time.Time `ch:"nextTimestamp"`
|
||||||
|
PrevErrorID string `ch:"prevErrorID"`
|
||||||
|
PrevTimestamp time.Time `ch:"prevTimestamp"`
|
||||||
|
Timestamp time.Time `ch:"timestamp"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type NextPrevErrorIDs struct {
|
||||||
|
NextErrorID string `json:"nextErrorID"`
|
||||||
|
NextTimestamp time.Time `json:"nextTimestamp"`
|
||||||
|
PrevErrorID string `json:"prevErrorID"`
|
||||||
|
PrevTimestamp time.Time `json:"prevTimestamp"`
|
||||||
|
GroupID string `json:"groupID"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Series struct {
|
type Series struct {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user