From c76cef47ba366d8c9eb36ed61f5c94fcb5de4c98 Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Wed, 3 Apr 2024 12:31:38 +0530 Subject: [PATCH 1/5] fix: remove integrations page view and add event for the same (#4802) --- frontend/src/pages/Integrations/Integrations.tsx | 4 ++-- frontend/src/pages/Integrations/utils.ts | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/src/pages/Integrations/Integrations.tsx b/frontend/src/pages/Integrations/Integrations.tsx index 1f1644fbc7..187c77f4f6 100644 --- a/frontend/src/pages/Integrations/Integrations.tsx +++ b/frontend/src/pages/Integrations/Integrations.tsx @@ -15,7 +15,7 @@ function Integrations(): JSX.Element { const history = useHistory(); const location = useLocation(); - const { trackPageView, trackEvent } = useAnalytics(); + const { trackEvent } = useAnalytics(); const selectedIntegration = useMemo(() => urlQuery.get('integration'), [ urlQuery, @@ -42,7 +42,7 @@ function Integrations(): JSX.Element { ); useEffect(() => { - trackPageView(location.pathname); + trackEvent(INTEGRATION_TELEMETRY_EVENTS.INTEGRATIONS_LIST_VISITED); // eslint-disable-next-line react-hooks/exhaustive-deps }, []); diff --git a/frontend/src/pages/Integrations/utils.ts b/frontend/src/pages/Integrations/utils.ts index a244da4c82..0785eb89c0 100644 --- a/frontend/src/pages/Integrations/utils.ts +++ b/frontend/src/pages/Integrations/utils.ts @@ -9,6 +9,7 @@ export const handleContactSupport = (isCloudUser: boolean): void => { }; export const INTEGRATION_TELEMETRY_EVENTS = { + INTEGRATIONS_LIST_VISITED: 'Integrations Page: Visited the list page', INTEGRATIONS_ITEM_LIST_CLICKED: 'Integrations Page: Clicked an integration', INTEGRATIONS_DETAIL_CONNECT: 'Integrations Detail Page: Clicked connect integration button', From 87534b6fb6c200f00b71e840cadd79a706284998 Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Wed, 3 Apr 2024 16:42:00 +0530 Subject: [PATCH 2/5] fix: incorrect error rate query (#4805) --- pkg/query-service/app/clickhouseReader/reader.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index 57d7318ee4..d795599845 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -879,7 +879,7 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G zap.L().Error("Error building query with tag params", zap.Error(errStatus)) return } - query += subQuery + errorQuery += subQuery args = append(args, argsSubQuery...) err = r.db.QueryRow(ctx, errorQuery, args...).Scan(&numErrors) if err != nil { From a7b0ef55add657bdfe66cc75ddd46ed49315863f Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Wed, 3 Apr 2024 17:52:45 +0530 Subject: [PATCH 3/5] fix: querier v2 synced and tablePanel result processor updated (#4807) --- pkg/query-service/app/http_handler.go | 22 ++ pkg/query-service/app/querier/v2/helper.go | 210 +++++++++++++++----- pkg/query-service/app/querier/v2/querier.go | 13 ++ 3 files changed, 190 insertions(+), 55 deletions(-) diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 7f9e6795a7..9244700af0 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -3722,6 +3722,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que } if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder { + result, err = postProcessResult(result, queryRangeParams) } @@ -3786,6 +3787,12 @@ func postProcessResult(result []*v3.Result, queryRangeParams *v3.QueryRangeParam // We apply the functions here it's easier to add new functions applyFunctions(result, queryRangeParams) + // expressions are executed at query serivce so the value of time.now in the invdividual + // queries will be different so for table panel we are making it same. + if queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeTable { + tablePanelResultProcessor(result) + } + for _, query := range queryRangeParams.CompositeQuery.BuilderQueries { // The way we distinguish between a formula and a query is by checking if the expression // is the same as the query name @@ -3838,3 +3845,18 @@ func applyFunctions(results []*v3.Result, queryRangeParams *v3.QueryRangeParamsV } } } + +func tablePanelResultProcessor(results []*v3.Result) { + var ts int64 + for ridx := range results { + for sidx := range results[ridx].Series { + for pidx := range results[ridx].Series[sidx].Points { + if ts == 0 { + ts = results[ridx].Series[sidx].Points[pidx].Timestamp + } else { + results[ridx].Series[sidx].Points[pidx].Timestamp = ts + } + } + } + } +} diff --git a/pkg/query-service/app/querier/v2/helper.go b/pkg/query-service/app/querier/v2/helper.go index e564956f19..cc4e83b702 100644 --- a/pkg/query-service/app/querier/v2/helper.go +++ b/pkg/query-service/app/querier/v2/helper.go @@ -18,6 +18,61 @@ import ( "go.uber.org/zap" ) +func prepareLogsQuery(ctx context.Context, + start, + end int64, + builderQuery *v3.BuilderQuery, + params *v3.QueryRangeParamsV3, + preferRPM bool, +) (string, error) { + query := "" + + if params == nil || builderQuery == nil { + return query, fmt.Errorf("params and builderQuery cannot be nil") + } + + // for ts query with limit replace it as it is already formed + if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 { + limitQuery, err := logsV3.PrepareLogsQuery( + start, + end, + params.CompositeQuery.QueryType, + params.CompositeQuery.PanelType, + builderQuery, + logsV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM}, + ) + if err != nil { + return query, err + } + placeholderQuery, err := logsV3.PrepareLogsQuery( + start, + end, + params.CompositeQuery.QueryType, + params.CompositeQuery.PanelType, + builderQuery, + logsV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM}, + ) + if err != nil { + return query, err + } + query = strings.Replace(placeholderQuery, "#LIMIT_PLACEHOLDER", limitQuery, 1) + return query, err + } + + query, err := logsV3.PrepareLogsQuery( + start, + end, + params.CompositeQuery.QueryType, + params.CompositeQuery.PanelType, + builderQuery, + logsV3.Options{PreferRPM: preferRPM}, + ) + if err != nil { + return query, err + } + return query, err +} + func (q *querier) runBuilderQuery( ctx context.Context, builderQuery *v3.BuilderQuery, @@ -48,54 +103,82 @@ func (q *querier) runBuilderQuery( if builderQuery.DataSource == v3.DataSourceLogs { var query string var err error - // for ts query with limit replace it as it is already formed - if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 { - limitQuery, err := logsV3.PrepareLogsQuery( - start, - end, - params.CompositeQuery.QueryType, - params.CompositeQuery.PanelType, - builderQuery, - logsV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM}, - ) - if err != nil { - ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil} - return - } - placeholderQuery, err := logsV3.PrepareLogsQuery( - start, - end, - params.CompositeQuery.QueryType, - params.CompositeQuery.PanelType, - builderQuery, - logsV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM}, - ) - if err != nil { - ch <- channelResult{Err: err, Name: queryName, Query: placeholderQuery, Series: nil} - return - } - query = strings.Replace(placeholderQuery, "#LIMIT_PLACEHOLDER", limitQuery, 1) - } else { - query, err = logsV3.PrepareLogsQuery( - start, - end, - params.CompositeQuery.QueryType, - params.CompositeQuery.PanelType, - builderQuery, - logsV3.Options{PreferRPM: preferRPM}, - ) + if _, ok := cacheKeys[queryName]; !ok { + query, err = prepareLogsQuery(ctx, start, end, builderQuery, params, preferRPM) if err != nil { ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil} return } - } - - if err != nil { - ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil} + series, err := q.execClickHouseQuery(ctx, query) + ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series} return } - series, err := q.execClickHouseQuery(ctx, query) - ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series} + cacheKey := cacheKeys[queryName] + var cachedData []byte + if !params.NoCache && q.cache != nil { + var retrieveStatus status.RetrieveStatus + data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true) + zap.L().Info("cache retrieve status", zap.String("status", retrieveStatus.String())) + if err == nil { + cachedData = data + } + } + misses := q.findMissingTimeRanges(start, end, params.Step, cachedData) + missedSeries := make([]*v3.Series, 0) + cachedSeries := make([]*v3.Series, 0) + for _, miss := range misses { + query, err = prepareLogsQuery(ctx, miss.start, miss.end, builderQuery, params, preferRPM) + if err != nil { + ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil} + return + } + series, err := q.execClickHouseQuery(ctx, query) + if err != nil { + ch <- channelResult{ + Err: err, + Name: queryName, + Query: query, + Series: nil, + } + return + } + missedSeries = append(missedSeries, series...) + } + if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil { + zap.L().Error("error unmarshalling cached data", zap.Error(err)) + } + mergedSeries := mergeSerieses(cachedSeries, missedSeries) + + var mergedSeriesData []byte + var marshallingErr error + missedSeriesLen := len(missedSeries) + if missedSeriesLen > 0 && !params.NoCache && q.cache != nil { + // caching the data + mergedSeriesData, marshallingErr = json.Marshal(mergedSeries) + if marshallingErr != nil { + zap.L().Error("error marshalling merged series", zap.Error(marshallingErr)) + } + } + + // response doesn't need everything + filterCachedPoints(mergedSeries, start, end) + + ch <- channelResult{ + Err: nil, + Name: queryName, + Series: mergedSeries, + } + + // Cache the seriesList for future queries + if missedSeriesLen > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil { + // caching the data + err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) + if err != nil { + zap.L().Error("error storing merged series", zap.Error(err)) + return + } + } + return } @@ -211,6 +294,19 @@ func (q *querier) runBuilderQuery( zap.L().Error("error unmarshalling cached data", zap.Error(err)) } mergedSeries := mergeSerieses(cachedSeries, missedSeries) + var mergedSeriesData []byte + var marshallingErr error + missedSeriesLen := len(missedSeries) + if missedSeriesLen > 0 && !params.NoCache && q.cache != nil { + // caching the data + mergedSeriesData, marshallingErr = json.Marshal(mergedSeries) + if marshallingErr != nil { + zap.S().Error("error marshalling merged series", zap.Error(marshallingErr)) + } + } + + // response doesn't need everything + filterCachedPoints(mergedSeries, params.Start, params.End) ch <- channelResult{ Err: nil, @@ -218,13 +314,8 @@ func (q *querier) runBuilderQuery( Series: mergedSeries, } // Cache the seriesList for future queries - if len(missedSeries) > 0 && !params.NoCache && q.cache != nil { - mergedSeriesData, err := json.Marshal(mergedSeries) - if err != nil { - zap.L().Error("error marshalling merged series", zap.Error(err)) - return - } - err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) + if missedSeriesLen > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil { + err := q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { zap.L().Error("error storing merged series", zap.Error(err)) return @@ -293,18 +384,27 @@ func (q *querier) runBuilderExpression( } mergedSeries := mergeSerieses(cachedSeries, missedSeries) + var mergedSeriesData []byte + missedSeriesLen := len(missedSeries) + var marshallingErr error + if missedSeriesLen > 0 && !params.NoCache && q.cache != nil { + // caching the data + mergedSeriesData, marshallingErr = json.Marshal(mergedSeries) + if marshallingErr != nil { + zap.S().Error("error marshalling merged series", zap.Error(marshallingErr)) + } + } + + // response doesn't need everything + filterCachedPoints(mergedSeries, params.Start, params.End) + ch <- channelResult{ Err: nil, Name: queryName, Series: mergedSeries, } // Cache the seriesList for future queries - if len(missedSeries) > 0 && !params.NoCache && q.cache != nil { - mergedSeriesData, err := json.Marshal(mergedSeries) - if err != nil { - zap.L().Error("error marshalling merged series", zap.Error(err)) - return - } + if len(missedSeries) > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil { err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { zap.L().Error("error storing merged series", zap.Error(err)) diff --git a/pkg/query-service/app/querier/v2/querier.go b/pkg/query-service/app/querier/v2/querier.go index e45153da7d..47933e847d 100644 --- a/pkg/query-service/app/querier/v2/querier.go +++ b/pkg/query-service/app/querier/v2/querier.go @@ -241,6 +241,19 @@ func labelsToString(labels map[string]string) string { return fmt.Sprintf("{%s}", strings.Join(labelKVs, ",")) } +func filterCachedPoints(cachedSeries []*v3.Series, start, end int64) { + for _, c := range cachedSeries { + points := []v3.Point{} + for _, p := range c.Points { + if p.Timestamp < start || p.Timestamp > end { + continue + } + points = append(points, p) + } + c.Points = points + } +} + func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series { // Merge the missed series with the cached series by timestamp mergedSeries := make([]*v3.Series, 0) From 6a4aa9a956b4b6536f60e7b47cb75d17bcdc5f55 Mon Sep 17 00:00:00 2001 From: Yunus M Date: Thu, 4 Apr 2024 11:05:58 +0530 Subject: [PATCH 4/5] QB - Logs - Enable TimeShift function (#4792) * feat: qb - logs - enable time shift function * feat: qb - logs - enable time shift function * feat: show functions for logs in v3 version too --- .../src/constants/queryFunctionOptions.ts | 8 +++++- .../container/FormAlertRules/QuerySection.tsx | 5 ++-- .../QBEntityOptions.styles.scss | 1 + .../QBEntityOptions/QBEntityOptions.tsx | 7 ++++- .../QueryBuilder/components/Query/Query.tsx | 6 ++++- .../components/QueryFunctions/Function.tsx | 19 +++++++++++--- .../QueryFunctions/QueryFunctions.tsx | 26 ++++++++++++++++--- .../queryBuilder/useQueryBuilderOperations.ts | 7 ++++- 8 files changed, 66 insertions(+), 13 deletions(-) diff --git a/frontend/src/constants/queryFunctionOptions.ts b/frontend/src/constants/queryFunctionOptions.ts index b79f673c46..4aa6332d67 100644 --- a/frontend/src/constants/queryFunctionOptions.ts +++ b/frontend/src/constants/queryFunctionOptions.ts @@ -2,7 +2,7 @@ import { QueryFunctionsTypes } from 'types/common/queryBuilder'; import { SelectOption } from 'types/common/select'; -export const queryFunctionOptions: SelectOption[] = [ +export const metricQueryFunctionOptions: SelectOption[] = [ { value: QueryFunctionsTypes.CUTOFF_MIN, label: 'Cut Off Min', @@ -65,6 +65,12 @@ export const queryFunctionOptions: SelectOption[] = [ }, ]; +export const logsQueryFunctionOptions: SelectOption[] = [ + { + value: QueryFunctionsTypes.TIME_SHIFT, + label: 'Time Shift', + }, +]; interface QueryFunctionConfigType { [key: string]: { showInput: boolean; diff --git a/frontend/src/container/FormAlertRules/QuerySection.tsx b/frontend/src/container/FormAlertRules/QuerySection.tsx index 1604cdb929..406a757eda 100644 --- a/frontend/src/container/FormAlertRules/QuerySection.tsx +++ b/frontend/src/container/FormAlertRules/QuerySection.tsx @@ -56,8 +56,9 @@ function QuerySection({ initialDataSource: ALERTS_DATA_SOURCE_MAP[alertType], }} showFunctions={ - alertType === AlertTypes.METRICS_BASED_ALERT && - alertDef.version === ENTITY_VERSION_V4 + (alertType === AlertTypes.METRICS_BASED_ALERT && + alertDef.version === ENTITY_VERSION_V4) || + alertType === AlertTypes.LOGS_BASED_ALERT } version={alertDef.version || 'v3'} /> diff --git a/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.styles.scss b/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.styles.scss index 3bafb59879..f0c7565d29 100644 --- a/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.styles.scss +++ b/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.styles.scss @@ -77,6 +77,7 @@ .qb-entity-options { .options { border-color: var(--bg-vanilla-300); + box-shadow: none; .periscope-btn { border-color: var(--bg-vanilla-300); diff --git a/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx b/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx index 8730506a88..fd8e860508 100644 --- a/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx +++ b/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx @@ -17,6 +17,7 @@ import { IBuilderQuery, QueryFunctionProps, } from 'types/api/queryBuilder/queryBuilderData'; +import { DataSource } from 'types/common/queryBuilder'; import QueryFunctions from '../QueryFunctions/QueryFunctions'; @@ -57,6 +58,8 @@ export default function QBEntityOptions({ } }; + const isLogsDataSource = query?.dataSource === DataSource.LOGS; + return (
@@ -97,12 +100,14 @@ export default function QBEntityOptions({ {showFunctions && - isMetricsDataSource && + (isMetricsDataSource || isLogsDataSource) && query && onQueryFunctionsUpdates && ( )} diff --git a/frontend/src/container/QueryBuilder/components/Query/Query.tsx b/frontend/src/container/QueryBuilder/components/Query/Query.tsx index 1bb761fde7..fb8b0e1561 100644 --- a/frontend/src/container/QueryBuilder/components/Query/Query.tsx +++ b/frontend/src/container/QueryBuilder/components/Query/Query.tsx @@ -36,6 +36,7 @@ import { } from 'react'; import { useLocation } from 'react-use'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; +import { DataSource } from 'types/common/queryBuilder'; import { transformToUpperCase } from 'utils/transformToUpperCase'; import QBEntityOptions from '../QBEntityOptions/QBEntityOptions'; @@ -324,7 +325,10 @@ export const Query = memo(function Query({