Merge branch 'develop' into otel-version-change

This commit is contained in:
CheetoDa 2024-04-04 12:00:09 +05:30 committed by GitHub
commit 1520c1c57d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 329 additions and 85 deletions

View File

@ -10,6 +10,7 @@ import (
"net/http"
_ "net/http/pprof" // http profiler
"os"
"regexp"
"time"
"github.com/gorilla/handlers"
@ -393,13 +394,14 @@ func (lrw *loggingResponseWriter) Flush() {
lrw.ResponseWriter.(http.Flusher).Flush()
}
func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFrom := "/api/v3/query_range"
func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFromV3 := "/api/v3/query_range"
pathToExtractBodyFromV4 := "/api/v4/query_range"
data := map[string]interface{}{}
var postData *v3.QueryRangeParamsV3
if path == pathToExtractBodyFrom && (r.Method == "POST") {
if (r.Method == "POST") && ((path == pathToExtractBodyFromV3) || (path == pathToExtractBodyFromV4)) {
if r.Body != nil {
bodyBytes, err := io.ReadAll(r.Body)
if err != nil {
@ -417,6 +419,25 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface
return nil, false
}
referrer := r.Header.Get("Referer")
dashboardMatched, err := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the referrer", zap.Error(err))
}
alertMatched, err := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the alert: ", zap.Error(err))
}
logsExplorerMatched, err := regexp.MatchString(`/logs/logs-explorer(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the logs explorer: ", zap.Error(err))
}
traceExplorerMatched, err := regexp.MatchString(`/traces-explorer(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the trace explorer: ", zap.Error(err))
}
signozMetricsUsed := false
signozLogsUsed := false
signozTracesUsed := false
@ -445,6 +466,20 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface
data["tracesUsed"] = signozTracesUsed
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
if err == nil {
// switch case to set data["screen"] based on the referrer
switch {
case dashboardMatched:
data["screen"] = "panel"
case alertMatched:
data["screen"] = "alert"
case logsExplorerMatched:
data["screen"] = "logs-explorer"
case traceExplorerMatched:
data["screen"] = "traces-explorer"
default:
data["screen"] = "unknown"
return data, true
}
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail, true, false)
}
}
@ -472,7 +507,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
route := mux.CurrentRoute(r)
path, _ := route.GetPathTemplate()
queryRangeV3data, metadataExists := extractQueryRangeV3Data(path, r)
queryRangeData, metadataExists := extractQueryRangeData(path, r)
getActiveLogs(path, r)
lrw := NewLoggingResponseWriter(w)
@ -480,7 +515,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
if metadataExists {
for key, value := range queryRangeV3data {
for key, value := range queryRangeData {
data[key] = value
}
}

View File

@ -2,7 +2,7 @@
import { QueryFunctionsTypes } from 'types/common/queryBuilder';
import { SelectOption } from 'types/common/select';
export const queryFunctionOptions: SelectOption<string, string>[] = [
export const metricQueryFunctionOptions: SelectOption<string, string>[] = [
{
value: QueryFunctionsTypes.CUTOFF_MIN,
label: 'Cut Off Min',
@ -65,6 +65,12 @@ export const queryFunctionOptions: SelectOption<string, string>[] = [
},
];
export const logsQueryFunctionOptions: SelectOption<string, string>[] = [
{
value: QueryFunctionsTypes.TIME_SHIFT,
label: 'Time Shift',
},
];
interface QueryFunctionConfigType {
[key: string]: {
showInput: boolean;

View File

@ -56,8 +56,9 @@ function QuerySection({
initialDataSource: ALERTS_DATA_SOURCE_MAP[alertType],
}}
showFunctions={
alertType === AlertTypes.METRICS_BASED_ALERT &&
alertDef.version === ENTITY_VERSION_V4
(alertType === AlertTypes.METRICS_BASED_ALERT &&
alertDef.version === ENTITY_VERSION_V4) ||
alertType === AlertTypes.LOGS_BASED_ALERT
}
version={alertDef.version || 'v3'}
/>

View File

@ -77,6 +77,7 @@
.qb-entity-options {
.options {
border-color: var(--bg-vanilla-300);
box-shadow: none;
.periscope-btn {
border-color: var(--bg-vanilla-300);

View File

@ -17,6 +17,7 @@ import {
IBuilderQuery,
QueryFunctionProps,
} from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import QueryFunctions from '../QueryFunctions/QueryFunctions';
@ -57,6 +58,8 @@ export default function QBEntityOptions({
}
};
const isLogsDataSource = query?.dataSource === DataSource.LOGS;
return (
<Col span={24}>
<div className="qb-entity-options">
@ -97,12 +100,14 @@ export default function QBEntityOptions({
</Button>
{showFunctions &&
isMetricsDataSource &&
(isMetricsDataSource || isLogsDataSource) &&
query &&
onQueryFunctionsUpdates && (
<QueryFunctions
query={query}
queryFunctions={query.functions}
onChange={onQueryFunctionsUpdates}
maxFunctions={isLogsDataSource ? 1 : 3}
/>
)}
</Button.Group>

View File

@ -36,6 +36,7 @@ import {
} from 'react';
import { useLocation } from 'react-use';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import { transformToUpperCase } from 'utils/transformToUpperCase';
import QBEntityOptions from '../QBEntityOptions/QBEntityOptions';
@ -324,7 +325,10 @@ export const Query = memo(function Query({
<QBEntityOptions
isMetricsDataSource={isMetricsDataSource}
showFunctions={
(version && version === ENTITY_VERSION_V4) || showFunctions || false
(version && version === ENTITY_VERSION_V4) ||
query.dataSource === DataSource.LOGS ||
showFunctions ||
false
}
isCollapsed={isCollapse}
entityType="query"

View File

@ -2,15 +2,21 @@
import { Button, Flex, Input, Select } from 'antd';
import cx from 'classnames';
import {
queryFunctionOptions,
logsQueryFunctionOptions,
metricQueryFunctionOptions,
queryFunctionsTypesConfig,
} from 'constants/queryFunctionOptions';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { debounce, isNil } from 'lodash-es';
import { X } from 'lucide-react';
import { QueryFunctionProps } from 'types/api/queryBuilder/queryBuilderData';
import {
IBuilderQuery,
QueryFunctionProps,
} from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
interface FunctionProps {
query: IBuilderQuery;
funcData: QueryFunctionProps;
index: any;
handleUpdateFunctionArgs: any;
@ -19,6 +25,7 @@ interface FunctionProps {
}
export default function Function({
query,
funcData,
index,
handleUpdateFunctionArgs,
@ -44,6 +51,12 @@ export default function Function({
500,
);
// update the logic when we start supporting functions for traces
const functionOptions =
query.dataSource === DataSource.LOGS
? logsQueryFunctionOptions
: metricQueryFunctionOptions;
return (
<Flex className="query-function">
<Select
@ -62,7 +75,7 @@ export default function Function({
boxShadow: `4px 10px 16px 2px rgba(0, 0, 0, 0.20)`,
}}
placement="bottomRight"
options={queryFunctionOptions}
options={functionOptions}
/>
{showInput && (

View File

@ -6,19 +6,29 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
import { cloneDeep, pullAt } from 'lodash-es';
import { Plus } from 'lucide-react';
import { useState } from 'react';
import { QueryFunctionProps } from 'types/api/queryBuilder/queryBuilderData';
import { QueryFunctionsTypes } from 'types/common/queryBuilder';
import {
IBuilderQuery,
QueryFunctionProps,
} from 'types/api/queryBuilder/queryBuilderData';
import { DataSource, QueryFunctionsTypes } from 'types/common/queryBuilder';
import Function from './Function';
const defaultFunctionStruct: QueryFunctionProps = {
const defaultMetricFunctionStruct: QueryFunctionProps = {
name: QueryFunctionsTypes.CUTOFF_MIN,
args: [],
};
const defaultLogFunctionStruct: QueryFunctionProps = {
name: QueryFunctionsTypes.TIME_SHIFT,
args: [],
};
interface QueryFunctionsProps {
query: IBuilderQuery;
queryFunctions: QueryFunctionProps[];
onChange: (functions: QueryFunctionProps[]) => void;
maxFunctions: number;
}
// SVG component
@ -71,8 +81,10 @@ function FunctionIcon({
}
export default function QueryFunctions({
query,
queryFunctions,
onChange,
maxFunctions = 3,
}: QueryFunctionsProps): JSX.Element {
const [functions, setFunctions] = useState<QueryFunctionProps[]>(
queryFunctions,
@ -81,6 +93,11 @@ export default function QueryFunctions({
const isDarkMode = useIsDarkMode();
const handleAddNewFunction = (): void => {
const defaultFunctionStruct =
query.dataSource === DataSource.LOGS
? defaultLogFunctionStruct
: defaultMetricFunctionStruct;
const updatedFunctionsArr = [
...functions,
{
@ -149,6 +166,7 @@ export default function QueryFunctions({
<div className="query-functions-list">
{functions.map((func, index) => (
<Function
query={query}
funcData={func}
index={index}
// eslint-disable-next-line react/no-array-index-key
@ -170,7 +188,7 @@ export default function QueryFunctions({
>
<Button
className="periscope-btn add-function-btn"
disabled={functions && functions.length >= 3}
disabled={functions && functions.length >= maxFunctions}
onClick={handleAddNewFunction}
>
<Plus size={14} color={!isDarkMode ? '#0B0C0E' : 'white'} />

View File

@ -294,7 +294,10 @@ export const useQueryOperations: UseQueryOperations = ({
...query,
};
if (newQuery.dataSource === DataSource.METRICS) {
if (
newQuery.dataSource === DataSource.METRICS ||
newQuery.dataSource === DataSource.LOGS
) {
newQuery.functions = functions;
}
@ -304,6 +307,7 @@ export const useQueryOperations: UseQueryOperations = ({
);
const isMetricsDataSource = query.dataSource === DataSource.METRICS;
const isLogsDataSource = query.dataSource === DataSource.LOGS;
const isTracePanelType = panelType === PANEL_TYPES.TRACE;
@ -346,6 +350,7 @@ export const useQueryOperations: UseQueryOperations = ({
return {
isTracePanelType,
isMetricsDataSource,
isLogsDataSource,
operators,
spaceAggregationOptions,
listOfAdditionalFilters,

View File

@ -15,7 +15,7 @@ function Integrations(): JSX.Element {
const history = useHistory();
const location = useLocation();
const { trackPageView, trackEvent } = useAnalytics();
const { trackEvent } = useAnalytics();
const selectedIntegration = useMemo(() => urlQuery.get('integration'), [
urlQuery,
@ -42,7 +42,7 @@ function Integrations(): JSX.Element {
);
useEffect(() => {
trackPageView(location.pathname);
trackEvent(INTEGRATION_TELEMETRY_EVENTS.INTEGRATIONS_LIST_VISITED);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);

View File

@ -9,6 +9,7 @@ export const handleContactSupport = (isCloudUser: boolean): void => {
};
export const INTEGRATION_TELEMETRY_EVENTS = {
INTEGRATIONS_LIST_VISITED: 'Integrations Page: Visited the list page',
INTEGRATIONS_ITEM_LIST_CLICKED: 'Integrations Page: Clicked an integration',
INTEGRATIONS_DETAIL_CONNECT:
'Integrations Detail Page: Clicked connect integration button',

View File

@ -50,6 +50,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/dao"
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
"go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model"
@ -879,7 +880,7 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
zap.L().Error("Error building query with tag params", zap.Error(errStatus))
return
}
query += subQuery
errorQuery += subQuery
args = append(args, argsSubQuery...)
err = r.db.QueryRow(ctx, errorQuery, args...).Scan(&numErrors)
if err != nil {
@ -3618,6 +3619,15 @@ func (r *ClickHouseReader) GetSavedViewsInfo(ctx context.Context) (*model.SavedV
return &savedViewsInfo, nil
}
func (r *ClickHouseReader) GetUsers(ctx context.Context) ([]model.UserPayload, error) {
users, apiErr := dao.DB().GetUsers(ctx)
if apiErr != nil {
return nil, apiErr.Err
}
return users, nil
}
func (r *ClickHouseReader) GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError) {
// response will contain top level fields from the otel log model
response := model.GetFieldsResponse{

View File

@ -3722,6 +3722,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
result, err = postProcessResult(result, queryRangeParams)
}
@ -3786,6 +3787,12 @@ func postProcessResult(result []*v3.Result, queryRangeParams *v3.QueryRangeParam
// We apply the functions here it's easier to add new functions
applyFunctions(result, queryRangeParams)
// expressions are executed at query serivce so the value of time.now in the invdividual
// queries will be different so for table panel we are making it same.
if queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeTable {
tablePanelResultProcessor(result)
}
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
// The way we distinguish between a formula and a query is by checking if the expression
// is the same as the query name
@ -3838,3 +3845,18 @@ func applyFunctions(results []*v3.Result, queryRangeParams *v3.QueryRangeParamsV
}
}
}
func tablePanelResultProcessor(results []*v3.Result) {
var ts int64
for ridx := range results {
for sidx := range results[ridx].Series {
for pidx := range results[ridx].Series[sidx].Points {
if ts == 0 {
ts = results[ridx].Series[sidx].Points[pidx].Timestamp
} else {
results[ridx].Series[sidx].Points[pidx].Timestamp = ts
}
}
}
}
}

View File

@ -18,6 +18,61 @@ import (
"go.uber.org/zap"
)
func prepareLogsQuery(ctx context.Context,
start,
end int64,
builderQuery *v3.BuilderQuery,
params *v3.QueryRangeParamsV3,
preferRPM bool,
) (string, error) {
query := ""
if params == nil || builderQuery == nil {
return query, fmt.Errorf("params and builderQuery cannot be nil")
}
// for ts query with limit replace it as it is already formed
if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
limitQuery, err := logsV3.PrepareLogsQuery(
start,
end,
params.CompositeQuery.QueryType,
params.CompositeQuery.PanelType,
builderQuery,
logsV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM},
)
if err != nil {
return query, err
}
placeholderQuery, err := logsV3.PrepareLogsQuery(
start,
end,
params.CompositeQuery.QueryType,
params.CompositeQuery.PanelType,
builderQuery,
logsV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM},
)
if err != nil {
return query, err
}
query = strings.Replace(placeholderQuery, "#LIMIT_PLACEHOLDER", limitQuery, 1)
return query, err
}
query, err := logsV3.PrepareLogsQuery(
start,
end,
params.CompositeQuery.QueryType,
params.CompositeQuery.PanelType,
builderQuery,
logsV3.Options{PreferRPM: preferRPM},
)
if err != nil {
return query, err
}
return query, err
}
func (q *querier) runBuilderQuery(
ctx context.Context,
builderQuery *v3.BuilderQuery,
@ -48,54 +103,82 @@ func (q *querier) runBuilderQuery(
if builderQuery.DataSource == v3.DataSourceLogs {
var query string
var err error
// for ts query with limit replace it as it is already formed
if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
limitQuery, err := logsV3.PrepareLogsQuery(
start,
end,
params.CompositeQuery.QueryType,
params.CompositeQuery.PanelType,
builderQuery,
logsV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM},
)
if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
return
}
placeholderQuery, err := logsV3.PrepareLogsQuery(
start,
end,
params.CompositeQuery.QueryType,
params.CompositeQuery.PanelType,
builderQuery,
logsV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM},
)
if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: placeholderQuery, Series: nil}
return
}
query = strings.Replace(placeholderQuery, "#LIMIT_PLACEHOLDER", limitQuery, 1)
} else {
query, err = logsV3.PrepareLogsQuery(
start,
end,
params.CompositeQuery.QueryType,
params.CompositeQuery.PanelType,
builderQuery,
logsV3.Options{PreferRPM: preferRPM},
)
if _, ok := cacheKeys[queryName]; !ok {
query, err = prepareLogsQuery(ctx, start, end, builderQuery, params, preferRPM)
if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
return
}
}
if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
series, err := q.execClickHouseQuery(ctx, query)
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
return
}
series, err := q.execClickHouseQuery(ctx, query)
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
cacheKey := cacheKeys[queryName]
var cachedData []byte
if !params.NoCache && q.cache != nil {
var retrieveStatus status.RetrieveStatus
data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
zap.L().Info("cache retrieve status", zap.String("status", retrieveStatus.String()))
if err == nil {
cachedData = data
}
}
misses := q.findMissingTimeRanges(start, end, params.Step, cachedData)
missedSeries := make([]*v3.Series, 0)
cachedSeries := make([]*v3.Series, 0)
for _, miss := range misses {
query, err = prepareLogsQuery(ctx, miss.start, miss.end, builderQuery, params, preferRPM)
if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
return
}
series, err := q.execClickHouseQuery(ctx, query)
if err != nil {
ch <- channelResult{
Err: err,
Name: queryName,
Query: query,
Series: nil,
}
return
}
missedSeries = append(missedSeries, series...)
}
if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil {
zap.L().Error("error unmarshalling cached data", zap.Error(err))
}
mergedSeries := mergeSerieses(cachedSeries, missedSeries)
var mergedSeriesData []byte
var marshallingErr error
missedSeriesLen := len(missedSeries)
if missedSeriesLen > 0 && !params.NoCache && q.cache != nil {
// caching the data
mergedSeriesData, marshallingErr = json.Marshal(mergedSeries)
if marshallingErr != nil {
zap.L().Error("error marshalling merged series", zap.Error(marshallingErr))
}
}
// response doesn't need everything
filterCachedPoints(mergedSeries, start, end)
ch <- channelResult{
Err: nil,
Name: queryName,
Series: mergedSeries,
}
// Cache the seriesList for future queries
if missedSeriesLen > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil {
// caching the data
err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour)
if err != nil {
zap.L().Error("error storing merged series", zap.Error(err))
return
}
}
return
}
@ -211,6 +294,19 @@ func (q *querier) runBuilderQuery(
zap.L().Error("error unmarshalling cached data", zap.Error(err))
}
mergedSeries := mergeSerieses(cachedSeries, missedSeries)
var mergedSeriesData []byte
var marshallingErr error
missedSeriesLen := len(missedSeries)
if missedSeriesLen > 0 && !params.NoCache && q.cache != nil {
// caching the data
mergedSeriesData, marshallingErr = json.Marshal(mergedSeries)
if marshallingErr != nil {
zap.S().Error("error marshalling merged series", zap.Error(marshallingErr))
}
}
// response doesn't need everything
filterCachedPoints(mergedSeries, params.Start, params.End)
ch <- channelResult{
Err: nil,
@ -218,13 +314,8 @@ func (q *querier) runBuilderQuery(
Series: mergedSeries,
}
// Cache the seriesList for future queries
if len(missedSeries) > 0 && !params.NoCache && q.cache != nil {
mergedSeriesData, err := json.Marshal(mergedSeries)
if err != nil {
zap.L().Error("error marshalling merged series", zap.Error(err))
return
}
err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour)
if missedSeriesLen > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil {
err := q.cache.Store(cacheKey, mergedSeriesData, time.Hour)
if err != nil {
zap.L().Error("error storing merged series", zap.Error(err))
return
@ -293,18 +384,27 @@ func (q *querier) runBuilderExpression(
}
mergedSeries := mergeSerieses(cachedSeries, missedSeries)
var mergedSeriesData []byte
missedSeriesLen := len(missedSeries)
var marshallingErr error
if missedSeriesLen > 0 && !params.NoCache && q.cache != nil {
// caching the data
mergedSeriesData, marshallingErr = json.Marshal(mergedSeries)
if marshallingErr != nil {
zap.S().Error("error marshalling merged series", zap.Error(marshallingErr))
}
}
// response doesn't need everything
filterCachedPoints(mergedSeries, params.Start, params.End)
ch <- channelResult{
Err: nil,
Name: queryName,
Series: mergedSeries,
}
// Cache the seriesList for future queries
if len(missedSeries) > 0 && !params.NoCache && q.cache != nil {
mergedSeriesData, err := json.Marshal(mergedSeries)
if err != nil {
zap.L().Error("error marshalling merged series", zap.Error(err))
return
}
if len(missedSeries) > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil {
err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour)
if err != nil {
zap.L().Error("error storing merged series", zap.Error(err))

View File

@ -241,6 +241,19 @@ func labelsToString(labels map[string]string) string {
return fmt.Sprintf("{%s}", strings.Join(labelKVs, ","))
}
func filterCachedPoints(cachedSeries []*v3.Series, start, end int64) {
for _, c := range cachedSeries {
points := []v3.Point{}
for _, p := range c.Points {
if p.Timestamp < start || p.Timestamp > end {
continue
}
points = append(points, p)
}
c.Points = points
}
}
func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series {
// Merge the missed series with the cached series by timestamp
mergedSeries := make([]*v3.Series, 0)

View File

@ -96,6 +96,7 @@ type Reader interface {
GetLogAttributeKeys(ctx context.Context, req *v3.FilterAttributeKeyRequest) (*v3.FilterAttributeKeyResponse, error)
GetLogAttributeValues(ctx context.Context, req *v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error)
GetLogAggregateAttributes(ctx context.Context, req *v3.AggregateAttributeRequest) (*v3.AggregateAttributeResponse, error)
GetUsers(ctx context.Context) ([]model.UserPayload, error)
// Connection needed for rules, not ideal but required
GetConn() clickhouse.Conn

View File

@ -51,7 +51,6 @@ const (
var SAAS_EVENTS_LIST = map[string]struct{}{
TELEMETRY_EVENT_NUMBER_OF_SERVICES: {},
TELEMETRY_EVENT_ACTIVE_USER: {},
TELEMETRY_EVENT_HEART_BEAT: {},
TELEMETRY_EVENT_LANGUAGE: {},
TELEMETRY_EVENT_SERVICE: {},
@ -61,7 +60,7 @@ var SAAS_EVENTS_LIST = map[string]struct{}{
TELEMETRY_EVENT_DASHBOARDS_ALERTS: {},
TELEMETRY_EVENT_SUCCESSFUL_DASHBOARD_PANEL_QUERY: {},
TELEMETRY_EVENT_SUCCESSFUL_ALERT_QUERY: {},
// TELEMETRY_EVENT_QUERY_RANGE_API: {}, // this event is not part of SAAS_EVENTS_LIST as it may cause too many events to be sent
TELEMETRY_EVENT_QUERY_RANGE_API: {},
}
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
@ -194,10 +193,7 @@ func createTelemetry() {
rand.Seed(time.Now().UnixNano())
data := map[string]interface{}{}
telemetry.SetTelemetryEnabled(constants.IsTelemetryEnabled())
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, "", true, false)
ticker := time.NewTicker(HEART_BEAT_DURATION)
activeUserTicker := time.NewTicker(ACTIVE_USER_DURATION)
@ -291,8 +287,16 @@ func createTelemetry() {
for key, value := range tsInfo {
data[key] = value
}
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, "", true, false)
users, apiErr := telemetry.reader.GetUsers(context.Background())
if apiErr == nil {
for _, user := range users {
if user.Email == DEFAULT_CLOUD_EMAIL {
continue
}
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, user.Email, true, false)
}
}
alertsInfo, err := telemetry.reader.GetAlertsInfo(context.Background())
if err == nil {
dashboardsInfo, err := telemetry.reader.GetDashboardsInfo(context.Background())
@ -317,14 +321,19 @@ func createTelemetry() {
"tracesSavedViews": savedViewsInfo.TracesSavedViews,
}
// send event only if there are dashboards or alerts or channels
if dashboardsInfo.TotalDashboards > 0 || alertsInfo.TotalAlerts > 0 || len(*channels) > 0 || savedViewsInfo.TotalSavedViews > 0 {
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, "", true, false)
if (dashboardsInfo.TotalDashboards > 0 || alertsInfo.TotalAlerts > 0 || len(*channels) > 0 || savedViewsInfo.TotalSavedViews > 0) && apiErr == nil {
for _, user := range users {
if user.Email == DEFAULT_CLOUD_EMAIL {
continue
}
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, user.Email, true, false)
}
}
}
}
}
}
if err != nil {
if err != nil || apiErr != nil {
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, map[string]interface{}{"error": err.Error()}, "", true, false)
}