diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 7345f5b338..ba875f8c99 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -133,7 +133,7 @@ services: # - ./data/clickhouse-3/:/var/lib/clickhouse/ alertmanager: - image: signoz/alertmanager:0.23.4 + image: signoz/alertmanager:0.23.5 volumes: - ./data/alertmanager:/data command: diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 61e03804f4..b27e23c2b4 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -54,7 +54,7 @@ services: alertmanager: container_name: signoz-alertmanager - image: signoz/alertmanager:0.23.4 + image: signoz/alertmanager:0.23.5 volumes: - ./data/alertmanager:/data depends_on: diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 91474969e4..2b0652f1cf 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -149,7 +149,7 @@ services: # - ./user_scripts:/var/lib/clickhouse/user_scripts/ alertmanager: - image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.4} + image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.5} container_name: signoz-alertmanager volumes: - ./data/alertmanager:/data diff --git a/ee/query-service/app/api/api.go b/ee/query-service/app/api/api.go index 32bb22435f..6defd85201 100644 --- a/ee/query-service/app/api/api.go +++ b/ee/query-service/app/api/api.go @@ -10,6 +10,7 @@ import ( "go.signoz.io/signoz/ee/query-service/license" "go.signoz.io/signoz/ee/query-service/usage" baseapp "go.signoz.io/signoz/pkg/query-service/app" + "go.signoz.io/signoz/pkg/query-service/app/integrations" "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline" "go.signoz.io/signoz/pkg/query-service/cache" baseint "go.signoz.io/signoz/pkg/query-service/interfaces" @@ -31,6 +32,7 @@ type APIHandlerOptions struct { UsageManager *usage.Manager FeatureFlags baseint.FeatureLookup LicenseManager *license.Manager + IntegrationsController *integrations.Controller LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController Cache cache.Cache // Querier Influx Interval @@ -56,6 +58,7 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) { AppDao: opts.AppDao, RuleManager: opts.RulesManager, FeatureFlags: opts.FeatureFlags, + IntegrationsController: opts.IntegrationsController, LogsParsingPipelineController: opts.LogsParsingPipelineController, Cache: opts.Cache, FluxInterval: opts.FluxInterval, diff --git a/ee/query-service/app/api/auth.go b/ee/query-service/app/api/auth.go index a469b99e33..9ec99a4cc1 100644 --- a/ee/query-service/app/api/auth.go +++ b/ee/query-service/app/api/auth.go @@ -74,7 +74,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() requestBody, err := io.ReadAll(r.Body) if err != nil { - zap.S().Errorf("received no input in api\n", err) + zap.L().Error("received no input in api", zap.Error(err)) RespondError(w, model.BadRequest(err), nil) return } @@ -82,7 +82,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { err = json.Unmarshal(requestBody, &req) if err != nil { - zap.S().Errorf("received invalid user registration request", zap.Error(err)) + zap.L().Error("received invalid user registration request", zap.Error(err)) RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil) return } @@ -90,13 +90,13 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { // get invite object invite, err := baseauth.ValidateInvite(ctx, req) if err != nil { - zap.S().Errorf("failed to validate invite token", err) + zap.L().Error("failed to validate invite token", zap.Error(err)) RespondError(w, model.BadRequest(err), nil) return } if invite == nil { - zap.S().Errorf("failed to validate invite token: it is either empty or invalid", err) + zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err)) RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil) return } @@ -104,7 +104,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { // get auth domain from email domain domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email) if apierr != nil { - zap.S().Errorf("failed to get domain from email", apierr) + zap.L().Error("failed to get domain from email", zap.Error(apierr)) RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil) } @@ -205,24 +205,24 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) ctx := context.Background() if !ah.CheckFeature(model.SSO) { - zap.S().Errorf("[receiveGoogleAuth] sso requested but feature unavailable %s in org domain %s", model.SSO) + zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain") http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently) return } q := r.URL.Query() if errType := q.Get("error"); errType != "" { - zap.S().Errorf("[receiveGoogleAuth] failed to login with google auth", q.Get("error_description")) + zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description"))) http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently) return } relayState := q.Get("state") - zap.S().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState)) + zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState)) parsedState, err := url.Parse(relayState) if err != nil || relayState == "" { - zap.S().Errorf("[receiveGoogleAuth] failed to process response - invalid response from IDP", err, r) + zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) handleSsoError(w, r, redirectUri) return } @@ -244,14 +244,14 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) identity, err := callbackHandler.HandleCallback(r) if err != nil { - zap.S().Errorf("[receiveGoogleAuth] failed to process HandleCallback ", domain.String(), zap.Error(err)) + zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email) if err != nil { - zap.S().Errorf("[receiveGoogleAuth] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err)) + zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } @@ -266,14 +266,14 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { ctx := context.Background() if !ah.CheckFeature(model.SSO) { - zap.S().Errorf("[receiveSAML] sso requested but feature unavailable %s in org domain %s", model.SSO) + zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain") http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently) return } err := r.ParseForm() if err != nil { - zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r) + zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) handleSsoError(w, r, redirectUri) return } @@ -281,11 +281,11 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { // the relay state is sent when a login request is submitted to // Idp. relayState := r.FormValue("RelayState") - zap.S().Debug("[receiveML] relay state", zap.String("relayState", relayState)) + zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState)) parsedState, err := url.Parse(relayState) if err != nil || relayState == "" { - zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r) + zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) handleSsoError(w, r, redirectUri) return } @@ -302,34 +302,34 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { sp, err := domain.PrepareSamlRequest(parsedState) if err != nil { - zap.S().Errorf("[receiveSAML] failed to prepare saml request for domain (%s): %v", domain.String(), err) + zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse")) if err != nil { - zap.S().Errorf("[receiveSAML] failed to retrieve assertion info from saml response for organization (%s): %v", domain.String(), err) + zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } if assertionInfo.WarningInfo.InvalidTime { - zap.S().Errorf("[receiveSAML] expired saml response for organization (%s): %v", domain.String(), err) + zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } email := assertionInfo.NameID if email == "" { - zap.S().Errorf("[receiveSAML] invalid email in the SSO response (%s)", domain.String()) + zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String())) handleSsoError(w, r, redirectUri) return } nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email) if err != nil { - zap.S().Errorf("[receiveSAML] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err)) + zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } diff --git a/ee/query-service/app/api/license.go b/ee/query-service/app/api/license.go index c6fe43a6bb..51cfddefb1 100644 --- a/ee/query-service/app/api/license.go +++ b/ee/query-service/app/api/license.go @@ -12,6 +12,20 @@ import ( "go.uber.org/zap" ) +type DayWiseBreakdown struct { + Type string `json:"type"` + Breakdown []DayWiseData `json:"breakdown"` +} + +type DayWiseData struct { + Timestamp int64 `json:"timestamp"` + Count float64 `json:"count"` + Size float64 `json:"size"` + UnitPrice float64 `json:"unitPrice"` + Quantity float64 `json:"quantity"` + Total float64 `json:"total"` +} + type tierBreakdown struct { UnitPrice float64 `json:"unitPrice"` Quantity float64 `json:"quantity"` @@ -21,9 +35,10 @@ type tierBreakdown struct { } type usageResponse struct { - Type string `json:"type"` - Unit string `json:"unit"` - Tiers []tierBreakdown `json:"tiers"` + Type string `json:"type"` + Unit string `json:"unit"` + Tiers []tierBreakdown `json:"tiers"` + DayWiseBreakdown DayWiseBreakdown `json:"dayWiseBreakdown"` } type details struct { @@ -176,7 +191,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { url := fmt.Sprintf("%s/trial?licenseKey=%s", constants.LicenseSignozIo, currentActiveLicenseKey) req, err := http.NewRequest("GET", url, nil) if err != nil { - zap.S().Error("Error while creating request for trial details", err) + zap.L().Error("Error while creating request for trial details", zap.Error(err)) // If there is an error in fetching trial details, we will still return the license details // to avoid blocking the UI ah.Respond(w, resp) @@ -185,7 +200,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey) trialResp, err := hClient.Do(req) if err != nil { - zap.S().Error("Error while fetching trial details", err) + zap.L().Error("Error while fetching trial details", zap.Error(err)) // If there is an error in fetching trial details, we will still return the license details // to avoid incorrectly blocking the UI ah.Respond(w, resp) @@ -196,7 +211,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { trialRespBody, err := io.ReadAll(trialResp.Body) if err != nil || trialResp.StatusCode != http.StatusOK { - zap.S().Error("Error while fetching trial details", err) + zap.L().Error("Error while fetching trial details", zap.Error(err)) // If there is an error in fetching trial details, we will still return the license details // to avoid incorrectly blocking the UI ah.Respond(w, resp) @@ -207,7 +222,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { var trialRespData model.SubscriptionServerResp if err := json.Unmarshal(trialRespBody, &trialRespData); err != nil { - zap.S().Error("Error while decoding trial details", err) + zap.L().Error("Error while decoding trial details", zap.Error(err)) // If there is an error in fetching trial details, we will still return the license details // to avoid incorrectly blocking the UI ah.Respond(w, resp) diff --git a/ee/query-service/app/api/metrics.go b/ee/query-service/app/api/metrics.go index 81af7035b7..7c0e320f45 100644 --- a/ee/query-service/app/api/metrics.go +++ b/ee/query-service/app/api/metrics.go @@ -18,14 +18,14 @@ import ( func (ah *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) { if !ah.CheckFeature(basemodel.CustomMetricsFunction) { - zap.S().Info("CustomMetricsFunction feature is not enabled in this plan") + zap.L().Info("CustomMetricsFunction feature is not enabled in this plan") ah.APIHandler.QueryRangeMetricsV2(w, r) return } metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r) if apiErrorObj != nil { - zap.S().Errorf(apiErrorObj.Err.Error()) + zap.L().Error("Error in parsing metric query params", zap.Error(apiErrorObj.Err)) RespondError(w, apiErrorObj, nil) return } diff --git a/ee/query-service/app/api/pat.go b/ee/query-service/app/api/pat.go index ea43f47fb0..3ff8be74a2 100644 --- a/ee/query-service/app/api/pat.go +++ b/ee/query-service/app/api/pat.go @@ -43,8 +43,8 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) { return } pat := model.PAT{ - Name: req.Name, - Role: req.Role, + Name: req.Name, + Role: req.Role, ExpiresAt: req.ExpiresInDays, } err = validatePATRequest(pat) @@ -65,7 +65,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) { pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60) } - zap.S().Debugf("Got Create PAT request: %+v", pat) + zap.L().Info("Got Create PAT request", zap.Any("pat", pat)) var apierr basemodel.BaseApiError if pat, apierr = ah.AppDao().CreatePAT(ctx, pat); apierr != nil { RespondError(w, apierr, nil) @@ -115,7 +115,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) { req.UpdatedByUserID = user.Id id := mux.Vars(r)["id"] req.UpdatedAt = time.Now().Unix() - zap.S().Debugf("Got Update PAT request: %+v", req) + zap.L().Info("Got Update PAT request", zap.Any("pat", req)) var apierr basemodel.BaseApiError if apierr = ah.AppDao().UpdatePAT(ctx, req, id); apierr != nil { RespondError(w, apierr, nil) @@ -135,7 +135,7 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) { }, nil) return } - zap.S().Infof("Get PATs for user: %+v", user.Id) + zap.L().Info("Get PATs for user", zap.String("user_id", user.Id)) pats, apierr := ah.AppDao().ListPATs(ctx) if apierr != nil { RespondError(w, apierr, nil) @@ -156,7 +156,7 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) { return } - zap.S().Debugf("Revoke PAT with id: %+v", id) + zap.L().Info("Revoke PAT with id", zap.String("id", id)) if apierr := ah.AppDao().RevokePAT(ctx, id, user.Id); apierr != nil { RespondError(w, apierr, nil) return diff --git a/ee/query-service/app/api/traces.go b/ee/query-service/app/api/traces.go index 22d66f7a82..ee18b2f50b 100644 --- a/ee/query-service/app/api/traces.go +++ b/ee/query-service/app/api/traces.go @@ -15,7 +15,7 @@ import ( func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) { if !ah.CheckFeature(basemodel.SmartTraceDetail) { - zap.S().Info("SmartTraceDetail feature is not enabled in this plan") + zap.L().Info("SmartTraceDetail feature is not enabled in this plan") ah.APIHandler.SearchTraces(w, r) return } @@ -26,7 +26,7 @@ func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) { } spanLimit, err := strconv.Atoi(constants.SpanLimitStr) if err != nil { - zap.S().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable: ", err) + zap.L().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable", zap.Error(err)) return } result, err := ah.opts.DataConnector.SearchTraces(r.Context(), traceId, spanId, levelUpInt, levelDownInt, spanLimit, db.SmartTraceAlgorithm) diff --git a/ee/query-service/app/db/metrics.go b/ee/query-service/app/db/metrics.go index 3bafc6a638..c7b41b17f5 100644 --- a/ee/query-service/app/db/metrics.go +++ b/ee/query-service/app/db/metrics.go @@ -22,7 +22,7 @@ import ( func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) { defer utils.Elapsed("GetMetricResult")() - zap.S().Infof("Executing metric result query: %s", query) + zap.L().Info("Executing metric result query: ", zap.String("query", query)) var hash string // If getSubTreeSpans function is used in the clickhouse query @@ -38,9 +38,8 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) } rows, err := r.conn.Query(ctx, query) - zap.S().Debug(query) if err != nil { - zap.S().Debug("Error in processing query: ", err) + zap.L().Error("Error in processing query", zap.Error(err)) return nil, "", fmt.Errorf("error in processing query") } @@ -117,7 +116,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()) } default: - zap.S().Errorf("invalid var found in metric builder query result", v, colName) + zap.L().Error("invalid var found in metric builder query result", zap.Any("var", v), zap.String("colName", colName)) } } sort.Strings(groupBy) @@ -140,7 +139,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) } // err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash) // if err != nil { - // zap.S().Error("Error in dropping temporary table: ", err) + // zap.L().Error("Error in dropping temporary table: ", err) // return nil, err // } if hash == "" { @@ -152,7 +151,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) { - zap.S().Debugf("Executing getSubTreeSpans function") + zap.L().Debug("Executing getSubTreeSpans function") // str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;` @@ -162,28 +161,28 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash) if err != nil { - zap.S().Error("Error in dropping temporary table: ", err) + zap.L().Error("Error in dropping temporary table", zap.Error(err)) return query, hash, err } // Create temporary table to store the getSubTreeSpans() results - zap.S().Debugf("Creating temporary table getSubTreeSpans%s", hash) + zap.L().Debug("Creating temporary table getSubTreeSpans", zap.String("hash", hash)) err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)") if err != nil { - zap.S().Error("Error in creating temporary table: ", err) + zap.L().Error("Error in creating temporary table", zap.Error(err)) return query, hash, err } var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse getSpansSubQuery := subtreeInput // Execute the subTree query - zap.S().Debugf("Executing subTree query: %s", getSpansSubQuery) + zap.L().Debug("Executing subTree query", zap.String("query", getSpansSubQuery)) err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery) - // zap.S().Info(getSpansSubQuery) + // zap.L().Info(getSpansSubQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return query, hash, fmt.Errorf("Error in processing sql query") } @@ -196,16 +195,16 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu if len(getSpansSubQueryDBResponses) == 0 { return query, hash, fmt.Errorf("No spans found for the given query") } - zap.S().Debugf("Executing query to fetch all the spans from the same TraceID: %s", modelQuery) + zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery)) err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return query, hash, fmt.Errorf("Error in processing sql query") } // Process model to fetch the spans - zap.S().Debugf("Processing model to fetch the spans") + zap.L().Debug("Processing model to fetch the spans") searchSpanResponses := []basemodel.SearchSpanResponseItem{} for _, item := range searchScanResponses { var jsonItem basemodel.SearchSpanResponseItem @@ -218,17 +217,17 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu } // Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash // Use map to store pointer to the spans to avoid duplicates and save memory - zap.S().Debugf("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans%s", hash) + zap.L().Debug("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash)) treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses) if err != nil { - zap.S().Error("Error in getSubTreeAlgorithm function: ", err) + zap.L().Error("Error in getSubTreeAlgorithm function", zap.Error(err)) return query, hash, err } - zap.S().Debugf("Preparing batch to store subtree spans in temporary table getSubTreeSpans%s", hash) + zap.L().Debug("Preparing batch to store subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash)) statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash)) if err != nil { - zap.S().Error("Error in preparing batch statement: ", err) + zap.L().Error("Error in preparing batch statement", zap.Error(err)) return query, hash, err } for _, span := range treeSearchResponse { @@ -251,14 +250,14 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu span.Events, ) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return query, hash, err } } - zap.S().Debugf("Inserting the subtree spans in temporary table getSubTreeSpans%s", hash) + zap.L().Debug("Inserting the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash)) err = statement.Send() if err != nil { - zap.S().Error("Error in sending statement: ", err) + zap.L().Error("Error in sending statement", zap.Error(err)) return query, hash, err } return query, hash, nil @@ -323,7 +322,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub spans = append(spans, span) } - zap.S().Debug("Building Tree") + zap.L().Debug("Building Tree") roots, err := buildSpanTrees(&spans) if err != nil { return nil, err @@ -333,7 +332,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub // For each root, get the subtree spans for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses { targetSpan := &model.SpanForTraceDetails{} - // zap.S().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses))) + // zap.L().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses))) // Search target span object in the tree for _, root := range roots { targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID) @@ -341,7 +340,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub break } if err != nil { - zap.S().Error("Error during BreadthFirstSearch(): ", err) + zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err)) return nil, err } } diff --git a/ee/query-service/app/db/trace.go b/ee/query-service/app/db/trace.go index 529a9a93fd..c6fe9045cf 100644 --- a/ee/query-service/app/db/trace.go +++ b/ee/query-service/app/db/trace.go @@ -49,7 +49,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI break } if err != nil { - zap.S().Error("Error during BreadthFirstSearch(): ", err) + zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err)) return nil, err } } @@ -186,7 +186,7 @@ func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTra // If the parent span is not found, add current span to list of roots if parent == nil { - // zap.S().Debug("Parent Span not found parent_id: ", span.ParentID) + // zap.L().Debug("Parent Span not found parent_id: ", span.ParentID) roots = append(roots, span) span.ParentID = "" continue diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index f8c7633417..c742eef01b 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -35,6 +35,7 @@ import ( baseapp "go.signoz.io/signoz/pkg/query-service/app" "go.signoz.io/signoz/pkg/query-service/app/dashboards" baseexplorer "go.signoz.io/signoz/pkg/query-service/app/explorer" + "go.signoz.io/signoz/pkg/query-service/app/integrations" "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline" "go.signoz.io/signoz/pkg/query-service/app/opamp" opAmpModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model" @@ -133,7 +134,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { var reader interfaces.DataConnector storage := os.Getenv("STORAGE") if storage == "clickhouse" { - zap.S().Info("Using ClickHouse as datastore ...") + zap.L().Info("Using ClickHouse as datastore ...") qb := db.NewDataConnector( localDB, serverOptions.PromConfigPath, @@ -171,13 +172,22 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { } // initiate opamp - _, err = opAmpModel.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH) + _, err = opAmpModel.InitDB(localDB) if err != nil { return nil, err } + integrationsController, err := integrations.NewController(localDB) + if err != nil { + return nil, fmt.Errorf( + "couldn't create integrations controller: %w", err, + ) + } + // ingestion pipelines manager - logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(localDB, "sqlite") + logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController( + localDB, "sqlite", integrationsController.GetPipelinesForInstalledIntegrations, + ) if err != nil { return nil, err } @@ -233,6 +243,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { UsageManager: usageManager, FeatureFlags: lm, LicenseManager: lm, + IntegrationsController: integrationsController, LogsParsingPipelineController: logParsingPipelineController, Cache: c, FluxInterval: fluxInterval, @@ -278,6 +289,7 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, r := mux.NewRouter() + r.Use(baseapp.LogCommentEnricher) r.Use(setTimeoutMiddleware) r.Use(s.analyticsMiddleware) r.Use(loggingMiddlewarePrivate) @@ -310,6 +322,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e } am := baseapp.NewAuthMiddleware(getUserFromRequest) + r.Use(baseapp.LogCommentEnricher) r.Use(setTimeoutMiddleware) r.Use(s.analyticsMiddleware) r.Use(loggingMiddleware) @@ -317,6 +330,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e apiHandler.RegisterRoutes(r, am) apiHandler.RegisterMetricsRoutes(r, am) apiHandler.RegisterLogsRoutes(r, am) + apiHandler.RegisterIntegrationRoutes(r, am) apiHandler.RegisterQueryRangeV3Routes(r, am) apiHandler.RegisterQueryRangeV4Routes(r, am) @@ -405,30 +419,33 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface signozMetricsUsed := false signozLogsUsed := false - dataSources := []string{} + signozTracesUsed := false if postData != nil { if postData.CompositeQuery != nil { data["queryType"] = postData.CompositeQuery.QueryType data["panelType"] = postData.CompositeQuery.PanelType - signozLogsUsed, signozMetricsUsed = telemetry.GetInstance().CheckSigNozSignals(postData) + signozLogsUsed, signozMetricsUsed, signozTracesUsed = telemetry.GetInstance().CheckSigNozSignals(postData) } } - if signozMetricsUsed || signozLogsUsed { + if signozMetricsUsed || signozLogsUsed || signozTracesUsed { if signozMetricsUsed { - dataSources = append(dataSources, "metrics") telemetry.GetInstance().AddActiveMetricsUser() } if signozLogsUsed { - dataSources = append(dataSources, "logs") telemetry.GetInstance().AddActiveLogsUser() } - data["dataSources"] = dataSources + if signozTracesUsed { + telemetry.GetInstance().AddActiveTracesUser() + } + data["metricsUsed"] = signozMetricsUsed + data["logsUsed"] = signozLogsUsed + data["tracesUsed"] = signozTracesUsed userEmail, err := baseauth.GetEmailFromJwt(r.Context()) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_V3, data, userEmail, true) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail) } } return data, true @@ -508,7 +525,7 @@ func (s *Server) initListeners() error { return err } - zap.S().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort)) + zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort)) // listen on private port to support internal services privateHostPort := s.serverOptions.PrivateHostPort @@ -521,7 +538,7 @@ func (s *Server) initListeners() error { if err != nil { return err } - zap.S().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort)) + zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort)) return nil } @@ -533,7 +550,7 @@ func (s *Server) Start() error { if !s.serverOptions.DisableRules { s.ruleManager.Start() } else { - zap.S().Info("msg: Rules disabled as rules.disable is set to TRUE") + zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE") } err := s.initListeners() @@ -547,23 +564,23 @@ func (s *Server) Start() error { } go func() { - zap.S().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort)) + zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort)) switch err := s.httpServer.Serve(s.httpConn); err { case nil, http.ErrServerClosed, cmux.ErrListenerClosed: // normal exit, nothing to do default: - zap.S().Error("Could not start HTTP server", zap.Error(err)) + zap.L().Error("Could not start HTTP server", zap.Error(err)) } s.unavailableChannel <- healthcheck.Unavailable }() go func() { - zap.S().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort)) + zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort)) err = http.ListenAndServe(baseconst.DebugHttpPort, nil) if err != nil { - zap.S().Error("Could not start pprof server", zap.Error(err)) + zap.L().Error("Could not start pprof server", zap.Error(err)) } }() @@ -573,14 +590,14 @@ func (s *Server) Start() error { } go func() { - zap.S().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort)) + zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort)) switch err := s.privateHTTP.Serve(s.privateConn); err { case nil, http.ErrServerClosed, cmux.ErrListenerClosed: // normal exit, nothing to do - zap.S().Info("private http server closed") + zap.L().Info("private http server closed") default: - zap.S().Error("Could not start private HTTP server", zap.Error(err)) + zap.L().Error("Could not start private HTTP server", zap.Error(err)) } s.unavailableChannel <- healthcheck.Unavailable @@ -588,10 +605,10 @@ func (s *Server) Start() error { }() go func() { - zap.S().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint)) + zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint)) err := s.opampServer.Start(baseconst.OpAmpWsEndpoint) if err != nil { - zap.S().Info("opamp ws server failed to start", err) + zap.L().Error("opamp ws server failed to start", zap.Error(err)) s.unavailableChannel <- healthcheck.Unavailable } }() @@ -667,7 +684,7 @@ func makeRulesManager( return nil, fmt.Errorf("rule manager error: %v", err) } - zap.S().Info("rules manager is ready") + zap.L().Info("rules manager is ready") return manager, nil } diff --git a/ee/query-service/auth/auth.go b/ee/query-service/auth/auth.go index 8c06384549..d45d050cca 100644 --- a/ee/query-service/auth/auth.go +++ b/ee/query-service/auth/auth.go @@ -17,25 +17,25 @@ import ( func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel.UserPayload, error) { patToken := r.Header.Get("SIGNOZ-API-KEY") if len(patToken) > 0 { - zap.S().Debugf("Received a non-zero length PAT token") + zap.L().Debug("Received a non-zero length PAT token") ctx := context.Background() dao := apiHandler.AppDao() pat, err := dao.GetPAT(ctx, patToken) if err == nil && pat != nil { - zap.S().Debugf("Found valid PAT: %+v", pat) + zap.L().Debug("Found valid PAT: ", zap.Any("pat", pat)) if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 { - zap.S().Debugf("PAT has expired: %+v", pat) + zap.L().Info("PAT has expired: ", zap.Any("pat", pat)) return nil, fmt.Errorf("PAT has expired") } group, apiErr := dao.GetGroupByName(ctx, pat.Role) if apiErr != nil { - zap.S().Debugf("Error while getting group for PAT: %+v", apiErr) + zap.L().Error("Error while getting group for PAT: ", zap.Any("apiErr", apiErr)) return nil, apiErr } user, err := dao.GetUser(ctx, pat.UserID) if err != nil { - zap.S().Debugf("Error while getting user for PAT: %+v", err) + zap.L().Error("Error while getting user for PAT: ", zap.Error(err)) return nil, err } telemetry.GetInstance().SetPatTokenUser() @@ -48,7 +48,7 @@ func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel }, nil } if err != nil { - zap.S().Debugf("Error while getting user for PAT: %+v", err) + zap.L().Error("Error while getting user for PAT: ", zap.Error(err)) return nil, err } } diff --git a/ee/query-service/dao/sqlite/auth.go b/ee/query-service/dao/sqlite/auth.go index 664323eaaf..4418b04cbf 100644 --- a/ee/query-service/dao/sqlite/auth.go +++ b/ee/query-service/dao/sqlite/auth.go @@ -22,19 +22,19 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) ( domain, apierr := m.GetDomainByEmail(ctx, email) if apierr != nil { - zap.S().Errorf("failed to get domain from email", apierr) + zap.L().Error("failed to get domain from email", zap.Error(apierr)) return nil, model.InternalErrorStr("failed to get domain from email") } hash, err := baseauth.PasswordHash(utils.GeneratePassowrd()) if err != nil { - zap.S().Errorf("failed to generate password hash when registering a user via SSO redirect", zap.Error(err)) + zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err)) return nil, model.InternalErrorStr("failed to generate password hash") } group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup) if apiErr != nil { - zap.S().Debugf("GetGroupByName failed, err: %v\n", apiErr.Err) + zap.L().Error("GetGroupByName failed", zap.Error(apiErr)) return nil, apiErr } @@ -51,7 +51,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) ( user, apiErr = m.CreateUser(ctx, user, false) if apiErr != nil { - zap.S().Debugf("CreateUser failed, err: %v\n", apiErr.Err) + zap.L().Error("CreateUser failed", zap.Error(apiErr)) return nil, apiErr } @@ -65,7 +65,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st userPayload, apierr := m.GetUserByEmail(ctx, email) if !apierr.IsNil() { - zap.S().Errorf(" failed to get user with email received from auth provider", apierr.Error()) + zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error())) return "", model.BadRequestStr("invalid user email received from the auth provider") } @@ -75,7 +75,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st newUser, apiErr := m.createUserForSAMLRequest(ctx, email) user = newUser if apiErr != nil { - zap.S().Errorf("failed to create user with email received from auth provider: %v", apierr.Error()) + zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr)) return "", apiErr } } else { @@ -84,7 +84,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st tokenStore, err := baseauth.GenerateJWTForUser(user) if err != nil { - zap.S().Errorf("failed to generate token for SSO login user", err) + zap.L().Error("failed to generate token for SSO login user", zap.Error(err)) return "", model.InternalErrorStr("failed to generate token for the user") } @@ -143,8 +143,8 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) ( // do nothing, just skip sso ssoAvailable = false default: - zap.S().Errorf("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err)) - return resp, model.BadRequest(err) + zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err)) + return resp, model.BadRequestStr(err.Error()) } } @@ -160,7 +160,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) ( if len(emailComponents) > 0 { emailDomain = emailComponents[1] } - zap.S().Errorf("failed to get org domain from email", zap.String("emailDomain", emailDomain), apierr.ToError()) + zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError())) return resp, apierr } @@ -176,7 +176,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) ( escapedUrl, _ := url.QueryUnescape(sourceUrl) siteUrl, err := url.Parse(escapedUrl) if err != nil { - zap.S().Errorf("failed to parse referer", err) + zap.L().Error("failed to parse referer", zap.Error(err)) return resp, model.InternalError(fmt.Errorf("failed to generate login request")) } @@ -185,7 +185,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) ( resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl) if err != nil { - zap.S().Errorf("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), err) + zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err)) return resp, model.InternalError(err) } diff --git a/ee/query-service/dao/sqlite/domain.go b/ee/query-service/dao/sqlite/domain.go index b515af49c9..fbaa4fe332 100644 --- a/ee/query-service/dao/sqlite/domain.go +++ b/ee/query-service/dao/sqlite/domain.go @@ -48,13 +48,13 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url if domainIdStr != "" { domainId, err := uuid.Parse(domainIdStr) if err != nil { - zap.S().Errorf("failed to parse domainId from relay state", err) + zap.L().Error("failed to parse domainId from relay state", zap.Error(err)) return nil, fmt.Errorf("failed to parse domainId from IdP response") } domain, err = m.GetDomain(ctx, domainId) if (err != nil) || domain == nil { - zap.S().Errorf("failed to find domain from domainId received in IdP response", err.Error()) + zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err)) return nil, fmt.Errorf("invalid credentials") } } @@ -64,7 +64,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url domainFromDB, err := m.GetDomainByName(ctx, domainNameStr) domain = domainFromDB if (err != nil) || domain == nil { - zap.S().Errorf("failed to find domain from domainName received in IdP response", err.Error()) + zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err)) return nil, fmt.Errorf("invalid credentials") } } @@ -132,7 +132,7 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDo for _, s := range stored { domain := model.OrgDomain{Id: s.Id, Name: s.Name, OrgId: s.OrgId} if err := domain.LoadConfig(s.Data); err != nil { - zap.S().Errorf("ListDomains() failed", zap.Error(err)) + zap.L().Error("ListDomains() failed", zap.Error(err)) } domains = append(domains, domain) } @@ -153,7 +153,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba configJson, err := json.Marshal(domain) if err != nil { - zap.S().Errorf("failed to unmarshal domain config", zap.Error(err)) + zap.L().Error("failed to unmarshal domain config", zap.Error(err)) return model.InternalError(fmt.Errorf("domain creation failed")) } @@ -167,7 +167,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba time.Now().Unix()) if err != nil { - zap.S().Errorf("failed to insert domain in db", zap.Error(err)) + zap.L().Error("failed to insert domain in db", zap.Error(err)) return model.InternalError(fmt.Errorf("domain creation failed")) } @@ -178,13 +178,13 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError { if domain.Id == uuid.Nil { - zap.S().Errorf("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) + zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) return model.InternalError(fmt.Errorf("domain update failed")) } configJson, err := json.Marshal(domain) if err != nil { - zap.S().Errorf("domain update failed", zap.Error(err)) + zap.L().Error("domain update failed", zap.Error(err)) return model.InternalError(fmt.Errorf("domain update failed")) } @@ -195,7 +195,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba domain.Id) if err != nil { - zap.S().Errorf("domain update failed", zap.Error(err)) + zap.L().Error("domain update failed", zap.Error(err)) return model.InternalError(fmt.Errorf("domain update failed")) } @@ -206,7 +206,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError { if id == uuid.Nil { - zap.S().Errorf("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) + zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) return model.InternalError(fmt.Errorf("domain delete failed")) } @@ -215,7 +215,7 @@ func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.Bas id) if err != nil { - zap.S().Errorf("domain delete failed", zap.Error(err)) + zap.L().Error("domain delete failed", zap.Error(err)) return model.InternalError(fmt.Errorf("domain delete failed")) } diff --git a/ee/query-service/dao/sqlite/pat.go b/ee/query-service/dao/sqlite/pat.go index b2af1640c3..75169db685 100644 --- a/ee/query-service/dao/sqlite/pat.go +++ b/ee/query-service/dao/sqlite/pat.go @@ -26,12 +26,12 @@ func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basem p.Revoked, ) if err != nil { - zap.S().Errorf("Failed to insert PAT in db, err: %v", zap.Error(err)) + zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err)) return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed")) } id, err := result.LastInsertId() if err != nil { - zap.S().Errorf("Failed to get last inserted id, err: %v", zap.Error(err)) + zap.L().Error("Failed to get last inserted id, err: %v", zap.Error(err)) return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed")) } p.Id = strconv.Itoa(int(id)) @@ -62,7 +62,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) basemo p.UpdatedByUserID, id) if err != nil { - zap.S().Errorf("Failed to update PAT in db, err: %v", zap.Error(err)) + zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err)) return model.InternalError(fmt.Errorf("PAT update failed")) } return nil @@ -74,7 +74,7 @@ func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed lastUsed, token) if err != nil { - zap.S().Errorf("Failed to update PAT last used in db, err: %v", zap.Error(err)) + zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err)) return model.InternalError(fmt.Errorf("PAT last used update failed")) } return nil @@ -84,7 +84,7 @@ func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApi pats := []model.PAT{} if err := m.DB().Select(&pats, "SELECT * FROM personal_access_tokens WHERE revoked=false ORDER by updated_at DESC;"); err != nil { - zap.S().Errorf("Failed to fetch PATs err: %v", zap.Error(err)) + zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err)) return nil, model.InternalError(fmt.Errorf("failed to fetch PATs")) } for i := range pats { @@ -129,7 +129,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) base "UPDATE personal_access_tokens SET revoked=true, updated_by_user_id = $1, updated_at=$2 WHERE id=$3", userID, updatedAt, id) if err != nil { - zap.S().Errorf("Failed to revoke PAT in db, err: %v", zap.Error(err)) + zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err)) return model.InternalError(fmt.Errorf("PAT revoke failed")) } return nil diff --git a/ee/query-service/integrations/signozio/signozio.go b/ee/query-service/integrations/signozio/signozio.go index c1ad5e57e4..c18cfb6572 100644 --- a/ee/query-service/integrations/signozio/signozio.go +++ b/ee/query-service/integrations/signozio/signozio.go @@ -47,13 +47,13 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError) httpResponse, err := http.Post(C.Prefix+"/licenses/activate", APPLICATION_JSON, bytes.NewBuffer(reqString)) if err != nil { - zap.S().Errorf("failed to connect to license.signoz.io", err) + zap.L().Error("failed to connect to license.signoz.io", zap.Error(err)) return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection")) } httpBody, err := io.ReadAll(httpResponse.Body) if err != nil { - zap.S().Errorf("failed to read activation response from license.signoz.io", err) + zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err)) return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io")) } @@ -63,7 +63,7 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError) result := ActivationResult{} err = json.Unmarshal(httpBody, &result) if err != nil { - zap.S().Errorf("failed to marshal activation response from license.signoz.io", err) + zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err)) return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response")) } diff --git a/ee/query-service/license/db.go b/ee/query-service/license/db.go index 8d2f7065ff..bf71e9376d 100644 --- a/ee/query-service/license/db.go +++ b/ee/query-service/license/db.go @@ -97,7 +97,7 @@ func (r *Repo) InsertLicense(ctx context.Context, l *model.License) error { l.ValidationMessage) if err != nil { - zap.S().Errorf("error in inserting license data: ", zap.Error(err)) + zap.L().Error("error in inserting license data: ", zap.Error(err)) return fmt.Errorf("failed to insert license in db: %v", err) } @@ -121,7 +121,7 @@ func (r *Repo) UpdatePlanDetails(ctx context.Context, _, err := r.db.ExecContext(ctx, query, planDetails, time.Now(), key) if err != nil { - zap.S().Errorf("error in updating license: ", zap.Error(err)) + zap.L().Error("error in updating license: ", zap.Error(err)) return fmt.Errorf("failed to update license in db: %v", err) } diff --git a/ee/query-service/license/manager.go b/ee/query-service/license/manager.go index dcfa8235b1..56cb685fec 100644 --- a/ee/query-service/license/manager.go +++ b/ee/query-service/license/manager.go @@ -100,7 +100,7 @@ func (lm *Manager) SetActive(l *model.License) { err := lm.InitFeatures(lm.activeFeatures) if err != nil { - zap.S().Panicf("Couldn't activate features: %v", err) + zap.L().Panic("Couldn't activate features", zap.Error(err)) } if !lm.validatorRunning { // we want to make sure only one validator runs, @@ -125,13 +125,13 @@ func (lm *Manager) LoadActiveLicense() error { if active != nil { lm.SetActive(active) } else { - zap.S().Info("No active license found, defaulting to basic plan") + zap.L().Info("No active license found, defaulting to basic plan") // if no active license is found, we default to basic(free) plan with all default features lm.activeFeatures = model.BasicPlan setDefaultFeatures(lm) err := lm.InitFeatures(lm.activeFeatures) if err != nil { - zap.S().Error("Couldn't initialize features: ", err) + zap.L().Error("Couldn't initialize features", zap.Error(err)) return err } } @@ -191,7 +191,7 @@ func (lm *Manager) Validator(ctx context.Context) { // Validate validates the current active license func (lm *Manager) Validate(ctx context.Context) (reterr error) { - zap.S().Info("License validation started") + zap.L().Info("License validation started") if lm.activeLicense == nil { return nil } @@ -201,12 +201,12 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) { lm.lastValidated = time.Now().Unix() if reterr != nil { - zap.S().Errorf("License validation completed with error", reterr) + zap.L().Error("License validation completed with error", zap.Error(reterr)) atomic.AddUint64(&lm.failedAttempts, 1) telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED, map[string]interface{}{"err": reterr.Error()}, "") } else { - zap.S().Info("License validation completed with no errors") + zap.L().Info("License validation completed with no errors") } lm.mutex.Unlock() @@ -214,7 +214,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) { response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId) if apiError != nil { - zap.S().Errorf("failed to validate license", apiError) + zap.L().Error("failed to validate license", zap.Error(apiError.Err)) return apiError.Err } @@ -235,7 +235,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) { } if err := l.ParsePlan(); err != nil { - zap.S().Errorf("failed to parse updated license", zap.Error(err)) + zap.L().Error("failed to parse updated license", zap.Error(err)) return err } @@ -245,7 +245,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) { if err != nil { // unexpected db write issue but we can let the user continue // and wait for update to work in next cycle. - zap.S().Errorf("failed to validate license", zap.Error(err)) + zap.L().Error("failed to validate license", zap.Error(err)) } } @@ -270,7 +270,7 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m response, apiError := validate.ActivateLicense(key, "") if apiError != nil { - zap.S().Errorf("failed to activate license", zap.Error(apiError.Err)) + zap.L().Error("failed to activate license", zap.Error(apiError.Err)) return nil, apiError } @@ -284,14 +284,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m err := l.ParsePlan() if err != nil { - zap.S().Errorf("failed to activate license", zap.Error(err)) + zap.L().Error("failed to activate license", zap.Error(err)) return nil, model.InternalError(err) } // store the license before activating it err = lm.repo.InsertLicense(ctx, l) if err != nil { - zap.S().Errorf("failed to activate license", zap.Error(err)) + zap.L().Error("failed to activate license", zap.Error(err)) return nil, model.InternalError(err) } diff --git a/ee/query-service/main.go b/ee/query-service/main.go index 427f78059b..3323e5bdbd 100644 --- a/ee/query-service/main.go +++ b/ee/query-service/main.go @@ -14,10 +14,10 @@ import ( semconv "go.opentelemetry.io/otel/semconv/v1.4.0" "go.signoz.io/signoz/ee/query-service/app" "go.signoz.io/signoz/pkg/query-service/auth" - "go.signoz.io/signoz/pkg/query-service/constants" baseconst "go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/version" "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder" zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync" @@ -27,18 +27,19 @@ import ( ) func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger { - config := zap.NewDevelopmentConfig() + config := zap.NewProductionConfig() ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt) defer stop() - config.EncoderConfig.EncodeDuration = zapcore.StringDurationEncoder - otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig) - consoleEncoder := zapcore.NewConsoleEncoder(config.EncoderConfig) - defaultLogLevel := zapcore.DebugLevel - config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder + config.EncoderConfig.EncodeDuration = zapcore.MillisDurationEncoder + config.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder config.EncoderConfig.TimeKey = "timestamp" config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder + otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig) + consoleEncoder := zapcore.NewJSONEncoder(config.EncoderConfig) + defaultLogLevel := zapcore.InfoLevel + res := resource.NewWithAttributes( semconv.SchemaURL, semconv.ServiceNameKey.String("query-service"), @@ -48,14 +49,15 @@ func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger { zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel), ) - if enableQueryServiceLogOTLPExport == true { - conn, err := grpc.DialContext(ctx, constants.OTLPTarget, grpc.WithBlock(), grpc.WithInsecure(), grpc.WithTimeout(time.Second*30)) + if enableQueryServiceLogOTLPExport { + ctx, _ := context.WithTimeout(ctx, time.Second*30) + conn, err := grpc.DialContext(ctx, baseconst.OTLPTarget, grpc.WithBlock(), grpc.WithTransportCredentials(insecure.NewCredentials())) if err != nil { - log.Println("failed to connect to otlp collector to export query service logs with error:", err) + log.Fatalf("failed to establish connection: %v", err) } else { logExportBatchSizeInt, err := strconv.Atoi(baseconst.LogExportBatchSize) if err != nil { - logExportBatchSizeInt = 1000 + logExportBatchSizeInt = 512 } ws := zapcore.AddSync(zapotlpsync.NewOtlpSyncer(conn, zapotlpsync.Options{ BatchSize: logExportBatchSizeInt, @@ -113,7 +115,6 @@ func main() { zap.ReplaceGlobals(loggerMgr) defer loggerMgr.Sync() // flushes buffer, if any - logger := loggerMgr.Sugar() version.PrintVersion() serverOptions := &app.ServerOptions{ @@ -137,22 +138,22 @@ func main() { auth.JwtSecret = os.Getenv("SIGNOZ_JWT_SECRET") if len(auth.JwtSecret) == 0 { - zap.S().Warn("No JWT secret key is specified.") + zap.L().Warn("No JWT secret key is specified.") } else { - zap.S().Info("No JWT secret key set successfully.") + zap.L().Info("JWT secret key set successfully.") } server, err := app.NewServer(serverOptions) if err != nil { - logger.Fatal("Failed to create server", zap.Error(err)) + zap.L().Fatal("Failed to create server", zap.Error(err)) } if err := server.Start(); err != nil { - logger.Fatal("Could not start servers", zap.Error(err)) + zap.L().Fatal("Could not start server", zap.Error(err)) } if err := auth.InitAuthCache(context.Background()); err != nil { - logger.Fatal("Failed to initialize auth cache", zap.Error(err)) + zap.L().Fatal("Failed to initialize auth cache", zap.Error(err)) } signalsChannel := make(chan os.Signal, 1) @@ -161,9 +162,9 @@ func main() { for { select { case status := <-server.HealthCheckStatus(): - logger.Info("Received HealthCheck status: ", zap.Int("status", int(status))) + zap.L().Info("Received HealthCheck status: ", zap.Int("status", int(status))) case <-signalsChannel: - logger.Fatal("Received OS Interrupt Signal ... ") + zap.L().Fatal("Received OS Interrupt Signal ... ") server.Stop() } } diff --git a/ee/query-service/model/domain.go b/ee/query-service/model/domain.go index beadd66a51..4d5ff66df2 100644 --- a/ee/query-service/model/domain.go +++ b/ee/query-service/model/domain.go @@ -9,8 +9,8 @@ import ( "github.com/google/uuid" "github.com/pkg/errors" saml2 "github.com/russellhaering/gosaml2" - "go.signoz.io/signoz/ee/query-service/sso/saml" "go.signoz.io/signoz/ee/query-service/sso" + "go.signoz.io/signoz/ee/query-service/sso/saml" basemodel "go.signoz.io/signoz/pkg/query-service/model" "go.uber.org/zap" ) @@ -24,16 +24,16 @@ const ( // OrgDomain identify org owned web domains for auth and other purposes type OrgDomain struct { - Id uuid.UUID `json:"id"` - Name string `json:"name"` - OrgId string `json:"orgId"` - SsoEnabled bool `json:"ssoEnabled"` - SsoType SSOType `json:"ssoType"` + Id uuid.UUID `json:"id"` + Name string `json:"name"` + OrgId string `json:"orgId"` + SsoEnabled bool `json:"ssoEnabled"` + SsoType SSOType `json:"ssoType"` - SamlConfig *SamlConfig `json:"samlConfig"` + SamlConfig *SamlConfig `json:"samlConfig"` GoogleAuthConfig *GoogleOAuthConfig `json:"googleAuthConfig"` - Org *basemodel.Organization + Org *basemodel.Organization } func (od *OrgDomain) String() string { @@ -100,8 +100,8 @@ func (od *OrgDomain) GetSAMLCert() string { return "" } -// PrepareGoogleOAuthProvider creates GoogleProvider that is used in -// requesting OAuth and also used in processing response from google +// PrepareGoogleOAuthProvider creates GoogleProvider that is used in +// requesting OAuth and also used in processing response from google func (od *OrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (sso.OAuthCallbackProvider, error) { if od.GoogleAuthConfig == nil { return nil, fmt.Errorf("Google auth is not setup correctly for this domain") @@ -137,38 +137,36 @@ func (od *OrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServicePro } func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) { - fmtDomainId := strings.Replace(od.Id.String(), "-", ":", -1) - + // build redirect url from window.location sent by frontend redirectURL := fmt.Sprintf("%s://%s%s", siteUrl.Scheme, siteUrl.Host, siteUrl.Path) // prepare state that gets relayed back when the auth provider // calls back our url. here we pass the app url (where signoz runs) // and the domain Id. The domain Id helps in identifying sso config - // when the call back occurs and the app url is useful in redirecting user - // back to the right path. + // when the call back occurs and the app url is useful in redirecting user + // back to the right path. // why do we need to pass app url? the callback typically is handled by backend // and sometimes backend might right at a different port or is unaware of frontend // endpoint (unless SITE_URL param is set). hence, we receive this build sso request - // along with frontend window.location and use it to relay the information through - // auth provider to the backend (HandleCallback or HandleSSO method). + // along with frontend window.location and use it to relay the information through + // auth provider to the backend (HandleCallback or HandleSSO method). relayState := fmt.Sprintf("%s?domainId=%s", redirectURL, fmtDomainId) - - switch (od.SsoType) { + switch od.SsoType { case SAML: sp, err := od.PrepareSamlRequest(siteUrl) if err != nil { return "", err } - + return sp.BuildAuthURL(relayState) - + case GoogleAuth: - + googleProvider, err := od.PrepareGoogleOAuthProvider(siteUrl) if err != nil { return "", err @@ -176,9 +174,8 @@ func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) { return googleProvider.BuildAuthURL(relayState) default: - zap.S().Errorf("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name)) - return "", fmt.Errorf("unsupported SSO config for the domain") + zap.L().Error("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name)) + return "", fmt.Errorf("unsupported SSO config for the domain") } - } diff --git a/ee/query-service/model/plans.go b/ee/query-service/model/plans.go index 5b6f230550..09a88bbf9f 100644 --- a/ee/query-service/model/plans.go +++ b/ee/query-service/model/plans.go @@ -90,6 +90,13 @@ var BasicPlan = basemodel.FeatureSet{ UsageLimit: -1, Route: "", }, + basemodel.Feature{ + Name: basemodel.AlertChannelEmail, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, basemodel.Feature{ Name: basemodel.AlertChannelMsTeams, Active: false, @@ -177,6 +184,13 @@ var ProPlan = basemodel.FeatureSet{ UsageLimit: -1, Route: "", }, + basemodel.Feature{ + Name: basemodel.AlertChannelEmail, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, basemodel.Feature{ Name: basemodel.AlertChannelMsTeams, Active: true, @@ -264,6 +278,13 @@ var EnterprisePlan = basemodel.FeatureSet{ UsageLimit: -1, Route: "", }, + basemodel.Feature{ + Name: basemodel.AlertChannelEmail, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, basemodel.Feature{ Name: basemodel.AlertChannelMsTeams, Active: true, @@ -279,17 +300,17 @@ var EnterprisePlan = basemodel.FeatureSet{ Route: "", }, basemodel.Feature{ - Name: Onboarding, - Active: true, - Usage: 0, + Name: Onboarding, + Active: true, + Usage: 0, UsageLimit: -1, - Route: "", + Route: "", }, basemodel.Feature{ - Name: ChatSupport, - Active: true, - Usage: 0, + Name: ChatSupport, + Active: true, + Usage: 0, UsageLimit: -1, - Route: "", + Route: "", }, } diff --git a/ee/query-service/sso/saml/request.go b/ee/query-service/sso/saml/request.go index 01af7afe28..c9788d0ff3 100644 --- a/ee/query-service/sso/saml/request.go +++ b/ee/query-service/sso/saml/request.go @@ -102,6 +102,6 @@ func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (* IDPCertificateStore: certStore, SPKeyStore: randomKeyStore, } - zap.S().Debugf("SAML request:", sp) + zap.L().Debug("SAML request", zap.Any("sp", sp)) return sp, nil } diff --git a/ee/query-service/usage/manager.go b/ee/query-service/usage/manager.go index 99158b4345..72535c9ae5 100644 --- a/ee/query-service/usage/manager.go +++ b/ee/query-service/usage/manager.go @@ -91,12 +91,12 @@ func (lm *Manager) UploadUsage() { // check if license is present or not license, err := lm.licenseRepo.GetActiveLicense(ctx) if err != nil { - zap.S().Errorf("failed to get active license: %v", zap.Error(err)) + zap.L().Error("failed to get active license", zap.Error(err)) return } if license == nil { // we will not start the usage reporting if license is not present. - zap.S().Info("no license present, skipping usage reporting") + zap.L().Info("no license present, skipping usage reporting") return } @@ -123,7 +123,7 @@ func (lm *Manager) UploadUsage() { dbusages := []model.UsageDB{} err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour))) if err != nil && !strings.Contains(err.Error(), "doesn't exist") { - zap.S().Errorf("failed to get usage from clickhouse: %v", zap.Error(err)) + zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err)) return } for _, u := range dbusages { @@ -133,16 +133,16 @@ func (lm *Manager) UploadUsage() { } if len(usages) <= 0 { - zap.S().Info("no snapshots to upload, skipping.") + zap.L().Info("no snapshots to upload, skipping.") return } - zap.S().Info("uploading usage data") + zap.L().Info("uploading usage data") orgName := "" orgNames, orgError := lm.modelDao.GetOrgs(ctx) if orgError != nil { - zap.S().Errorf("failed to get org data: %v", zap.Error(orgError)) + zap.L().Error("failed to get org data: %v", zap.Error(orgError)) } if len(orgNames) == 1 { orgName = orgNames[0].Name @@ -152,14 +152,14 @@ func (lm *Manager) UploadUsage() { for _, usage := range usages { usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data)) if err != nil { - zap.S().Errorf("error while decrypting usage data: %v", zap.Error(err)) + zap.L().Error("error while decrypting usage data: %v", zap.Error(err)) return } usageData := model.Usage{} err = json.Unmarshal(usageDataBytes, &usageData) if err != nil { - zap.S().Errorf("error while unmarshalling usage data: %v", zap.Error(err)) + zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err)) return } @@ -184,13 +184,13 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload for i := 1; i <= MaxRetries; i++ { apiErr := licenseserver.SendUsage(ctx, payload) if apiErr != nil && i == MaxRetries { - zap.S().Errorf("retries stopped : %v", zap.Error(apiErr)) + zap.L().Error("retries stopped : %v", zap.Error(apiErr)) // not returning error here since it is captured in the failed count return } else if apiErr != nil { // sleeping for exponential backoff sleepDuration := RetryInterval * time.Duration(i) - zap.S().Errorf("failed to upload snapshot retrying after %v secs : %v", sleepDuration.Seconds(), zap.Error(apiErr.Err)) + zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err)) time.Sleep(sleepDuration) } else { break @@ -201,7 +201,7 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload func (lm *Manager) Stop() { lm.scheduler.Stop() - zap.S().Debug("sending usage data before shutting down") + zap.L().Info("sending usage data before shutting down") // send usage before shutting down lm.UploadUsage() diff --git a/frontend/package.json b/frontend/package.json index 293b4903fb..e7d1861cd4 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -107,6 +107,7 @@ "react-virtuoso": "4.0.3", "redux": "^4.0.5", "redux-thunk": "^2.3.0", + "rehype-raw": "7.0.0", "stream": "^0.0.2", "style-loader": "1.3.0", "styled-components": "^5.3.11", @@ -203,6 +204,7 @@ "jest-styled-components": "^7.0.8", "lint-staged": "^12.5.0", "msw": "1.3.2", + "npm-run-all": "latest", "portfinder-sync": "^0.0.2", "prettier": "2.2.1", "raw-loader": "4.0.2", @@ -216,8 +218,7 @@ "ts-node": "^10.2.1", "typescript-plugin-css-modules": "5.0.1", "webpack-bundle-analyzer": "^4.5.0", - "webpack-cli": "^4.9.2", - "npm-run-all": "latest" + "webpack-cli": "^4.9.2" }, "lint-staged": { "*.(js|jsx|ts|tsx)": [ diff --git a/frontend/public/Icons/cable-car.svg b/frontend/public/Icons/cable-car.svg new file mode 100644 index 0000000000..0c7318debd --- /dev/null +++ b/frontend/public/Icons/cable-car.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Icons/configure.svg b/frontend/public/Icons/configure.svg new file mode 100644 index 0000000000..088dfa9447 --- /dev/null +++ b/frontend/public/Icons/configure.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Icons/group.svg b/frontend/public/Icons/group.svg new file mode 100644 index 0000000000..e293cebcd0 --- /dev/null +++ b/frontend/public/Icons/group.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Icons/redis-logo.svg b/frontend/public/Icons/redis-logo.svg new file mode 100644 index 0000000000..424f1e575f --- /dev/null +++ b/frontend/public/Icons/redis-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/locales/en-GB/alerts.json b/frontend/public/locales/en-GB/alerts.json index 5b102e147d..0901b14d19 100644 --- a/frontend/public/locales/en-GB/alerts.json +++ b/frontend/public/locales/en-GB/alerts.json @@ -37,11 +37,16 @@ "text_condition1": "Send a notification when", "text_condition2": "the threshold", "text_condition3": "during the last", + "option_1min": "1 min", "option_5min": "5 mins", "option_10min": "10 mins", "option_15min": "15 mins", + "option_30min": "30 mins", "option_60min": "60 mins", "option_4hours": "4 hours", + "option_3hours": "3 hours", + "option_6hours": "6 hours", + "option_12hours": "12 hours", "option_24hours": "24 hours", "field_threshold": "Alert Threshold", "option_allthetimes": "all the times", @@ -111,5 +116,8 @@ "exceptions_based_alert": "Exceptions-based Alert", "exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.", "field_unit": "Threshold unit", + "text_alert_on_absent": "Send a notification if data is missing for", + "text_alert_frequency": "Run alert every", + "text_for": "minutes", "selected_query_placeholder": "Select query" } diff --git a/frontend/public/locales/en-GB/dashboard.json b/frontend/public/locales/en-GB/dashboard.json index 6179004aff..bc7969d053 100644 --- a/frontend/public/locales/en-GB/dashboard.json +++ b/frontend/public/locales/en-GB/dashboard.json @@ -25,5 +25,5 @@ "dashboard_unsave_changes": "There are unsaved changes in the Query builder, please stage and run the query or the changes will be lost. Press OK to discard.", "dashboard_save_changes": "Your graph built with {{queryTag}} query will be saved. Press OK to confirm.", "your_graph_build_with": "Your graph built with", - "dashboar_ok_confirm": "query will be saved. Press OK to confirm." + "dashboard_ok_confirm": "query will be saved. Press OK to confirm." } diff --git a/frontend/public/locales/en-GB/organizationsettings.json b/frontend/public/locales/en-GB/organizationsettings.json index deae9666ee..74654d9b46 100644 --- a/frontend/public/locales/en-GB/organizationsettings.json +++ b/frontend/public/locales/en-GB/organizationsettings.json @@ -14,6 +14,5 @@ "delete_domain_message": "Are you sure you want to delete this domain?", "delete_domain": "Delete Domain", "add_domain": "Add Domains", - "saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly", - "invite_link_share_manually": "After inviting members, please copy the invite link and send them the link manually" + "saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly" } diff --git a/frontend/public/locales/en/alerts.json b/frontend/public/locales/en/alerts.json index 455ade61e3..597cc24096 100644 --- a/frontend/public/locales/en/alerts.json +++ b/frontend/public/locales/en/alerts.json @@ -37,11 +37,16 @@ "text_condition1": "Send a notification when", "text_condition2": "the threshold", "text_condition3": "during the last", + "option_1min": "1 min", "option_5min": "5 mins", "option_10min": "10 mins", "option_15min": "15 mins", + "option_30min": "30 mins", "option_60min": "60 mins", + "option_3hours": "3 hours", "option_4hours": "4 hours", + "option_6hours": "6 hours", + "option_12hours": "12 hours", "option_24hours": "24 hours", "field_threshold": "Alert Threshold", "option_allthetimes": "all the times", @@ -111,5 +116,8 @@ "exceptions_based_alert": "Exceptions-based Alert", "exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.", "field_unit": "Threshold unit", + "text_alert_on_absent": "Send a notification if data is missing for", + "text_alert_frequency": "Run alert every", + "text_for": "minutes", "selected_query_placeholder": "Select query" } diff --git a/frontend/public/locales/en/billings.json b/frontend/public/locales/en/billings.json new file mode 100644 index 0000000000..fb706e002f --- /dev/null +++ b/frontend/public/locales/en/billings.json @@ -0,0 +1,14 @@ +{ + "days_remaining": "days remaining in your billing period.", + "billing": "Billing", + "manage_billing_and_costs": "Manage your billing information, invoices, and monitor costs.", + "enterprise_cloud": "Enterprise Cloud", + "enterprise": "Enterprise", + "card_details_recieved_and_billing_info": "We have received your card details, your billing will only start after the end of your free trial period.", + "upgrade_plan": "Upgrade Plan", + "manage_billing": "Manage Billing", + "upgrade_now_text": "Upgrade now to have uninterrupted access", + "billing_start_info": "Your billing will start only after the trial period", + "checkout_plans": "Check out features in paid plans", + "here": "here" +} diff --git a/frontend/public/locales/en/channels.json b/frontend/public/locales/en/channels.json index 63094aa911..9ab31d697c 100644 --- a/frontend/public/locales/en/channels.json +++ b/frontend/public/locales/en/channels.json @@ -23,6 +23,12 @@ "field_opsgenie_api_key": "API Key", "field_opsgenie_description": "Description", "placeholder_opsgenie_description": "Description", + "help_email_to": "Email address(es) to send alerts to (comma separated)", + "field_email_to": "To", + "placeholder_email_to": "To", + "help_email_html": "Send email in html format", + "field_email_html": "Email body template", + "placeholder_email_html": "Email body template", "field_webhook_username": "User Name (optional)", "field_webhook_password": "Password (optional)", "field_pager_routing_key": "Routing Key", diff --git a/frontend/public/locales/en/dashboard.json b/frontend/public/locales/en/dashboard.json index a74f23d228..9c0529cd73 100644 --- a/frontend/public/locales/en/dashboard.json +++ b/frontend/public/locales/en/dashboard.json @@ -28,5 +28,5 @@ "dashboard_unsave_changes": "There are unsaved changes in the Query builder, please stage and run the query or the changes will be lost. Press OK to discard.", "dashboard_save_changes": "Your graph built with {{queryTag}} query will be saved. Press OK to confirm.", "your_graph_build_with": "Your graph built with", - "dashboar_ok_confirm": "query will be saved. Press OK to confirm." + "dashboard_ok_confirm": "query will be saved. Press OK to confirm." } diff --git a/frontend/public/locales/en/organizationsettings.json b/frontend/public/locales/en/organizationsettings.json index deae9666ee..74654d9b46 100644 --- a/frontend/public/locales/en/organizationsettings.json +++ b/frontend/public/locales/en/organizationsettings.json @@ -14,6 +14,5 @@ "delete_domain_message": "Are you sure you want to delete this domain?", "delete_domain": "Delete Domain", "add_domain": "Add Domains", - "saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly", - "invite_link_share_manually": "After inviting members, please copy the invite link and send them the link manually" + "saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly" } diff --git a/frontend/public/locales/en/titles.json b/frontend/public/locales/en/titles.json index 85da13a12a..e707c998f7 100644 --- a/frontend/public/locales/en/titles.json +++ b/frontend/public/locales/en/titles.json @@ -4,6 +4,10 @@ "SERVICE_METRICS": "SigNoz | Service Metrics", "SERVICE_MAP": "SigNoz | Service Map", "GET_STARTED": "SigNoz | Get Started", + "GET_STARTED_APPLICATION_MONITORING": "SigNoz | Get Started | APM", + "GET_STARTED_LOGS_MANAGEMENT": "SigNoz | Get Started | Logs", + "GET_STARTED_INFRASTRUCTURE_MONITORING": "SigNoz | Get Started | Infrastructure", + "GET_STARTED_AWS_MONITORING": "SigNoz | Get Started | AWS", "TRACE": "SigNoz | Trace", "TRACE_DETAIL": "SigNoz | Trace Detail", "TRACES_EXPLORER": "SigNoz | Traces Explorer", @@ -40,8 +44,9 @@ "LIST_LICENSES": "SigNoz | List of Licenses", "WORKSPACE_LOCKED": "SigNoz | Workspace Locked", "SUPPORT": "SigNoz | Support", - "LOGS_SAVE_VIEWS": "SigNoz | Logs Save Views", - "TRACES_SAVE_VIEWS": "SigNoz | Traces Save Views", + "LOGS_SAVE_VIEWS": "SigNoz | Logs Saved Views", + "TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views", "DEFAULT": "Open source Observability Platform | SigNoz", - "SHORTCUTS": "SigNoz | Shortcuts" + "SHORTCUTS": "SigNoz | Shortcuts", + "INTEGRATIONS_INSTALLED": "SigNoz | Integrations" } diff --git a/frontend/scripts/typecheck-staged.sh b/frontend/scripts/typecheck-staged.sh index 7da93c088e..0990e81ba4 100644 --- a/frontend/scripts/typecheck-staged.sh +++ b/frontend/scripts/typecheck-staged.sh @@ -9,7 +9,7 @@ done # create temporary tsconfig which includes only passed files str="{ \"extends\": \"./tsconfig.json\", - \"include\": [\"src/types/global.d.ts\",\"src/typings/window.ts\", \"src/typings/chartjs-adapter-date-fns.d.ts\", \"src/typings/environment.ts\" ,\"src/container/OnboardingContainer/typings.d.ts\",$files] + \"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./conf/default.conf\",\"./public\",\"./tests\",\"./playwright.config.ts\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files] }" echo $str > tsconfig.tmp diff --git a/frontend/src/AppRoutes/pageComponents.ts b/frontend/src/AppRoutes/pageComponents.ts index 6d26f9b55a..bea07a7e51 100644 --- a/frontend/src/AppRoutes/pageComponents.ts +++ b/frontend/src/AppRoutes/pageComponents.ts @@ -190,3 +190,18 @@ export const WorkspaceBlocked = Loadable( export const ShortcutsPage = Loadable( () => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Shortcuts'), ); + +export const InstalledIntegrations = Loadable( + () => + import( + /* webpackChunkName: "InstalledIntegrations" */ 'pages/IntegrationsModulePage' + ), +); + +export const IntegrationsMarketPlace = Loadable( + // eslint-disable-next-line sonarjs/no-identical-functions + () => + import( + /* webpackChunkName: "IntegrationsMarketPlace" */ 'pages/IntegrationsModulePage' + ), +); diff --git a/frontend/src/AppRoutes/routes.ts b/frontend/src/AppRoutes/routes.ts index c0332448e7..360c74d8da 100644 --- a/frontend/src/AppRoutes/routes.ts +++ b/frontend/src/AppRoutes/routes.ts @@ -1,6 +1,4 @@ import ROUTES from 'constants/routes'; -import Shortcuts from 'pages/Shortcuts/Shortcuts'; -import WorkspaceBlocked from 'pages/WorkspaceLocked'; import { RouteProps } from 'react-router-dom'; import { @@ -16,6 +14,8 @@ import { EditRulesPage, ErrorDetails, IngestionSettings, + InstalledIntegrations, + IntegrationsMarketPlace, LicensePage, ListAllALertsPage, LiveLogs, @@ -35,6 +35,7 @@ import { ServiceMetricsPage, ServicesTablePage, SettingsPage, + ShortcutsPage, SignupPage, SomethingWentWrong, StatusPage, @@ -45,6 +46,7 @@ import { TracesSaveViews, UnAuthorized, UsageExplorerPage, + WorkspaceBlocked, } from './pageComponents'; const routes: AppRoutes[] = [ @@ -57,7 +59,7 @@ const routes: AppRoutes[] = [ }, { path: ROUTES.GET_STARTED, - exact: true, + exact: false, component: Onboarding, isPrivate: true, key: 'GET_STARTED', @@ -331,10 +333,24 @@ const routes: AppRoutes[] = [ { path: ROUTES.SHORTCUTS, exact: true, - component: Shortcuts, + component: ShortcutsPage, isPrivate: true, key: 'SHORTCUTS', }, + { + path: ROUTES.INTEGRATIONS_INSTALLED, + exact: true, + component: InstalledIntegrations, + isPrivate: true, + key: 'INTEGRATIONS_INSTALLED', + }, + { + path: ROUTES.INTEGRATIONS_MARKETPLACE, + exact: true, + component: IntegrationsMarketPlace, + isPrivate: true, + key: 'INTEGRATIONS_MARKETPLACE', + }, ]; export const SUPPORT_ROUTE: AppRoutes = { @@ -358,6 +374,8 @@ export const oldRoutes = [ '/logs/old-logs-explorer', '/logs-explorer', '/logs-explorer/live', + '/logs-save-views', + '/traces-save-views', '/settings/api-keys', ]; @@ -366,6 +384,8 @@ export const oldNewRoutesMapping: Record = { '/logs/old-logs-explorer': '/logs/old-logs-explorer', '/logs-explorer': '/logs/logs-explorer', '/logs-explorer/live': '/logs/logs-explorer/live', + '/logs-save-views': '/logs/saved-views', + '/traces-save-views': '/traces/saved-views', '/settings/api-keys': '/settings/access-tokens', }; diff --git a/frontend/src/api/Integrations/getAllIntegrations.ts b/frontend/src/api/Integrations/getAllIntegrations.ts new file mode 100644 index 0000000000..8aec6ef9cc --- /dev/null +++ b/frontend/src/api/Integrations/getAllIntegrations.ts @@ -0,0 +1,7 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; +import { AllIntegrationsProps } from 'types/api/integrations/types'; + +export const getAllIntegrations = (): Promise< + AxiosResponse +> => axios.get(`/integrations`); diff --git a/frontend/src/api/Integrations/getIntegration.ts b/frontend/src/api/Integrations/getIntegration.ts new file mode 100644 index 0000000000..84fb696343 --- /dev/null +++ b/frontend/src/api/Integrations/getIntegration.ts @@ -0,0 +1,11 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; +import { + GetIntegrationPayloadProps, + GetIntegrationProps, +} from 'types/api/integrations/types'; + +export const getIntegration = ( + props: GetIntegrationPayloadProps, +): Promise> => + axios.get(`/integrations/${props.integrationId}`); diff --git a/frontend/src/api/Integrations/getIntegrationStatus.ts b/frontend/src/api/Integrations/getIntegrationStatus.ts new file mode 100644 index 0000000000..fbfbca2782 --- /dev/null +++ b/frontend/src/api/Integrations/getIntegrationStatus.ts @@ -0,0 +1,11 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; +import { + GetIntegrationPayloadProps, + GetIntegrationStatusProps, +} from 'types/api/integrations/types'; + +export const getIntegrationStatus = ( + props: GetIntegrationPayloadProps, +): Promise> => + axios.get(`/integrations/${props.integrationId}/connection_status`); diff --git a/frontend/src/api/Integrations/installIntegration.ts b/frontend/src/api/Integrations/installIntegration.ts new file mode 100644 index 0000000000..609ec00545 --- /dev/null +++ b/frontend/src/api/Integrations/installIntegration.ts @@ -0,0 +1,31 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + InstalledIntegrationsSuccessResponse, + InstallIntegrationKeyProps, +} from 'types/api/integrations/types'; + +const installIntegration = async ( + props: InstallIntegrationKeyProps, +): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.post('/integrations/install', { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default installIntegration; diff --git a/frontend/src/api/Integrations/uninstallIntegration.ts b/frontend/src/api/Integrations/uninstallIntegration.ts new file mode 100644 index 0000000000..f2a9760bfc --- /dev/null +++ b/frontend/src/api/Integrations/uninstallIntegration.ts @@ -0,0 +1,31 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + UninstallIntegrationProps, + UninstallIntegrationSuccessResponse, +} from 'types/api/integrations/types'; + +const unInstallIntegration = async ( + props: UninstallIntegrationProps, +): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.post('/integrations/uninstall', { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default unInstallIntegration; diff --git a/frontend/src/api/SAML/listAllDomain.ts b/frontend/src/api/SAML/listAllDomain.ts index dea73e4311..41620f7d3e 100644 --- a/frontend/src/api/SAML/listAllDomain.ts +++ b/frontend/src/api/SAML/listAllDomain.ts @@ -8,7 +8,7 @@ const listAllDomain = async ( props: Props, ): Promise | ErrorResponse> => { try { - const response = await axios.get(`orgs/${props.orgId}/domains`); + const response = await axios.get(`/orgs/${props.orgId}/domains`); return { statusCode: 200, diff --git a/frontend/src/api/apiV1.ts b/frontend/src/api/apiV1.ts index 2e7df02395..4fba137e18 100644 --- a/frontend/src/api/apiV1.ts +++ b/frontend/src/api/apiV1.ts @@ -2,6 +2,7 @@ const apiV1 = '/api/v1/'; export const apiV2 = '/api/v2/'; export const apiV3 = '/api/v3/'; +export const apiV4 = '/api/v4/'; export const apiAlertManager = '/api/alertmanager'; export default apiV1; diff --git a/frontend/src/api/billing/getUsage.ts b/frontend/src/api/billing/getUsage.ts index 1cb5be5640..da7b6ebd63 100644 --- a/frontend/src/api/billing/getUsage.ts +++ b/frontend/src/api/billing/getUsage.ts @@ -13,6 +13,7 @@ export interface UsageResponsePayloadProps { billTotal: number; }; discount: number; + subscriptionStatus?: string; } const getUsage = async ( diff --git a/frontend/src/api/channels/createEmail.ts b/frontend/src/api/channels/createEmail.ts new file mode 100644 index 0000000000..cde74b9c6d --- /dev/null +++ b/frontend/src/api/channels/createEmail.ts @@ -0,0 +1,34 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createEmail'; + +const create = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/channels', { + name: props.name, + email_configs: [ + { + send_resolved: true, + to: props.to, + html: props.html, + headers: props.headers, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default create; diff --git a/frontend/src/api/channels/editEmail.ts b/frontend/src/api/channels/editEmail.ts new file mode 100644 index 0000000000..f20e5eb8f9 --- /dev/null +++ b/frontend/src/api/channels/editEmail.ts @@ -0,0 +1,34 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/editEmail'; + +const editEmail = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/channels/${props.id}`, { + name: props.name, + email_configs: [ + { + send_resolved: true, + to: props.to, + html: props.html, + headers: props.headers, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default editEmail; diff --git a/frontend/src/api/channels/testEmail.ts b/frontend/src/api/channels/testEmail.ts new file mode 100644 index 0000000000..825836abea --- /dev/null +++ b/frontend/src/api/channels/testEmail.ts @@ -0,0 +1,34 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createEmail'; + +const testEmail = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/testChannel', { + name: props.name, + email_configs: [ + { + send_resolved: true, + to: props.to, + html: props.html, + headers: props.headers, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default testEmail; diff --git a/frontend/src/api/index.ts b/frontend/src/api/index.ts index bde915f201..92a06363a1 100644 --- a/frontend/src/api/index.ts +++ b/frontend/src/api/index.ts @@ -9,7 +9,7 @@ import { ENVIRONMENT } from 'constants/env'; import { LOCALSTORAGE } from 'constants/localStorage'; import store from 'store'; -import apiV1, { apiAlertManager, apiV2, apiV3 } from './apiV1'; +import apiV1, { apiAlertManager, apiV2, apiV3, apiV4 } from './apiV1'; import { Logout } from './utils'; const interceptorsResponse = ( @@ -114,6 +114,7 @@ ApiV2Instance.interceptors.request.use(interceptorsRequestResponse); export const ApiV3Instance = axios.create({ baseURL: `${ENVIRONMENT.baseURL}${apiV3}`, }); + ApiV3Instance.interceptors.response.use( interceptorsResponse, interceptorRejected, @@ -121,6 +122,18 @@ ApiV3Instance.interceptors.response.use( ApiV3Instance.interceptors.request.use(interceptorsRequestResponse); // +// axios V4 +export const ApiV4Instance = axios.create({ + baseURL: `${ENVIRONMENT.baseURL}${apiV4}`, +}); + +ApiV4Instance.interceptors.response.use( + interceptorsResponse, + interceptorRejected, +); +ApiV4Instance.interceptors.request.use(interceptorsRequestResponse); +// + AxiosAlertManagerInstance.interceptors.response.use( interceptorsResponse, interceptorRejected, diff --git a/frontend/src/api/metrics/getQueryRange.ts b/frontend/src/api/metrics/getQueryRange.ts index 984d381e10..40deb021bc 100644 --- a/frontend/src/api/metrics/getQueryRange.ts +++ b/frontend/src/api/metrics/getQueryRange.ts @@ -1,6 +1,7 @@ -import { ApiV3Instance as axios } from 'api'; +import { ApiV3Instance, ApiV4Instance } from 'api'; import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; import { AxiosError } from 'axios'; +import { ENTITY_VERSION_V4 } from 'constants/app'; import { ErrorResponse, SuccessResponse } from 'types/api'; import { MetricRangePayloadV3, @@ -9,10 +10,23 @@ import { export const getMetricsQueryRange = async ( props: QueryRangePayload, + version: string, signal: AbortSignal, ): Promise | ErrorResponse> => { try { - const response = await axios.post('/query_range', props, { signal }); + if (version && version === ENTITY_VERSION_V4) { + const response = await ApiV4Instance.post('/query_range', props, { signal }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + params: props, + }; + } + + const response = await ApiV3Instance.post('/query_range', props, { signal }); return { statusCode: 200, diff --git a/frontend/src/api/queryBuilder/getAggregateAttribute.ts b/frontend/src/api/queryBuilder/getAggregateAttribute.ts index e493bb460a..f13c3da4a8 100644 --- a/frontend/src/api/queryBuilder/getAggregateAttribute.ts +++ b/frontend/src/api/queryBuilder/getAggregateAttribute.ts @@ -24,7 +24,7 @@ export const getAggregateAttribute = async ({ const response: AxiosResponse<{ data: IQueryAutocompleteResponse; }> = await ApiV3Instance.get( - `autocomplete/aggregate_attributes?${createQueryParams({ + `/autocomplete/aggregate_attributes?${createQueryParams({ aggregateOperator, searchText, dataSource, diff --git a/frontend/src/api/queryBuilder/getAttributeKeys.ts b/frontend/src/api/queryBuilder/getAttributeKeys.ts index 99edc630c8..9cc127bb71 100644 --- a/frontend/src/api/queryBuilder/getAttributeKeys.ts +++ b/frontend/src/api/queryBuilder/getAttributeKeys.ts @@ -25,7 +25,7 @@ export const getAggregateKeys = async ({ const response: AxiosResponse<{ data: IQueryAutocompleteResponse; }> = await ApiV3Instance.get( - `autocomplete/attribute_keys?${createQueryParams({ + `/autocomplete/attribute_keys?${createQueryParams({ aggregateOperator, searchText, dataSource, diff --git a/frontend/src/api/saveView/deleteView.ts b/frontend/src/api/saveView/deleteView.ts index e58e731d10..9317c8331a 100644 --- a/frontend/src/api/saveView/deleteView.ts +++ b/frontend/src/api/saveView/deleteView.ts @@ -2,4 +2,4 @@ import axios from 'api'; import { DeleteViewPayloadProps } from 'types/api/saveViews/types'; export const deleteView = (uuid: string): Promise => - axios.delete(`explorer/views/${uuid}`); + axios.delete(`/explorer/views/${uuid}`); diff --git a/frontend/src/api/saveView/getAllViews.ts b/frontend/src/api/saveView/getAllViews.ts index bdafb96b61..4a54d6af0d 100644 --- a/frontend/src/api/saveView/getAllViews.ts +++ b/frontend/src/api/saveView/getAllViews.ts @@ -6,4 +6,4 @@ import { DataSource } from 'types/common/queryBuilder'; export const getAllViews = ( sourcepage: DataSource, ): Promise> => - axios.get(`explorer/views?sourcePage=${sourcepage}`); + axios.get(`/explorer/views?sourcePage=${sourcepage}`); diff --git a/frontend/src/api/saveView/saveView.ts b/frontend/src/api/saveView/saveView.ts index a0c7ba5bf4..60a552f0bb 100644 --- a/frontend/src/api/saveView/saveView.ts +++ b/frontend/src/api/saveView/saveView.ts @@ -8,7 +8,7 @@ export const saveView = ({ viewName, extraData, }: SaveViewProps): Promise> => - axios.post('explorer/views', { + axios.post('/explorer/views', { name: viewName, sourcePage, compositeQuery, diff --git a/frontend/src/api/saveView/updateView.ts b/frontend/src/api/saveView/updateView.ts index 6ee745ffc2..b48b73f275 100644 --- a/frontend/src/api/saveView/updateView.ts +++ b/frontend/src/api/saveView/updateView.ts @@ -11,7 +11,7 @@ export const updateView = ({ sourcePage, viewKey, }: UpdateViewProps): Promise => - axios.put(`explorer/views/${viewKey}`, { + axios.put(`/explorer/views/${viewKey}`, { name: viewName, compositeQuery, extraData, diff --git a/frontend/src/assets/Integrations/ConfigureIcon.tsx b/frontend/src/assets/Integrations/ConfigureIcon.tsx new file mode 100644 index 0000000000..84ddef5de0 --- /dev/null +++ b/frontend/src/assets/Integrations/ConfigureIcon.tsx @@ -0,0 +1,23 @@ +import { Color } from '@signozhq/design-tokens'; +import { useIsDarkMode } from 'hooks/useDarkMode'; + +function ConfigureIcon(): JSX.Element { + const isDarkMode = useIsDarkMode(); + return ( + + + + + + + ); +} + +export default ConfigureIcon; diff --git a/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx b/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx index 8be31b78e9..a29f0180b4 100644 --- a/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx +++ b/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx @@ -115,6 +115,9 @@ function CustomTimePicker({ const handleOpenChange = (newOpen: boolean): void => { setOpen(newOpen); + if (!newOpen) { + setCustomDTPickerVisible?.(false); + } }; const debouncedHandleInputChange = debounce((inputValue): void => { diff --git a/frontend/src/components/CustomTimePicker/CustomTimePickerPopoverContent.tsx b/frontend/src/components/CustomTimePicker/CustomTimePickerPopoverContent.tsx index 3141158f7f..4a41bec4f5 100644 --- a/frontend/src/components/CustomTimePicker/CustomTimePickerPopoverContent.tsx +++ b/frontend/src/components/CustomTimePicker/CustomTimePickerPopoverContent.tsx @@ -1,6 +1,6 @@ import './CustomTimePicker.styles.scss'; -import { Button, DatePicker } from 'antd'; +import { Button } from 'antd'; import cx from 'classnames'; import ROUTES from 'constants/routes'; import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal'; @@ -9,12 +9,10 @@ import { Option, RelativeDurationSuggestionOptions, } from 'container/TopNav/DateTimeSelectionV2/config'; -import dayjs, { Dayjs } from 'dayjs'; import { Dispatch, SetStateAction, useMemo } from 'react'; -import { useSelector } from 'react-redux'; import { useLocation } from 'react-router-dom'; -import { AppState } from 'store/reducers'; -import { GlobalReducer } from 'types/reducer/globalTime'; + +import RangePickerModal from './RangePickerModal'; interface CustomTimePickerPopoverContentProps { options: any[]; @@ -40,35 +38,12 @@ function CustomTimePickerPopoverContent({ handleGoLive, selectedTime, }: CustomTimePickerPopoverContentProps): JSX.Element { - const { RangePicker } = DatePicker; const { pathname } = useLocation(); - const { maxTime, minTime } = useSelector( - (state) => state.globalTime, - ); - const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [ pathname, ]); - const disabledDate = (current: Dayjs): boolean => { - const currentDay = dayjs(current); - return currentDay.isAfter(dayjs()); - }; - - const onPopoverClose = (visible: boolean): void => { - if (!visible) { - setCustomDTPickerVisible(false); - } - setIsOpen(visible); - }; - - const onModalOkHandler = (date_time: any): void => { - if (date_time?.[1]) { - onPopoverClose(false); - } - onCustomDateHandler(date_time, LexicalContext.CUSTOM_DATE_PICKER); - }; function getTimeChips(options: Option[]): JSX.Element { return (
@@ -105,26 +80,32 @@ function CustomTimePickerPopoverContent({ }} className={cx( 'date-time-options-btn', - selectedTime === option.value && 'active', + customDateTimeVisible + ? option.value === 'custom' && 'active' + : selectedTime === option.value && 'active', )} > {option.label} ))}
-
+
{selectedTime === 'custom' || customDateTimeVisible ? ( - ) : ( -
+
RELATIVE TIMES
{getTimeChips(RelativeDurationSuggestionOptions)}
diff --git a/frontend/src/components/CustomTimePicker/RangePickerModal.styles.scss b/frontend/src/components/CustomTimePicker/RangePickerModal.styles.scss new file mode 100644 index 0000000000..58ebe060d4 --- /dev/null +++ b/frontend/src/components/CustomTimePicker/RangePickerModal.styles.scss @@ -0,0 +1,4 @@ +.custom-date-picker { + display: flex; + flex-direction: column; +} diff --git a/frontend/src/components/CustomTimePicker/RangePickerModal.tsx b/frontend/src/components/CustomTimePicker/RangePickerModal.tsx new file mode 100644 index 0000000000..24ba0e2b01 --- /dev/null +++ b/frontend/src/components/CustomTimePicker/RangePickerModal.tsx @@ -0,0 +1,68 @@ +import './RangePickerModal.styles.scss'; + +import { DatePicker } from 'antd'; +import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal'; +import { LexicalContext } from 'container/TopNav/DateTimeSelectionV2/config'; +import dayjs, { Dayjs } from 'dayjs'; +import { Dispatch, SetStateAction } from 'react'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +interface RangePickerModalProps { + setCustomDTPickerVisible: Dispatch>; + setIsOpen: Dispatch>; + onCustomDateHandler: ( + dateTimeRange: DateTimeRangeType, + lexicalContext?: LexicalContext | undefined, + ) => void; + selectedTime: string; +} + +function RangePickerModal(props: RangePickerModalProps): JSX.Element { + const { + setCustomDTPickerVisible, + setIsOpen, + onCustomDateHandler, + selectedTime, + } = props; + const { RangePicker } = DatePicker; + const { maxTime, minTime } = useSelector( + (state) => state.globalTime, + ); + + const disabledDate = (current: Dayjs): boolean => { + const currentDay = dayjs(current); + return currentDay.isAfter(dayjs()); + }; + + const onPopoverClose = (visible: boolean): void => { + if (!visible) { + setCustomDTPickerVisible(false); + } + setIsOpen(visible); + }; + + const onModalOkHandler = (date_time: any): void => { + if (date_time?.[1]) { + onPopoverClose(false); + } + onCustomDateHandler(date_time, LexicalContext.CUSTOM_DATE_PICKER); + }; + return ( +
+ +
+ ); +} + +export default RangePickerModal; diff --git a/frontend/src/components/LogDetail/LogDetails.styles.scss b/frontend/src/components/LogDetail/LogDetails.styles.scss index 0dcdc1e5c1..c8ac0be91f 100644 --- a/frontend/src/components/LogDetail/LogDetails.styles.scss +++ b/frontend/src/components/LogDetail/LogDetails.styles.scss @@ -18,6 +18,8 @@ } .ant-drawer-body { + display: flex; + flex-direction: column; padding: 16px; } diff --git a/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.styles.scss b/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.styles.scss index e3da355621..2a6822dc00 100644 --- a/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.styles.scss +++ b/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.styles.scss @@ -1,10 +1,13 @@ .query-builder-search-wrapper { - margin-top: 10px; - height: 46px; - border: 1px solid var(--bg-slate-400); - border-bottom: none; + margin-top: 10px; + border: 1px solid var(--bg-slate-400); + border-bottom: none; - .ant-select-selector { - border: none !important; - } -} \ No newline at end of file + .ant-select-selector { + border: none !important; + + input { + font-size: 12px; + } + } +} diff --git a/frontend/src/components/Logs/ListLogView/index.tsx b/frontend/src/components/Logs/ListLogView/index.tsx index 74deef6cdf..2b828d663c 100644 --- a/frontend/src/components/Logs/ListLogView/index.tsx +++ b/frontend/src/components/Logs/ListLogView/index.tsx @@ -9,6 +9,7 @@ import dayjs from 'dayjs'; import dompurify from 'dompurify'; import { useActiveLog } from 'hooks/logs/useActiveLog'; import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; +import { useIsDarkMode } from 'hooks/useDarkMode'; // utils import { FlatLogData } from 'lib/logs/flatLogData'; import { useCallback, useMemo, useState } from 'react'; @@ -19,9 +20,8 @@ import { ILog } from 'types/api/logs/log'; // components import AddToQueryHOC, { AddToQueryHOCProps } from '../AddToQueryHOC'; import LogLinesActionButtons from '../LogLinesActionButtons/LogLinesActionButtons'; -import LogStateIndicator, { - LogType, -} from '../LogStateIndicator/LogStateIndicator'; +import LogStateIndicator from '../LogStateIndicator/LogStateIndicator'; +import { getLogIndicatorType } from '../LogStateIndicator/utils'; // styles import { Container, @@ -37,12 +37,17 @@ const convert = new Convert(); interface LogFieldProps { fieldKey: string; fieldValue: string; + linesPerRow?: number; } -type LogSelectedFieldProps = LogFieldProps & +type LogSelectedFieldProps = Omit & Pick; -function LogGeneralField({ fieldKey, fieldValue }: LogFieldProps): JSX.Element { +function LogGeneralField({ + fieldKey, + fieldValue, + linesPerRow = 1, +}: LogFieldProps): JSX.Element { const html = useMemo( () => ({ __html: convert.toHtml(dompurify.sanitize(fieldValue)), @@ -55,7 +60,11 @@ function LogGeneralField({ fieldKey, fieldValue }: LogFieldProps): JSX.Element { {`${fieldKey} : `} - + 1 ? linesPerRow : undefined} + /> ); } @@ -92,6 +101,7 @@ type ListLogViewProps = { onSetActiveLog: (log: ILog) => void; onAddToQuery: AddToQueryHOCProps['onAddToQuery']; activeLog?: ILog | null; + linesPerRow: number; }; function ListLogView({ @@ -100,6 +110,7 @@ function ListLogView({ onSetActiveLog, onAddToQuery, activeLog, + linesPerRow, }: ListLogViewProps): JSX.Element { const flattenLogData = useMemo(() => FlatLogData(logData), [logData]); @@ -114,6 +125,8 @@ function ListLogView({ onClearActiveLog: handleClearActiveContextLog, } = useActiveLog(); + const isDarkMode = useIsDarkMode(); + const handlerClearActiveContextLog = useCallback( (event: React.MouseEvent | React.KeyboardEvent) => { event.preventDefault(); @@ -149,7 +162,7 @@ function ListLogView({ [flattenLogData.timestamp], ); - const logType = logData?.attributes_string?.log_level || LogType.INFO; + const logType = getLogIndicatorType(logData); const handleMouseEnter = (): void => { setHasActionButtons(true); @@ -163,6 +176,7 @@ function ListLogView({ <>
- + {flattenLogData.stream && ( )} @@ -219,4 +237,8 @@ ListLogView.defaultProps = { activeLog: null, }; +LogGeneralField.defaultProps = { + linesPerRow: 1, +}; + export default ListLogView; diff --git a/frontend/src/components/Logs/ListLogView/styles.ts b/frontend/src/components/Logs/ListLogView/styles.ts index 79812c4400..52cc2b20d4 100644 --- a/frontend/src/components/Logs/ListLogView/styles.ts +++ b/frontend/src/components/Logs/ListLogView/styles.ts @@ -1,23 +1,33 @@ +import { Color } from '@signozhq/design-tokens'; import { Card, Typography } from 'antd'; import styled from 'styled-components'; -import { getActiveLogBackground } from 'utils/logs'; + +interface LogTextProps { + linesPerRow?: number; +} export const Container = styled(Card)<{ $isActiveLog: boolean; + $isDarkMode: boolean; }>` width: 100% !important; margin-bottom: 0.3rem; cursor: pointer; .ant-card-body { padding: 0.3rem 0.6rem; - } - ${({ $isActiveLog }): string => getActiveLogBackground($isActiveLog)} + ${({ $isActiveLog, $isDarkMode }): string => + $isActiveLog + ? `background-color: ${ + $isDarkMode ? Color.BG_SLATE_500 : Color.BG_VANILLA_300 + } !important` + : ''} + } `; export const Text = styled(Typography.Text)` &&& { - min-width: 1.5rem; + min-width: 2.5rem; white-space: nowrap; } `; @@ -35,11 +45,19 @@ export const LogContainer = styled.div` gap: 6px; `; -export const LogText = styled.div` +export const LogText = styled.div` display: inline-block; text-overflow: ellipsis; overflow: hidden; - white-space: nowrap; + ${({ linesPerRow }): string => + linesPerRow + ? `-webkit-line-clamp: ${linesPerRow}; + line-clamp: ${linesPerRow}; + display: -webkit-box; + -webkit-box-orient: vertical; + white-space: normal; ` + : 'white-space: nowrap;'}; + }; `; export const SelectedLog = styled.div` diff --git a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss index 6d2429b592..a00c7f6761 100644 --- a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss +++ b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss @@ -10,15 +10,27 @@ background-color: transparent; &.INFO { - background-color: #1d212d; + background-color: var(--bg-slate-400); } - &.WARNING { - background-color: #ffcd56; + &.WARNING, &.WARN { + background-color: var(--bg-amber-500); } &.ERROR { - background-color: #e5484d; + background-color: var(--bg-cherry-500); + } + + &.TRACE { + background-color: var(--bg-robin-300); + } + + &.DEBUG { + background-color: var(--bg-forest-500); + } + + &.FATAL { + background-color: var(--bg-sakura-500); } } diff --git a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.test.tsx b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.test.tsx new file mode 100644 index 0000000000..d924c27426 --- /dev/null +++ b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.test.tsx @@ -0,0 +1,45 @@ +import { render } from '@testing-library/react'; + +import LogStateIndicator from './LogStateIndicator'; + +describe('LogStateIndicator', () => { + it('renders correctly with default props', () => { + const { container } = render(); + const indicator = container.firstChild as HTMLElement; + expect(indicator.classList.contains('log-state-indicator')).toBe(true); + expect(indicator.classList.contains('isActive')).toBe(false); + expect(container.querySelector('.line')).toBeTruthy(); + expect(container.querySelector('.line')?.classList.contains('INFO')).toBe( + true, + ); + }); + + it('renders correctly when isActive is true', () => { + const { container } = render(); + const indicator = container.firstChild as HTMLElement; + expect(indicator.classList.contains('isActive')).toBe(true); + }); + + it('renders correctly with different types', () => { + const { container: containerInfo } = render( + , + ); + expect(containerInfo.querySelector('.line')?.classList.contains('INFO')).toBe( + true, + ); + + const { container: containerWarning } = render( + , + ); + expect( + containerWarning.querySelector('.line')?.classList.contains('WARNING'), + ).toBe(true); + + const { container: containerError } = render( + , + ); + expect( + containerError.querySelector('.line')?.classList.contains('ERROR'), + ).toBe(true); + }); +}); diff --git a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx index 4c9b7de903..5355e38017 100644 --- a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx +++ b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx @@ -2,11 +2,40 @@ import './LogStateIndicator.styles.scss'; import cx from 'classnames'; +export const SEVERITY_TEXT_TYPE = { + TRACE: 'TRACE', + TRACE2: 'TRACE2', + TRACE3: 'TRACE3', + TRACE4: 'TRACE4', + DEBUG: 'DEBUG', + DEBUG2: 'DEBUG2', + DEBUG3: 'DEBUG3', + DEBUG4: 'DEBUG4', + INFO: 'INFO', + INFO2: 'INFO2', + INFO3: 'INFO3', + INFO4: 'INFO4', + WARN: 'WARN', + WARN2: 'WARN2', + WARN3: 'WARN3', + WARN4: 'WARN4', + WARNING: 'WARNING', + ERROR: 'ERROR', + ERROR2: 'ERROR2', + ERROR3: 'ERROR3', + ERROR4: 'ERROR4', + FATAL: 'FATAL', + FATAL2: 'FATAL2', + FATAL3: 'FATAL3', + FATAL4: 'FATAL4', +} as const; + export const LogType = { INFO: 'INFO', WARNING: 'WARNING', ERROR: 'ERROR', -}; +} as const; + function LogStateIndicator({ type, isActive, diff --git a/frontend/src/components/Logs/LogStateIndicator/utils.test.ts b/frontend/src/components/Logs/LogStateIndicator/utils.test.ts new file mode 100644 index 0000000000..65f6b9664d --- /dev/null +++ b/frontend/src/components/Logs/LogStateIndicator/utils.test.ts @@ -0,0 +1,89 @@ +import { ILog } from 'types/api/logs/log'; + +import { getLogIndicatorType, getLogIndicatorTypeForTable } from './utils'; + +describe('getLogIndicatorType', () => { + it('should return severity type for valid log with severityText', () => { + const log = { + date: '2024-02-29T12:34:46Z', + timestamp: 1646115296, + id: '123456', + traceId: '987654', + spanId: '54321', + traceFlags: 0, + severityText: 'INFO', + severityNumber: 2, + body: 'Sample log Message', + resources_string: {}, + attributesString: {}, + attributes_string: {}, + attributesInt: {}, + attributesFloat: {}, + severity_text: 'INFO', + }; + expect(getLogIndicatorType(log)).toBe('INFO'); + }); + + it('should return log level if severityText is missing', () => { + const log: ILog = { + date: '2024-02-29T12:34:58Z', + timestamp: 1646115296, + id: '123456', + traceId: '987654', + spanId: '54321', + traceFlags: 0, + severityNumber: 2, + body: 'Sample log', + resources_string: {}, + attributesString: {}, + attributes_string: {}, + attributesInt: {}, + attributesFloat: {}, + severity_text: 'FATAL', + severityText: '', + }; + expect(getLogIndicatorType(log)).toBe('FATAL'); + }); +}); + +describe('getLogIndicatorTypeForTable', () => { + it('should return severity type for valid log with severityText', () => { + const log = { + date: '2024-02-29T12:34:56Z', + timestamp: 1646115296, + id: '123456', + traceId: '987654', + spanId: '54321', + traceFlags: 0, + severity_number: 2, + body: 'Sample log message', + resources_string: {}, + attributesString: {}, + attributes_string: {}, + attributesInt: {}, + attributesFloat: {}, + severity_text: 'WARN', + }; + expect(getLogIndicatorTypeForTable(log)).toBe('WARN'); + }); + + it('should return log level if severityText is missing', () => { + const log = { + date: '2024-02-29T12:34:56Z', + timestamp: 1646115296, + id: '123456', + traceId: '987654', + spanId: '54321', + traceFlags: 0, + severityNumber: 2, + body: 'Sample log message', + resources_string: {}, + attributesString: {}, + attributes_string: {}, + attributesInt: {}, + attributesFloat: {}, + log_level: 'INFO', + }; + expect(getLogIndicatorTypeForTable(log)).toBe('INFO'); + }); +}); diff --git a/frontend/src/components/Logs/LogStateIndicator/utils.ts b/frontend/src/components/Logs/LogStateIndicator/utils.ts new file mode 100644 index 0000000000..7bfe7a430a --- /dev/null +++ b/frontend/src/components/Logs/LogStateIndicator/utils.ts @@ -0,0 +1,57 @@ +import { ILog } from 'types/api/logs/log'; + +import { LogType, SEVERITY_TEXT_TYPE } from './LogStateIndicator'; + +const getSeverityType = (severityText: string): string => { + switch (severityText) { + case SEVERITY_TEXT_TYPE.TRACE: + case SEVERITY_TEXT_TYPE.TRACE2: + case SEVERITY_TEXT_TYPE.TRACE3: + case SEVERITY_TEXT_TYPE.TRACE4: + return SEVERITY_TEXT_TYPE.TRACE; + case SEVERITY_TEXT_TYPE.DEBUG: + case SEVERITY_TEXT_TYPE.DEBUG2: + case SEVERITY_TEXT_TYPE.DEBUG3: + case SEVERITY_TEXT_TYPE.DEBUG4: + return SEVERITY_TEXT_TYPE.DEBUG; + case SEVERITY_TEXT_TYPE.INFO: + case SEVERITY_TEXT_TYPE.INFO2: + case SEVERITY_TEXT_TYPE.INFO3: + case SEVERITY_TEXT_TYPE.INFO4: + return SEVERITY_TEXT_TYPE.INFO; + case SEVERITY_TEXT_TYPE.WARN: + case SEVERITY_TEXT_TYPE.WARN2: + case SEVERITY_TEXT_TYPE.WARN3: + case SEVERITY_TEXT_TYPE.WARN4: + case SEVERITY_TEXT_TYPE.WARNING: + return SEVERITY_TEXT_TYPE.WARN; + case SEVERITY_TEXT_TYPE.ERROR: + case SEVERITY_TEXT_TYPE.ERROR2: + case SEVERITY_TEXT_TYPE.ERROR3: + case SEVERITY_TEXT_TYPE.ERROR4: + return SEVERITY_TEXT_TYPE.ERROR; + case SEVERITY_TEXT_TYPE.FATAL: + case SEVERITY_TEXT_TYPE.FATAL2: + case SEVERITY_TEXT_TYPE.FATAL3: + case SEVERITY_TEXT_TYPE.FATAL4: + return SEVERITY_TEXT_TYPE.FATAL; + default: + return SEVERITY_TEXT_TYPE.INFO; + } +}; + +export const getLogIndicatorType = (logData: ILog): string => { + if (logData.severity_text) { + return getSeverityType(logData.severity_text); + } + return logData.attributes_string?.log_level || LogType.INFO; +}; + +export const getLogIndicatorTypeForTable = ( + log: Record, +): string => { + if (log.severity_text) { + return getSeverityType(log.severity_text as string); + } + return (log.log_level as string) || LogType.INFO; +}; diff --git a/frontend/src/components/Logs/RawLogView/index.tsx b/frontend/src/components/Logs/RawLogView/index.tsx index 94c9dbe1bb..099e0fcc25 100644 --- a/frontend/src/components/Logs/RawLogView/index.tsx +++ b/frontend/src/components/Logs/RawLogView/index.tsx @@ -23,9 +23,8 @@ import { } from 'react'; import LogLinesActionButtons from '../LogLinesActionButtons/LogLinesActionButtons'; -import LogStateIndicator, { - LogType, -} from '../LogStateIndicator/LogStateIndicator'; +import LogStateIndicator from '../LogStateIndicator/LogStateIndicator'; +import { getLogIndicatorType } from '../LogStateIndicator/utils'; // styles import { RawLogContent, RawLogViewContainer } from './styles'; import { RawLogViewProps } from './types'; @@ -64,7 +63,7 @@ function RawLogView({ const severityText = data.severity_text ? `${data.severity_text} |` : ''; - const logType = data?.attributes_string?.log_level || LogType.INFO; + const logType = getLogIndicatorType(data); const updatedSelecedFields = useMemo( () => selectedFields.filter((e) => e.name !== 'id'), @@ -164,7 +163,11 @@ function RawLogView({ > + $isHightlightedLog + ? `background-color: ${ + $isDarkMode ? Color.BG_SLATE_500 : Color.BG_VANILLA_300 + }; + transition: background-color 2s ease-in;` + : ''} `; export const ExpandIconWrapper = styled(Col)` diff --git a/frontend/src/components/Logs/TableView/config.ts b/frontend/src/components/Logs/TableView/config.ts index 73b5f9a4c3..7a267dc624 100644 --- a/frontend/src/components/Logs/TableView/config.ts +++ b/frontend/src/components/Logs/TableView/config.ts @@ -14,12 +14,12 @@ export function getDefaultCellStyle(isDarkMode?: boolean): CSSProperties { lineHeight: '18px', letterSpacing: '-0.07px', marginBottom: '0px', + minWidth: '10rem', }; } export const defaultTableStyle: CSSProperties = { minWidth: '40rem', - maxWidth: '40rem', }; export const defaultListViewPanelStyle: CSSProperties = { diff --git a/frontend/src/components/Logs/TableView/types.ts b/frontend/src/components/Logs/TableView/types.ts index 3176101d9d..36a796ac0f 100644 --- a/frontend/src/components/Logs/TableView/types.ts +++ b/frontend/src/components/Logs/TableView/types.ts @@ -23,6 +23,7 @@ export type UseTableViewProps = { onOpenLogsContext?: (log: ILog) => void; onClickExpand?: (log: ILog) => void; activeLog?: ILog | null; + activeLogIndex?: number; activeContextLog?: ILog | null; isListViewPanel?: boolean; } & LogsTableViewProps; diff --git a/frontend/src/components/Logs/TableView/useTableView.tsx b/frontend/src/components/Logs/TableView/useTableView.tsx index 9db2332635..259e046370 100644 --- a/frontend/src/components/Logs/TableView/useTableView.tsx +++ b/frontend/src/components/Logs/TableView/useTableView.tsx @@ -7,12 +7,10 @@ import dayjs from 'dayjs'; import dompurify from 'dompurify'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { FlatLogData } from 'lib/logs/flatLogData'; -import { defaultTo } from 'lodash-es'; import { useMemo } from 'react'; -import LogStateIndicator, { - LogType, -} from '../LogStateIndicator/LogStateIndicator'; +import LogStateIndicator from '../LogStateIndicator/LogStateIndicator'; +import { getLogIndicatorTypeForTable } from '../LogStateIndicator/utils'; import { defaultListViewPanelStyle, defaultTableStyle, @@ -84,7 +82,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => { children: (
{ if ( maxLinesPerRow && @@ -122,38 +120,36 @@ export default function LogsFormatOptionsMenu({ {selectedItem && ( <> - {selectedItem === 'raw' && ( - <> -
-
-
max lines per row
-
- - - -
+ <> +
+
+
max lines per row
+
+ + +
- - )} +
+
{!addNewColumn &&
} @@ -221,8 +217,6 @@ export default function LogsFormatOptionsMenu({ className="column-name" key={value} onClick={(eve): void => { - console.log('coluimn name', label, value); - eve.stopPropagation(); if (addColumn && addColumn?.onSelect) { diff --git a/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx b/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx index cd6a5fdc33..20be0677bd 100644 --- a/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx +++ b/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx @@ -1,10 +1,12 @@ /* eslint-disable no-restricted-syntax */ /* eslint-disable react/jsx-props-no-spreading */ /* eslint-disable @typescript-eslint/explicit-function-return-type */ + import ReactMarkdown from 'react-markdown'; import { CodeProps } from 'react-markdown/lib/ast-to-react'; import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'; import { a11yDark } from 'react-syntax-highlighter/dist/cjs/styles/prism'; +import rehypeRaw from 'rehype-raw'; import CodeCopyBtn from './CodeCopyBtn/CodeCopyBtn'; @@ -74,6 +76,10 @@ const interpolateMarkdown = ( return interpolatedContent; }; +function CustomTag({ color }: { color: string }): JSX.Element { + return

This is custom element

; +} + function MarkdownRenderer({ markdownContent, variables, @@ -85,12 +91,14 @@ function MarkdownRenderer({ return ( {interpolatedMarkdown} diff --git a/frontend/src/constants/app.ts b/frontend/src/constants/app.ts index 8529db4e4d..d260806856 100644 --- a/frontend/src/constants/app.ts +++ b/frontend/src/constants/app.ts @@ -13,3 +13,6 @@ export const SIGNOZ_UPGRADE_PLAN_URL = 'https://upgrade.signoz.io/upgrade-from-app'; export const DASHBOARD_TIME_IN_DURATION = 'refreshInterval'; + +export const DEFAULT_ENTITY_VERSION = 'v3'; +export const ENTITY_VERSION_V4 = 'v4'; diff --git a/frontend/src/constants/localStorage.ts b/frontend/src/constants/localStorage.ts index 296735b286..85f46ab892 100644 --- a/frontend/src/constants/localStorage.ts +++ b/frontend/src/constants/localStorage.ts @@ -16,4 +16,6 @@ export enum LOCALSTORAGE { CHAT_SUPPORT = 'CHAT_SUPPORT', IS_IDENTIFIED_USER = 'IS_IDENTIFIED_USER', DASHBOARD_VARIABLES = 'DASHBOARD_VARIABLES', + SHOW_EXPLORER_TOOLBAR = 'SHOW_EXPLORER_TOOLBAR', + PINNED_ATTRIBUTES = 'PINNED_ATTRIBUTES', } diff --git a/frontend/src/constants/query.ts b/frontend/src/constants/query.ts index d3bd2729d1..31ec5fcd20 100644 --- a/frontend/src/constants/query.ts +++ b/frontend/src/constants/query.ts @@ -27,5 +27,6 @@ export enum QueryParams { viewName = 'viewName', viewKey = 'viewKey', expandedWidgetId = 'expandedWidgetId', + integration = 'integration', pagination = 'pagination', } diff --git a/frontend/src/constants/queryBuilder.ts b/frontend/src/constants/queryBuilder.ts index 936bfccdde..0999b634ba 100644 --- a/frontend/src/constants/queryBuilder.ts +++ b/frontend/src/constants/queryBuilder.ts @@ -36,6 +36,11 @@ import { v4 as uuid } from 'uuid'; import { logsAggregateOperatorOptions, metricAggregateOperatorOptions, + metricsGaugeAggregateOperatorOptions, + metricsGaugeSpaceAggregateOperatorOptions, + metricsHistogramSpaceAggregateOperatorOptions, + metricsSumAggregateOperatorOptions, + metricsSumSpaceAggregateOperatorOptions, tracesAggregateOperatorOptions, } from './queryBuilderOperators'; @@ -74,6 +79,18 @@ export const mapOfOperators = { traces: tracesAggregateOperatorOptions, }; +export const metricsOperatorsByType = { + Sum: metricsSumAggregateOperatorOptions, + Gauge: metricsGaugeAggregateOperatorOptions, +}; + +export const metricsSpaceAggregationOperatorsByType = { + Sum: metricsSumSpaceAggregateOperatorOptions, + Gauge: metricsGaugeSpaceAggregateOperatorOptions, + Histogram: metricsHistogramSpaceAggregateOperatorOptions, + ExponentialHistogram: metricsHistogramSpaceAggregateOperatorOptions, +}; + export const mapOfQueryFilters: Record = { metrics: [ // eslint-disable-next-line sonarjs/no-duplicate-string @@ -148,6 +165,9 @@ export const initialQueryBuilderFormValues: IBuilderQuery = { queryName: createNewBuilderItemName({ existNames: [], sourceNames: alphabet }), aggregateOperator: MetricAggregateOperator.COUNT, aggregateAttribute: initialAutocompleteData, + timeAggregation: MetricAggregateOperator.RATE, + spaceAggregation: MetricAggregateOperator.SUM, + functions: [], filters: { items: [], op: 'AND' }, expression: createNewBuilderItemName({ existNames: [], @@ -160,7 +180,7 @@ export const initialQueryBuilderFormValues: IBuilderQuery = { orderBy: [], groupBy: [], legend: '', - reduceTo: 'sum', + reduceTo: 'avg', }; const initialQueryBuilderFormLogsValues: IBuilderQuery = { @@ -268,6 +288,14 @@ export enum PANEL_TYPES { EMPTY_WIDGET = 'EMPTY_WIDGET', } +// eslint-disable-next-line @typescript-eslint/naming-convention +export enum ATTRIBUTE_TYPES { + SUM = 'Sum', + GAUGE = 'Gauge', + HISTOGRAM = 'Histogram', + EXPONENTIAL_HISTOGRAM = 'ExponentialHistogram', +} + export type IQueryBuilderState = 'search'; export const QUERY_BUILDER_SEARCH_VALUES = { diff --git a/frontend/src/constants/queryBuilderOperators.ts b/frontend/src/constants/queryBuilderOperators.ts index 7c5cff2b69..581d517875 100644 --- a/frontend/src/constants/queryBuilderOperators.ts +++ b/frontend/src/constants/queryBuilderOperators.ts @@ -302,3 +302,126 @@ export const logsAggregateOperatorOptions: SelectOption[] = [ label: 'Rate_max', }, ]; + +export const metricsSumAggregateOperatorOptions: SelectOption< + string, + string +>[] = [ + { + value: MetricAggregateOperator.RATE, + label: 'Rate', + }, + { + value: MetricAggregateOperator.INCREASE, + label: 'Increase', + }, +]; + +export const metricsGaugeAggregateOperatorOptions: SelectOption< + string, + string +>[] = [ + { + value: MetricAggregateOperator.LATEST, + label: 'Latest', + }, + { + value: MetricAggregateOperator.SUM, + label: 'Sum', + }, + { + value: MetricAggregateOperator.AVG, + label: 'Avg', + }, + { + value: MetricAggregateOperator.MIN, + label: 'Min', + }, + { + value: MetricAggregateOperator.MAX, + label: 'Max', + }, + { + value: MetricAggregateOperator.COUNT, + label: 'Count', + }, + { + value: MetricAggregateOperator.COUNT_DISTINCT, + label: 'Count Distinct', + }, +]; + +export const metricsSumSpaceAggregateOperatorOptions: SelectOption< + string, + string +>[] = [ + { + value: MetricAggregateOperator.SUM, + label: 'Sum', + }, + { + value: MetricAggregateOperator.AVG, + label: 'Avg', + }, + { + value: MetricAggregateOperator.MIN, + label: 'Min', + }, + { + value: MetricAggregateOperator.MAX, + label: 'Max', + }, +]; + +export const metricsGaugeSpaceAggregateOperatorOptions: SelectOption< + string, + string +>[] = [ + { + value: MetricAggregateOperator.SUM, + label: 'Sum', + }, + { + value: MetricAggregateOperator.AVG, + label: 'Avg', + }, + { + value: MetricAggregateOperator.MIN, + label: 'Min', + }, + { + value: MetricAggregateOperator.MAX, + label: 'Max', + }, +]; + +export const metricsHistogramSpaceAggregateOperatorOptions: SelectOption< + string, + string +>[] = [ + { + value: MetricAggregateOperator.P50, + label: 'P50', + }, + { + value: MetricAggregateOperator.P75, + label: 'P75', + }, + { + value: MetricAggregateOperator.P90, + label: 'P90', + }, + { + value: MetricAggregateOperator.P95, + label: 'P95', + }, + { + value: MetricAggregateOperator.P99, + label: 'P99', + }, +]; + +export const metricsEmptyTimeAggregateOperatorOptions: SelectOption< + string, + string +>[] = []; diff --git a/frontend/src/constants/queryFunctionOptions.ts b/frontend/src/constants/queryFunctionOptions.ts new file mode 100644 index 0000000000..b79f673c46 --- /dev/null +++ b/frontend/src/constants/queryFunctionOptions.ts @@ -0,0 +1,137 @@ +/* eslint-disable sonarjs/no-duplicate-string */ +import { QueryFunctionsTypes } from 'types/common/queryBuilder'; +import { SelectOption } from 'types/common/select'; + +export const queryFunctionOptions: SelectOption[] = [ + { + value: QueryFunctionsTypes.CUTOFF_MIN, + label: 'Cut Off Min', + }, + { + value: QueryFunctionsTypes.CUTOFF_MAX, + label: 'Cut Off Max', + }, + { + value: QueryFunctionsTypes.CLAMP_MIN, + label: 'Clamp Min', + }, + { + value: QueryFunctionsTypes.CLAMP_MAX, + label: 'Clamp Max', + }, + { + value: QueryFunctionsTypes.ABSOLUTE, + label: 'Absolute', + }, + { + value: QueryFunctionsTypes.LOG_2, + label: 'Log2', + }, + { + value: QueryFunctionsTypes.LOG_10, + label: 'Log10', + }, + { + value: QueryFunctionsTypes.CUMULATIVE_SUM, + label: 'Cumulative Sum', + }, + { + value: QueryFunctionsTypes.EWMA_3, + label: 'EWMA 3', + }, + { + value: QueryFunctionsTypes.EWMA_5, + label: 'EWMA 5', + }, + { + value: QueryFunctionsTypes.EWMA_7, + label: 'EWMA 7', + }, + { + value: QueryFunctionsTypes.MEDIAN_3, + label: 'Median 3', + }, + { + value: QueryFunctionsTypes.MEDIAN_5, + label: 'Median 5', + }, + { + value: QueryFunctionsTypes.MEDIAN_7, + label: 'Median 7', + }, + { + value: QueryFunctionsTypes.TIME_SHIFT, + label: 'Time Shift', + }, +]; + +interface QueryFunctionConfigType { + [key: string]: { + showInput: boolean; + inputType?: string; + placeholder?: string; + }; +} + +export const queryFunctionsTypesConfig: QueryFunctionConfigType = { + cutOffMin: { + showInput: true, + inputType: 'text', + placeholder: 'Threshold', + }, + cutOffMax: { + showInput: true, + inputType: 'text', + placeholder: 'Threshold', + }, + clampMin: { + showInput: true, + inputType: 'text', + placeholder: 'Threshold', + }, + clampMax: { + showInput: true, + inputType: 'text', + placeholder: 'Threshold', + }, + absolute: { + showInput: false, + }, + log2: { + showInput: false, + }, + log10: { + showInput: false, + }, + cumSum: { + showInput: false, + }, + ewma3: { + showInput: true, + inputType: 'text', + placeholder: 'Alpha', + }, + ewma5: { + showInput: true, + inputType: 'text', + placeholder: 'Alpha', + }, + ewma7: { + showInput: true, + inputType: 'text', + placeholder: 'Alpha', + }, + median3: { + showInput: false, + }, + median5: { + showInput: false, + }, + median7: { + showInput: false, + }, + timeShift: { + showInput: true, + inputType: 'text', + }, +}; diff --git a/frontend/src/constants/routes.ts b/frontend/src/constants/routes.ts index 2f7c650912..0b087ff8cd 100644 --- a/frontend/src/constants/routes.ts +++ b/frontend/src/constants/routes.ts @@ -7,6 +7,11 @@ const ROUTES = { TRACE_DETAIL: '/trace/:id', TRACES_EXPLORER: '/traces-explorer', GET_STARTED: '/get-started', + GET_STARTED_APPLICATION_MONITORING: '/get-started/application-monitoring', + GET_STARTED_LOGS_MANAGEMENT: '/get-started/logs-management', + GET_STARTED_INFRASTRUCTURE_MONITORING: + '/get-started/infrastructure-monitoring', + GET_STARTED_AWS_MONITORING: '/get-started/aws-monitoring', USAGE_EXPLORER: '/usage-explorer', APPLICATION: '/services', ALL_DASHBOARD: '/dashboard', @@ -42,10 +47,13 @@ const ROUTES = { TRACE_EXPLORER: '/trace-explorer', BILLING: '/billing', SUPPORT: '/support', - LOGS_SAVE_VIEWS: '/logs-save-views', - TRACES_SAVE_VIEWS: '/traces-save-views', + LOGS_SAVE_VIEWS: '/logs/saved-views', + TRACES_SAVE_VIEWS: '/traces/saved-views', WORKSPACE_LOCKED: '/workspace-locked', SHORTCUTS: '/shortcuts', + INTEGRATIONS_BASE: '/integrations', + INTEGRATIONS_INSTALLED: '/integrations/installed', + INTEGRATIONS_MARKETPLACE: '/integrations/marketplace', } as const; export default ROUTES; diff --git a/frontend/src/constants/shortcuts/DashboardShortcuts.ts b/frontend/src/constants/shortcuts/DashboardShortcuts.ts new file mode 100644 index 0000000000..ee861708f7 --- /dev/null +++ b/frontend/src/constants/shortcuts/DashboardShortcuts.ts @@ -0,0 +1,17 @@ +import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS'; + +const userOS = getUserOperatingSystem(); + +export const DashboardShortcuts = { + SaveChanges: 's+meta', + DiscardChanges: 'd+meta', +}; + +export const DashboardShortcutsName = { + SaveChanges: `${userOS === UserOperatingSystem.MACOS ? 'cmd' : 'ctrl'}+s`, +}; + +export const DashboardShortcutsDescription = { + SaveChanges: 'Save Changes', + DiscardChanges: 'Discard Changes', +}; diff --git a/frontend/src/constants/shortcuts/QBShortcuts.ts b/frontend/src/constants/shortcuts/QBShortcuts.ts new file mode 100644 index 0000000000..56fea081df --- /dev/null +++ b/frontend/src/constants/shortcuts/QBShortcuts.ts @@ -0,0 +1,17 @@ +import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS'; + +const userOS = getUserOperatingSystem(); + +export const QBShortcuts = { + StageAndRunQuery: 'enter+meta', +}; + +export const QBShortcutsName = { + StageAndRunQuery: `${ + userOS === UserOperatingSystem.MACOS ? 'cmd' : 'ctrl' + }+enter`, +}; + +export const QBShortcutsDescription = { + StageAndRunQuery: 'Stage and Run the query', +}; diff --git a/frontend/src/container/AppLayout/index.tsx b/frontend/src/container/AppLayout/index.tsx index d44a2d26b8..5d18e7d307 100644 --- a/frontend/src/container/AppLayout/index.tsx +++ b/frontend/src/container/AppLayout/index.tsx @@ -231,7 +231,12 @@ function AppLayout(props: AppLayoutProps): JSX.Element { const routeKey = useMemo(() => getRouteKey(pathname), [pathname]); const pageTitle = t(routeKey); const renderFullScreen = - pathname === ROUTES.GET_STARTED || pathname === ROUTES.WORKSPACE_LOCKED; + pathname === ROUTES.GET_STARTED || + pathname === ROUTES.WORKSPACE_LOCKED || + pathname === ROUTES.GET_STARTED_APPLICATION_MONITORING || + pathname === ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING || + pathname === ROUTES.GET_STARTED_LOGS_MANAGEMENT || + pathname === ROUTES.GET_STARTED_AWS_MONITORING; const [showTrialExpiryBanner, setShowTrialExpiryBanner] = useState(false); diff --git a/frontend/src/container/BillingContainer/BillingContainer.styles.scss b/frontend/src/container/BillingContainer/BillingContainer.styles.scss index afb9e80253..05a672b18c 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.styles.scss +++ b/frontend/src/container/BillingContainer/BillingContainer.styles.scss @@ -1,13 +1,29 @@ .billing-container { - padding: 16px 0; - width: 100%; + padding-top: 36px; + width: 65%; .billing-summary { margin: 24px 8px; } .billing-details { - margin: 36px 8px; + margin: 24px 0px; + + .ant-table-title { + color: var(--bg-vanilla-400); + background-color: rgb(27, 28, 32); + } + + .ant-table-cell { + background-color: var(--bg-ink-400); + border-color: var(--bg-slate-500); + } + + .ant-table-tbody { + td { + border-color: var(--bg-slate-500); + } + } } .upgrade-plan-benefits { @@ -24,6 +40,15 @@ } } } + + .empty-graph-card { + .ant-card-body { + height: 40vh; + display: flex; + justify-content: center; + align-items: center; + } + } } .ant-skeleton.ant-skeleton-element.ant-skeleton-active { @@ -34,3 +59,20 @@ .ant-skeleton.ant-skeleton-element .ant-skeleton-input { min-width: 100% !important; } + +.lightMode { + .billing-container { + .billing-details { + .ant-table-cell { + background: var(--bg-vanilla-100); + border-color: var(--bg-vanilla-200); + } + + .ant-table-tbody { + td { + border-color: var(--bg-vanilla-200); + } + } + } + } +} diff --git a/frontend/src/container/BillingContainer/BillingContainer.test.tsx b/frontend/src/container/BillingContainer/BillingContainer.test.tsx index b4eadd433b..1988df313b 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.test.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.test.tsx @@ -12,13 +12,36 @@ import BillingContainer from './BillingContainer'; const lisenceUrl = 'http://localhost/api/v2/licenses'; +jest.mock('uplot', () => { + const paths = { + spline: jest.fn(), + bars: jest.fn(), + }; + + const uplotMock = jest.fn(() => ({ + paths, + })); + + return { + paths, + default: uplotMock, + }; +}); + +window.ResizeObserver = + window.ResizeObserver || + jest.fn().mockImplementation(() => ({ + disconnect: jest.fn(), + observe: jest.fn(), + unobserve: jest.fn(), + })); + describe('BillingContainer', () => { test('Component should render', async () => { act(() => { render(); }); - const unit = screen.getAllByText(/unit/i); - expect(unit[1]).toBeInTheDocument(); + const dataInjection = screen.getByRole('columnheader', { name: /data ingested/i, }); @@ -32,24 +55,15 @@ describe('BillingContainer', () => { }); expect(cost).toBeInTheDocument(); - const total = screen.getByRole('cell', { - name: /total/i, - }); - expect(total).toBeInTheDocument(); - const manageBilling = screen.getByRole('button', { - name: /manage billing/i, + name: 'manage_billing', }); expect(manageBilling).toBeInTheDocument(); - const dollar = screen.getByRole('cell', { - name: /\$0/i, - }); + const dollar = screen.getByText(/\$0/i); expect(dollar).toBeInTheDocument(); - const currentBill = screen.getByRole('heading', { - name: /current bill total/i, - }); + const currentBill = screen.getByText('billing'); expect(currentBill).toBeInTheDocument(); }); @@ -61,9 +75,7 @@ describe('BillingContainer', () => { const freeTrailText = await screen.findByText('Free Trial'); expect(freeTrailText).toBeInTheDocument(); - const currentBill = await screen.findByRole('heading', { - name: /current bill total/i, - }); + const currentBill = screen.getByText('billing'); expect(currentBill).toBeInTheDocument(); const dollar0 = await screen.findByText(/\$0/i); @@ -73,18 +85,14 @@ describe('BillingContainer', () => { ); expect(onTrail).toBeInTheDocument(); - const numberOfDayRemaining = await screen.findByText( - /1 days remaining in your billing period./i, - ); + const numberOfDayRemaining = await screen.findByText(/1 days_remaining/i); expect(numberOfDayRemaining).toBeInTheDocument(); const upgradeButton = await screen.findAllByRole('button', { - name: /upgrade/i, + name: /upgrade_plan/i, }); expect(upgradeButton[1]).toBeInTheDocument(); expect(upgradeButton.length).toBe(2); - const checkPaidPlan = await screen.findByText( - /Check out features in paid plans/i, - ); + const checkPaidPlan = await screen.findByText(/checkout_plans/i); expect(checkPaidPlan).toBeInTheDocument(); const link = screen.getByRole('link', { name: /here/i }); @@ -102,9 +110,7 @@ describe('BillingContainer', () => { render(); }); - const currentBill = await screen.findByRole('heading', { - name: /current bill total/i, - }); + const currentBill = screen.getByText('billing'); expect(currentBill).toBeInTheDocument(); const dollar0 = await screen.findByText(/\$0/i); @@ -116,17 +122,17 @@ describe('BillingContainer', () => { expect(onTrail).toBeInTheDocument(); const receivedCardDetails = await screen.findByText( - /We have received your card details, your billing will only start after the end of your free trial period./i, + /card_details_recieved_and_billing_info/i, ); expect(receivedCardDetails).toBeInTheDocument(); const manageBillingButton = await screen.findByRole('button', { - name: /manage billing/i, + name: /manage_billing/i, }); expect(manageBillingButton).toBeInTheDocument(); const dayRemainingInBillingPeriod = await screen.findByText( - /1 days remaining in your billing period./i, + /1 days_remaining/i, ); expect(dayRemainingInBillingPeriod).toBeInTheDocument(); }); @@ -137,45 +143,30 @@ describe('BillingContainer', () => { res(ctx.status(200), ctx.json(notOfTrailResponse)), ), ); - render(); + const { findByText } = render(); const billingPeriodText = `Your current billing period is from ${getFormattedDate( billingSuccessResponse.data.billingPeriodStart, )} to ${getFormattedDate(billingSuccessResponse.data.billingPeriodEnd)}`; - const billingPeriod = await screen.findByRole('heading', { - name: new RegExp(billingPeriodText, 'i'), - }); + const billingPeriod = await findByText(billingPeriodText); expect(billingPeriod).toBeInTheDocument(); - const currentBill = await screen.findByRole('heading', { - name: /current bill total/i, - }); + const currentBill = screen.getByText('billing'); expect(currentBill).toBeInTheDocument(); - const dollar0 = await screen.findAllByText(/\$1278.3/i); - expect(dollar0[0]).toBeInTheDocument(); - expect(dollar0.length).toBe(2); + const dollar0 = await screen.findByText(/\$1,278.3/i); + expect(dollar0).toBeInTheDocument(); const metricsRow = await screen.findByRole('row', { - name: /metrics Million 4012 0.1 \$ 401.2/i, + name: /metrics 4012 Million 0.1 \$ 401.2/i, }); expect(metricsRow).toBeInTheDocument(); const logRow = await screen.findByRole('row', { - name: /Logs GB 497 0.4 \$ 198.8/i, + name: /Logs 497 GB 0.4 \$ 198.8/i, }); expect(logRow).toBeInTheDocument(); - - const totalBill = await screen.findByRole('cell', { - name: /\$1278/i, - }); - expect(totalBill).toBeInTheDocument(); - - const totalBillRow = await screen.findByRole('row', { - name: /total \$1278/i, - }); - expect(totalBillRow).toBeInTheDocument(); }); test('Should render corrent day remaining in billing period', async () => { @@ -186,7 +177,7 @@ describe('BillingContainer', () => { ); render(); const dayRemainingInBillingPeriod = await screen.findByText( - /11 days remaining in your billing period./i, + /11 days_remaining/i, ); expect(dayRemainingInBillingPeriod).toBeInTheDocument(); }); diff --git a/frontend/src/container/BillingContainer/BillingContainer.tsx b/frontend/src/container/BillingContainer/BillingContainer.tsx index e419c581ed..9b45801356 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -2,19 +2,33 @@ import './BillingContainer.styles.scss'; import { CheckCircleOutlined } from '@ant-design/icons'; -import { Button, Col, Row, Skeleton, Table, Tag, Typography } from 'antd'; +import { Color } from '@signozhq/design-tokens'; +import { + Alert, + Button, + Card, + Col, + Flex, + Row, + Skeleton, + Table, + Tag, + Typography, +} from 'antd'; import { ColumnsType } from 'antd/es/table'; import updateCreditCardApi from 'api/billing/checkout'; -import getUsage from 'api/billing/getUsage'; +import getUsage, { UsageResponsePayloadProps } from 'api/billing/getUsage'; import manageCreditCardApi from 'api/billing/manage'; +import Spinner from 'components/Spinner'; import { SOMETHING_WENT_WRONG } from 'constants/api'; import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; import useAnalytics from 'hooks/analytics/useAnalytics'; import useAxiosError from 'hooks/useAxiosError'; import useLicense from 'hooks/useLicense'; import { useNotifications } from 'hooks/useNotifications'; -import { pick } from 'lodash-es'; +import { isEmpty, pick } from 'lodash-es'; import { useCallback, useEffect, useState } from 'react'; +import { useTranslation } from 'react-i18next'; import { useMutation, useQuery } from 'react-query'; import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; @@ -22,8 +36,11 @@ import { ErrorResponse, SuccessResponse } from 'types/api'; import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout'; import { License } from 'types/api/licenses/def'; import AppReducer from 'types/reducer/app'; +import { isCloudUser } from 'utils/app'; import { getFormattedDate, getRemainingDays } from 'utils/timeUtils'; +import { BillingUsageGraph } from './BillingUsageGraph/BillingUsageGraph'; + interface DataType { key: string; name: string; @@ -33,6 +50,11 @@ interface DataType { cost: string; } +enum SubscriptionStatus { + PastDue = 'past_due', + Active = 'active', +} + const renderSkeletonInput = (): JSX.Element => ( = [ }, ]; +// eslint-disable-next-line sonarjs/cognitive-complexity export default function BillingContainer(): JSX.Element { - const daysRemainingStr = 'days remaining in your billing period.'; + const { t } = useTranslation(['billings']); + const daysRemainingStr = t('days_remaining'); const [headerText, setHeaderText] = useState(''); const [billAmount, setBillAmount] = useState(0); - const [totalBillAmount, setTotalBillAmount] = useState(0); const [activeLicense, setActiveLicense] = useState(null); const [daysRemaining, setDaysRemaining] = useState(0); const [isFreeTrial, setIsFreeTrial] = useState(false); const [data, setData] = useState([]); - const billCurrency = '$'; + const [apiResponse, setApiResponse] = useState< + Partial + >({}); const { trackEvent } = useAnalytics(); @@ -120,10 +145,15 @@ export default function BillingContainer(): JSX.Element { const handleError = useAxiosError(); + const isCloudUserVal = isCloudUser(); + const processUsageData = useCallback( (data: any): void => { + if (isEmpty(data?.payload)) { + return; + } const { - details: { breakdown = [], total, billTotal }, + details: { breakdown = [], billTotal }, billingPeriodStart, billingPeriodEnd, } = data?.payload || {}; @@ -141,8 +171,7 @@ export default function BillingContainer(): JSX.Element { formattedUsageData.push({ key: `${index}${i}`, name: i === 0 ? element?.type : '', - unit: element?.unit, - dataIngested: tier.quantity, + dataIngested: `${tier.quantity} ${element?.unit}`, pricePerUnit: tier.unitPrice, cost: `$ ${tier.tierCost}`, }); @@ -152,7 +181,6 @@ export default function BillingContainer(): JSX.Element { } setData(formattedUsageData); - setTotalBillAmount(total); if (!licensesData?.payload?.onTrial) { const remainingDays = getRemainingDays(billingPeriodEnd) - 1; @@ -165,11 +193,16 @@ export default function BillingContainer(): JSX.Element { setDaysRemaining(remainingDays > 0 ? remainingDays : 0); setBillAmount(billTotal); } + + setApiResponse(data?.payload || {}); }, [licensesData?.payload?.onTrial], ); - const { isLoading } = useQuery( + const isSubscriptionPastDue = + apiResponse.subscriptionStatus === SubscriptionStatus.PastDue; + + const { isLoading, isFetching: isFetchingBillingData } = useQuery( [REACT_QUERY_KEY.GET_BILLING_USAGE, user?.userId], { queryFn: () => getUsage(activeLicense?.key || ''), @@ -208,11 +241,6 @@ export default function BillingContainer(): JSX.Element { key: 'name', render: (text): JSX.Element =>
{text}
, }, - { - title: 'Unit', - dataIndex: 'unit', - key: 'unit', - }, { title: 'Data Ingested', dataIndex: 'dataIngested', @@ -230,24 +258,6 @@ export default function BillingContainer(): JSX.Element { }, ]; - const renderSummary = (): JSX.Element => ( - - - - Total - - -   -   -   - - - ${totalBillAmount} - - - - ); - const renderTableSkeleton = (): JSX.Element => ( + !isLoading && !isFetchingBillingData ? ( + + ) : ( + + + + ), + [apiResponse, billAmount, isLoading, isFetchingBillingData], + ); + + const { Text } = Typography; + const subscriptionPastDueMessage = (): JSX.Element => ( + + {`We were not able to process payments for your account. Please update your card details `} + + {t('here')} + + {` if your payment information has changed. Email us at `} + cloud-support@signoz.io + {` otherwise. Be sure to provide this information immediately to avoid interruption to your service.`} + + ); + return (
- + + {t('billing')} + + + {t('manage_billing_and_costs')} + + + + -
- - {headerText} - - - {licensesData?.payload?.onTrial && - licensesData?.payload?.trialConvertedToSubscription && ( - - We have received your card details, your billing will only start after - the end of your free trial period. - - )} - - - + + + + {isCloudUserVal ? t('enterprise_cloud') : t('enterprise')}{' '} + {isFreeTrial ? Free Trial : ''} + + {!isLoading && !isFetchingBillingData ? ( + + {daysRemaining} {daysRemainingStr} + + ) : null} + - - + -
- - Current bill total - + {licensesData?.payload?.onTrial && + licensesData?.payload?.trialConvertedToSubscription && ( + + {t('card_details_recieved_and_billing_info')} + + )} - - {billCurrency} - {billAmount}   - {isFreeTrial ? Free Trial : ''} - + {!isLoading && !isFetchingBillingData ? ( + headerText && ( + + ) + ) : ( + + )} - - {daysRemaining} {daysRemainingStr} - -
+ {isSubscriptionPastDue && + (!isLoading && !isFetchingBillingData ? ( + + ) : ( + + ))} + + +
- {!isLoading && ( + {!isLoading && !isFetchingBillingData && (
)} - {isLoading && renderTableSkeleton()} + {(isLoading || isFetchingBillingData) && renderTableSkeleton()} {isFreeTrial && !licensesData?.payload?.trialConvertedToSubscription && ( @@ -423,16 +476,16 @@ export default function BillingContainer(): JSX.Element { - Upgrade now to have uninterrupted access + {t('upgrade_now_text')} - Your billing will start only after the trial period + {t('Your billing will start only after the trial period')} - Check out features in paid plans   + {t('checkout_plans')}   - here + {t('here')} @@ -453,7 +506,7 @@ export default function BillingContainer(): JSX.Element { loading={isLoadingBilling || isLoadingManageBilling} onClick={handleBilling} > - Upgrade Plan + {t('upgrade_plan')} diff --git a/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.styles.scss b/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.styles.scss new file mode 100644 index 0000000000..e5722d4f4a --- /dev/null +++ b/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.styles.scss @@ -0,0 +1,29 @@ +.billing-graph-card { + .ant-card-body { + height: 40vh; + .uplot-graph-container { + padding: 8px; + } + } + .total-spent { + font-family: 'SF Mono' monospace; + font-size: 16px; + font-style: normal; + font-weight: 600; + line-height: 24px; + } + + .total-spent-title { + font-size: 12px; + font-weight: 500; + line-height: 22px; + letter-spacing: 0.48px; + color: rgba(255, 255, 255, 0.5); + } +} + +.lightMode { + .total-spent-title { + color: var(--bg-ink-100); + } +} diff --git a/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx b/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx new file mode 100644 index 0000000000..be77ebba95 --- /dev/null +++ b/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx @@ -0,0 +1,201 @@ +import './BillingUsageGraph.styles.scss'; +import '../../../lib/uPlotLib/uPlotLib.styles.scss'; + +import { Color } from '@signozhq/design-tokens'; +import { Card, Flex, Typography } from 'antd'; +import Uplot from 'components/Uplot'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { useResizeObserver } from 'hooks/useDimensions'; +import tooltipPlugin from 'lib/uPlotLib/plugins/tooltipPlugin'; +import getAxes from 'lib/uPlotLib/utils/getAxes'; +import getRenderer from 'lib/uPlotLib/utils/getRenderer'; +import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData'; +import { getXAxisScale } from 'lib/uPlotLib/utils/getXAxisScale'; +import { getYAxisScale } from 'lib/uPlotLib/utils/getYAxisScale'; +import { useMemo, useRef } from 'react'; +import uPlot from 'uplot'; + +import { + convertDataToMetricRangePayload, + fillMissingValuesForQuantities, +} from './utils'; + +interface BillingUsageGraphProps { + data: any; + billAmount: number; +} +const paths = ( + u: any, + seriesIdx: number, + idx0: number, + idx1: number, + extendGap: boolean, + buildClip: boolean, +): uPlot.Series.PathBuilder => { + const s = u.series[seriesIdx]; + const style = s.drawStyle; + const interp = s.lineInterpolation; + + const renderer = getRenderer(style, interp); + + return renderer(u, seriesIdx, idx0, idx1, extendGap, buildClip); +}; + +const calculateStartEndTime = ( + data: any, +): { startTime: number; endTime: number } => { + const timestamps: number[] = []; + data?.details?.breakdown?.forEach((breakdown: any) => { + breakdown?.dayWiseBreakdown?.breakdown.forEach((entry: any) => { + timestamps.push(entry?.timestamp); + }); + }); + const billingTime = [data?.billingPeriodStart, data?.billingPeriodEnd]; + const startTime: number = Math.min(...timestamps, ...billingTime); + const endTime: number = Math.max(...timestamps, ...billingTime); + return { startTime, endTime }; +}; + +export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element { + const { data, billAmount } = props; + const graphCompatibleData = useMemo( + () => convertDataToMetricRangePayload(data), + [data], + ); + const chartData = getUPlotChartData(graphCompatibleData); + const graphRef = useRef(null); + const isDarkMode = useIsDarkMode(); + const containerDimensions = useResizeObserver(graphRef); + + const { startTime, endTime } = useMemo(() => calculateStartEndTime(data), [ + data, + ]); + + const getGraphSeries = (color: string, label: string): any => ({ + drawStyle: 'bars', + paths, + lineInterpolation: 'spline', + show: true, + label, + fill: color, + stroke: color, + width: 2, + spanGaps: true, + points: { + size: 5, + show: false, + stroke: color, + }, + }); + + const uPlotSeries: any = useMemo( + () => [ + { label: 'Timestamp', stroke: 'purple' }, + getGraphSeries( + '#7CEDBE', + graphCompatibleData.data.result[0]?.legend as string, + ), + getGraphSeries( + '#4E74F8', + graphCompatibleData.data.result[1]?.legend as string, + ), + getGraphSeries( + '#F24769', + graphCompatibleData.data.result[2]?.legend as string, + ), + ], + [graphCompatibleData.data.result], + ); + + const axesOptions = getAxes(isDarkMode, ''); + + const optionsForChart: uPlot.Options = useMemo( + () => ({ + id: 'billing-usage-breakdown', + series: uPlotSeries, + width: containerDimensions.width, + height: containerDimensions.height - 30, + axes: [ + { + ...axesOptions[0], + grid: { + ...axesOptions.grid, + show: false, + stroke: isDarkMode ? Color.BG_VANILLA_400 : Color.BG_INK_400, + }, + }, + { + ...axesOptions[1], + stroke: isDarkMode ? Color.BG_SLATE_200 : Color.BG_INK_400, + }, + ], + scales: { + x: { + ...getXAxisScale(startTime - 86400, endTime), // Minus 86400 from startTime to decrease a day to have a buffer start + }, + y: { + ...getYAxisScale({ + series: graphCompatibleData?.data.newResult.data.result, + yAxisUnit: '', + softMax: null, + softMin: null, + }), + }, + }, + legend: { + show: true, + live: false, + isolate: true, + }, + cursor: { + lock: false, + focus: { + prox: 1e6, + bias: 1, + }, + }, + focus: { + alpha: 0.3, + }, + padding: [32, 32, 16, 16], + plugins: [ + tooltipPlugin( + fillMissingValuesForQuantities(graphCompatibleData, chartData[0]), + '', + true, + ), + ], + }), + [ + axesOptions, + chartData, + containerDimensions.height, + containerDimensions.width, + endTime, + graphCompatibleData, + isDarkMode, + startTime, + uPlotSeries, + ], + ); + + const numberFormatter = new Intl.NumberFormat('en-US'); + + return ( + + + + + TOTAL SPENT + + + ${numberFormatter.format(billAmount)} + + + +
+ +
+
+ ); +} diff --git a/frontend/src/container/BillingContainer/BillingUsageGraph/utils.ts b/frontend/src/container/BillingContainer/BillingUsageGraph/utils.ts new file mode 100644 index 0000000000..d40c8a6097 --- /dev/null +++ b/frontend/src/container/BillingContainer/BillingUsageGraph/utils.ts @@ -0,0 +1,87 @@ +import { isEmpty, isNull } from 'lodash-es'; +import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; + +export const convertDataToMetricRangePayload = ( + data: any, +): MetricRangePayloadProps => { + const emptyStateData = { + data: { + newResult: { data: { result: [], resultType: '' } }, + result: [], + resultType: '', + }, + }; + if (isEmpty(data)) { + return emptyStateData; + } + const { + details: { breakdown = [] }, + } = data || {}; + + if (isNull(breakdown) || breakdown.length === 0) { + return emptyStateData; + } + + const payload = breakdown.map((info: any) => { + const metric = info.type; + const sortedBreakdownData = (info?.dayWiseBreakdown?.breakdown || []).sort( + (a: any, b: any) => a.timestamp - b.timestamp, + ); + const values = (sortedBreakdownData || []).map((categoryInfo: any) => [ + categoryInfo.timestamp, + categoryInfo.total, + ]); + const queryName = info.type; + const legend = info.type; + const { unit } = info; + const quantity = sortedBreakdownData.map( + (categoryInfo: any) => categoryInfo.quantity, + ); + return { metric, values, queryName, legend, quantity, unit }; + }); + + const sortedData = payload.sort((a: any, b: any) => { + const sumA = a.values.reduce((acc: any, val: any) => acc + val[1], 0); + const avgA = a.values.length ? sumA / a.values.length : 0; + const sumB = b.values.reduce((acc: any, val: any) => acc + val[1], 0); + const avgB = b.values.length ? sumB / b.values.length : 0; + + return sumA === sumB ? avgB - avgA : sumB - sumA; + }); + + return { + data: { + newResult: { data: { result: sortedData, resultType: '' } }, + result: sortedData, + resultType: '', + }, + }; +}; + +export function fillMissingValuesForQuantities( + data: any, + timestampArray: number[], +): MetricRangePayloadProps { + const { result } = data.data; + + const transformedResultArr: any[] = []; + result.forEach((item: any) => { + const timestampToQuantityMap: { [timestamp: number]: number } = {}; + item.values.forEach((val: number[], index: number) => { + timestampToQuantityMap[val[0]] = item.quantity[index]; + }); + + const quantityArray = timestampArray.map( + (timestamp: number) => timestampToQuantityMap[timestamp] ?? null, + ); + transformedResultArr.push({ ...item, quantity: quantityArray }); + }); + + return { + data: { + newResult: { data: { result: transformedResultArr, resultType: '' } }, + result: transformedResultArr, + resultType: '', + }, + }; +} diff --git a/frontend/src/container/CreateAlertChannels/config.ts b/frontend/src/container/CreateAlertChannels/config.ts index e15c1d7e08..3ee3882cc1 100644 --- a/frontend/src/container/CreateAlertChannels/config.ts +++ b/frontend/src/container/CreateAlertChannels/config.ts @@ -64,6 +64,16 @@ export interface OpsgenieChannel extends Channel { priority?: string; } +export interface EmailChannel extends Channel { + // comma separated list of email addresses to send alerts to + to: string; + // HTML body of the email notification. + html: string; + // Further headers email header key/value pairs. + // [ headers: { : , ... } ] + headers: Record; +} + export const ValidatePagerChannel = (p: PagerChannel): string => { if (!p) { return 'Received unexpected input for this channel, please contact your administrator '; diff --git a/frontend/src/container/CreateAlertChannels/defaults.ts b/frontend/src/container/CreateAlertChannels/defaults.ts index 3068d8dd0c..f687164a72 100644 --- a/frontend/src/container/CreateAlertChannels/defaults.ts +++ b/frontend/src/container/CreateAlertChannels/defaults.ts @@ -1,4 +1,4 @@ -import { OpsgenieChannel, PagerChannel } from './config'; +import { EmailChannel, OpsgenieChannel, PagerChannel } from './config'; export const PagerInitialConfig: Partial = { description: `[{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }} @@ -50,3 +50,399 @@ export const OpsgenieInitialConfig: Partial = { priority: '{{ if eq (index .Alerts 0).Labels.severity "critical" }}P1{{ else if eq (index .Alerts 0).Labels.severity "warning" }}P2{{ else if eq (index .Alerts 0).Labels.severity "info" }}P3{{ else }}P4{{ end }}', }; + +export const EmailInitialConfig: Partial = { + send_resolved: true, + html: ` + + + + + + {{ template "__subject" . }} + + + +
+ + + + + +
+
+ + + {{ if gt (len .Alerts.Firing) 0 }} + + + + + +
+ {{ else }} + + {{ end }} + {{ .Alerts | len }} alert{{ if gt (len .Alerts) 1 }}s{{ end }} for {{ range .GroupLabels.SortedPairs }} + {{ .Name }}={{ .Value }} + {{ end }} +
+ + {{ if gt (len .Alerts.Firing) 0 }} + + + + {{ end }} + {{ range .Alerts.Firing }} + + + + {{ end }} + {{ if gt (len .Alerts.Resolved) 0 }} + {{ if gt (len .Alerts.Firing) 0 }} + + + + {{ end }} + + + + {{ end }} + {{ range .Alerts.Resolved }} + + + + {{ end }} +
+ [{{ .Alerts.Firing | len }}] Firing +
+ Labels
+ {{ range .Labels.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + {{ if gt (len .Annotations) 0 }}Annotations
{{ end }} + {{ range .Annotations.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + Source
+
+
+
+
+
+ [{{ .Alerts.Resolved | len }}] Resolved +
+ Labels
+ {{ range .Labels.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + {{ if gt (len .Annotations) 0 }}Annotations
{{ end }} + {{ range .Annotations.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + Source
+
+
+
+
+ + `, +}; diff --git a/frontend/src/container/CreateAlertChannels/index.tsx b/frontend/src/container/CreateAlertChannels/index.tsx index d8426f71b9..51a0b6214e 100644 --- a/frontend/src/container/CreateAlertChannels/index.tsx +++ b/frontend/src/container/CreateAlertChannels/index.tsx @@ -1,9 +1,11 @@ import { Form } from 'antd'; +import createEmail from 'api/channels/createEmail'; import createMsTeamsApi from 'api/channels/createMsTeams'; import createOpsgenie from 'api/channels/createOpsgenie'; import createPagerApi from 'api/channels/createPager'; import createSlackApi from 'api/channels/createSlack'; import createWebhookApi from 'api/channels/createWebhook'; +import testEmail from 'api/channels/testEmail'; import testMsTeamsApi from 'api/channels/testMsTeams'; import testOpsGenie from 'api/channels/testOpsgenie'; import testPagerApi from 'api/channels/testPager'; @@ -18,6 +20,7 @@ import { useTranslation } from 'react-i18next'; import { ChannelType, + EmailChannel, MsTeamsChannel, OpsgenieChannel, PagerChannel, @@ -25,7 +28,11 @@ import { ValidatePagerChannel, WebhookChannel, } from './config'; -import { OpsgenieInitialConfig, PagerInitialConfig } from './defaults'; +import { + EmailInitialConfig, + OpsgenieInitialConfig, + PagerInitialConfig, +} from './defaults'; import { isChannelType } from './utils'; function CreateAlertChannels({ @@ -42,7 +49,8 @@ function CreateAlertChannels({ WebhookChannel & PagerChannel & MsTeamsChannel & - OpsgenieChannel + OpsgenieChannel & + EmailChannel > >({ text: `{{ range .Alerts -}} @@ -94,6 +102,14 @@ function CreateAlertChannels({ ...OpsgenieInitialConfig, })); } + + // reset config to email defaults + if (value === ChannelType.Email && currentType !== value) { + setSelectedConfig((selectedConfig) => ({ + ...selectedConfig, + ...EmailInitialConfig, + })); + } }, [type, selectedConfig], ); @@ -293,6 +309,43 @@ function CreateAlertChannels({ setSavingState(false); }, [prepareOpsgenieRequest, t, notifications]); + const prepareEmailRequest = useCallback( + () => ({ + name: selectedConfig?.name || '', + send_resolved: true, + to: selectedConfig?.to || '', + html: selectedConfig?.html || '', + headers: selectedConfig?.headers || {}, + }), + [selectedConfig], + ); + + const onEmailHandler = useCallback(async () => { + setSavingState(true); + try { + const request = prepareEmailRequest(); + const response = await createEmail(request); + if (response.statusCode === 200) { + notifications.success({ + message: 'Success', + description: t('channel_creation_done'), + }); + history.replace(ROUTES.ALL_CHANNELS); + } else { + notifications.error({ + message: 'Error', + description: response.error || t('channel_creation_failed'), + }); + } + } catch (error) { + notifications.error({ + message: 'Error', + description: t('channel_creation_failed'), + }); + } + setSavingState(false); + }, [prepareEmailRequest, t, notifications]); + const prepareMsTeamsRequest = useCallback( () => ({ webhook_url: selectedConfig?.webhook_url || '', @@ -339,6 +392,7 @@ function CreateAlertChannels({ [ChannelType.Pagerduty]: onPagerHandler, [ChannelType.Opsgenie]: onOpsgenieHandler, [ChannelType.MsTeams]: onMsTeamsHandler, + [ChannelType.Email]: onEmailHandler, }; if (isChannelType(value)) { @@ -360,6 +414,7 @@ function CreateAlertChannels({ onPagerHandler, onOpsgenieHandler, onMsTeamsHandler, + onEmailHandler, notifications, t, ], @@ -392,6 +447,10 @@ function CreateAlertChannels({ request = prepareOpsgenieRequest(); response = await testOpsGenie(request); break; + case ChannelType.Email: + request = prepareEmailRequest(); + response = await testEmail(request); + break; default: notifications.error({ message: 'Error', @@ -427,6 +486,7 @@ function CreateAlertChannels({ prepareOpsgenieRequest, prepareSlackRequest, prepareMsTeamsRequest, + prepareEmailRequest, notifications, ], ); @@ -455,6 +515,7 @@ function CreateAlertChannels({ ...selectedConfig, ...PagerInitialConfig, ...OpsgenieInitialConfig, + ...EmailInitialConfig, }, }} /> diff --git a/frontend/src/container/CreateAlertRule/defaults.ts b/frontend/src/container/CreateAlertRule/defaults.ts index 8517d9b18c..677f4accc4 100644 --- a/frontend/src/container/CreateAlertRule/defaults.ts +++ b/frontend/src/container/CreateAlertRule/defaults.ts @@ -1,9 +1,9 @@ +import { ENTITY_VERSION_V4 } from 'constants/app'; import { initialQueryBuilderFormValuesMap, initialQueryPromQLData, PANEL_TYPES, } from 'constants/queryBuilder'; -import ROUTES from 'constants/routes'; import { AlertTypes } from 'types/api/alerts/alertTypes'; import { AlertDef, @@ -25,6 +25,7 @@ const defaultAnnotations = { export const alertDefaults: AlertDef = { alertType: AlertTypes.METRICS_BASED_ALERT, + version: ENTITY_VERSION_V4, condition: { compositeQuery: { builderQueries: { @@ -78,7 +79,6 @@ export const logAlertDefaults: AlertDef = { }, labels: { severity: 'warning', - details: `${window.location.protocol}//${window.location.host}${ROUTES.LOGS_EXPLORER}`, }, annotations: defaultAnnotations, evalWindow: defaultEvalWindow, @@ -109,7 +109,6 @@ export const traceAlertDefaults: AlertDef = { }, labels: { severity: 'warning', - details: `${window.location.protocol}//${window.location.host}/traces`, }, annotations: defaultAnnotations, evalWindow: defaultEvalWindow, @@ -140,7 +139,6 @@ export const exceptionAlertDefaults: AlertDef = { }, labels: { severity: 'warning', - details: `${window.location.protocol}//${window.location.host}/exceptions`, }, annotations: defaultAnnotations, evalWindow: defaultEvalWindow, diff --git a/frontend/src/container/CreateAlertRule/index.tsx b/frontend/src/container/CreateAlertRule/index.tsx index 9ce1634d13..a5924531b2 100644 --- a/frontend/src/container/CreateAlertRule/index.tsx +++ b/frontend/src/container/CreateAlertRule/index.tsx @@ -1,7 +1,9 @@ import { Form, Row } from 'antd'; +import { ENTITY_VERSION_V4 } from 'constants/app'; import FormAlertRules from 'container/FormAlertRules'; import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQueryParam'; import { useEffect, useState } from 'react'; +import { useLocation } from 'react-router-dom'; import { AlertTypes } from 'types/api/alerts/alertTypes'; import { AlertDef } from 'types/api/alerts/def'; @@ -20,6 +22,10 @@ function CreateRules(): JSX.Element { AlertTypes.METRICS_BASED_ALERT, ); + const location = useLocation(); + const queryParams = new URLSearchParams(location.search); + const version = queryParams.get('version'); + const compositeQuery = useGetCompositeQueryParam(); const [formInstance] = Form.useForm(); @@ -37,7 +43,10 @@ function CreateRules(): JSX.Element { setInitValues(exceptionAlertDefaults); break; default: - setInitValues(alertDefaults); + setInitValues({ + ...alertDefaults, + version: version || ENTITY_VERSION_V4, + }); } }; @@ -52,6 +61,7 @@ function CreateRules(): JSX.Element { if (alertType) { onSelectType(alertType); } + // eslint-disable-next-line react-hooks/exhaustive-deps }, [compositeQuery]); if (!initValues) { diff --git a/frontend/src/container/DownloadV2/DownloadV2.styles.scss b/frontend/src/container/DownloadV2/DownloadV2.styles.scss new file mode 100644 index 0000000000..850c1c7d16 --- /dev/null +++ b/frontend/src/container/DownloadV2/DownloadV2.styles.scss @@ -0,0 +1,84 @@ +.download-logs-popover { + .ant-popover-inner { + border-radius: 4px; + border: 1px solid var(--bg-slate-400); + background: linear-gradient( + 139deg, + rgba(18, 19, 23, 0.8) 0%, + rgba(18, 19, 23, 0.9) 98.68% + ); + box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2); + backdrop-filter: blur(20px); + padding: 12px 18px 12px 14px; + + .download-logs-content { + display: flex; + flex-direction: column; + gap: 8px; + align-items: flex-start; + + .action-btns { + padding: 4px 0px !important; + width: 159px; + display: flex; + align-items: center; + color: var(--bg-vanilla-400); + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: normal; + letter-spacing: 0.14px; + gap: 6px; + + .ant-btn-icon { + margin-inline-end: 0px; + } + } + + .action-btns:hover { + &.ant-btn-text { + background-color: rgba(171, 189, 255, 0.04) !important; + } + } + + .export-heading { + color: #52575c; + font-size: 11px; + font-style: normal; + font-weight: 600; + line-height: 18px; /* 163.636% */ + letter-spacing: 0.88px; + text-transform: uppercase; + } + } + } +} + +.lightMode { + .download-logs-popover { + .ant-popover-inner { + border: 1px solid var(--bg-vanilla-300); + background: linear-gradient( + 139deg, + rgba(255, 255, 255, 0.8) 0%, + rgba(255, 255, 255, 0.9) 98.68% + ); + + box-shadow: 4px 10px 16px 2px rgba(255, 255, 255, 0.2); + + .download-logs-content { + .action-btns { + color: var(--bg-ink-400); + } + .action-btns:hover { + &.ant-btn-text { + background-color: var(--bg-vanilla-300) !important; + } + } + .export-heading { + color: var(--bg-ink-200); + } + } + } + } +} diff --git a/frontend/src/container/DownloadV2/DownloadV2.tsx b/frontend/src/container/DownloadV2/DownloadV2.tsx new file mode 100644 index 0000000000..95630efcb9 --- /dev/null +++ b/frontend/src/container/DownloadV2/DownloadV2.tsx @@ -0,0 +1,84 @@ +import './DownloadV2.styles.scss'; + +import { Button, Popover, Typography } from 'antd'; +import { Excel } from 'antd-table-saveas-excel'; +import { FileDigit, FileDown, Sheet } from 'lucide-react'; +import { unparse } from 'papaparse'; + +import { DownloadProps } from './DownloadV2.types'; + +function Download({ data, isLoading, fileName }: DownloadProps): JSX.Element { + const downloadExcelFile = (): void => { + const headers = Object.keys(Object.assign({}, ...data)).map((item) => { + const updatedTitle = item + .split('_') + .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) + .join(' '); + return { + title: updatedTitle, + dataIndex: item, + }; + }); + const excel = new Excel(); + excel + .addSheet(fileName) + .addColumns(headers) + .addDataSource(data, { + str2Percent: true, + }) + .saveAs(`${fileName}.xlsx`); + }; + + const downloadCsvFile = (): void => { + const csv = unparse(data); + const csvBlob = new Blob([csv], { type: 'text/csv;charset=utf-8;' }); + const csvUrl = URL.createObjectURL(csvBlob); + const downloadLink = document.createElement('a'); + downloadLink.href = csvUrl; + downloadLink.download = `${fileName}.csv`; + downloadLink.click(); + downloadLink.remove(); + }; + + return ( + + Export As + + +
+ } + > +
)} -
-
- - showSearch - placeholder="Select a view" - loading={viewsIsLoading || isRefetching} - value={viewName || undefined} - onSelect={handleSelect} - style={{ - minWidth: 170, - }} - dropdownStyle={dropdownStyle} - className="views-dropdown" - allowClear={{ - clearIcon: , - }} - onClear={handleClearSelect} - ref={ref} - > - {viewsData?.data?.data?.map((view) => { - const extraData = - view.extraData !== '' ? JSON.parse(view.extraData) : ''; - let bgColor = getRandomColor(); - if (extraData !== '') { - bgColor = extraData.color; - } - return ( - -
- {' '} - {view.name} -
-
- ); - })} - - - -
- -
- -
- - - + {viewsData?.data?.data?.map((view) => { + const extraData = + view.extraData !== '' ? JSON.parse(view.extraData) : ''; + let bgColor = getRandomColor(); + if (extraData !== '') { + bgColor = extraData.color; + } + return ( + +
+ {' '} + {view.name} +
+
+ ); + })} + - - - +
+ +
+ +
+ + + + + + + + + + + +
-
+ )} + + >; } -ExplorerOptions.defaultProps = { isLoading: false }; +ExplorerOptions.defaultProps = { + isLoading: false, + isExplorerOptionHidden: false, + setIsExplorerOptionHidden: undefined, +}; export default ExplorerOptions; diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.styles.scss b/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.styles.scss new file mode 100644 index 0000000000..e45b9e893c --- /dev/null +++ b/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.styles.scss @@ -0,0 +1,55 @@ +.explorer-option-droppable-container { + position: fixed; + bottom: 0; + width: -webkit-fill-available; + height: 24px; + display: flex; + justify-content: center; + border-radius: 10px 10px 0px 0px; + // box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + // backdrop-filter: blur(20px); + + .explorer-actions-btn { + display: flex; + gap: 8px; + margin-right: 8px; + + .action-btn { + display: flex; + justify-content: center; + align-items: center; + border-radius: 10px 10px 0px 0px; + box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + backdrop-filter: blur(20px); + height: 24px !important; + border: none; + } + } + + .explorer-show-btn { + border-radius: 10px 10px 0px 0px; + border: 1px solid var(--bg-slate-400); + background: rgba(22, 24, 29, 0.40); + box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + backdrop-filter: blur(20px); + align-self: center; + padding: 8px 12px; + height: 24px !important; + + .menu-bar { + border-radius: 50px; + background: var(--bg-slate-200); + height: 4px; + width: 50px; + } + } +} + +.lightMode { + .explorer-option-droppable-container { + + .explorer-show-btn { + background: var(--bg-vanilla-200); + } + } +} \ No newline at end of file diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.tsx new file mode 100644 index 0000000000..f5e7faf0dc --- /dev/null +++ b/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.tsx @@ -0,0 +1,78 @@ +/* eslint-disable no-nested-ternary */ +import './ExplorerOptionsHideArea.styles.scss'; + +import { Color } from '@signozhq/design-tokens'; +import { Button, Tooltip } from 'antd'; +import { Disc3, X } from 'lucide-react'; +import { Dispatch, SetStateAction } from 'react'; +import { DataSource } from 'types/common/queryBuilder'; + +import { setExplorerToolBarVisibility } from './utils'; + +interface DroppableAreaProps { + isQueryUpdated: boolean; + isExplorerOptionHidden?: boolean; + sourcepage: DataSource; + setIsExplorerOptionHidden?: Dispatch>; + handleClearSelect: () => void; + onUpdateQueryHandler: () => void; +} + +function ExplorerOptionsHideArea({ + isQueryUpdated, + isExplorerOptionHidden, + sourcepage, + setIsExplorerOptionHidden, + handleClearSelect, + onUpdateQueryHandler, +}: DroppableAreaProps): JSX.Element { + const handleShowExplorerOption = (): void => { + if (setIsExplorerOptionHidden) { + setIsExplorerOptionHidden(false); + setExplorerToolBarVisibility(true, sourcepage); + } + }; + + return ( +
+ {isExplorerOptionHidden && ( + <> + {isQueryUpdated && ( +
+ +
+ )} + + + )} +
+ ); +} + +ExplorerOptionsHideArea.defaultProps = { + isExplorerOptionHidden: undefined, + setIsExplorerOptionHidden: undefined, +}; + +export default ExplorerOptionsHideArea; diff --git a/frontend/src/container/ExplorerOptions/utils.ts b/frontend/src/container/ExplorerOptions/utils.ts index e3ac710609..d94e64161e 100644 --- a/frontend/src/container/ExplorerOptions/utils.ts +++ b/frontend/src/container/ExplorerOptions/utils.ts @@ -1,5 +1,6 @@ import { Color } from '@signozhq/design-tokens'; import { showErrorNotification } from 'components/ExplorerCard/utils'; +import { LOCALSTORAGE } from 'constants/localStorage'; import { QueryParams } from 'constants/query'; import ROUTES from 'constants/routes'; import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi'; @@ -67,3 +68,54 @@ export const generateRGBAFromHex = (hex: string, opacity: number): string => hex.slice(3, 5), 16, )}, ${parseInt(hex.slice(5, 7), 16)}, ${opacity})`; + +export const getExplorerToolBarVisibility = (dataSource: string): boolean => { + try { + const showExplorerToolbar = localStorage.getItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + ); + if (showExplorerToolbar === null) { + const parsedShowExplorerToolbar: { + [DataSource.LOGS]: boolean; + [DataSource.TRACES]: boolean; + [DataSource.METRICS]: boolean; + } = { + [DataSource.METRICS]: true, + [DataSource.TRACES]: true, + [DataSource.LOGS]: true, + }; + localStorage.setItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + JSON.stringify(parsedShowExplorerToolbar), + ); + return true; + } + const parsedShowExplorerToolbar = JSON.parse(showExplorerToolbar || '{}'); + return parsedShowExplorerToolbar[dataSource]; + } catch (error) { + console.error(error); + return false; + } +}; + +export const setExplorerToolBarVisibility = ( + value: boolean, + dataSource: string, +): void => { + try { + const showExplorerToolbar = localStorage.getItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + ); + if (showExplorerToolbar) { + const parsedShowExplorerToolbar = JSON.parse(showExplorerToolbar); + parsedShowExplorerToolbar[dataSource] = value; + localStorage.setItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + JSON.stringify(parsedShowExplorerToolbar), + ); + return; + } + } catch (error) { + console.error(error); + } +}; diff --git a/frontend/src/container/FormAlertChannels/Settings/Email.tsx b/frontend/src/container/FormAlertChannels/Settings/Email.tsx new file mode 100644 index 0000000000..398e172a57 --- /dev/null +++ b/frontend/src/container/FormAlertChannels/Settings/Email.tsx @@ -0,0 +1,48 @@ +import { Form, Input } from 'antd'; +import { Dispatch, SetStateAction } from 'react'; +import { useTranslation } from 'react-i18next'; + +import { EmailChannel } from '../../CreateAlertChannels/config'; + +function EmailForm({ setSelectedConfig }: EmailFormProps): JSX.Element { + const { t } = useTranslation('channels'); + + const handleInputChange = (field: string) => ( + event: React.ChangeEvent, + ): void => { + setSelectedConfig((value) => ({ + ...value, + [field]: event.target.value, + })); + }; + + return ( + <> + + + + + {/* +