mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-07-28 05:41:58 +08:00
Merge branch 'develop' into main-public
This commit is contained in:
commit
fe2ddf9d60
@ -74,7 +74,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
defer r.Body.Close()
|
||||
requestBody, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
zap.S().Errorf("received no input in api\n", err)
|
||||
zap.L().Error("received no input in api", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
@ -82,7 +82,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
err = json.Unmarshal(requestBody, &req)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("received invalid user registration request", zap.Error(err))
|
||||
zap.L().Error("received invalid user registration request", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
|
||||
return
|
||||
}
|
||||
@ -90,13 +90,13 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
// get invite object
|
||||
invite, err := baseauth.ValidateInvite(ctx, req)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to validate invite token", err)
|
||||
zap.L().Error("failed to validate invite token", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if invite == nil {
|
||||
zap.S().Errorf("failed to validate invite token: it is either empty or invalid", err)
|
||||
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
|
||||
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
|
||||
return
|
||||
}
|
||||
@ -104,7 +104,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
// get auth domain from email domain
|
||||
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
|
||||
if apierr != nil {
|
||||
zap.S().Errorf("failed to get domain from email", apierr)
|
||||
zap.L().Error("failed to get domain from email", zap.Error(apierr))
|
||||
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
|
||||
}
|
||||
|
||||
@ -205,24 +205,24 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
|
||||
ctx := context.Background()
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.S().Errorf("[receiveGoogleAuth] sso requested but feature unavailable %s in org domain %s", model.SSO)
|
||||
zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
q := r.URL.Query()
|
||||
if errType := q.Get("error"); errType != "" {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to login with google auth", q.Get("error_description"))
|
||||
zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description")))
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
relayState := q.Get("state")
|
||||
zap.S().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
|
||||
zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
|
||||
|
||||
parsedState, err := url.Parse(relayState)
|
||||
if err != nil || relayState == "" {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to process response - invalid response from IDP", err, r)
|
||||
zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
@ -244,14 +244,14 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
|
||||
|
||||
identity, err := callbackHandler.HandleCallback(r)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to process HandleCallback ", domain.String(), zap.Error(err))
|
||||
zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveGoogleAuth] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
|
||||
zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
@ -266,14 +266,14 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := context.Background()
|
||||
|
||||
if !ah.CheckFeature(model.SSO) {
|
||||
zap.S().Errorf("[receiveSAML] sso requested but feature unavailable %s in org domain %s", model.SSO)
|
||||
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r)
|
||||
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
@ -281,11 +281,11 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
// the relay state is sent when a login request is submitted to
|
||||
// Idp.
|
||||
relayState := r.FormValue("RelayState")
|
||||
zap.S().Debug("[receiveML] relay state", zap.String("relayState", relayState))
|
||||
zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState))
|
||||
|
||||
parsedState, err := url.Parse(relayState)
|
||||
if err != nil || relayState == "" {
|
||||
zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r)
|
||||
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
@ -302,34 +302,34 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
sp, err := domain.PrepareSamlRequest(parsedState)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveSAML] failed to prepare saml request for domain (%s): %v", domain.String(), err)
|
||||
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse"))
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveSAML] failed to retrieve assertion info from saml response for organization (%s): %v", domain.String(), err)
|
||||
zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
if assertionInfo.WarningInfo.InvalidTime {
|
||||
zap.S().Errorf("[receiveSAML] expired saml response for organization (%s): %v", domain.String(), err)
|
||||
zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
email := assertionInfo.NameID
|
||||
if email == "" {
|
||||
zap.S().Errorf("[receiveSAML] invalid email in the SSO response (%s)", domain.String())
|
||||
zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String()))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email)
|
||||
if err != nil {
|
||||
zap.S().Errorf("[receiveSAML] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
|
||||
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
@ -191,7 +191,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
url := fmt.Sprintf("%s/trial?licenseKey=%s", constants.LicenseSignozIo, currentActiveLicenseKey)
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
zap.S().Error("Error while creating request for trial details", err)
|
||||
zap.L().Error("Error while creating request for trial details", zap.Error(err))
|
||||
// If there is an error in fetching trial details, we will still return the license details
|
||||
// to avoid blocking the UI
|
||||
ah.Respond(w, resp)
|
||||
@ -200,7 +200,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
|
||||
trialResp, err := hClient.Do(req)
|
||||
if err != nil {
|
||||
zap.S().Error("Error while fetching trial details", err)
|
||||
zap.L().Error("Error while fetching trial details", zap.Error(err))
|
||||
// If there is an error in fetching trial details, we will still return the license details
|
||||
// to avoid incorrectly blocking the UI
|
||||
ah.Respond(w, resp)
|
||||
@ -211,7 +211,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
trialRespBody, err := io.ReadAll(trialResp.Body)
|
||||
|
||||
if err != nil || trialResp.StatusCode != http.StatusOK {
|
||||
zap.S().Error("Error while fetching trial details", err)
|
||||
zap.L().Error("Error while fetching trial details", zap.Error(err))
|
||||
// If there is an error in fetching trial details, we will still return the license details
|
||||
// to avoid incorrectly blocking the UI
|
||||
ah.Respond(w, resp)
|
||||
@ -222,7 +222,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
var trialRespData model.SubscriptionServerResp
|
||||
|
||||
if err := json.Unmarshal(trialRespBody, &trialRespData); err != nil {
|
||||
zap.S().Error("Error while decoding trial details", err)
|
||||
zap.L().Error("Error while decoding trial details", zap.Error(err))
|
||||
// If there is an error in fetching trial details, we will still return the license details
|
||||
// to avoid incorrectly blocking the UI
|
||||
ah.Respond(w, resp)
|
||||
|
@ -18,14 +18,14 @@ import (
|
||||
|
||||
func (ah *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) {
|
||||
if !ah.CheckFeature(basemodel.CustomMetricsFunction) {
|
||||
zap.S().Info("CustomMetricsFunction feature is not enabled in this plan")
|
||||
zap.L().Info("CustomMetricsFunction feature is not enabled in this plan")
|
||||
ah.APIHandler.QueryRangeMetricsV2(w, r)
|
||||
return
|
||||
}
|
||||
metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
zap.S().Errorf(apiErrorObj.Err.Error())
|
||||
zap.L().Error("Error in parsing metric query params", zap.Error(apiErrorObj.Err))
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
|
@ -43,8 +43,8 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
pat := model.PAT{
|
||||
Name: req.Name,
|
||||
Role: req.Role,
|
||||
Name: req.Name,
|
||||
Role: req.Role,
|
||||
ExpiresAt: req.ExpiresInDays,
|
||||
}
|
||||
err = validatePATRequest(pat)
|
||||
@ -65,7 +65,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
||||
pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60)
|
||||
}
|
||||
|
||||
zap.S().Debugf("Got Create PAT request: %+v", pat)
|
||||
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
|
||||
var apierr basemodel.BaseApiError
|
||||
if pat, apierr = ah.AppDao().CreatePAT(ctx, pat); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
@ -115,7 +115,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
req.UpdatedByUserID = user.Id
|
||||
id := mux.Vars(r)["id"]
|
||||
req.UpdatedAt = time.Now().Unix()
|
||||
zap.S().Debugf("Got Update PAT request: %+v", req)
|
||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||
var apierr basemodel.BaseApiError
|
||||
if apierr = ah.AppDao().UpdatePAT(ctx, req, id); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
@ -135,7 +135,7 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
|
||||
}, nil)
|
||||
return
|
||||
}
|
||||
zap.S().Infof("Get PATs for user: %+v", user.Id)
|
||||
zap.L().Info("Get PATs for user", zap.String("user_id", user.Id))
|
||||
pats, apierr := ah.AppDao().ListPATs(ctx)
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
@ -156,7 +156,7 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
zap.S().Debugf("Revoke PAT with id: %+v", id)
|
||||
zap.L().Info("Revoke PAT with id", zap.String("id", id))
|
||||
if apierr := ah.AppDao().RevokePAT(ctx, id, user.Id); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
|
@ -15,7 +15,7 @@ import (
|
||||
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
|
||||
zap.S().Info("SmartTraceDetail feature is not enabled in this plan")
|
||||
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
|
||||
ah.APIHandler.SearchTraces(w, r)
|
||||
return
|
||||
}
|
||||
@ -26,7 +26,7 @@ func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
spanLimit, err := strconv.Atoi(constants.SpanLimitStr)
|
||||
if err != nil {
|
||||
zap.S().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable: ", err)
|
||||
zap.L().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable", zap.Error(err))
|
||||
return
|
||||
}
|
||||
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), traceId, spanId, levelUpInt, levelDownInt, spanLimit, db.SmartTraceAlgorithm)
|
||||
|
@ -22,7 +22,7 @@ import (
|
||||
func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) {
|
||||
|
||||
defer utils.Elapsed("GetMetricResult")()
|
||||
zap.S().Infof("Executing metric result query: %s", query)
|
||||
zap.L().Info("Executing metric result query: ", zap.String("query", query))
|
||||
|
||||
var hash string
|
||||
// If getSubTreeSpans function is used in the clickhouse query
|
||||
@ -38,9 +38,8 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
|
||||
}
|
||||
|
||||
rows, err := r.conn.Query(ctx, query)
|
||||
zap.S().Debug(query)
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing query: ", err)
|
||||
zap.L().Error("Error in processing query", zap.Error(err))
|
||||
return nil, "", fmt.Errorf("error in processing query")
|
||||
}
|
||||
|
||||
@ -117,7 +116,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
|
||||
}
|
||||
default:
|
||||
zap.S().Errorf("invalid var found in metric builder query result", v, colName)
|
||||
zap.L().Error("invalid var found in metric builder query result", zap.Any("var", v), zap.String("colName", colName))
|
||||
}
|
||||
}
|
||||
sort.Strings(groupBy)
|
||||
@ -140,7 +139,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
|
||||
}
|
||||
// err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash)
|
||||
// if err != nil {
|
||||
// zap.S().Error("Error in dropping temporary table: ", err)
|
||||
// zap.L().Error("Error in dropping temporary table: ", err)
|
||||
// return nil, err
|
||||
// }
|
||||
if hash == "" {
|
||||
@ -152,7 +151,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
|
||||
|
||||
func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) {
|
||||
|
||||
zap.S().Debugf("Executing getSubTreeSpans function")
|
||||
zap.L().Debug("Executing getSubTreeSpans function")
|
||||
|
||||
// str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;`
|
||||
|
||||
@ -162,28 +161,28 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
|
||||
|
||||
err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash)
|
||||
if err != nil {
|
||||
zap.S().Error("Error in dropping temporary table: ", err)
|
||||
zap.L().Error("Error in dropping temporary table", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
|
||||
// Create temporary table to store the getSubTreeSpans() results
|
||||
zap.S().Debugf("Creating temporary table getSubTreeSpans%s", hash)
|
||||
zap.L().Debug("Creating temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)")
|
||||
if err != nil {
|
||||
zap.S().Error("Error in creating temporary table: ", err)
|
||||
zap.L().Error("Error in creating temporary table", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
|
||||
var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse
|
||||
getSpansSubQuery := subtreeInput
|
||||
// Execute the subTree query
|
||||
zap.S().Debugf("Executing subTree query: %s", getSpansSubQuery)
|
||||
zap.L().Debug("Executing subTree query", zap.String("query", getSpansSubQuery))
|
||||
err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery)
|
||||
|
||||
// zap.S().Info(getSpansSubQuery)
|
||||
// zap.L().Info(getSpansSubQuery)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return query, hash, fmt.Errorf("Error in processing sql query")
|
||||
}
|
||||
|
||||
@ -196,16 +195,16 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
|
||||
if len(getSpansSubQueryDBResponses) == 0 {
|
||||
return query, hash, fmt.Errorf("No spans found for the given query")
|
||||
}
|
||||
zap.S().Debugf("Executing query to fetch all the spans from the same TraceID: %s", modelQuery)
|
||||
zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery))
|
||||
err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return query, hash, fmt.Errorf("Error in processing sql query")
|
||||
}
|
||||
|
||||
// Process model to fetch the spans
|
||||
zap.S().Debugf("Processing model to fetch the spans")
|
||||
zap.L().Debug("Processing model to fetch the spans")
|
||||
searchSpanResponses := []basemodel.SearchSpanResponseItem{}
|
||||
for _, item := range searchScanResponses {
|
||||
var jsonItem basemodel.SearchSpanResponseItem
|
||||
@ -218,17 +217,17 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
|
||||
}
|
||||
// Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash
|
||||
// Use map to store pointer to the spans to avoid duplicates and save memory
|
||||
zap.S().Debugf("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans%s", hash)
|
||||
zap.L().Debug("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
|
||||
treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses)
|
||||
if err != nil {
|
||||
zap.S().Error("Error in getSubTreeAlgorithm function: ", err)
|
||||
zap.L().Error("Error in getSubTreeAlgorithm function", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
zap.S().Debugf("Preparing batch to store subtree spans in temporary table getSubTreeSpans%s", hash)
|
||||
zap.L().Debug("Preparing batch to store subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash))
|
||||
if err != nil {
|
||||
zap.S().Error("Error in preparing batch statement: ", err)
|
||||
zap.L().Error("Error in preparing batch statement", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
for _, span := range treeSearchResponse {
|
||||
@ -251,14 +250,14 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
|
||||
span.Events,
|
||||
)
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
}
|
||||
zap.S().Debugf("Inserting the subtree spans in temporary table getSubTreeSpans%s", hash)
|
||||
zap.L().Debug("Inserting the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
|
||||
err = statement.Send()
|
||||
if err != nil {
|
||||
zap.S().Error("Error in sending statement: ", err)
|
||||
zap.L().Error("Error in sending statement", zap.Error(err))
|
||||
return query, hash, err
|
||||
}
|
||||
return query, hash, nil
|
||||
@ -323,7 +322,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
|
||||
spans = append(spans, span)
|
||||
}
|
||||
|
||||
zap.S().Debug("Building Tree")
|
||||
zap.L().Debug("Building Tree")
|
||||
roots, err := buildSpanTrees(&spans)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -333,7 +332,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
|
||||
// For each root, get the subtree spans
|
||||
for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses {
|
||||
targetSpan := &model.SpanForTraceDetails{}
|
||||
// zap.S().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
|
||||
// zap.L().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
|
||||
// Search target span object in the tree
|
||||
for _, root := range roots {
|
||||
targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID)
|
||||
@ -341,7 +340,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
zap.S().Error("Error during BreadthFirstSearch(): ", err)
|
||||
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
zap.S().Error("Error during BreadthFirstSearch(): ", err)
|
||||
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
@ -186,7 +186,7 @@ func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTra
|
||||
|
||||
// If the parent span is not found, add current span to list of roots
|
||||
if parent == nil {
|
||||
// zap.S().Debug("Parent Span not found parent_id: ", span.ParentID)
|
||||
// zap.L().Debug("Parent Span not found parent_id: ", span.ParentID)
|
||||
roots = append(roots, span)
|
||||
span.ParentID = ""
|
||||
continue
|
||||
|
@ -134,7 +134,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
var reader interfaces.DataConnector
|
||||
storage := os.Getenv("STORAGE")
|
||||
if storage == "clickhouse" {
|
||||
zap.S().Info("Using ClickHouse as datastore ...")
|
||||
zap.L().Info("Using ClickHouse as datastore ...")
|
||||
qb := db.NewDataConnector(
|
||||
localDB,
|
||||
serverOptions.PromConfigPath,
|
||||
@ -419,30 +419,33 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface
|
||||
|
||||
signozMetricsUsed := false
|
||||
signozLogsUsed := false
|
||||
dataSources := []string{}
|
||||
signozTracesUsed := false
|
||||
if postData != nil {
|
||||
|
||||
if postData.CompositeQuery != nil {
|
||||
data["queryType"] = postData.CompositeQuery.QueryType
|
||||
data["panelType"] = postData.CompositeQuery.PanelType
|
||||
|
||||
signozLogsUsed, signozMetricsUsed, _ = telemetry.GetInstance().CheckSigNozSignals(postData)
|
||||
signozLogsUsed, signozMetricsUsed, signozTracesUsed = telemetry.GetInstance().CheckSigNozSignals(postData)
|
||||
}
|
||||
}
|
||||
|
||||
if signozMetricsUsed || signozLogsUsed {
|
||||
if signozMetricsUsed || signozLogsUsed || signozTracesUsed {
|
||||
if signozMetricsUsed {
|
||||
dataSources = append(dataSources, "metrics")
|
||||
telemetry.GetInstance().AddActiveMetricsUser()
|
||||
}
|
||||
if signozLogsUsed {
|
||||
dataSources = append(dataSources, "logs")
|
||||
telemetry.GetInstance().AddActiveLogsUser()
|
||||
}
|
||||
data["dataSources"] = dataSources
|
||||
if signozTracesUsed {
|
||||
telemetry.GetInstance().AddActiveTracesUser()
|
||||
}
|
||||
data["metricsUsed"] = signozMetricsUsed
|
||||
data["logsUsed"] = signozLogsUsed
|
||||
data["tracesUsed"] = signozTracesUsed
|
||||
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
|
||||
if err == nil {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_V3, data, userEmail, true)
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail)
|
||||
}
|
||||
}
|
||||
return data, true
|
||||
@ -522,7 +525,7 @@ func (s *Server) initListeners() error {
|
||||
return err
|
||||
}
|
||||
|
||||
zap.S().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
|
||||
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
|
||||
|
||||
// listen on private port to support internal services
|
||||
privateHostPort := s.serverOptions.PrivateHostPort
|
||||
@ -535,7 +538,7 @@ func (s *Server) initListeners() error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
zap.S().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
|
||||
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -547,7 +550,7 @@ func (s *Server) Start() error {
|
||||
if !s.serverOptions.DisableRules {
|
||||
s.ruleManager.Start()
|
||||
} else {
|
||||
zap.S().Info("msg: Rules disabled as rules.disable is set to TRUE")
|
||||
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
|
||||
}
|
||||
|
||||
err := s.initListeners()
|
||||
@ -561,23 +564,23 @@ func (s *Server) Start() error {
|
||||
}
|
||||
|
||||
go func() {
|
||||
zap.S().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
|
||||
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
|
||||
|
||||
switch err := s.httpServer.Serve(s.httpConn); err {
|
||||
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
|
||||
// normal exit, nothing to do
|
||||
default:
|
||||
zap.S().Error("Could not start HTTP server", zap.Error(err))
|
||||
zap.L().Error("Could not start HTTP server", zap.Error(err))
|
||||
}
|
||||
s.unavailableChannel <- healthcheck.Unavailable
|
||||
}()
|
||||
|
||||
go func() {
|
||||
zap.S().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
|
||||
zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
|
||||
|
||||
err = http.ListenAndServe(baseconst.DebugHttpPort, nil)
|
||||
if err != nil {
|
||||
zap.S().Error("Could not start pprof server", zap.Error(err))
|
||||
zap.L().Error("Could not start pprof server", zap.Error(err))
|
||||
}
|
||||
}()
|
||||
|
||||
@ -587,14 +590,14 @@ func (s *Server) Start() error {
|
||||
}
|
||||
|
||||
go func() {
|
||||
zap.S().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
|
||||
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
|
||||
|
||||
switch err := s.privateHTTP.Serve(s.privateConn); err {
|
||||
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
|
||||
// normal exit, nothing to do
|
||||
zap.S().Info("private http server closed")
|
||||
zap.L().Info("private http server closed")
|
||||
default:
|
||||
zap.S().Error("Could not start private HTTP server", zap.Error(err))
|
||||
zap.L().Error("Could not start private HTTP server", zap.Error(err))
|
||||
}
|
||||
|
||||
s.unavailableChannel <- healthcheck.Unavailable
|
||||
@ -602,10 +605,10 @@ func (s *Server) Start() error {
|
||||
}()
|
||||
|
||||
go func() {
|
||||
zap.S().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
|
||||
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
|
||||
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
|
||||
if err != nil {
|
||||
zap.S().Info("opamp ws server failed to start", err)
|
||||
zap.L().Error("opamp ws server failed to start", zap.Error(err))
|
||||
s.unavailableChannel <- healthcheck.Unavailable
|
||||
}
|
||||
}()
|
||||
@ -681,7 +684,7 @@ func makeRulesManager(
|
||||
return nil, fmt.Errorf("rule manager error: %v", err)
|
||||
}
|
||||
|
||||
zap.S().Info("rules manager is ready")
|
||||
zap.L().Info("rules manager is ready")
|
||||
|
||||
return manager, nil
|
||||
}
|
||||
|
@ -17,25 +17,25 @@ import (
|
||||
func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel.UserPayload, error) {
|
||||
patToken := r.Header.Get("SIGNOZ-API-KEY")
|
||||
if len(patToken) > 0 {
|
||||
zap.S().Debugf("Received a non-zero length PAT token")
|
||||
zap.L().Debug("Received a non-zero length PAT token")
|
||||
ctx := context.Background()
|
||||
dao := apiHandler.AppDao()
|
||||
|
||||
pat, err := dao.GetPAT(ctx, patToken)
|
||||
if err == nil && pat != nil {
|
||||
zap.S().Debugf("Found valid PAT: %+v", pat)
|
||||
zap.L().Debug("Found valid PAT: ", zap.Any("pat", pat))
|
||||
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
|
||||
zap.S().Debugf("PAT has expired: %+v", pat)
|
||||
zap.L().Info("PAT has expired: ", zap.Any("pat", pat))
|
||||
return nil, fmt.Errorf("PAT has expired")
|
||||
}
|
||||
group, apiErr := dao.GetGroupByName(ctx, pat.Role)
|
||||
if apiErr != nil {
|
||||
zap.S().Debugf("Error while getting group for PAT: %+v", apiErr)
|
||||
zap.L().Error("Error while getting group for PAT: ", zap.Any("apiErr", apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
user, err := dao.GetUser(ctx, pat.UserID)
|
||||
if err != nil {
|
||||
zap.S().Debugf("Error while getting user for PAT: %+v", err)
|
||||
zap.L().Error("Error while getting user for PAT: ", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
telemetry.GetInstance().SetPatTokenUser()
|
||||
@ -48,7 +48,7 @@ func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel
|
||||
}, nil
|
||||
}
|
||||
if err != nil {
|
||||
zap.S().Debugf("Error while getting user for PAT: %+v", err)
|
||||
zap.L().Error("Error while getting user for PAT: ", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
@ -22,19 +22,19 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
|
||||
domain, apierr := m.GetDomainByEmail(ctx, email)
|
||||
|
||||
if apierr != nil {
|
||||
zap.S().Errorf("failed to get domain from email", apierr)
|
||||
zap.L().Error("failed to get domain from email", zap.Error(apierr))
|
||||
return nil, model.InternalErrorStr("failed to get domain from email")
|
||||
}
|
||||
|
||||
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
|
||||
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
|
||||
return nil, model.InternalErrorStr("failed to generate password hash")
|
||||
}
|
||||
|
||||
group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup)
|
||||
if apiErr != nil {
|
||||
zap.S().Debugf("GetGroupByName failed, err: %v\n", apiErr.Err)
|
||||
zap.L().Error("GetGroupByName failed", zap.Error(apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
@ -51,7 +51,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
|
||||
|
||||
user, apiErr = m.CreateUser(ctx, user, false)
|
||||
if apiErr != nil {
|
||||
zap.S().Debugf("CreateUser failed, err: %v\n", apiErr.Err)
|
||||
zap.L().Error("CreateUser failed", zap.Error(apiErr))
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
@ -65,7 +65,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
|
||||
|
||||
userPayload, apierr := m.GetUserByEmail(ctx, email)
|
||||
if !apierr.IsNil() {
|
||||
zap.S().Errorf(" failed to get user with email received from auth provider", apierr.Error())
|
||||
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
|
||||
return "", model.BadRequestStr("invalid user email received from the auth provider")
|
||||
}
|
||||
|
||||
@ -75,7 +75,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
|
||||
newUser, apiErr := m.createUserForSAMLRequest(ctx, email)
|
||||
user = newUser
|
||||
if apiErr != nil {
|
||||
zap.S().Errorf("failed to create user with email received from auth provider: %v", apierr.Error())
|
||||
zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr))
|
||||
return "", apiErr
|
||||
}
|
||||
} else {
|
||||
@ -84,7 +84,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
|
||||
|
||||
tokenStore, err := baseauth.GenerateJWTForUser(user)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to generate token for SSO login user", err)
|
||||
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
|
||||
return "", model.InternalErrorStr("failed to generate token for the user")
|
||||
}
|
||||
|
||||
@ -143,8 +143,8 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
// do nothing, just skip sso
|
||||
ssoAvailable = false
|
||||
default:
|
||||
zap.S().Errorf("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
|
||||
return resp, model.BadRequest(err)
|
||||
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
|
||||
return resp, model.BadRequestStr(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@ -160,7 +160,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
if len(emailComponents) > 0 {
|
||||
emailDomain = emailComponents[1]
|
||||
}
|
||||
zap.S().Errorf("failed to get org domain from email", zap.String("emailDomain", emailDomain), apierr.ToError())
|
||||
zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError()))
|
||||
return resp, apierr
|
||||
}
|
||||
|
||||
@ -176,7 +176,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
escapedUrl, _ := url.QueryUnescape(sourceUrl)
|
||||
siteUrl, err := url.Parse(escapedUrl)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to parse referer", err)
|
||||
zap.L().Error("failed to parse referer", zap.Error(err))
|
||||
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
|
||||
}
|
||||
|
||||
@ -185,7 +185,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
||||
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), err)
|
||||
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
|
||||
return resp, model.InternalError(err)
|
||||
}
|
||||
|
||||
|
@ -48,13 +48,13 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
|
||||
if domainIdStr != "" {
|
||||
domainId, err := uuid.Parse(domainIdStr)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to parse domainId from relay state", err)
|
||||
zap.L().Error("failed to parse domainId from relay state", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse domainId from IdP response")
|
||||
}
|
||||
|
||||
domain, err = m.GetDomain(ctx, domainId)
|
||||
if (err != nil) || domain == nil {
|
||||
zap.S().Errorf("failed to find domain from domainId received in IdP response", err.Error())
|
||||
zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err))
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
}
|
||||
@ -64,7 +64,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
|
||||
domainFromDB, err := m.GetDomainByName(ctx, domainNameStr)
|
||||
domain = domainFromDB
|
||||
if (err != nil) || domain == nil {
|
||||
zap.S().Errorf("failed to find domain from domainName received in IdP response", err.Error())
|
||||
zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err))
|
||||
return nil, fmt.Errorf("invalid credentials")
|
||||
}
|
||||
}
|
||||
@ -132,7 +132,7 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDo
|
||||
for _, s := range stored {
|
||||
domain := model.OrgDomain{Id: s.Id, Name: s.Name, OrgId: s.OrgId}
|
||||
if err := domain.LoadConfig(s.Data); err != nil {
|
||||
zap.S().Errorf("ListDomains() failed", zap.Error(err))
|
||||
zap.L().Error("ListDomains() failed", zap.Error(err))
|
||||
}
|
||||
domains = append(domains, domain)
|
||||
}
|
||||
@ -153,7 +153,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
|
||||
|
||||
configJson, err := json.Marshal(domain)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to unmarshal domain config", zap.Error(err))
|
||||
zap.L().Error("failed to unmarshal domain config", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain creation failed"))
|
||||
}
|
||||
|
||||
@ -167,7 +167,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
|
||||
time.Now().Unix())
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to insert domain in db", zap.Error(err))
|
||||
zap.L().Error("failed to insert domain in db", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain creation failed"))
|
||||
}
|
||||
|
||||
@ -178,13 +178,13 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
|
||||
func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError {
|
||||
|
||||
if domain.Id == uuid.Nil {
|
||||
zap.S().Errorf("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
configJson, err := json.Marshal(domain)
|
||||
if err != nil {
|
||||
zap.S().Errorf("domain update failed", zap.Error(err))
|
||||
zap.L().Error("domain update failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
@ -195,7 +195,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba
|
||||
domain.Id)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("domain update failed", zap.Error(err))
|
||||
zap.L().Error("domain update failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain update failed"))
|
||||
}
|
||||
|
||||
@ -206,7 +206,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba
|
||||
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
|
||||
|
||||
if id == uuid.Nil {
|
||||
zap.S().Errorf("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
|
||||
return model.InternalError(fmt.Errorf("domain delete failed"))
|
||||
}
|
||||
|
||||
@ -215,7 +215,7 @@ func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.Bas
|
||||
id)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("domain delete failed", zap.Error(err))
|
||||
zap.L().Error("domain delete failed", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("domain delete failed"))
|
||||
}
|
||||
|
||||
|
@ -26,12 +26,12 @@ func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basem
|
||||
p.Revoked,
|
||||
)
|
||||
if err != nil {
|
||||
zap.S().Errorf("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
id, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
zap.S().Errorf("Failed to get last inserted id, err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to get last inserted id, err: %v", zap.Error(err))
|
||||
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||
}
|
||||
p.Id = strconv.Itoa(int(id))
|
||||
@ -62,7 +62,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) basemo
|
||||
p.UpdatedByUserID,
|
||||
id)
|
||||
if err != nil {
|
||||
zap.S().Errorf("Failed to update PAT in db, err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT update failed"))
|
||||
}
|
||||
return nil
|
||||
@ -74,7 +74,7 @@ func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed
|
||||
lastUsed,
|
||||
token)
|
||||
if err != nil {
|
||||
zap.S().Errorf("Failed to update PAT last used in db, err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT last used update failed"))
|
||||
}
|
||||
return nil
|
||||
@ -84,7 +84,7 @@ func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApi
|
||||
pats := []model.PAT{}
|
||||
|
||||
if err := m.DB().Select(&pats, "SELECT * FROM personal_access_tokens WHERE revoked=false ORDER by updated_at DESC;"); err != nil {
|
||||
zap.S().Errorf("Failed to fetch PATs err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
|
||||
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
|
||||
}
|
||||
for i := range pats {
|
||||
@ -129,7 +129,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) base
|
||||
"UPDATE personal_access_tokens SET revoked=true, updated_by_user_id = $1, updated_at=$2 WHERE id=$3",
|
||||
userID, updatedAt, id)
|
||||
if err != nil {
|
||||
zap.S().Errorf("Failed to revoke PAT in db, err: %v", zap.Error(err))
|
||||
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("PAT revoke failed"))
|
||||
}
|
||||
return nil
|
||||
|
@ -47,13 +47,13 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError)
|
||||
httpResponse, err := http.Post(C.Prefix+"/licenses/activate", APPLICATION_JSON, bytes.NewBuffer(reqString))
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to connect to license.signoz.io", err)
|
||||
zap.L().Error("failed to connect to license.signoz.io", zap.Error(err))
|
||||
return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
|
||||
}
|
||||
|
||||
httpBody, err := io.ReadAll(httpResponse.Body)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to read activation response from license.signoz.io", err)
|
||||
zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err))
|
||||
return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
|
||||
}
|
||||
|
||||
@ -63,7 +63,7 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError)
|
||||
result := ActivationResult{}
|
||||
err = json.Unmarshal(httpBody, &result)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to marshal activation response from license.signoz.io", err)
|
||||
zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err))
|
||||
return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,7 @@ func (r *Repo) InsertLicense(ctx context.Context, l *model.License) error {
|
||||
l.ValidationMessage)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("error in inserting license data: ", zap.Error(err))
|
||||
zap.L().Error("error in inserting license data: ", zap.Error(err))
|
||||
return fmt.Errorf("failed to insert license in db: %v", err)
|
||||
}
|
||||
|
||||
@ -121,7 +121,7 @@ func (r *Repo) UpdatePlanDetails(ctx context.Context,
|
||||
_, err := r.db.ExecContext(ctx, query, planDetails, time.Now(), key)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("error in updating license: ", zap.Error(err))
|
||||
zap.L().Error("error in updating license: ", zap.Error(err))
|
||||
return fmt.Errorf("failed to update license in db: %v", err)
|
||||
}
|
||||
|
||||
|
@ -100,7 +100,7 @@ func (lm *Manager) SetActive(l *model.License) {
|
||||
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.S().Panicf("Couldn't activate features: %v", err)
|
||||
zap.L().Panic("Couldn't activate features", zap.Error(err))
|
||||
}
|
||||
if !lm.validatorRunning {
|
||||
// we want to make sure only one validator runs,
|
||||
@ -125,13 +125,13 @@ func (lm *Manager) LoadActiveLicense() error {
|
||||
if active != nil {
|
||||
lm.SetActive(active)
|
||||
} else {
|
||||
zap.S().Info("No active license found, defaulting to basic plan")
|
||||
zap.L().Info("No active license found, defaulting to basic plan")
|
||||
// if no active license is found, we default to basic(free) plan with all default features
|
||||
lm.activeFeatures = model.BasicPlan
|
||||
setDefaultFeatures(lm)
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.S().Error("Couldn't initialize features: ", err)
|
||||
zap.L().Error("Couldn't initialize features", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -191,7 +191,7 @@ func (lm *Manager) Validator(ctx context.Context) {
|
||||
|
||||
// Validate validates the current active license
|
||||
func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
zap.S().Info("License validation started")
|
||||
zap.L().Info("License validation started")
|
||||
if lm.activeLicense == nil {
|
||||
return nil
|
||||
}
|
||||
@ -201,12 +201,12 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
|
||||
lm.lastValidated = time.Now().Unix()
|
||||
if reterr != nil {
|
||||
zap.S().Errorf("License validation completed with error", reterr)
|
||||
zap.L().Error("License validation completed with error", zap.Error(reterr))
|
||||
atomic.AddUint64(&lm.failedAttempts, 1)
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
|
||||
map[string]interface{}{"err": reterr.Error()}, "")
|
||||
} else {
|
||||
zap.S().Info("License validation completed with no errors")
|
||||
zap.L().Info("License validation completed with no errors")
|
||||
}
|
||||
|
||||
lm.mutex.Unlock()
|
||||
@ -214,7 +214,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
|
||||
response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId)
|
||||
if apiError != nil {
|
||||
zap.S().Errorf("failed to validate license", apiError)
|
||||
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
|
||||
return apiError.Err
|
||||
}
|
||||
|
||||
@ -235,7 +235,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
}
|
||||
|
||||
if err := l.ParsePlan(); err != nil {
|
||||
zap.S().Errorf("failed to parse updated license", zap.Error(err))
|
||||
zap.L().Error("failed to parse updated license", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
@ -245,7 +245,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
if err != nil {
|
||||
// unexpected db write issue but we can let the user continue
|
||||
// and wait for update to work in next cycle.
|
||||
zap.S().Errorf("failed to validate license", zap.Error(err))
|
||||
zap.L().Error("failed to validate license", zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
@ -270,7 +270,7 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
|
||||
|
||||
response, apiError := validate.ActivateLicense(key, "")
|
||||
if apiError != nil {
|
||||
zap.S().Errorf("failed to activate license", zap.Error(apiError.Err))
|
||||
zap.L().Error("failed to activate license", zap.Error(apiError.Err))
|
||||
return nil, apiError
|
||||
}
|
||||
|
||||
@ -284,14 +284,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
|
||||
err := l.ParsePlan()
|
||||
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to activate license", zap.Error(err))
|
||||
zap.L().Error("failed to activate license", zap.Error(err))
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
// store the license before activating it
|
||||
err = lm.repo.InsertLicense(ctx, l)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to activate license", zap.Error(err))
|
||||
zap.L().Error("failed to activate license", zap.Error(err))
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
|
@ -14,10 +14,10 @@ import (
|
||||
semconv "go.opentelemetry.io/otel/semconv/v1.4.0"
|
||||
"go.signoz.io/signoz/ee/query-service/app"
|
||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
|
||||
"go.signoz.io/signoz/pkg/query-service/version"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
|
||||
zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder"
|
||||
zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync"
|
||||
@ -27,18 +27,19 @@ import (
|
||||
)
|
||||
|
||||
func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
|
||||
config := zap.NewDevelopmentConfig()
|
||||
config := zap.NewProductionConfig()
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
|
||||
defer stop()
|
||||
|
||||
config.EncoderConfig.EncodeDuration = zapcore.StringDurationEncoder
|
||||
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
|
||||
consoleEncoder := zapcore.NewConsoleEncoder(config.EncoderConfig)
|
||||
defaultLogLevel := zapcore.DebugLevel
|
||||
config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
|
||||
config.EncoderConfig.EncodeDuration = zapcore.MillisDurationEncoder
|
||||
config.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
||||
|
||||
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
|
||||
consoleEncoder := zapcore.NewJSONEncoder(config.EncoderConfig)
|
||||
defaultLogLevel := zapcore.InfoLevel
|
||||
|
||||
res := resource.NewWithAttributes(
|
||||
semconv.SchemaURL,
|
||||
semconv.ServiceNameKey.String("query-service"),
|
||||
@ -48,14 +49,15 @@ func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
|
||||
zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel),
|
||||
)
|
||||
|
||||
if enableQueryServiceLogOTLPExport == true {
|
||||
conn, err := grpc.DialContext(ctx, constants.OTLPTarget, grpc.WithBlock(), grpc.WithInsecure(), grpc.WithTimeout(time.Second*30))
|
||||
if enableQueryServiceLogOTLPExport {
|
||||
ctx, _ := context.WithTimeout(ctx, time.Second*30)
|
||||
conn, err := grpc.DialContext(ctx, baseconst.OTLPTarget, grpc.WithBlock(), grpc.WithTransportCredentials(insecure.NewCredentials()))
|
||||
if err != nil {
|
||||
log.Println("failed to connect to otlp collector to export query service logs with error:", err)
|
||||
log.Fatalf("failed to establish connection: %v", err)
|
||||
} else {
|
||||
logExportBatchSizeInt, err := strconv.Atoi(baseconst.LogExportBatchSize)
|
||||
if err != nil {
|
||||
logExportBatchSizeInt = 1000
|
||||
logExportBatchSizeInt = 512
|
||||
}
|
||||
ws := zapcore.AddSync(zapotlpsync.NewOtlpSyncer(conn, zapotlpsync.Options{
|
||||
BatchSize: logExportBatchSizeInt,
|
||||
@ -113,7 +115,6 @@ func main() {
|
||||
zap.ReplaceGlobals(loggerMgr)
|
||||
defer loggerMgr.Sync() // flushes buffer, if any
|
||||
|
||||
logger := loggerMgr.Sugar()
|
||||
version.PrintVersion()
|
||||
|
||||
serverOptions := &app.ServerOptions{
|
||||
@ -137,22 +138,22 @@ func main() {
|
||||
auth.JwtSecret = os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
|
||||
if len(auth.JwtSecret) == 0 {
|
||||
zap.S().Warn("No JWT secret key is specified.")
|
||||
zap.L().Warn("No JWT secret key is specified.")
|
||||
} else {
|
||||
zap.S().Info("No JWT secret key set successfully.")
|
||||
zap.L().Info("JWT secret key set successfully.")
|
||||
}
|
||||
|
||||
server, err := app.NewServer(serverOptions)
|
||||
if err != nil {
|
||||
logger.Fatal("Failed to create server", zap.Error(err))
|
||||
zap.L().Fatal("Failed to create server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := server.Start(); err != nil {
|
||||
logger.Fatal("Could not start servers", zap.Error(err))
|
||||
zap.L().Fatal("Could not start server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := auth.InitAuthCache(context.Background()); err != nil {
|
||||
logger.Fatal("Failed to initialize auth cache", zap.Error(err))
|
||||
zap.L().Fatal("Failed to initialize auth cache", zap.Error(err))
|
||||
}
|
||||
|
||||
signalsChannel := make(chan os.Signal, 1)
|
||||
@ -161,9 +162,9 @@ func main() {
|
||||
for {
|
||||
select {
|
||||
case status := <-server.HealthCheckStatus():
|
||||
logger.Info("Received HealthCheck status: ", zap.Int("status", int(status)))
|
||||
zap.L().Info("Received HealthCheck status: ", zap.Int("status", int(status)))
|
||||
case <-signalsChannel:
|
||||
logger.Fatal("Received OS Interrupt Signal ... ")
|
||||
zap.L().Fatal("Received OS Interrupt Signal ... ")
|
||||
server.Stop()
|
||||
}
|
||||
}
|
||||
|
@ -9,8 +9,8 @@ import (
|
||||
"github.com/google/uuid"
|
||||
"github.com/pkg/errors"
|
||||
saml2 "github.com/russellhaering/gosaml2"
|
||||
"go.signoz.io/signoz/ee/query-service/sso/saml"
|
||||
"go.signoz.io/signoz/ee/query-service/sso"
|
||||
"go.signoz.io/signoz/ee/query-service/sso/saml"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -24,16 +24,16 @@ const (
|
||||
|
||||
// OrgDomain identify org owned web domains for auth and other purposes
|
||||
type OrgDomain struct {
|
||||
Id uuid.UUID `json:"id"`
|
||||
Name string `json:"name"`
|
||||
OrgId string `json:"orgId"`
|
||||
SsoEnabled bool `json:"ssoEnabled"`
|
||||
SsoType SSOType `json:"ssoType"`
|
||||
Id uuid.UUID `json:"id"`
|
||||
Name string `json:"name"`
|
||||
OrgId string `json:"orgId"`
|
||||
SsoEnabled bool `json:"ssoEnabled"`
|
||||
SsoType SSOType `json:"ssoType"`
|
||||
|
||||
SamlConfig *SamlConfig `json:"samlConfig"`
|
||||
SamlConfig *SamlConfig `json:"samlConfig"`
|
||||
GoogleAuthConfig *GoogleOAuthConfig `json:"googleAuthConfig"`
|
||||
|
||||
Org *basemodel.Organization
|
||||
Org *basemodel.Organization
|
||||
}
|
||||
|
||||
func (od *OrgDomain) String() string {
|
||||
@ -100,8 +100,8 @@ func (od *OrgDomain) GetSAMLCert() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// PrepareGoogleOAuthProvider creates GoogleProvider that is used in
|
||||
// requesting OAuth and also used in processing response from google
|
||||
// PrepareGoogleOAuthProvider creates GoogleProvider that is used in
|
||||
// requesting OAuth and also used in processing response from google
|
||||
func (od *OrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (sso.OAuthCallbackProvider, error) {
|
||||
if od.GoogleAuthConfig == nil {
|
||||
return nil, fmt.Errorf("Google auth is not setup correctly for this domain")
|
||||
@ -137,38 +137,36 @@ func (od *OrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServicePro
|
||||
}
|
||||
|
||||
func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
|
||||
|
||||
|
||||
fmtDomainId := strings.Replace(od.Id.String(), "-", ":", -1)
|
||||
|
||||
|
||||
// build redirect url from window.location sent by frontend
|
||||
redirectURL := fmt.Sprintf("%s://%s%s", siteUrl.Scheme, siteUrl.Host, siteUrl.Path)
|
||||
|
||||
// prepare state that gets relayed back when the auth provider
|
||||
// calls back our url. here we pass the app url (where signoz runs)
|
||||
// and the domain Id. The domain Id helps in identifying sso config
|
||||
// when the call back occurs and the app url is useful in redirecting user
|
||||
// back to the right path.
|
||||
// when the call back occurs and the app url is useful in redirecting user
|
||||
// back to the right path.
|
||||
// why do we need to pass app url? the callback typically is handled by backend
|
||||
// and sometimes backend might right at a different port or is unaware of frontend
|
||||
// endpoint (unless SITE_URL param is set). hence, we receive this build sso request
|
||||
// along with frontend window.location and use it to relay the information through
|
||||
// auth provider to the backend (HandleCallback or HandleSSO method).
|
||||
// along with frontend window.location and use it to relay the information through
|
||||
// auth provider to the backend (HandleCallback or HandleSSO method).
|
||||
relayState := fmt.Sprintf("%s?domainId=%s", redirectURL, fmtDomainId)
|
||||
|
||||
|
||||
switch (od.SsoType) {
|
||||
switch od.SsoType {
|
||||
case SAML:
|
||||
|
||||
sp, err := od.PrepareSamlRequest(siteUrl)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
|
||||
return sp.BuildAuthURL(relayState)
|
||||
|
||||
|
||||
case GoogleAuth:
|
||||
|
||||
|
||||
googleProvider, err := od.PrepareGoogleOAuthProvider(siteUrl)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@ -176,9 +174,8 @@ func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
|
||||
return googleProvider.BuildAuthURL(relayState)
|
||||
|
||||
default:
|
||||
zap.S().Errorf("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name))
|
||||
return "", fmt.Errorf("unsupported SSO config for the domain")
|
||||
zap.L().Error("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name))
|
||||
return "", fmt.Errorf("unsupported SSO config for the domain")
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -102,6 +102,6 @@ func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (*
|
||||
IDPCertificateStore: certStore,
|
||||
SPKeyStore: randomKeyStore,
|
||||
}
|
||||
zap.S().Debugf("SAML request:", sp)
|
||||
zap.L().Debug("SAML request", zap.Any("sp", sp))
|
||||
return sp, nil
|
||||
}
|
||||
|
@ -91,12 +91,12 @@ func (lm *Manager) UploadUsage() {
|
||||
// check if license is present or not
|
||||
license, err := lm.licenseRepo.GetActiveLicense(ctx)
|
||||
if err != nil {
|
||||
zap.S().Errorf("failed to get active license: %v", zap.Error(err))
|
||||
zap.L().Error("failed to get active license", zap.Error(err))
|
||||
return
|
||||
}
|
||||
if license == nil {
|
||||
// we will not start the usage reporting if license is not present.
|
||||
zap.S().Info("no license present, skipping usage reporting")
|
||||
zap.L().Info("no license present, skipping usage reporting")
|
||||
return
|
||||
}
|
||||
|
||||
@ -123,7 +123,7 @@ func (lm *Manager) UploadUsage() {
|
||||
dbusages := []model.UsageDB{}
|
||||
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
||||
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
||||
zap.S().Errorf("failed to get usage from clickhouse: %v", zap.Error(err))
|
||||
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
for _, u := range dbusages {
|
||||
@ -133,16 +133,16 @@ func (lm *Manager) UploadUsage() {
|
||||
}
|
||||
|
||||
if len(usages) <= 0 {
|
||||
zap.S().Info("no snapshots to upload, skipping.")
|
||||
zap.L().Info("no snapshots to upload, skipping.")
|
||||
return
|
||||
}
|
||||
|
||||
zap.S().Info("uploading usage data")
|
||||
zap.L().Info("uploading usage data")
|
||||
|
||||
orgName := ""
|
||||
orgNames, orgError := lm.modelDao.GetOrgs(ctx)
|
||||
if orgError != nil {
|
||||
zap.S().Errorf("failed to get org data: %v", zap.Error(orgError))
|
||||
zap.L().Error("failed to get org data: %v", zap.Error(orgError))
|
||||
}
|
||||
if len(orgNames) == 1 {
|
||||
orgName = orgNames[0].Name
|
||||
@ -152,14 +152,14 @@ func (lm *Manager) UploadUsage() {
|
||||
for _, usage := range usages {
|
||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||
if err != nil {
|
||||
zap.S().Errorf("error while decrypting usage data: %v", zap.Error(err))
|
||||
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
usageData := model.Usage{}
|
||||
err = json.Unmarshal(usageDataBytes, &usageData)
|
||||
if err != nil {
|
||||
zap.S().Errorf("error while unmarshalling usage data: %v", zap.Error(err))
|
||||
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
@ -184,13 +184,13 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
|
||||
for i := 1; i <= MaxRetries; i++ {
|
||||
apiErr := licenseserver.SendUsage(ctx, payload)
|
||||
if apiErr != nil && i == MaxRetries {
|
||||
zap.S().Errorf("retries stopped : %v", zap.Error(apiErr))
|
||||
zap.L().Error("retries stopped : %v", zap.Error(apiErr))
|
||||
// not returning error here since it is captured in the failed count
|
||||
return
|
||||
} else if apiErr != nil {
|
||||
// sleeping for exponential backoff
|
||||
sleepDuration := RetryInterval * time.Duration(i)
|
||||
zap.S().Errorf("failed to upload snapshot retrying after %v secs : %v", sleepDuration.Seconds(), zap.Error(apiErr.Err))
|
||||
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err))
|
||||
time.Sleep(sleepDuration)
|
||||
} else {
|
||||
break
|
||||
@ -201,7 +201,7 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
|
||||
func (lm *Manager) Stop() {
|
||||
lm.scheduler.Stop()
|
||||
|
||||
zap.S().Debug("sending usage data before shutting down")
|
||||
zap.L().Info("sending usage data before shutting down")
|
||||
// send usage before shutting down
|
||||
lm.UploadUsage()
|
||||
|
||||
|
1
frontend/public/Icons/cable-car.svg
Normal file
1
frontend/public/Icons/cable-car.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#prefix__clip0_2022_1972)" stroke="#fff" stroke-width="1.333" stroke-linecap="round" stroke-linejoin="round"><path d="M6.667 2h.006M9.333 1.333h.007M1.333 6l13.334-3.333M8 8V4.333M11.333 8H4.667a2 2 0 00-2 2v2.667a2 2 0 002 2h6.666a2 2 0 002-2V10a2 2 0 00-2-2zM6 8v3.333M10 8v3.333M2.667 11.334h10.666"/></g><defs><clipPath id="prefix__clip0_2022_1972"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>
|
After Width: | Height: | Size: 507 B |
1
frontend/public/Icons/configure.svg
Normal file
1
frontend/public/Icons/configure.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.333" stroke-linecap="round"><path d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z" stroke-linejoin="round"/><path d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3M10.75 10.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z"/></g></svg>
|
After Width: | Height: | Size: 644 B |
1
frontend/public/Icons/group.svg
Normal file
1
frontend/public/Icons/group.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.333" stroke-linecap="round" stroke-linejoin="round"><path d="M2 4.667V3.333C2 2.6 2.6 2 3.333 2h1.334M11.333 2h1.334C13.4 2 14 2.6 14 3.333v1.334M14 11.334v1.333C14 13.4 13.4 14 12.667 14h-1.334M4.667 14H3.333C2.6 14 2 13.4 2 12.667v-1.333M8.667 4.667H5.333a.667.667 0 00-.666.666v2c0 .368.298.667.666.667h3.334a.667.667 0 00.666-.667v-2a.667.667 0 00-.666-.667zM10.667 8H7.333a.667.667 0 00-.666.667v2c0 .368.298.666.666.666h3.334a.667.667 0 00.666-.666v-2A.667.667 0 0010.667 8z"/></g></svg>
|
After Width: | Height: | Size: 604 B |
@ -37,11 +37,16 @@
|
||||
"text_condition1": "Send a notification when",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_1min": "1 min",
|
||||
"option_5min": "5 mins",
|
||||
"option_10min": "10 mins",
|
||||
"option_15min": "15 mins",
|
||||
"option_30min": "30 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_4hours": "4 hours",
|
||||
"option_3hours": "3 hours",
|
||||
"option_6hours": "6 hours",
|
||||
"option_12hours": "12 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
@ -112,6 +117,7 @@
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
|
||||
"field_unit": "Threshold unit",
|
||||
"text_alert_on_absent": "Send a notification if data is missing for",
|
||||
"text_alert_frequency": "Run alert every",
|
||||
"text_for": "minutes",
|
||||
"selected_query_placeholder": "Select query"
|
||||
}
|
||||
|
@ -14,6 +14,5 @@
|
||||
"delete_domain_message": "Are you sure you want to delete this domain?",
|
||||
"delete_domain": "Delete Domain",
|
||||
"add_domain": "Add Domains",
|
||||
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly",
|
||||
"invite_link_share_manually": "After inviting members, please copy the invite link and send them the link manually"
|
||||
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly"
|
||||
}
|
||||
|
@ -37,11 +37,16 @@
|
||||
"text_condition1": "Send a notification when",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_1min": "1 min",
|
||||
"option_5min": "5 mins",
|
||||
"option_10min": "10 mins",
|
||||
"option_15min": "15 mins",
|
||||
"option_30min": "30 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_3hours": "3 hours",
|
||||
"option_4hours": "4 hours",
|
||||
"option_6hours": "6 hours",
|
||||
"option_12hours": "12 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
@ -112,6 +117,7 @@
|
||||
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
|
||||
"field_unit": "Threshold unit",
|
||||
"text_alert_on_absent": "Send a notification if data is missing for",
|
||||
"text_alert_frequency": "Run alert every",
|
||||
"text_for": "minutes",
|
||||
"selected_query_placeholder": "Select query"
|
||||
}
|
||||
|
14
frontend/public/locales/en/billings.json
Normal file
14
frontend/public/locales/en/billings.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"days_remaining": "days remaining in your billing period.",
|
||||
"billing": "Billing",
|
||||
"manage_billing_and_costs": "Manage your billing information, invoices, and monitor costs.",
|
||||
"enterprise_cloud": "Enterprise Cloud",
|
||||
"enterprise": "Enterprise",
|
||||
"card_details_recieved_and_billing_info": "We have received your card details, your billing will only start after the end of your free trial period.",
|
||||
"upgrade_plan": "Upgrade Plan",
|
||||
"manage_billing": "Manage Billing",
|
||||
"upgrade_now_text": "Upgrade now to have uninterrupted access",
|
||||
"billing_start_info": "Your billing will start only after the trial period",
|
||||
"checkout_plans": "Check out features in paid plans",
|
||||
"here": "here"
|
||||
}
|
@ -14,6 +14,5 @@
|
||||
"delete_domain_message": "Are you sure you want to delete this domain?",
|
||||
"delete_domain": "Delete Domain",
|
||||
"add_domain": "Add Domains",
|
||||
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly",
|
||||
"invite_link_share_manually": "After inviting members, please copy the invite link and send them the link manually"
|
||||
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly"
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ export interface UsageResponsePayloadProps {
|
||||
billTotal: number;
|
||||
};
|
||||
discount: number;
|
||||
subscriptionStatus?: string;
|
||||
}
|
||||
|
||||
const getUsage = async (
|
||||
|
23
frontend/src/assets/Integrations/ConfigureIcon.tsx
Normal file
23
frontend/src/assets/Integrations/ConfigureIcon.tsx
Normal file
@ -0,0 +1,23 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
|
||||
function ConfigureIcon(): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
return (
|
||||
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g
|
||||
stroke={isDarkMode ? Color.BG_VANILLA_100 : Color.BG_INK_500}
|
||||
strokeWidth="1.333"
|
||||
strokeLinecap="round"
|
||||
>
|
||||
<path
|
||||
d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z"
|
||||
strokeLinejoin="round"
|
||||
/>
|
||||
<path d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3M10.75 10.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z" />
|
||||
</g>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
||||
export default ConfigureIcon;
|
@ -1,10 +1,13 @@
|
||||
.query-builder-search-wrapper {
|
||||
margin-top: 10px;
|
||||
height: 46px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
border-bottom: none;
|
||||
margin-top: 10px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
border-bottom: none;
|
||||
|
||||
.ant-select-selector {
|
||||
border: none !important;
|
||||
}
|
||||
}
|
||||
.ant-select-selector {
|
||||
border: none !important;
|
||||
|
||||
input {
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -37,12 +37,17 @@ const convert = new Convert();
|
||||
interface LogFieldProps {
|
||||
fieldKey: string;
|
||||
fieldValue: string;
|
||||
linesPerRow?: number;
|
||||
}
|
||||
|
||||
type LogSelectedFieldProps = LogFieldProps &
|
||||
type LogSelectedFieldProps = Omit<LogFieldProps, 'linesPerRow'> &
|
||||
Pick<AddToQueryHOCProps, 'onAddToQuery'>;
|
||||
|
||||
function LogGeneralField({ fieldKey, fieldValue }: LogFieldProps): JSX.Element {
|
||||
function LogGeneralField({
|
||||
fieldKey,
|
||||
fieldValue,
|
||||
linesPerRow = 1,
|
||||
}: LogFieldProps): JSX.Element {
|
||||
const html = useMemo(
|
||||
() => ({
|
||||
__html: convert.toHtml(dompurify.sanitize(fieldValue)),
|
||||
@ -55,7 +60,11 @@ function LogGeneralField({ fieldKey, fieldValue }: LogFieldProps): JSX.Element {
|
||||
<Text ellipsis type="secondary" className="log-field-key">
|
||||
{`${fieldKey} : `}
|
||||
</Text>
|
||||
<LogText dangerouslySetInnerHTML={html} className="log-value" />
|
||||
<LogText
|
||||
dangerouslySetInnerHTML={html}
|
||||
className="log-value"
|
||||
linesPerRow={linesPerRow > 1 ? linesPerRow : undefined}
|
||||
/>
|
||||
</TextContainer>
|
||||
);
|
||||
}
|
||||
@ -92,6 +101,7 @@ type ListLogViewProps = {
|
||||
onSetActiveLog: (log: ILog) => void;
|
||||
onAddToQuery: AddToQueryHOCProps['onAddToQuery'];
|
||||
activeLog?: ILog | null;
|
||||
linesPerRow: number;
|
||||
};
|
||||
|
||||
function ListLogView({
|
||||
@ -100,6 +110,7 @@ function ListLogView({
|
||||
onSetActiveLog,
|
||||
onAddToQuery,
|
||||
activeLog,
|
||||
linesPerRow,
|
||||
}: ListLogViewProps): JSX.Element {
|
||||
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
|
||||
|
||||
@ -179,7 +190,11 @@ function ListLogView({
|
||||
/>
|
||||
<div>
|
||||
<LogContainer>
|
||||
<LogGeneralField fieldKey="Log" fieldValue={flattenLogData.body} />
|
||||
<LogGeneralField
|
||||
fieldKey="Log"
|
||||
fieldValue={flattenLogData.body}
|
||||
linesPerRow={linesPerRow}
|
||||
/>
|
||||
{flattenLogData.stream && (
|
||||
<LogGeneralField fieldKey="Stream" fieldValue={flattenLogData.stream} />
|
||||
)}
|
||||
@ -222,4 +237,8 @@ ListLogView.defaultProps = {
|
||||
activeLog: null,
|
||||
};
|
||||
|
||||
LogGeneralField.defaultProps = {
|
||||
linesPerRow: 1,
|
||||
};
|
||||
|
||||
export default ListLogView;
|
||||
|
@ -2,6 +2,10 @@ import { Color } from '@signozhq/design-tokens';
|
||||
import { Card, Typography } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
interface LogTextProps {
|
||||
linesPerRow?: number;
|
||||
}
|
||||
|
||||
export const Container = styled(Card)<{
|
||||
$isActiveLog: boolean;
|
||||
$isDarkMode: boolean;
|
||||
@ -23,7 +27,7 @@ export const Container = styled(Card)<{
|
||||
|
||||
export const Text = styled(Typography.Text)`
|
||||
&&& {
|
||||
min-width: 1.5rem;
|
||||
min-width: 2.5rem;
|
||||
white-space: nowrap;
|
||||
}
|
||||
`;
|
||||
@ -41,11 +45,19 @@ export const LogContainer = styled.div`
|
||||
gap: 6px;
|
||||
`;
|
||||
|
||||
export const LogText = styled.div`
|
||||
export const LogText = styled.div<LogTextProps>`
|
||||
display: inline-block;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
white-space: nowrap;
|
||||
${({ linesPerRow }): string =>
|
||||
linesPerRow
|
||||
? `-webkit-line-clamp: ${linesPerRow};
|
||||
line-clamp: ${linesPerRow};
|
||||
display: -webkit-box;
|
||||
-webkit-box-orient: vertical;
|
||||
white-space: normal; `
|
||||
: 'white-space: nowrap;'};
|
||||
};
|
||||
`;
|
||||
|
||||
export const SelectedLog = styled.div`
|
||||
|
@ -27,7 +27,7 @@
|
||||
line-height: 18px;
|
||||
letter-spacing: 0.08em;
|
||||
text-align: left;
|
||||
color: var(--bg-slate-200, #52575c);
|
||||
color: #52575c;
|
||||
}
|
||||
|
||||
.menu-items {
|
||||
@ -65,7 +65,7 @@
|
||||
padding: 12px;
|
||||
|
||||
.title {
|
||||
color: var(--bg-slate-200, #52575c);
|
||||
color: #52575c;
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-style: normal;
|
||||
@ -149,7 +149,7 @@
|
||||
}
|
||||
|
||||
.title {
|
||||
color: var(--bg-slate-200, #52575c);
|
||||
color: #52575c;
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-style: normal;
|
||||
|
@ -120,38 +120,36 @@ export default function LogsFormatOptionsMenu({
|
||||
|
||||
{selectedItem && (
|
||||
<>
|
||||
{selectedItem === 'raw' && (
|
||||
<>
|
||||
<div className="horizontal-line" />
|
||||
<div className="max-lines-per-row">
|
||||
<div className="title"> max lines per row </div>
|
||||
<div className="raw-format max-lines-per-row-input">
|
||||
<button
|
||||
type="button"
|
||||
className="periscope-btn"
|
||||
onClick={decrementMaxLinesPerRow}
|
||||
>
|
||||
{' '}
|
||||
<Minus size={12} />{' '}
|
||||
</button>
|
||||
<InputNumber
|
||||
min={1}
|
||||
max={10}
|
||||
value={maxLinesPerRow}
|
||||
onChange={handleLinesPerRowChange}
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
className="periscope-btn"
|
||||
onClick={incrementMaxLinesPerRow}
|
||||
>
|
||||
{' '}
|
||||
<Plus size={12} />{' '}
|
||||
</button>
|
||||
</div>
|
||||
<>
|
||||
<div className="horizontal-line" />
|
||||
<div className="max-lines-per-row">
|
||||
<div className="title"> max lines per row </div>
|
||||
<div className="raw-format max-lines-per-row-input">
|
||||
<button
|
||||
type="button"
|
||||
className="periscope-btn"
|
||||
onClick={decrementMaxLinesPerRow}
|
||||
>
|
||||
{' '}
|
||||
<Minus size={12} />{' '}
|
||||
</button>
|
||||
<InputNumber
|
||||
min={1}
|
||||
max={10}
|
||||
value={maxLinesPerRow}
|
||||
onChange={handleLinesPerRowChange}
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
className="periscope-btn"
|
||||
onClick={incrementMaxLinesPerRow}
|
||||
>
|
||||
{' '}
|
||||
<Plus size={12} />{' '}
|
||||
</button>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
|
||||
<div className="selected-item-content-container active">
|
||||
{!addNewColumn && <div className="horizontal-line" />}
|
||||
|
@ -27,5 +27,6 @@ export enum QueryParams {
|
||||
viewName = 'viewName',
|
||||
viewKey = 'viewKey',
|
||||
expandedWidgetId = 'expandedWidgetId',
|
||||
integration = 'integration',
|
||||
pagination = 'pagination',
|
||||
}
|
||||
|
@ -56,14 +56,14 @@ describe('BillingContainer', () => {
|
||||
expect(cost).toBeInTheDocument();
|
||||
|
||||
const manageBilling = screen.getByRole('button', {
|
||||
name: /manage billing/i,
|
||||
name: 'manage_billing',
|
||||
});
|
||||
expect(manageBilling).toBeInTheDocument();
|
||||
|
||||
const dollar = screen.getByText(/\$0/i);
|
||||
expect(dollar).toBeInTheDocument();
|
||||
|
||||
const currentBill = screen.getByText('Billing');
|
||||
const currentBill = screen.getByText('billing');
|
||||
expect(currentBill).toBeInTheDocument();
|
||||
});
|
||||
|
||||
@ -75,7 +75,7 @@ describe('BillingContainer', () => {
|
||||
const freeTrailText = await screen.findByText('Free Trial');
|
||||
expect(freeTrailText).toBeInTheDocument();
|
||||
|
||||
const currentBill = screen.getByText('Billing');
|
||||
const currentBill = screen.getByText('billing');
|
||||
expect(currentBill).toBeInTheDocument();
|
||||
|
||||
const dollar0 = await screen.findByText(/\$0/i);
|
||||
@ -85,18 +85,14 @@ describe('BillingContainer', () => {
|
||||
);
|
||||
expect(onTrail).toBeInTheDocument();
|
||||
|
||||
const numberOfDayRemaining = await screen.findByText(
|
||||
/1 days remaining in your billing period./i,
|
||||
);
|
||||
const numberOfDayRemaining = await screen.findByText(/1 days_remaining/i);
|
||||
expect(numberOfDayRemaining).toBeInTheDocument();
|
||||
const upgradeButton = await screen.findAllByRole('button', {
|
||||
name: /upgrade/i,
|
||||
name: /upgrade_plan/i,
|
||||
});
|
||||
expect(upgradeButton[1]).toBeInTheDocument();
|
||||
expect(upgradeButton.length).toBe(2);
|
||||
const checkPaidPlan = await screen.findByText(
|
||||
/Check out features in paid plans/i,
|
||||
);
|
||||
const checkPaidPlan = await screen.findByText(/checkout_plans/i);
|
||||
expect(checkPaidPlan).toBeInTheDocument();
|
||||
|
||||
const link = screen.getByRole('link', { name: /here/i });
|
||||
@ -114,7 +110,7 @@ describe('BillingContainer', () => {
|
||||
render(<BillingContainer />);
|
||||
});
|
||||
|
||||
const currentBill = screen.getByText('Billing');
|
||||
const currentBill = screen.getByText('billing');
|
||||
expect(currentBill).toBeInTheDocument();
|
||||
|
||||
const dollar0 = await screen.findByText(/\$0/i);
|
||||
@ -126,17 +122,17 @@ describe('BillingContainer', () => {
|
||||
expect(onTrail).toBeInTheDocument();
|
||||
|
||||
const receivedCardDetails = await screen.findByText(
|
||||
/We have received your card details, your billing will only start after the end of your free trial period./i,
|
||||
/card_details_recieved_and_billing_info/i,
|
||||
);
|
||||
expect(receivedCardDetails).toBeInTheDocument();
|
||||
|
||||
const manageBillingButton = await screen.findByRole('button', {
|
||||
name: /manage billing/i,
|
||||
name: /manage_billing/i,
|
||||
});
|
||||
expect(manageBillingButton).toBeInTheDocument();
|
||||
|
||||
const dayRemainingInBillingPeriod = await screen.findByText(
|
||||
/1 days remaining in your billing period./i,
|
||||
/1 days_remaining/i,
|
||||
);
|
||||
expect(dayRemainingInBillingPeriod).toBeInTheDocument();
|
||||
});
|
||||
@ -156,7 +152,7 @@ describe('BillingContainer', () => {
|
||||
const billingPeriod = await findByText(billingPeriodText);
|
||||
expect(billingPeriod).toBeInTheDocument();
|
||||
|
||||
const currentBill = screen.getByText('Billing');
|
||||
const currentBill = screen.getByText('billing');
|
||||
expect(currentBill).toBeInTheDocument();
|
||||
|
||||
const dollar0 = await screen.findByText(/\$1,278.3/i);
|
||||
@ -181,7 +177,7 @@ describe('BillingContainer', () => {
|
||||
);
|
||||
render(<BillingContainer />);
|
||||
const dayRemainingInBillingPeriod = await screen.findByText(
|
||||
/11 days remaining in your billing period./i,
|
||||
/11 days_remaining/i,
|
||||
);
|
||||
expect(dayRemainingInBillingPeriod).toBeInTheDocument();
|
||||
});
|
||||
|
@ -17,7 +17,7 @@ import {
|
||||
} from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import updateCreditCardApi from 'api/billing/checkout';
|
||||
import getUsage from 'api/billing/getUsage';
|
||||
import getUsage, { UsageResponsePayloadProps } from 'api/billing/getUsage';
|
||||
import manageCreditCardApi from 'api/billing/manage';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
@ -26,8 +26,9 @@ import useAnalytics from 'hooks/analytics/useAnalytics';
|
||||
import useAxiosError from 'hooks/useAxiosError';
|
||||
import useLicense from 'hooks/useLicense';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { pick } from 'lodash-es';
|
||||
import { isEmpty, pick } from 'lodash-es';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@ -49,6 +50,11 @@ interface DataType {
|
||||
cost: string;
|
||||
}
|
||||
|
||||
enum SubscriptionStatus {
|
||||
PastDue = 'past_due',
|
||||
Active = 'active',
|
||||
}
|
||||
|
||||
const renderSkeletonInput = (): JSX.Element => (
|
||||
<Skeleton.Input
|
||||
style={{ marginTop: '10px', height: '40px', width: '100%' }}
|
||||
@ -116,15 +122,19 @@ const dummyColumns: ColumnsType<DataType> = [
|
||||
},
|
||||
];
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export default function BillingContainer(): JSX.Element {
|
||||
const daysRemainingStr = 'days remaining in your billing period.';
|
||||
const { t } = useTranslation(['billings']);
|
||||
const daysRemainingStr = t('days_remaining');
|
||||
const [headerText, setHeaderText] = useState('');
|
||||
const [billAmount, setBillAmount] = useState(0);
|
||||
const [activeLicense, setActiveLicense] = useState<License | null>(null);
|
||||
const [daysRemaining, setDaysRemaining] = useState(0);
|
||||
const [isFreeTrial, setIsFreeTrial] = useState(false);
|
||||
const [data, setData] = useState<any[]>([]);
|
||||
const [apiResponse, setApiResponse] = useState<any>({});
|
||||
const [apiResponse, setApiResponse] = useState<
|
||||
Partial<UsageResponsePayloadProps>
|
||||
>({});
|
||||
|
||||
const { trackEvent } = useAnalytics();
|
||||
|
||||
@ -139,6 +149,9 @@ export default function BillingContainer(): JSX.Element {
|
||||
|
||||
const processUsageData = useCallback(
|
||||
(data: any): void => {
|
||||
if (isEmpty(data?.payload)) {
|
||||
return;
|
||||
}
|
||||
const {
|
||||
details: { breakdown = [], billTotal },
|
||||
billingPeriodStart,
|
||||
@ -186,6 +199,9 @@ export default function BillingContainer(): JSX.Element {
|
||||
[licensesData?.payload?.onTrial],
|
||||
);
|
||||
|
||||
const isSubscriptionPastDue =
|
||||
apiResponse.subscriptionStatus === SubscriptionStatus.PastDue;
|
||||
|
||||
const { isLoading, isFetching: isFetchingBillingData } = useQuery(
|
||||
[REACT_QUERY_KEY.GET_BILLING_USAGE, user?.userId],
|
||||
{
|
||||
@ -342,14 +358,27 @@ export default function BillingContainer(): JSX.Element {
|
||||
[apiResponse, billAmount, isLoading, isFetchingBillingData],
|
||||
);
|
||||
|
||||
const { Text } = Typography;
|
||||
const subscriptionPastDueMessage = (): JSX.Element => (
|
||||
<Typography>
|
||||
{`We were not able to process payments for your account. Please update your card details `}
|
||||
<Text type="danger" onClick={handleBilling} style={{ cursor: 'pointer' }}>
|
||||
{t('here')}
|
||||
</Text>
|
||||
{` if your payment information has changed. Email us at `}
|
||||
<Text type="secondary">cloud-support@signoz.io</Text>
|
||||
{` otherwise. Be sure to provide this information immediately to avoid interruption to your service.`}
|
||||
</Typography>
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="billing-container">
|
||||
<Flex vertical style={{ marginBottom: 16 }}>
|
||||
<Typography.Text style={{ fontWeight: 500, fontSize: 18 }}>
|
||||
Billing
|
||||
{t('billing')}
|
||||
</Typography.Text>
|
||||
<Typography.Text color={Color.BG_VANILLA_400}>
|
||||
Manage your billing information, invoices, and monitor costs.
|
||||
{t('manage_billing_and_costs')}
|
||||
</Typography.Text>
|
||||
</Flex>
|
||||
|
||||
@ -361,7 +390,7 @@ export default function BillingContainer(): JSX.Element {
|
||||
<Flex justify="space-between" align="center">
|
||||
<Flex vertical>
|
||||
<Typography.Title level={5} style={{ marginTop: 2, fontWeight: 500 }}>
|
||||
{isCloudUserVal ? 'Enterprise Cloud' : 'Enterprise'}{' '}
|
||||
{isCloudUserVal ? t('enterprise_cloud') : t('enterprise')}{' '}
|
||||
{isFreeTrial ? <Tag color="success"> Free Trial </Tag> : ''}
|
||||
</Typography.Title>
|
||||
{!isLoading && !isFetchingBillingData ? (
|
||||
@ -378,8 +407,8 @@ export default function BillingContainer(): JSX.Element {
|
||||
onClick={handleBilling}
|
||||
>
|
||||
{isFreeTrial && !licensesData?.payload?.trialConvertedToSubscription
|
||||
? 'Upgrade Plan'
|
||||
: 'Manage Billing'}
|
||||
? t('upgrade_plan')
|
||||
: t('manage_billing')}
|
||||
</Button>
|
||||
</Flex>
|
||||
|
||||
@ -389,21 +418,34 @@ export default function BillingContainer(): JSX.Element {
|
||||
ellipsis
|
||||
style={{ fontWeight: '300', color: '#49aa19', fontSize: 12 }}
|
||||
>
|
||||
We have received your card details, your billing will only start after
|
||||
the end of your free trial period.
|
||||
{t('card_details_recieved_and_billing_info')}
|
||||
</Typography.Text>
|
||||
)}
|
||||
|
||||
{!isLoading && !isFetchingBillingData ? (
|
||||
<Alert
|
||||
message={headerText}
|
||||
type="info"
|
||||
showIcon
|
||||
style={{ marginTop: 12 }}
|
||||
/>
|
||||
headerText && (
|
||||
<Alert
|
||||
message={headerText}
|
||||
type="info"
|
||||
showIcon
|
||||
style={{ marginTop: 12 }}
|
||||
/>
|
||||
)
|
||||
) : (
|
||||
<Skeleton.Input active style={{ height: 20, marginTop: 20 }} />
|
||||
)}
|
||||
|
||||
{isSubscriptionPastDue &&
|
||||
(!isLoading && !isFetchingBillingData ? (
|
||||
<Alert
|
||||
message={subscriptionPastDueMessage()}
|
||||
type="error"
|
||||
showIcon
|
||||
style={{ marginTop: 12 }}
|
||||
/>
|
||||
) : (
|
||||
<Skeleton.Input active style={{ height: 20, marginTop: 20 }} />
|
||||
))}
|
||||
</Card>
|
||||
|
||||
<BillingUsageGraphCallback />
|
||||
@ -434,16 +476,16 @@ export default function BillingContainer(): JSX.Element {
|
||||
<Col span={20} className="plan-benefits">
|
||||
<Typography.Text className="plan-benefit">
|
||||
<CheckCircleOutlined />
|
||||
Upgrade now to have uninterrupted access
|
||||
{t('upgrade_now_text')}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="plan-benefit">
|
||||
<CheckCircleOutlined />
|
||||
Your billing will start only after the trial period
|
||||
{t('Your billing will start only after the trial period')}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="plan-benefit">
|
||||
<CheckCircleOutlined />
|
||||
<span>
|
||||
Check out features in paid plans
|
||||
{t('checkout_plans')}
|
||||
<a
|
||||
href="https://signoz.io/pricing/"
|
||||
style={{
|
||||
@ -452,7 +494,7 @@ export default function BillingContainer(): JSX.Element {
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
here
|
||||
{t('here')}
|
||||
</a>
|
||||
</span>
|
||||
</Typography.Text>
|
||||
@ -464,7 +506,7 @@ export default function BillingContainer(): JSX.Element {
|
||||
loading={isLoadingBilling || isLoadingManageBilling}
|
||||
onClick={handleBilling}
|
||||
>
|
||||
Upgrade Plan
|
||||
{t('upgrade_plan')}
|
||||
</Button>
|
||||
</Col>
|
||||
</Row>
|
||||
|
@ -3,9 +3,7 @@ import '../../../lib/uPlotLib/uPlotLib.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Card, Flex, Typography } from 'antd';
|
||||
import { getComponentForPanelType } from 'constants/panelTypes';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { PropsTypePropsMap } from 'container/GridPanelSwitch/types';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import tooltipPlugin from 'lib/uPlotLib/plugins/tooltipPlugin';
|
||||
@ -14,7 +12,7 @@ import getRenderer from 'lib/uPlotLib/utils/getRenderer';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { getXAxisScale } from 'lib/uPlotLib/utils/getXAxisScale';
|
||||
import { getYAxisScale } from 'lib/uPlotLib/utils/getYAxisScale';
|
||||
import { FC, useMemo, useRef } from 'react';
|
||||
import { useMemo, useRef } from 'react';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import {
|
||||
@ -43,6 +41,21 @@ const paths = (
|
||||
return renderer(u, seriesIdx, idx0, idx1, extendGap, buildClip);
|
||||
};
|
||||
|
||||
const calculateStartEndTime = (
|
||||
data: any,
|
||||
): { startTime: number; endTime: number } => {
|
||||
const timestamps: number[] = [];
|
||||
data?.details?.breakdown?.forEach((breakdown: any) => {
|
||||
breakdown?.dayWiseBreakdown?.breakdown.forEach((entry: any) => {
|
||||
timestamps.push(entry?.timestamp);
|
||||
});
|
||||
});
|
||||
const billingTime = [data?.billingPeriodStart, data?.billingPeriodEnd];
|
||||
const startTime: number = Math.min(...timestamps, ...billingTime);
|
||||
const endTime: number = Math.max(...timestamps, ...billingTime);
|
||||
return { startTime, endTime };
|
||||
};
|
||||
|
||||
export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element {
|
||||
const { data, billAmount } = props;
|
||||
const graphCompatibleData = useMemo(
|
||||
@ -54,11 +67,9 @@ export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const containerDimensions = useResizeObserver(graphRef);
|
||||
|
||||
const { billingPeriodStart: startTime, billingPeriodEnd: endTime } = data;
|
||||
|
||||
const Component = getComponentForPanelType(PANEL_TYPES.BAR) as FC<
|
||||
PropsTypePropsMap[PANEL_TYPES]
|
||||
>;
|
||||
const { startTime, endTime } = useMemo(() => calculateStartEndTime(data), [
|
||||
data,
|
||||
]);
|
||||
|
||||
const getGraphSeries = (color: string, label: string): any => ({
|
||||
drawStyle: 'bars',
|
||||
@ -183,7 +194,7 @@ export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element {
|
||||
</Flex>
|
||||
</Flex>
|
||||
<div ref={graphRef} style={{ height: '100%', paddingBottom: 48 }}>
|
||||
<Component data={chartData} options={optionsForChart} />
|
||||
<Uplot data={chartData} options={optionsForChart} />
|
||||
</div>
|
||||
</Card>
|
||||
);
|
||||
|
84
frontend/src/container/DownloadV2/DownloadV2.styles.scss
Normal file
84
frontend/src/container/DownloadV2/DownloadV2.styles.scss
Normal file
@ -0,0 +1,84 @@
|
||||
.download-logs-popover {
|
||||
.ant-popover-inner {
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: linear-gradient(
|
||||
139deg,
|
||||
rgba(18, 19, 23, 0.8) 0%,
|
||||
rgba(18, 19, 23, 0.9) 98.68%
|
||||
);
|
||||
box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2);
|
||||
backdrop-filter: blur(20px);
|
||||
padding: 12px 18px 12px 14px;
|
||||
|
||||
.download-logs-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
align-items: flex-start;
|
||||
|
||||
.action-btns {
|
||||
padding: 4px 0px !important;
|
||||
width: 159px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: normal;
|
||||
letter-spacing: 0.14px;
|
||||
gap: 6px;
|
||||
|
||||
.ant-btn-icon {
|
||||
margin-inline-end: 0px;
|
||||
}
|
||||
}
|
||||
|
||||
.action-btns:hover {
|
||||
&.ant-btn-text {
|
||||
background-color: rgba(171, 189, 255, 0.04) !important;
|
||||
}
|
||||
}
|
||||
|
||||
.export-heading {
|
||||
color: #52575c;
|
||||
font-size: 11px;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 18px; /* 163.636% */
|
||||
letter-spacing: 0.88px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.download-logs-popover {
|
||||
.ant-popover-inner {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: linear-gradient(
|
||||
139deg,
|
||||
rgba(255, 255, 255, 0.8) 0%,
|
||||
rgba(255, 255, 255, 0.9) 98.68%
|
||||
);
|
||||
|
||||
box-shadow: 4px 10px 16px 2px rgba(255, 255, 255, 0.2);
|
||||
|
||||
.download-logs-content {
|
||||
.action-btns {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
.action-btns:hover {
|
||||
&.ant-btn-text {
|
||||
background-color: var(--bg-vanilla-300) !important;
|
||||
}
|
||||
}
|
||||
.export-heading {
|
||||
color: var(--bg-ink-200);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
84
frontend/src/container/DownloadV2/DownloadV2.tsx
Normal file
84
frontend/src/container/DownloadV2/DownloadV2.tsx
Normal file
@ -0,0 +1,84 @@
|
||||
import './DownloadV2.styles.scss';
|
||||
|
||||
import { Button, Popover, Typography } from 'antd';
|
||||
import { Excel } from 'antd-table-saveas-excel';
|
||||
import { FileDigit, FileDown, Sheet } from 'lucide-react';
|
||||
import { unparse } from 'papaparse';
|
||||
|
||||
import { DownloadProps } from './DownloadV2.types';
|
||||
|
||||
function Download({ data, isLoading, fileName }: DownloadProps): JSX.Element {
|
||||
const downloadExcelFile = (): void => {
|
||||
const headers = Object.keys(Object.assign({}, ...data)).map((item) => {
|
||||
const updatedTitle = item
|
||||
.split('_')
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
return {
|
||||
title: updatedTitle,
|
||||
dataIndex: item,
|
||||
};
|
||||
});
|
||||
const excel = new Excel();
|
||||
excel
|
||||
.addSheet(fileName)
|
||||
.addColumns(headers)
|
||||
.addDataSource(data, {
|
||||
str2Percent: true,
|
||||
})
|
||||
.saveAs(`${fileName}.xlsx`);
|
||||
};
|
||||
|
||||
const downloadCsvFile = (): void => {
|
||||
const csv = unparse(data);
|
||||
const csvBlob = new Blob([csv], { type: 'text/csv;charset=utf-8;' });
|
||||
const csvUrl = URL.createObjectURL(csvBlob);
|
||||
const downloadLink = document.createElement('a');
|
||||
downloadLink.href = csvUrl;
|
||||
downloadLink.download = `${fileName}.csv`;
|
||||
downloadLink.click();
|
||||
downloadLink.remove();
|
||||
};
|
||||
|
||||
return (
|
||||
<Popover
|
||||
trigger={['click']}
|
||||
placement="bottomRight"
|
||||
rootClassName="download-logs-popover"
|
||||
arrow={false}
|
||||
content={
|
||||
<div className="download-logs-content">
|
||||
<Typography.Text className="export-heading">Export As</Typography.Text>
|
||||
<Button
|
||||
icon={<Sheet size={14} />}
|
||||
type="text"
|
||||
onClick={downloadExcelFile}
|
||||
className="action-btns"
|
||||
>
|
||||
Excel (.xlsx)
|
||||
</Button>
|
||||
<Button
|
||||
icon={<FileDigit size={14} />}
|
||||
type="text"
|
||||
onClick={downloadCsvFile}
|
||||
className="action-btns"
|
||||
>
|
||||
CSV
|
||||
</Button>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="periscope-btn"
|
||||
loading={isLoading}
|
||||
icon={<FileDown size={14} />}
|
||||
/>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
|
||||
Download.defaultProps = {
|
||||
isLoading: undefined,
|
||||
};
|
||||
|
||||
export default Download;
|
10
frontend/src/container/DownloadV2/DownloadV2.types.ts
Normal file
10
frontend/src/container/DownloadV2/DownloadV2.types.ts
Normal file
@ -0,0 +1,10 @@
|
||||
export type DownloadOptions = {
|
||||
isDownloadEnabled: boolean;
|
||||
fileName: string;
|
||||
};
|
||||
|
||||
export type DownloadProps = {
|
||||
data: Record<string, string>[];
|
||||
isLoading?: boolean;
|
||||
fileName: string;
|
||||
};
|
@ -1,11 +1,7 @@
|
||||
import { DndContext, DragEndEvent } from '@dnd-kit/core';
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
import ExplorerOptions, { ExplorerOptionsProps } from './ExplorerOptions';
|
||||
import {
|
||||
getExplorerToolBarVisibility,
|
||||
setExplorerToolBarVisibility,
|
||||
} from './utils';
|
||||
import { getExplorerToolBarVisibility } from './utils';
|
||||
|
||||
type ExplorerOptionsWrapperProps = Omit<
|
||||
ExplorerOptionsProps,
|
||||
@ -27,29 +23,16 @@ function ExplorerOptionWrapper({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const handleDragEnd = (event: DragEndEvent): void => {
|
||||
const { active, over } = event;
|
||||
if (
|
||||
over !== null &&
|
||||
active.id === 'explorer-options-draggable' &&
|
||||
over.id === 'explorer-options-droppable'
|
||||
) {
|
||||
setIsExplorerOptionHidden(true);
|
||||
setExplorerToolBarVisibility(false, sourcepage);
|
||||
}
|
||||
};
|
||||
return (
|
||||
<DndContext onDragEnd={handleDragEnd}>
|
||||
<ExplorerOptions
|
||||
disabled={disabled}
|
||||
query={query}
|
||||
isLoading={isLoading}
|
||||
onExport={onExport}
|
||||
sourcepage={sourcepage}
|
||||
isExplorerOptionHidden={isExplorerOptionHidden}
|
||||
setIsExplorerOptionHidden={setIsExplorerOptionHidden}
|
||||
/>
|
||||
</DndContext>
|
||||
<ExplorerOptions
|
||||
disabled={disabled}
|
||||
query={query}
|
||||
isLoading={isLoading}
|
||||
onExport={onExport}
|
||||
sourcepage={sourcepage}
|
||||
isExplorerOptionHidden={isExplorerOptionHidden}
|
||||
setIsExplorerOptionHidden={setIsExplorerOptionHidden}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
.explorer-update {
|
||||
position: fixed;
|
||||
bottom: 24px;
|
||||
left: calc(50% - 225px);
|
||||
left: calc(50% - 250px);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
@ -37,21 +37,24 @@
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.explorer-options {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
padding: 10px 12px;
|
||||
border-radius: 50px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: rgba(22, 24, 29, 0.6);
|
||||
box-shadow: 4px 4px 16px 4px rgba(0, 0, 0, 0.25);
|
||||
backdrop-filter: blur(20px);
|
||||
position: fixed;
|
||||
bottom: 24px;
|
||||
left: calc(50% + 240px);
|
||||
padding: 10px 12px;
|
||||
transform: translate(calc(-50% - 120px), 0);
|
||||
transition: left 0.2s linear;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
border-radius: 50px;
|
||||
background: rgba(22, 24, 29, 0.6);
|
||||
box-shadow: 4px 4px 16px 4px rgba(0, 0, 0, 0.25);
|
||||
backdrop-filter: blur(20px);
|
||||
|
||||
cursor: default;
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
z-index: 1;
|
||||
.ant-select-selector {
|
||||
padding: 0 !important;
|
||||
}
|
||||
@ -236,9 +239,9 @@
|
||||
|
||||
.lightMode {
|
||||
.explorer-options {
|
||||
background: transparent;
|
||||
box-shadow: none;
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: rgba(255, 255, 255, 0.8);
|
||||
box-shadow: 4px 4px 16px 4px rgba(255, 255, 255, 0.55);
|
||||
backdrop-filter: blur(20px);
|
||||
|
||||
hr {
|
||||
|
@ -1,7 +1,6 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
import './ExplorerOptions.styles.scss';
|
||||
|
||||
import { useDraggable } from '@dnd-kit/core';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import {
|
||||
Button,
|
||||
@ -32,7 +31,15 @@ import useErrorNotification from 'hooks/useErrorNotification';
|
||||
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { mapCompositeQueryFromQuery } from 'lib/newQueryBuilder/queryBuilderMappers/mapCompositeQueryFromQuery';
|
||||
import { Check, ConciergeBell, Disc3, Plus, X, XCircle } from 'lucide-react';
|
||||
import {
|
||||
Check,
|
||||
ConciergeBell,
|
||||
Disc3,
|
||||
PanelBottomClose,
|
||||
Plus,
|
||||
X,
|
||||
XCircle,
|
||||
} from 'lucide-react';
|
||||
import {
|
||||
CSSProperties,
|
||||
Dispatch,
|
||||
@ -51,12 +58,13 @@ import { DataSource } from 'types/common/queryBuilder';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
|
||||
import ExplorerOptionsDroppableArea from './ExplorerOptionsDroppableArea';
|
||||
import ExplorerOptionsHideArea from './ExplorerOptionsHideArea';
|
||||
import {
|
||||
DATASOURCE_VS_ROUTES,
|
||||
generateRGBAFromHex,
|
||||
getRandomColor,
|
||||
saveNewViewHandler,
|
||||
setExplorerToolBarVisibility,
|
||||
} from './utils';
|
||||
|
||||
const allowedRoles = [USER_ROLES.ADMIN, USER_ROLES.AUTHOR, USER_ROLES.EDITOR];
|
||||
@ -79,7 +87,6 @@ function ExplorerOptions({
|
||||
const history = useHistory();
|
||||
const ref = useRef<RefSelectProps>(null);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const [isDragEnabled, setIsDragEnabled] = useState(false);
|
||||
|
||||
const onModalToggle = useCallback((value: boolean) => {
|
||||
setIsExport(value);
|
||||
@ -271,31 +278,18 @@ function ExplorerOptions({
|
||||
[isDarkMode],
|
||||
);
|
||||
|
||||
const {
|
||||
attributes,
|
||||
listeners,
|
||||
setNodeRef,
|
||||
transform,
|
||||
isDragging,
|
||||
} = useDraggable({
|
||||
id: 'explorer-options-draggable',
|
||||
disabled: isDragEnabled,
|
||||
});
|
||||
const hideToolbar = (): void => {
|
||||
setExplorerToolBarVisibility(false, sourcepage);
|
||||
if (setIsExplorerOptionHidden) {
|
||||
setIsExplorerOptionHidden(true);
|
||||
}
|
||||
};
|
||||
|
||||
const isEditDeleteSupported = allowedRoles.includes(role as string);
|
||||
|
||||
const style: React.CSSProperties | undefined = transform
|
||||
? {
|
||||
transform: `translate3d(${transform.x - 338}px, ${transform.y}px, 0)`,
|
||||
width: `${400 - transform.y * 6}px`,
|
||||
maxWidth: '440px', // initial width of the explorer options
|
||||
overflow: 'hidden',
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return (
|
||||
<>
|
||||
{isQueryUpdated && !isExplorerOptionHidden && !isDragging && (
|
||||
{isQueryUpdated && !isExplorerOptionHidden && (
|
||||
<div
|
||||
className={cx(
|
||||
isEditDeleteSupported ? '' : 'hide-update',
|
||||
@ -330,12 +324,7 @@ function ExplorerOptions({
|
||||
background: extraData
|
||||
? `linear-gradient(90deg, rgba(0,0,0,0) -5%, ${rgbaColor} 9%, rgba(0,0,0,0) 30%)`
|
||||
: 'transparent',
|
||||
backdropFilter: 'blur(20px)',
|
||||
...style,
|
||||
}}
|
||||
ref={setNodeRef}
|
||||
{...listeners}
|
||||
{...attributes}
|
||||
>
|
||||
<div className="view-options">
|
||||
<Select<string, { key: string; value: string }>
|
||||
@ -352,9 +341,6 @@ function ExplorerOptions({
|
||||
allowClear={{
|
||||
clearIcon: <XCircle size={16} style={{ marginTop: '-3px' }} />,
|
||||
}}
|
||||
onDropdownVisibleChange={(open): void => {
|
||||
setIsDragEnabled(open);
|
||||
}}
|
||||
onClear={handleClearSelect}
|
||||
ref={ref}
|
||||
>
|
||||
@ -410,11 +396,17 @@ function ExplorerOptions({
|
||||
<Plus size={16} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip title="Hide">
|
||||
<Button disabled={disabled} shape="circle" onClick={hideToolbar}>
|
||||
<PanelBottomClose size={16} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<ExplorerOptionsDroppableArea
|
||||
<ExplorerOptionsHideArea
|
||||
isExplorerOptionHidden={isExplorerOptionHidden}
|
||||
setIsExplorerOptionHidden={setIsExplorerOptionHidden}
|
||||
sourcepage={sourcepage}
|
||||
|
@ -49,7 +49,7 @@
|
||||
.explorer-option-droppable-container {
|
||||
|
||||
.explorer-show-btn {
|
||||
background: var(--bg-vanilla-400);
|
||||
background: var(--bg-vanilla-200);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,7 +1,6 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import './ExplorerOptionsDroppableArea.styles.scss';
|
||||
import './ExplorerOptionsHideArea.styles.scss';
|
||||
|
||||
import { useDroppable } from '@dnd-kit/core';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Tooltip } from 'antd';
|
||||
import { Disc3, X } from 'lucide-react';
|
||||
@ -19,7 +18,7 @@ interface DroppableAreaProps {
|
||||
onUpdateQueryHandler: () => void;
|
||||
}
|
||||
|
||||
function ExplorerOptionsDroppableArea({
|
||||
function ExplorerOptionsHideArea({
|
||||
isQueryUpdated,
|
||||
isExplorerOptionHidden,
|
||||
sourcepage,
|
||||
@ -27,10 +26,6 @@ function ExplorerOptionsDroppableArea({
|
||||
handleClearSelect,
|
||||
onUpdateQueryHandler,
|
||||
}: DroppableAreaProps): JSX.Element {
|
||||
const { setNodeRef } = useDroppable({
|
||||
id: 'explorer-options-droppable',
|
||||
});
|
||||
|
||||
const handleShowExplorerOption = (): void => {
|
||||
if (setIsExplorerOptionHidden) {
|
||||
setIsExplorerOptionHidden(false);
|
||||
@ -39,7 +34,7 @@ function ExplorerOptionsDroppableArea({
|
||||
};
|
||||
|
||||
return (
|
||||
<div ref={setNodeRef} className="explorer-option-droppable-container">
|
||||
<div className="explorer-option-droppable-container">
|
||||
{isExplorerOptionHidden && (
|
||||
<>
|
||||
{isQueryUpdated && (
|
||||
@ -75,9 +70,9 @@ function ExplorerOptionsDroppableArea({
|
||||
);
|
||||
}
|
||||
|
||||
ExplorerOptionsDroppableArea.defaultProps = {
|
||||
ExplorerOptionsHideArea.defaultProps = {
|
||||
isExplorerOptionHidden: undefined,
|
||||
setIsExplorerOptionHidden: undefined,
|
||||
};
|
||||
|
||||
export default ExplorerOptionsDroppableArea;
|
||||
export default ExplorerOptionsHideArea;
|
@ -1,5 +1,6 @@
|
||||
import {
|
||||
Checkbox,
|
||||
Collapse,
|
||||
Form,
|
||||
InputNumber,
|
||||
InputNumberProps,
|
||||
@ -19,12 +20,18 @@ import {
|
||||
AlertDef,
|
||||
defaultCompareOp,
|
||||
defaultEvalWindow,
|
||||
defaultFrequency,
|
||||
defaultMatchType,
|
||||
} from 'types/api/alerts/def';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { FormContainer, InlineSelect, StepHeading } from './styles';
|
||||
import {
|
||||
FormContainer,
|
||||
InlineSelect,
|
||||
StepHeading,
|
||||
VerticalLine,
|
||||
} from './styles';
|
||||
|
||||
function RuleOptions({
|
||||
alertDef,
|
||||
@ -200,6 +207,35 @@ function RuleOptions({
|
||||
});
|
||||
};
|
||||
|
||||
const onChangeFrequency = (value: string | unknown): void => {
|
||||
const freq = (value as string) || alertDef.frequency;
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
frequency: freq,
|
||||
});
|
||||
};
|
||||
|
||||
const renderFrequency = (): JSX.Element => (
|
||||
<InlineSelect
|
||||
getPopupContainer={popupContainer}
|
||||
defaultValue={defaultFrequency}
|
||||
style={{ minWidth: '120px' }}
|
||||
value={alertDef.frequency}
|
||||
onChange={onChangeFrequency}
|
||||
>
|
||||
<Select.Option value="1m0s">{t('option_1min')}</Select.Option>
|
||||
<Select.Option value="5m0s">{t('option_5min')}</Select.Option>
|
||||
<Select.Option value="10m0s">{t('option_10min')}</Select.Option>
|
||||
<Select.Option value="15m0s">{t('option_15min')}</Select.Option>
|
||||
<Select.Option value="30m0s">{t('option_30min')}</Select.Option>
|
||||
<Select.Option value="1h0m0s">{t('option_60min')}</Select.Option>
|
||||
<Select.Option value="3h0m0s">{t('option_3hours')}</Select.Option>
|
||||
<Select.Option value="6h0m0s">{t('option_6hours')}</Select.Option>
|
||||
<Select.Option value="12h0m0s">{t('option_12hours')}</Select.Option>
|
||||
<Select.Option value="24h0m0s">{t('option_24hours')}</Select.Option>
|
||||
</InlineSelect>
|
||||
);
|
||||
|
||||
const selectedCategory = getCategoryByOptionId(currentQuery?.unit || '');
|
||||
|
||||
const categorySelectOptions = getCategorySelectOptionByName(
|
||||
@ -238,42 +274,57 @@ function RuleOptions({
|
||||
/>
|
||||
</Form.Item>
|
||||
</Space>
|
||||
<Space direction="horizontal" align="center">
|
||||
<Form.Item noStyle name={['condition', 'alertOnAbsent']}>
|
||||
<Checkbox
|
||||
checked={alertDef?.condition?.alertOnAbsent}
|
||||
onChange={(e): void => {
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
alertOnAbsent: e.target.checked,
|
||||
},
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Typography.Text>{t('text_alert_on_absent')}</Typography.Text>
|
||||
<Collapse>
|
||||
<Collapse.Panel header={t('More options')} key="1">
|
||||
<Space direction="vertical" size="large">
|
||||
<VerticalLine>
|
||||
<Space direction="horizontal" align="center">
|
||||
<Typography.Text>{t('text_alert_frequency')}</Typography.Text>
|
||||
{renderFrequency()}
|
||||
</Space>
|
||||
</VerticalLine>
|
||||
|
||||
<Form.Item noStyle name={['condition', 'absentFor']}>
|
||||
<InputNumber
|
||||
min={1}
|
||||
value={alertDef?.condition?.absentFor}
|
||||
onChange={(value): void => {
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
absentFor: Number(value) || 0,
|
||||
},
|
||||
});
|
||||
}}
|
||||
type="number"
|
||||
onWheel={(e): void => e.currentTarget.blur()}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Typography.Text>{t('text_for')}</Typography.Text>
|
||||
</Space>
|
||||
<VerticalLine>
|
||||
<Space direction="horizontal" align="center">
|
||||
<Form.Item noStyle name={['condition', 'alertOnAbsent']}>
|
||||
<Checkbox
|
||||
checked={alertDef?.condition?.alertOnAbsent}
|
||||
onChange={(e): void => {
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
alertOnAbsent: e.target.checked,
|
||||
},
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Typography.Text>{t('text_alert_on_absent')}</Typography.Text>
|
||||
|
||||
<Form.Item noStyle name={['condition', 'absentFor']}>
|
||||
<InputNumber
|
||||
min={1}
|
||||
value={alertDef?.condition?.absentFor}
|
||||
onChange={(value): void => {
|
||||
setAlertDef({
|
||||
...alertDef,
|
||||
condition: {
|
||||
...alertDef.condition,
|
||||
absentFor: Number(value) || 0,
|
||||
},
|
||||
});
|
||||
}}
|
||||
type="number"
|
||||
onWheel={(e): void => e.currentTarget.blur()}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Typography.Text>{t('text_for')}</Typography.Text>
|
||||
</Space>
|
||||
</VerticalLine>
|
||||
</Space>
|
||||
</Collapse.Panel>
|
||||
</Collapse>
|
||||
</Space>
|
||||
</FormContainer>
|
||||
</>
|
||||
|
@ -67,6 +67,13 @@ export const SeveritySelect = styled(Select)`
|
||||
width: 25% !important;
|
||||
`;
|
||||
|
||||
export const VerticalLine = styled.div`
|
||||
border-left: 2px solid #e8e8e8; /* Adjust color and thickness as desired */
|
||||
padding-left: 20px; /* Adjust spacing to content as needed */
|
||||
margin-left: 20px; /* Adjust margin as desired */
|
||||
height: 100%; /* Adjust based on your layout needs */
|
||||
`;
|
||||
|
||||
export const InputSmall = styled(Input)`
|
||||
width: 40% !important;
|
||||
`;
|
||||
|
@ -71,6 +71,7 @@ function LiveLogsList({ logs }: LiveLogsListProps): JSX.Element {
|
||||
key={log.id}
|
||||
logData={log}
|
||||
selectedFields={selectedFields}
|
||||
linesPerRow={options.maxLines}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={onSetActiveLog}
|
||||
/>
|
||||
|
@ -8,4 +8,5 @@
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.005em;
|
||||
text-align: left;
|
||||
min-height: 500px;
|
||||
}
|
||||
|
@ -90,6 +90,7 @@ function LogsExplorerList({
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={onSetActiveLog}
|
||||
activeLog={activeLog}
|
||||
linesPerRow={options.maxLines}
|
||||
/>
|
||||
);
|
||||
},
|
||||
|
@ -14,6 +14,7 @@ import {
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config';
|
||||
import Download from 'container/DownloadV2/DownloadV2';
|
||||
import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
|
||||
import GoToTop from 'container/GoToTop';
|
||||
import LogsExplorerChart from 'container/LogsExplorerChart';
|
||||
@ -21,6 +22,7 @@ import LogsExplorerList from 'container/LogsExplorerList';
|
||||
import LogsExplorerTable from 'container/LogsExplorerTable';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
|
||||
import dayjs from 'dayjs';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { addEmptyWidgetInDashboardJSONWithQuery } from 'hooks/dashboard/utils';
|
||||
import { LogTimeRange } from 'hooks/logs/types';
|
||||
@ -33,8 +35,9 @@ import useClickOutside from 'hooks/useClickOutside';
|
||||
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
import { getPaginationQueryData } from 'lib/newQueryBuilder/getPaginationQueryData';
|
||||
import { defaultTo, isEmpty } from 'lodash-es';
|
||||
import { defaultTo, isEmpty, omit } from 'lodash-es';
|
||||
import { Sliders } from 'lucide-react';
|
||||
import { SELECTED_VIEWS } from 'pages/LogsExplorer/utils';
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
@ -523,6 +526,23 @@ function LogsExplorerViews({
|
||||
},
|
||||
});
|
||||
|
||||
const flattenLogData = useMemo(
|
||||
() =>
|
||||
logs.map((log) => {
|
||||
const timestamp =
|
||||
typeof log.timestamp === 'string'
|
||||
? dayjs(log.timestamp).format()
|
||||
: dayjs(log.timestamp / 1e6).format();
|
||||
|
||||
return FlatLogData({
|
||||
timestamp,
|
||||
body: log.body,
|
||||
...omit(log, 'timestamp', 'body'),
|
||||
});
|
||||
}),
|
||||
[logs],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="logs-explorer-views-container">
|
||||
{showHistogram && (
|
||||
@ -578,6 +598,11 @@ function LogsExplorerViews({
|
||||
<div className="logs-actions-container">
|
||||
{selectedPanelType === PANEL_TYPES.LIST && (
|
||||
<div className="tab-options">
|
||||
<Download
|
||||
data={flattenLogData}
|
||||
isLoading={isFetching}
|
||||
fileName="log_data"
|
||||
/>
|
||||
<div className="format-options-container" ref={menuRef}>
|
||||
<Button
|
||||
className="periscope-btn"
|
||||
|
@ -74,6 +74,7 @@ function LogsTable(props: LogsTableProps): JSX.Element {
|
||||
key={log.id}
|
||||
logData={log}
|
||||
selectedFields={selected}
|
||||
linesPerRow={linesPerRow}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={onSetActiveLog}
|
||||
/>
|
||||
|
@ -23,7 +23,12 @@ function DashboardDescription(): JSX.Element {
|
||||
handleDashboardLockToggle,
|
||||
} = useDashboard();
|
||||
|
||||
const selectedData = selectedDashboard?.data || ({} as DashboardData);
|
||||
const selectedData = selectedDashboard
|
||||
? {
|
||||
...selectedDashboard.data,
|
||||
uuid: selectedDashboard.uuid,
|
||||
}
|
||||
: ({} as DashboardData);
|
||||
|
||||
const { title = '', tags, description } = selectedData || {};
|
||||
|
||||
|
@ -2,14 +2,13 @@ import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
import { orange } from '@ant-design/colors';
|
||||
import { WarningOutlined } from '@ant-design/icons';
|
||||
import { Input, Popover, Select, Tooltip, Typography } from 'antd';
|
||||
import { Input, Popover, Select, Typography } from 'antd';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { debounce } from 'lodash-es';
|
||||
import map from 'lodash-es/map';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { memo, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
@ -52,7 +51,6 @@ function VariableItem({
|
||||
onValueUpdate,
|
||||
lastUpdatedVar,
|
||||
}: VariableItemProps): JSX.Element {
|
||||
const { isDashboardLocked } = useDashboard();
|
||||
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
|
||||
[],
|
||||
);
|
||||
@ -222,84 +220,77 @@ function VariableItem({
|
||||
}, [variableData.type, variableData.customValue]);
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
placement="top"
|
||||
title={isDashboardLocked ? 'Dashboard is locked' : ''}
|
||||
>
|
||||
<VariableContainer className="variable-item">
|
||||
<Typography.Text className="variable-name" ellipsis>
|
||||
${variableData.name}
|
||||
</Typography.Text>
|
||||
<VariableValue>
|
||||
{variableData.type === 'TEXTBOX' ? (
|
||||
<Input
|
||||
placeholder="Enter value"
|
||||
disabled={isDashboardLocked}
|
||||
<VariableContainer className="variable-item">
|
||||
<Typography.Text className="variable-name" ellipsis>
|
||||
${variableData.name}
|
||||
</Typography.Text>
|
||||
<VariableValue>
|
||||
{variableData.type === 'TEXTBOX' ? (
|
||||
<Input
|
||||
placeholder="Enter value"
|
||||
bordered={false}
|
||||
key={variableData.selectedValue?.toString()}
|
||||
defaultValue={variableData.selectedValue?.toString()}
|
||||
onChange={(e): void => {
|
||||
debouncedHandleChange(e.target.value || '');
|
||||
}}
|
||||
style={{
|
||||
width:
|
||||
50 + ((variableData.selectedValue?.toString()?.length || 0) * 7 || 50),
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
!errorMessage &&
|
||||
optionsData && (
|
||||
<Select
|
||||
key={
|
||||
selectValue && Array.isArray(selectValue)
|
||||
? selectValue.join(' ')
|
||||
: selectValue || variableData.id
|
||||
}
|
||||
defaultValue={selectValue}
|
||||
onChange={handleChange}
|
||||
bordered={false}
|
||||
key={variableData.selectedValue?.toString()}
|
||||
defaultValue={variableData.selectedValue?.toString()}
|
||||
onChange={(e): void => {
|
||||
debouncedHandleChange(e.target.value || '');
|
||||
}}
|
||||
style={{
|
||||
width:
|
||||
50 + ((variableData.selectedValue?.toString()?.length || 0) * 7 || 50),
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
!errorMessage &&
|
||||
optionsData && (
|
||||
<Select
|
||||
key={
|
||||
selectValue && Array.isArray(selectValue)
|
||||
? selectValue.join(' ')
|
||||
: selectValue || variableData.id
|
||||
}
|
||||
defaultValue={selectValue}
|
||||
onChange={handleChange}
|
||||
bordered={false}
|
||||
placeholder="Select value"
|
||||
placement="bottomRight"
|
||||
mode={mode}
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={SelectItemStyle}
|
||||
loading={isLoading}
|
||||
showSearch
|
||||
data-testid="variable-select"
|
||||
className="variable-select"
|
||||
disabled={isDashboardLocked}
|
||||
getPopupContainer={popupContainer}
|
||||
>
|
||||
{enableSelectAll && (
|
||||
<Select.Option data-testid="option-ALL" value={ALL_SELECT_VALUE}>
|
||||
ALL
|
||||
</Select.Option>
|
||||
)}
|
||||
{map(optionsData, (option) => (
|
||||
<Select.Option
|
||||
data-testid={`option-${option}`}
|
||||
key={option.toString()}
|
||||
value={option}
|
||||
>
|
||||
{option.toString()}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
)
|
||||
)}
|
||||
{variableData.type !== 'TEXTBOX' && errorMessage && (
|
||||
<span style={{ margin: '0 0.5rem' }}>
|
||||
<Popover
|
||||
placement="top"
|
||||
content={<Typography>{errorMessage}</Typography>}
|
||||
>
|
||||
<WarningOutlined style={{ color: orange[5] }} />
|
||||
</Popover>
|
||||
</span>
|
||||
)}
|
||||
</VariableValue>
|
||||
</VariableContainer>
|
||||
</Tooltip>
|
||||
placeholder="Select value"
|
||||
placement="bottomRight"
|
||||
mode={mode}
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={SelectItemStyle}
|
||||
loading={isLoading}
|
||||
showSearch
|
||||
data-testid="variable-select"
|
||||
className="variable-select"
|
||||
getPopupContainer={popupContainer}
|
||||
>
|
||||
{enableSelectAll && (
|
||||
<Select.Option data-testid="option-ALL" value={ALL_SELECT_VALUE}>
|
||||
ALL
|
||||
</Select.Option>
|
||||
)}
|
||||
{map(optionsData, (option) => (
|
||||
<Select.Option
|
||||
data-testid={`option-${option}`}
|
||||
key={option.toString()}
|
||||
value={option}
|
||||
>
|
||||
{option.toString()}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
)
|
||||
)}
|
||||
{variableData.type !== 'TEXTBOX' && errorMessage && (
|
||||
<span style={{ margin: '0 0.5rem' }}>
|
||||
<Popover
|
||||
placement="top"
|
||||
content={<Typography>{errorMessage}</Typography>}
|
||||
>
|
||||
<WarningOutlined style={{ color: orange[5] }} />
|
||||
</Popover>
|
||||
</span>
|
||||
)}
|
||||
</VariableValue>
|
||||
</VariableContainer>
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -133,6 +133,7 @@ function WidgetGraph({
|
||||
softMax,
|
||||
softMin,
|
||||
panelType: selectedGraph,
|
||||
currentQuery,
|
||||
}),
|
||||
[
|
||||
widgetId,
|
||||
@ -148,6 +149,7 @@ function WidgetGraph({
|
||||
softMax,
|
||||
softMin,
|
||||
selectedGraph,
|
||||
currentQuery,
|
||||
],
|
||||
);
|
||||
|
||||
|
@ -0,0 +1,24 @@
|
||||
## Install otel-collector in your Kubernetes infra
|
||||
|
||||
|
||||
Add the SigNoz Helm Chart repository
|
||||
```bash
|
||||
helm repo add signoz https://charts.signoz.io
|
||||
```
|
||||
|
||||
|
||||
If the chart is already present, update the chart to the latest using:
|
||||
```bash
|
||||
helm repo update
|
||||
```
|
||||
|
||||
|
||||
Install the Kubernetes Infrastructure chart provided by SigNoz
|
||||
```bash
|
||||
helm install my-release signoz/k8s-infra \
|
||||
--set otelCollectorEndpoint=ingest.{{REGION}}.signoz.cloud:443 \
|
||||
--set otelInsecure=false \
|
||||
--set signozApiKey={{SIGNOZ_INGESTION_KEY}} \
|
||||
--set global.clusterName=<CLUSTER_NAME>
|
||||
```
|
||||
- Replace `<CLUSTER_NAME>` with the name of the Kubernetes cluster or a unique identifier of the cluster.
|
@ -0,0 +1,64 @@
|
||||
|
||||
|
||||
After setting up the Otel collector agent, follow the steps below to instrument your PHP Application
|
||||
|
||||
### Step 1: Setup Development Environment
|
||||
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
|
||||
|
||||
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
|
||||
|
||||
**Linux**:
|
||||
```bash
|
||||
sudo apt-get install gcc make autoconf
|
||||
```
|
||||
|
||||
**MacOs(Homebrew)**:
|
||||
```bash
|
||||
brew install gcc make autoconf
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 2: Build the extension
|
||||
|
||||
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
|
||||
|
||||
```bash
|
||||
pecl install opentelemetry
|
||||
```
|
||||
|
||||
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
|
||||
|
||||
```bash
|
||||
[opentelemetry]
|
||||
extension=opentelemetry.so
|
||||
```
|
||||
|
||||
Verify that the extension is enabled by running:
|
||||
|
||||
```bash
|
||||
php -m | grep opentelemetry
|
||||
```
|
||||
|
||||
Running the above command will **output**:
|
||||
|
||||
```bash
|
||||
opentelemetry
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 3: Add the dependencies
|
||||
|
||||
Add dependencies required to perform automatic instrumentation using this command :
|
||||
|
||||
```bash
|
||||
composer config allow-plugins.php-http/discovery false
|
||||
composer require \
|
||||
open-telemetry/sdk \
|
||||
open-telemetry/exporter-otlp \
|
||||
php-http/guzzle7-adapter \
|
||||
open-telemetry/transport-grpc
|
||||
```
|
||||
|
||||
|
@ -0,0 +1,16 @@
|
||||
### Set environment variables and run app
|
||||
|
||||
We will pass environment variables at the runtime:
|
||||
|
||||
```bash
|
||||
env OTEL_PHP_AUTOLOAD_ENABLED=true \
|
||||
OTEL_SERVICE_NAME={MYAPP} \
|
||||
OTEL_TRACES_EXPORTER=otlp \
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=<COLLECTOR_ENDPOINT> \
|
||||
OTEL_PROPAGATORS=baggage,tracecontext \
|
||||
<your-run-command>
|
||||
```
|
||||
|
||||
- <COLLECTOR_ENDPOINT> - Endpoint at which the collector is running. Ex. -> `http://localhost:4317`
|
||||
- <your-run-command> - Run command for your PHP application
|
@ -0,0 +1,60 @@
|
||||
|
||||
|
||||
### Step 1: Setup Development Environment
|
||||
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
|
||||
|
||||
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
|
||||
|
||||
**Linux**:
|
||||
```bash
|
||||
sudo apt-get install gcc make autoconf
|
||||
```
|
||||
|
||||
**MacOs(Homebrew)**:
|
||||
```bash
|
||||
brew install gcc make autoconf
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 2: Build the extension
|
||||
|
||||
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
|
||||
|
||||
```bash
|
||||
pecl install opentelemetry
|
||||
```
|
||||
|
||||
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
|
||||
|
||||
```bash
|
||||
[opentelemetry]
|
||||
extension=opentelemetry.so
|
||||
```
|
||||
|
||||
Verify that the extension is enabled by running:
|
||||
|
||||
```bash
|
||||
php -m | grep opentelemetry
|
||||
```
|
||||
|
||||
Running the above command will **output**:
|
||||
|
||||
```bash
|
||||
opentelemetry
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 3: Add the dependencies
|
||||
|
||||
Add dependencies required to perform automatic instrumentation using this command :
|
||||
|
||||
```bash
|
||||
composer config allow-plugins.php-http/discovery false
|
||||
composer require \
|
||||
open-telemetry/sdk \
|
||||
open-telemetry/exporter-otlp \
|
||||
php-http/guzzle7-adapter \
|
||||
open-telemetry/transport-grpc
|
||||
```
|
@ -0,0 +1,16 @@
|
||||
### Running your PHP application
|
||||
|
||||
We will pass environment variables at the runtime:
|
||||
|
||||
```bash
|
||||
env OTEL_PHP_AUTOLOAD_ENABLED=true \
|
||||
OTEL_SERVICE_NAME={{MYAPP}} \
|
||||
OTEL_TRACES_EXPORTER=otlp \
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \
|
||||
OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \
|
||||
OTEL_PROPAGATORS=baggage,tracecontext \
|
||||
<your-run-command>
|
||||
```
|
||||
|
||||
- <your-run-command> - Run command for your PHP application
|
@ -0,0 +1,96 @@
|
||||
## Setup OpenTelemetry Binary as an agent
|
||||
|
||||
|
||||
### Step 1: Download otel-collector tar.gz
|
||||
```bash
|
||||
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_amd64.tar.gz
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
|
||||
```bash
|
||||
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_amd64.tar.gz -C otelcol-contrib
|
||||
```
|
||||
|
||||
|
||||
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
|
||||
```bash
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
hostmetrics:
|
||||
collection_interval: 60s
|
||||
scrapers:
|
||||
cpu: {}
|
||||
disk: {}
|
||||
load: {}
|
||||
filesystem: {}
|
||||
memory: {}
|
||||
network: {}
|
||||
paging: {}
|
||||
process:
|
||||
mute_process_name_error: true
|
||||
mute_process_exe_error: true
|
||||
mute_process_io_error: true
|
||||
processes: {}
|
||||
prometheus:
|
||||
config:
|
||||
global:
|
||||
scrape_interval: 60s
|
||||
scrape_configs:
|
||||
- job_name: otel-collector-binary
|
||||
static_configs:
|
||||
- targets:
|
||||
# - localhost:8888
|
||||
processors:
|
||||
batch:
|
||||
send_batch_size: 1000
|
||||
timeout: 10s
|
||||
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
|
||||
resourcedetection:
|
||||
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
timeout: 2s
|
||||
system:
|
||||
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
|
||||
extensions:
|
||||
health_check: {}
|
||||
zpages: {}
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
|
||||
tls:
|
||||
insecure: false
|
||||
headers:
|
||||
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
|
||||
logging:
|
||||
verbosity: normal
|
||||
service:
|
||||
telemetry:
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions: [health_check, zpages]
|
||||
pipelines:
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
metrics/internal:
|
||||
receivers: [prometheus, hostmetrics]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
```
|
||||
|
||||
|
@ -0,0 +1,62 @@
|
||||
|
||||
|
||||
After setting up the Otel collector agent, follow the steps below to instrument your PHP Application
|
||||
|
||||
### Step 1: Setup Development Environment
|
||||
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
|
||||
|
||||
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
|
||||
|
||||
**Linux**:
|
||||
```bash
|
||||
sudo apt-get install gcc make autoconf
|
||||
```
|
||||
|
||||
**MacOs(Homebrew)**:
|
||||
```bash
|
||||
brew install gcc make autoconf
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 2: Build the extension
|
||||
|
||||
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
|
||||
|
||||
```bash
|
||||
pecl install opentelemetry
|
||||
```
|
||||
|
||||
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
|
||||
|
||||
```bash
|
||||
[opentelemetry]
|
||||
extension=opentelemetry.so
|
||||
```
|
||||
|
||||
Verify that the extension is enabled by running:
|
||||
|
||||
```bash
|
||||
php -m | grep opentelemetry
|
||||
```
|
||||
|
||||
Running the above command will **output**:
|
||||
|
||||
```bash
|
||||
opentelemetry
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 3: Add the dependencies
|
||||
|
||||
Add dependencies required to perform automatic instrumentation using this command :
|
||||
|
||||
```bash
|
||||
composer config allow-plugins.php-http/discovery false
|
||||
composer require \
|
||||
open-telemetry/sdk \
|
||||
open-telemetry/exporter-otlp \
|
||||
php-http/guzzle7-adapter \
|
||||
open-telemetry/transport-grpc
|
||||
```
|
@ -0,0 +1,41 @@
|
||||
|
||||
|
||||
Once you are done instrumenting your PHP application, you can run it using the below commands
|
||||
|
||||
|
||||
|
||||
### Step 1: Run OTel Collector
|
||||
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
|
||||
|
||||
```bash
|
||||
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
|
||||
```
|
||||
|
||||
|
||||
#### (Optional Step): View last 50 lines of `otelcol` logs
|
||||
```bash
|
||||
tail -f -n 50 otelcol-output.log
|
||||
```
|
||||
|
||||
#### (Optional Step): Stop `otelcol`
|
||||
```bash
|
||||
kill "$(< otel-pid)"
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Running your PHP application
|
||||
|
||||
We will pass environment variables at the runtime:
|
||||
|
||||
```bash
|
||||
env OTEL_PHP_AUTOLOAD_ENABLED=true \
|
||||
OTEL_SERVICE_NAME=<SERVICE_NAME> \
|
||||
OTEL_TRACES_EXPORTER=otlp \
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=<COLLECTOR_ENDPOINT> \
|
||||
OTEL_PROPAGATORS=baggage,tracecontext \
|
||||
<your-run-command>
|
||||
```
|
||||
|
||||
- <COLLECTOR_ENDPOINT> - Endpoint at which the collector is running. Ex. -> `http://localhost:4317`
|
||||
- <your-run-command> - Run command for your PHP application
|
@ -0,0 +1,60 @@
|
||||
|
||||
|
||||
### Step 1: Setup Development Environment
|
||||
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
|
||||
|
||||
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
|
||||
|
||||
**Linux**:
|
||||
```bash
|
||||
sudo apt-get install gcc make autoconf
|
||||
```
|
||||
|
||||
**MacOs(Homebrew)**:
|
||||
```bash
|
||||
brew install gcc make autoconf
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 2: Build the extension
|
||||
|
||||
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
|
||||
|
||||
```bash
|
||||
pecl install opentelemetry
|
||||
```
|
||||
|
||||
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
|
||||
|
||||
```bash
|
||||
[opentelemetry]
|
||||
extension=opentelemetry.so
|
||||
```
|
||||
|
||||
Verify that the extension is enabled by running:
|
||||
|
||||
```bash
|
||||
php -m | grep opentelemetry
|
||||
```
|
||||
|
||||
Running the above command will **output**:
|
||||
|
||||
```bash
|
||||
opentelemetry
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 3: Add the dependencies
|
||||
|
||||
Add dependencies required to perform automatic instrumentation using this command :
|
||||
|
||||
```bash
|
||||
composer config allow-plugins.php-http/discovery false
|
||||
composer require \
|
||||
open-telemetry/sdk \
|
||||
open-telemetry/exporter-otlp \
|
||||
php-http/guzzle7-adapter \
|
||||
open-telemetry/transport-grpc
|
||||
```
|
@ -0,0 +1,16 @@
|
||||
### Running your PHP application
|
||||
|
||||
We will pass environment variables at the runtime:
|
||||
|
||||
```bash
|
||||
env OTEL_PHP_AUTOLOAD_ENABLED=true \
|
||||
OTEL_SERVICE_NAME={{MYAPP}} \
|
||||
OTEL_TRACES_EXPORTER=otlp \
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \
|
||||
OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \
|
||||
OTEL_PROPAGATORS=baggage,tracecontext \
|
||||
<your-run-command>
|
||||
```
|
||||
|
||||
- <your-run-command> - Run command for your PHP application
|
@ -0,0 +1,96 @@
|
||||
## Setup OpenTelemetry Binary as an agent
|
||||
|
||||
|
||||
### Step 1: Download otel-collector tar.gz
|
||||
```bash
|
||||
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_arm64.tar.gz
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
|
||||
```bash
|
||||
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_arm64.tar.gz -C otelcol-contrib
|
||||
```
|
||||
|
||||
|
||||
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
|
||||
```bash
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
hostmetrics:
|
||||
collection_interval: 60s
|
||||
scrapers:
|
||||
cpu: {}
|
||||
disk: {}
|
||||
load: {}
|
||||
filesystem: {}
|
||||
memory: {}
|
||||
network: {}
|
||||
paging: {}
|
||||
process:
|
||||
mute_process_name_error: true
|
||||
mute_process_exe_error: true
|
||||
mute_process_io_error: true
|
||||
processes: {}
|
||||
prometheus:
|
||||
config:
|
||||
global:
|
||||
scrape_interval: 60s
|
||||
scrape_configs:
|
||||
- job_name: otel-collector-binary
|
||||
static_configs:
|
||||
- targets:
|
||||
# - localhost:8888
|
||||
processors:
|
||||
batch:
|
||||
send_batch_size: 1000
|
||||
timeout: 10s
|
||||
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
|
||||
resourcedetection:
|
||||
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
timeout: 2s
|
||||
system:
|
||||
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
|
||||
extensions:
|
||||
health_check: {}
|
||||
zpages: {}
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
|
||||
tls:
|
||||
insecure: false
|
||||
headers:
|
||||
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
|
||||
logging:
|
||||
verbosity: normal
|
||||
service:
|
||||
telemetry:
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions: [health_check, zpages]
|
||||
pipelines:
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
metrics/internal:
|
||||
receivers: [prometheus, hostmetrics]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
```
|
||||
|
||||
|
@ -0,0 +1,62 @@
|
||||
|
||||
|
||||
After setting up the Otel collector agent, follow the steps below to instrument your PHP Application
|
||||
|
||||
### Step 1: Setup Development Environment
|
||||
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
|
||||
|
||||
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
|
||||
|
||||
**Linux**:
|
||||
```bash
|
||||
sudo apt-get install gcc make autoconf
|
||||
```
|
||||
|
||||
**MacOs(Homebrew)**:
|
||||
```bash
|
||||
brew install gcc make autoconf
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 2: Build the extension
|
||||
|
||||
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
|
||||
|
||||
```bash
|
||||
pecl install opentelemetry
|
||||
```
|
||||
|
||||
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
|
||||
|
||||
```bash
|
||||
[opentelemetry]
|
||||
extension=opentelemetry.so
|
||||
```
|
||||
|
||||
Verify that the extension is enabled by running:
|
||||
|
||||
```bash
|
||||
php -m | grep opentelemetry
|
||||
```
|
||||
|
||||
Running the above command will **output**:
|
||||
|
||||
```bash
|
||||
opentelemetry
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 3: Add the dependencies
|
||||
|
||||
Add dependencies required to perform automatic instrumentation using this command :
|
||||
|
||||
```bash
|
||||
composer config allow-plugins.php-http/discovery false
|
||||
composer require \
|
||||
open-telemetry/sdk \
|
||||
open-telemetry/exporter-otlp \
|
||||
php-http/guzzle7-adapter \
|
||||
open-telemetry/transport-grpc
|
||||
```
|
@ -0,0 +1,41 @@
|
||||
|
||||
|
||||
Once you are done instrumenting your Rust application, you can run it using the below commands
|
||||
|
||||
|
||||
|
||||
### Step 1: Run OTel Collector
|
||||
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
|
||||
|
||||
```bash
|
||||
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
|
||||
```
|
||||
|
||||
|
||||
#### (Optional Step): View last 50 lines of `otelcol` logs
|
||||
```bash
|
||||
tail -f -n 50 otelcol-output.log
|
||||
```
|
||||
|
||||
#### (Optional Step): Stop `otelcol`
|
||||
```bash
|
||||
kill "$(< otel-pid)"
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Running your PHP application
|
||||
|
||||
We will pass environment variables at the runtime:
|
||||
|
||||
```bash
|
||||
env OTEL_PHP_AUTOLOAD_ENABLED=true \
|
||||
OTEL_SERVICE_NAME=<SERVICE_NAME> \
|
||||
OTEL_TRACES_EXPORTER=otlp \
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=<COLLECTOR_ENDPOINT> \
|
||||
OTEL_PROPAGATORS=baggage,tracecontext \
|
||||
<your-run-command>
|
||||
```
|
||||
|
||||
- <COLLECTOR_ENDPOINT> - Endpoint at which the collector is running. Ex. -> `http://localhost:4317`
|
||||
- <your-run-command> - Run command for your PHP application
|
@ -0,0 +1,60 @@
|
||||
|
||||
|
||||
### Step 1: Setup Development Environment
|
||||
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
|
||||
|
||||
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
|
||||
|
||||
**Linux**:
|
||||
```bash
|
||||
sudo apt-get install gcc make autoconf
|
||||
```
|
||||
|
||||
**MacOs(Homebrew)**:
|
||||
```bash
|
||||
brew install gcc make autoconf
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 2: Build the extension
|
||||
|
||||
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
|
||||
|
||||
```bash
|
||||
pecl install opentelemetry
|
||||
```
|
||||
|
||||
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
|
||||
|
||||
```bash
|
||||
[opentelemetry]
|
||||
extension=opentelemetry.so
|
||||
```
|
||||
|
||||
Verify that the extension is enabled by running:
|
||||
|
||||
```bash
|
||||
php -m | grep opentelemetry
|
||||
```
|
||||
|
||||
Running the above command will **output**:
|
||||
|
||||
```bash
|
||||
opentelemetry
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 3: Add the dependencies
|
||||
|
||||
Add dependencies required to perform automatic instrumentation using this command :
|
||||
|
||||
```bash
|
||||
composer config allow-plugins.php-http/discovery false
|
||||
composer require \
|
||||
open-telemetry/sdk \
|
||||
open-telemetry/exporter-otlp \
|
||||
php-http/guzzle7-adapter \
|
||||
open-telemetry/transport-grpc
|
||||
```
|
@ -0,0 +1,16 @@
|
||||
### Running your PHP application
|
||||
|
||||
We will pass environment variables at the runtime:
|
||||
|
||||
```bash
|
||||
env OTEL_PHP_AUTOLOAD_ENABLED=true \
|
||||
OTEL_SERVICE_NAME={{MYAPP}} \
|
||||
OTEL_TRACES_EXPORTER=otlp \
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \
|
||||
OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \
|
||||
OTEL_PROPAGATORS=baggage,tracecontext \
|
||||
<your-run-command>
|
||||
```
|
||||
|
||||
- <your-run-command> - Run command for your PHP application
|
@ -0,0 +1,96 @@
|
||||
### Setup OpenTelemetry Binary as an agent
|
||||
|
||||
|
||||
### Step 1: Download otel-collector tar.gz
|
||||
```bash
|
||||
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_amd64.tar.gz
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
|
||||
```bash
|
||||
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_amd64.tar.gz -C otelcol-contrib
|
||||
```
|
||||
|
||||
|
||||
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
|
||||
```bash
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
hostmetrics:
|
||||
collection_interval: 60s
|
||||
scrapers:
|
||||
cpu: {}
|
||||
disk: {}
|
||||
load: {}
|
||||
filesystem: {}
|
||||
memory: {}
|
||||
network: {}
|
||||
paging: {}
|
||||
process:
|
||||
mute_process_name_error: true
|
||||
mute_process_exe_error: true
|
||||
mute_process_io_error: true
|
||||
processes: {}
|
||||
prometheus:
|
||||
config:
|
||||
global:
|
||||
scrape_interval: 60s
|
||||
scrape_configs:
|
||||
- job_name: otel-collector-binary
|
||||
static_configs:
|
||||
- targets:
|
||||
# - localhost:8888
|
||||
processors:
|
||||
batch:
|
||||
send_batch_size: 1000
|
||||
timeout: 10s
|
||||
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
|
||||
resourcedetection:
|
||||
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
timeout: 2s
|
||||
system:
|
||||
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
|
||||
extensions:
|
||||
health_check: {}
|
||||
zpages: {}
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
|
||||
tls:
|
||||
insecure: false
|
||||
headers:
|
||||
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
|
||||
logging:
|
||||
verbosity: normal
|
||||
service:
|
||||
telemetry:
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions: [health_check, zpages]
|
||||
pipelines:
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
metrics/internal:
|
||||
receivers: [prometheus, hostmetrics]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
```
|
||||
|
||||
|
@ -0,0 +1,62 @@
|
||||
|
||||
|
||||
After setting up the Otel collector agent, follow the steps below to instrument your PHP Application
|
||||
|
||||
### Step 1: Setup Development Environment
|
||||
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
|
||||
|
||||
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
|
||||
|
||||
**Linux**:
|
||||
```bash
|
||||
sudo apt-get install gcc make autoconf
|
||||
```
|
||||
|
||||
**MacOs(Homebrew)**:
|
||||
```bash
|
||||
brew install gcc make autoconf
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 2: Build the extension
|
||||
|
||||
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
|
||||
|
||||
```bash
|
||||
pecl install opentelemetry
|
||||
```
|
||||
|
||||
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
|
||||
|
||||
```bash
|
||||
[opentelemetry]
|
||||
extension=opentelemetry.so
|
||||
```
|
||||
|
||||
Verify that the extension is enabled by running:
|
||||
|
||||
```bash
|
||||
php -m | grep opentelemetry
|
||||
```
|
||||
|
||||
Running the above command will **output**:
|
||||
|
||||
```bash
|
||||
opentelemetry
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 3: Add the dependencies
|
||||
|
||||
Add dependencies required to perform automatic instrumentation using this command :
|
||||
|
||||
```bash
|
||||
composer config allow-plugins.php-http/discovery false
|
||||
composer require \
|
||||
open-telemetry/sdk \
|
||||
open-telemetry/exporter-otlp \
|
||||
php-http/guzzle7-adapter \
|
||||
open-telemetry/transport-grpc
|
||||
```
|
@ -0,0 +1,41 @@
|
||||
|
||||
|
||||
Once you are done instrumenting your Rust application, you can run it using the below commands
|
||||
|
||||
|
||||
|
||||
### Step 1: Run OTel Collector
|
||||
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
|
||||
|
||||
```bash
|
||||
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
|
||||
```
|
||||
|
||||
|
||||
#### (Optional Step): View last 50 lines of `otelcol` logs
|
||||
```bash
|
||||
tail -f -n 50 otelcol-output.log
|
||||
```
|
||||
|
||||
#### (Optional Step): Stop `otelcol`
|
||||
```bash
|
||||
kill "$(< otel-pid)"
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Running your PHP application
|
||||
|
||||
We will pass environment variables at the runtime:
|
||||
|
||||
```bash
|
||||
env OTEL_PHP_AUTOLOAD_ENABLED=true \
|
||||
OTEL_SERVICE_NAME=<SERVICE_NAME> \
|
||||
OTEL_TRACES_EXPORTER=otlp \
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=<COLLECTOR_ENDPOINT> \
|
||||
OTEL_PROPAGATORS=baggage,tracecontext \
|
||||
<your-run-command>
|
||||
```
|
||||
|
||||
- <COLLECTOR_ENDPOINT> - Endpoint at which the collector is running. Ex. -> `http://localhost:4317`
|
||||
- <your-run-command> - Run command for your PHP application
|
@ -0,0 +1,60 @@
|
||||
|
||||
|
||||
### Step 1: Setup Development Environment
|
||||
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
|
||||
|
||||
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
|
||||
|
||||
**Linux**:
|
||||
```bash
|
||||
sudo apt-get install gcc make autoconf
|
||||
```
|
||||
|
||||
**MacOs(Homebrew)**:
|
||||
```bash
|
||||
brew install gcc make autoconf
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 2: Build the extension
|
||||
|
||||
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
|
||||
|
||||
```bash
|
||||
pecl install opentelemetry
|
||||
```
|
||||
|
||||
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
|
||||
|
||||
```bash
|
||||
[opentelemetry]
|
||||
extension=opentelemetry.so
|
||||
```
|
||||
|
||||
Verify that the extension is enabled by running:
|
||||
|
||||
```bash
|
||||
php -m | grep opentelemetry
|
||||
```
|
||||
|
||||
Running the above command will **output**:
|
||||
|
||||
```bash
|
||||
opentelemetry
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 3: Add the dependencies
|
||||
|
||||
Add dependencies required to perform automatic instrumentation using this command :
|
||||
|
||||
```bash
|
||||
composer config allow-plugins.php-http/discovery false
|
||||
composer require \
|
||||
open-telemetry/sdk \
|
||||
open-telemetry/exporter-otlp \
|
||||
php-http/guzzle7-adapter \
|
||||
open-telemetry/transport-grpc
|
||||
```
|
@ -0,0 +1,16 @@
|
||||
### Running your PHP application
|
||||
|
||||
We will pass environment variables at the runtime:
|
||||
|
||||
```bash
|
||||
env OTEL_PHP_AUTOLOAD_ENABLED=true \
|
||||
OTEL_SERVICE_NAME={{MYAPP}} \
|
||||
OTEL_TRACES_EXPORTER=otlp \
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \
|
||||
OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \
|
||||
OTEL_PROPAGATORS=baggage,tracecontext \
|
||||
<your-run-command>
|
||||
```
|
||||
|
||||
- <your-run-command> - Run command for your PHP application
|
@ -0,0 +1,96 @@
|
||||
## Setup OpenTelemetry Binary as an agent
|
||||
|
||||
|
||||
### Step 1: Download otel-collector tar.gz
|
||||
```bash
|
||||
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_arm64.tar.gz
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
|
||||
```bash
|
||||
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_arm64.tar.gz -C otelcol-contrib
|
||||
```
|
||||
|
||||
|
||||
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
|
||||
```bash
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
hostmetrics:
|
||||
collection_interval: 60s
|
||||
scrapers:
|
||||
cpu: {}
|
||||
disk: {}
|
||||
load: {}
|
||||
filesystem: {}
|
||||
memory: {}
|
||||
network: {}
|
||||
paging: {}
|
||||
process:
|
||||
mute_process_name_error: true
|
||||
mute_process_exe_error: true
|
||||
mute_process_io_error: true
|
||||
processes: {}
|
||||
prometheus:
|
||||
config:
|
||||
global:
|
||||
scrape_interval: 60s
|
||||
scrape_configs:
|
||||
- job_name: otel-collector-binary
|
||||
static_configs:
|
||||
- targets:
|
||||
# - localhost:8888
|
||||
processors:
|
||||
batch:
|
||||
send_batch_size: 1000
|
||||
timeout: 10s
|
||||
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
|
||||
resourcedetection:
|
||||
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
timeout: 2s
|
||||
system:
|
||||
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
|
||||
extensions:
|
||||
health_check: {}
|
||||
zpages: {}
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
|
||||
tls:
|
||||
insecure: false
|
||||
headers:
|
||||
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
|
||||
logging:
|
||||
verbosity: normal
|
||||
service:
|
||||
telemetry:
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions: [health_check, zpages]
|
||||
pipelines:
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
metrics/internal:
|
||||
receivers: [prometheus, hostmetrics]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp]
|
||||
```
|
||||
|
||||
|
@ -0,0 +1,62 @@
|
||||
|
||||
|
||||
After setting up the Otel collector agent, follow the steps below to instrument your PHP Application
|
||||
|
||||
### Step 1: Setup Development Environment
|
||||
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
|
||||
|
||||
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
|
||||
|
||||
**Linux**:
|
||||
```bash
|
||||
sudo apt-get install gcc make autoconf
|
||||
```
|
||||
|
||||
**MacOs(Homebrew)**:
|
||||
```bash
|
||||
brew install gcc make autoconf
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 2: Build the extension
|
||||
|
||||
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
|
||||
|
||||
```bash
|
||||
pecl install opentelemetry
|
||||
```
|
||||
|
||||
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
|
||||
|
||||
```bash
|
||||
[opentelemetry]
|
||||
extension=opentelemetry.so
|
||||
```
|
||||
|
||||
Verify that the extension is enabled by running:
|
||||
|
||||
```bash
|
||||
php -m | grep opentelemetry
|
||||
```
|
||||
|
||||
Running the above command will **output**:
|
||||
|
||||
```bash
|
||||
opentelemetry
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Step 3: Add the dependencies
|
||||
|
||||
Add dependencies required to perform automatic instrumentation using this command :
|
||||
|
||||
```bash
|
||||
composer config allow-plugins.php-http/discovery false
|
||||
composer require \
|
||||
open-telemetry/sdk \
|
||||
open-telemetry/exporter-otlp \
|
||||
php-http/guzzle7-adapter \
|
||||
open-telemetry/transport-grpc
|
||||
```
|
@ -0,0 +1,41 @@
|
||||
|
||||
|
||||
Once you are done instrumenting your Rust application, you can run it using the below commands
|
||||
|
||||
|
||||
|
||||
### Step 1: Run OTel Collector
|
||||
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
|
||||
|
||||
```bash
|
||||
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
|
||||
```
|
||||
|
||||
|
||||
#### (Optional Step): View last 50 lines of `otelcol` logs
|
||||
```bash
|
||||
tail -f -n 50 otelcol-output.log
|
||||
```
|
||||
|
||||
#### (Optional Step): Stop `otelcol`
|
||||
```bash
|
||||
kill "$(< otel-pid)"
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Running your PHP application
|
||||
|
||||
We will pass environment variables at the runtime:
|
||||
|
||||
```bash
|
||||
env OTEL_PHP_AUTOLOAD_ENABLED=true \
|
||||
OTEL_SERVICE_NAME=<SERVICE_NAME> \
|
||||
OTEL_TRACES_EXPORTER=otlp \
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=<COLLECTOR_ENDPOINT> \
|
||||
OTEL_PROPAGATORS=baggage,tracecontext \
|
||||
<your-run-command>
|
||||
```
|
||||
|
||||
- <COLLECTOR_ENDPOINT> - Endpoint at which the collector is running. Ex. -> `http://localhost:4317`
|
||||
- <your-run-command> - Run command for your PHP application
|
@ -403,6 +403,38 @@ import APM_javascript_reactjs_macOsARM64_quickStart_runApplication from '../Modu
|
||||
import APM_javascript_reactjs_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-installOtelCollector.md';
|
||||
import APM_javascript_reactjs_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-instrumentApplication.md';
|
||||
import APM_javascript_reactjs_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-runApplication.md';
|
||||
// PHP-Kubernetes
|
||||
import APM_php_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-installOtelCollector.md';
|
||||
import APM_php_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-instrumentApplication.md';
|
||||
import APM_php_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-runApplication.md';
|
||||
// PHP-LinuxAMD64-quickstart
|
||||
import APM_php_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-instrumentApplication.md';
|
||||
import APM_php_linuxAMD64_quickStart_runApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-runApplication.md';
|
||||
// PHP-LinuxAMD64-recommended
|
||||
import APM_php_linuxAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-installOtelCollector.md';
|
||||
import APM_php_linuxAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-instrumentApplication.md';
|
||||
import APM_php_linuxAMD64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-runApplication.md';
|
||||
// PHP-LinuxARM64-quickstart
|
||||
import APM_php_linuxARM64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-instrumentApplication.md';
|
||||
import APM_php_linuxARM64_quickStart_runApplication from '../Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-runApplication.md';
|
||||
// PHP-LinuxARM64-recommended
|
||||
import APM_php_linuxARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-installOtelCollector.md';
|
||||
import APM_php_linuxARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-instrumentApplication.md';
|
||||
import APM_php_linuxARM64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-runApplication.md';
|
||||
// PHP-MacOsAMD64-quickstart
|
||||
import APM_php_macOsAMD64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-instrumentApplication.md';
|
||||
import APM_php_macOsAMD64_quickStart_runApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-runApplication.md';
|
||||
// PHP-MacOsAMD64-recommended
|
||||
import APM_php_macOsAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-installOtelCollector.md';
|
||||
import APM_php_macOsAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-instrumentApplication.md';
|
||||
import APM_php_macOsAMD64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-runApplication.md';
|
||||
// PHP-MacOsARM64-quickstart
|
||||
import APM_php_macOsARM64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-instrumentApplication.md';
|
||||
import APM_php_macOsARM64_quickStart_runApplication from '../Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-runApplication.md';
|
||||
// PHP-MacOsARM64-recommended
|
||||
import APM_php_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-installOtelCollector.md';
|
||||
import APM_php_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-instrumentApplication.md';
|
||||
import APM_php_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-runApplication.md';
|
||||
/// ////// Javascript Done
|
||||
/// ///// Python Start
|
||||
// Django
|
||||
@ -575,7 +607,6 @@ import APM_python_other_macOsARM64_recommendedSteps_setupOtelCollector from '../
|
||||
import APM_python_other_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Python/md-docs/Others/MacOsARM64/Recommended/others-macosarm64-recommended-instrumentApplication.md';
|
||||
import APM_python_other_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Python/md-docs/Others/MacOsARM64/Recommended/others-macosarm64-recommended-runApplication.md';
|
||||
// ----------------------------------------------------------------------------
|
||||
/// ////// Go Done
|
||||
/// ///// ROR Start
|
||||
// ROR-Kubernetes
|
||||
import APM_rails_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/RubyOnRails/md-docs/Kubernetes/ror-kubernetes-installOtelCollector.md';
|
||||
@ -1546,4 +1577,36 @@ export const ApmDocFilePaths = {
|
||||
APM_swift_macOsARM64_recommendedSteps_setupOtelCollector,
|
||||
APM_swift_macOsARM64_recommendedSteps_instrumentApplication,
|
||||
APM_swift_macOsARM64_recommendedSteps_runApplication,
|
||||
|
||||
APM_php_kubernetes_recommendedSteps_setupOtelCollector,
|
||||
APM_php_kubernetes_recommendedSteps_instrumentApplication,
|
||||
APM_php_kubernetes_recommendedSteps_runApplication,
|
||||
|
||||
APM_php_linuxAMD64_quickStart_instrumentApplication,
|
||||
APM_php_linuxAMD64_quickStart_runApplication,
|
||||
|
||||
APM_php_linuxAMD64_recommendedSteps_setupOtelCollector,
|
||||
APM_php_linuxAMD64_recommendedSteps_instrumentApplication,
|
||||
APM_php_linuxAMD64_recommendedSteps_runApplication,
|
||||
|
||||
APM_php_linuxARM64_quickStart_instrumentApplication,
|
||||
APM_php_linuxARM64_quickStart_runApplication,
|
||||
|
||||
APM_php_linuxARM64_recommendedSteps_setupOtelCollector,
|
||||
APM_php_linuxARM64_recommendedSteps_instrumentApplication,
|
||||
APM_php_linuxARM64_recommendedSteps_runApplication,
|
||||
|
||||
APM_php_macOsAMD64_quickStart_instrumentApplication,
|
||||
APM_php_macOsAMD64_quickStart_runApplication,
|
||||
|
||||
APM_php_macOsAMD64_recommendedSteps_setupOtelCollector,
|
||||
APM_php_macOsAMD64_recommendedSteps_instrumentApplication,
|
||||
APM_php_macOsAMD64_recommendedSteps_runApplication,
|
||||
|
||||
APM_php_macOsARM64_quickStart_instrumentApplication,
|
||||
APM_php_macOsARM64_quickStart_runApplication,
|
||||
|
||||
APM_php_macOsARM64_recommendedSteps_setupOtelCollector,
|
||||
APM_php_macOsARM64_recommendedSteps_instrumentApplication,
|
||||
APM_php_macOsARM64_recommendedSteps_runApplication,
|
||||
};
|
||||
|
@ -132,6 +132,11 @@ const supportedLanguages = [
|
||||
id: 'swift',
|
||||
imgURL: `/Logos/swift.png`,
|
||||
},
|
||||
{
|
||||
name: 'php',
|
||||
id: 'php',
|
||||
imgURL: `/Logos/php.png`,
|
||||
},
|
||||
];
|
||||
|
||||
export const defaultLogsType = {
|
||||
@ -293,7 +298,8 @@ export const getSupportedFrameworks = ({
|
||||
(moduleID === ModulesMap.APM && dataSourceName === '.NET') ||
|
||||
(moduleID === ModulesMap.APM && dataSourceName === 'rust') ||
|
||||
(moduleID === ModulesMap.APM && dataSourceName === 'elixir') ||
|
||||
(moduleID === ModulesMap.APM && dataSourceName === 'swift')
|
||||
(moduleID === ModulesMap.APM && dataSourceName === 'swift') ||
|
||||
(moduleID === ModulesMap.APM && dataSourceName === 'php')
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
@ -322,7 +328,8 @@ export const hasFrameworks = ({
|
||||
(moduleID === ModulesMap.APM && dataSourceName === '.NET') ||
|
||||
(moduleID === ModulesMap.APM && dataSourceName === 'rust') ||
|
||||
(moduleID === ModulesMap.APM && dataSourceName === 'elixir') ||
|
||||
(moduleID === ModulesMap.APM && dataSourceName === 'swift')
|
||||
(moduleID === ModulesMap.APM && dataSourceName === 'swift') ||
|
||||
(moduleID === ModulesMap.APM && dataSourceName === 'php')
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
@ -279,9 +279,6 @@ function PendingInvitesContainer(): JSX.Element {
|
||||
</Typography.Title>
|
||||
|
||||
<Space>
|
||||
<Typography.Text type="warning">
|
||||
{t('invite_link_share_manually')}
|
||||
</Typography.Text>
|
||||
<Button
|
||||
icon={<PlusOutlined />}
|
||||
type="primary"
|
||||
|
@ -1,11 +1,7 @@
|
||||
import { Select, Spin } from 'antd';
|
||||
import { getAggregateKeys } from 'api/queryBuilder/getAttributeKeys';
|
||||
// ** Constants
|
||||
import {
|
||||
idDivider,
|
||||
QueryBuilderKeys,
|
||||
selectValueDivider,
|
||||
} from 'constants/queryBuilder';
|
||||
import { idDivider, QueryBuilderKeys } from 'constants/queryBuilder';
|
||||
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
import useDebounce from 'hooks/useDebounce';
|
||||
@ -83,11 +79,7 @@ export const GroupByFilter = memo(function GroupByFilter({
|
||||
dataType={item.dataType || ''}
|
||||
/>
|
||||
),
|
||||
value: `${transformStringWithPrefix({
|
||||
str: item.key,
|
||||
prefix: item.type || '',
|
||||
condition: !item.isColumn,
|
||||
})}${selectValueDivider}${item.id}`,
|
||||
value: `${item.id}`,
|
||||
})) || [];
|
||||
|
||||
setOptionsData(options);
|
||||
@ -135,7 +127,8 @@ export const GroupByFilter = memo(function GroupByFilter({
|
||||
const keys = await getAttributeKeys();
|
||||
|
||||
const groupByValues: BaseAutocompleteData[] = values.map((item) => {
|
||||
const [currentValue, id] = item.value.split(selectValueDivider);
|
||||
const id = item.value;
|
||||
const currentValue = item.value.split(idDivider)[0];
|
||||
|
||||
if (id && id.includes(idDivider)) {
|
||||
const attribute = keys.find((item) => item.id === id);
|
||||
@ -174,11 +167,7 @@ export const GroupByFilter = memo(function GroupByFilter({
|
||||
condition: !item.isColumn,
|
||||
}),
|
||||
)}`,
|
||||
value: `${transformStringWithPrefix({
|
||||
str: item.key,
|
||||
prefix: item.type || '',
|
||||
condition: !item.isColumn,
|
||||
})}${selectValueDivider}${item.id}`,
|
||||
value: `${item.id}`,
|
||||
}),
|
||||
);
|
||||
|
||||
|
@ -5,7 +5,7 @@ import { parse } from 'papaparse';
|
||||
import { orderByValueDelimiter } from '../OrderByFilter/utils';
|
||||
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
export const tagRegexp = /^\s*(.*?)\s*(IN|NOT_IN|LIKE|NOT_LIKE|REGEX|NOT_REGEX|=|!=|EXISTS|NOT_EXISTS|CONTAINS|NOT_CONTAINS|>=|>|<=|<|HAS|NHAS)\s*(.*)$/g;
|
||||
export const tagRegexp = /^\s*(.*?)\s*(\bIN\b|\bNOT_IN\b|\bLIKE\b|\bNOT_LIKE\b|\bREGEX\b|\bNOT_REGEX\b|=|!=|\bEXISTS\b|\bNOT_EXISTS\b|\bCONTAINS\b|\bNOT_CONTAINS\b|>=|>|<=|<|\bHAS\b|\bNHAS\b)\s*(.*)$/gi;
|
||||
|
||||
export function isInNInOperator(value: string): boolean {
|
||||
return value === OPERATORS.IN || value === OPERATORS.NIN;
|
||||
@ -25,8 +25,8 @@ export function getTagToken(tag: string): ITagToken {
|
||||
const [, matchTagKey, matchTagOperator, matchTagValue] = match;
|
||||
return {
|
||||
tagKey: matchTagKey,
|
||||
tagOperator: matchTagOperator,
|
||||
tagValue: isInNInOperator(matchTagOperator)
|
||||
tagOperator: matchTagOperator.toUpperCase(),
|
||||
tagValue: isInNInOperator(matchTagOperator.toUpperCase())
|
||||
? parse(matchTagValue).data.flat()
|
||||
: matchTagValue,
|
||||
} as ITagToken;
|
||||
|
@ -0,0 +1,20 @@
|
||||
.resourceAttributesFilter-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: stretch;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
margin-bottom: 16px;
|
||||
|
||||
.resource-attributes-selector {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.environment-selector {
|
||||
min-width: 200px;
|
||||
}
|
||||
|
||||
.ant-form-item {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
}
|
@ -1,10 +1,17 @@
|
||||
import './ResourceAttributesFilter.styles.scss';
|
||||
|
||||
import { CloseCircleFilled } from '@ant-design/icons';
|
||||
import { Button, Select, Spin } from 'antd';
|
||||
import useResourceAttribute, {
|
||||
isResourceEmpty,
|
||||
} from 'hooks/useResourceAttribute';
|
||||
import { convertMetricKeyToTrace } from 'hooks/useResourceAttribute/utils';
|
||||
import { ReactNode, useMemo } from 'react';
|
||||
import {
|
||||
convertMetricKeyToTrace,
|
||||
getEnvironmentTagKeys,
|
||||
getEnvironmentTagValues,
|
||||
} from 'hooks/useResourceAttribute/utils';
|
||||
import { ReactNode, useEffect, useMemo, useState } from 'react';
|
||||
import { SelectOption } from 'types/common/select';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
@ -22,60 +29,129 @@ function ResourceAttributesFilter({
|
||||
handleClearAll,
|
||||
handleFocus,
|
||||
handleChange,
|
||||
handleEnvironmentChange,
|
||||
selectedQuery,
|
||||
optionsData,
|
||||
loading,
|
||||
} = useResourceAttribute();
|
||||
|
||||
const isEmpty = useMemo(
|
||||
() => isResourceEmpty(queries, staging, selectedQuery),
|
||||
[queries, selectedQuery, staging],
|
||||
const [environments, setEnvironments] = useState<
|
||||
SelectOption<string, string>[]
|
||||
>([]);
|
||||
|
||||
const [selectedEnvironments, setSelectedEnvironments] = useState<string[]>([]);
|
||||
|
||||
const queriesExcludingEnvironment = useMemo(
|
||||
() =>
|
||||
queries.filter(
|
||||
(query) => query.tagKey !== 'resource_deployment_environment',
|
||||
),
|
||||
[queries],
|
||||
);
|
||||
|
||||
return (
|
||||
<SearchContainer>
|
||||
<div>
|
||||
{queries.map((query) => (
|
||||
<QueryChip key={query.id} queryData={query} onClose={handleClose} />
|
||||
))}
|
||||
{staging.map((query, idx) => (
|
||||
<QueryChipItem key={uuid()}>
|
||||
{idx === 0 ? convertMetricKeyToTrace(query) : query}
|
||||
</QueryChipItem>
|
||||
))}
|
||||
</div>
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
placeholder={!isEmpty && 'Search and Filter based on resource attributes.'}
|
||||
onChange={handleChange}
|
||||
bordered={false}
|
||||
value={selectedQuery as never}
|
||||
style={{ flex: 1 }}
|
||||
options={optionsData.options}
|
||||
mode={optionsData?.mode}
|
||||
showArrow={!!suffixIcon}
|
||||
onClick={handleFocus}
|
||||
onBlur={handleBlur}
|
||||
onClear={handleClearAll}
|
||||
suffixIcon={suffixIcon}
|
||||
notFoundContent={
|
||||
loading ? (
|
||||
<span>
|
||||
<Spin size="small" /> Loading...
|
||||
</span>
|
||||
) : (
|
||||
<span>
|
||||
No resource attributes available to filter. Please refer docs to send
|
||||
attributes.
|
||||
</span>
|
||||
)
|
||||
}
|
||||
/>
|
||||
const isEmpty = useMemo(
|
||||
() => isResourceEmpty(queriesExcludingEnvironment, staging, selectedQuery),
|
||||
[queriesExcludingEnvironment, selectedQuery, staging],
|
||||
);
|
||||
|
||||
{queries.length || staging.length || selectedQuery.length ? (
|
||||
<Button onClick={handleClearAll} icon={<CloseCircleFilled />} type="text" />
|
||||
) : null}
|
||||
</SearchContainer>
|
||||
useEffect(() => {
|
||||
const resourceDeploymentEnvironmentQuery = queries.filter(
|
||||
(query) => query.tagKey === 'resource_deployment_environment',
|
||||
);
|
||||
|
||||
if (resourceDeploymentEnvironmentQuery?.length > 0) {
|
||||
setSelectedEnvironments(resourceDeploymentEnvironmentQuery[0].tagValue);
|
||||
} else {
|
||||
setSelectedEnvironments([]);
|
||||
}
|
||||
}, [queries]);
|
||||
|
||||
useEffect(() => {
|
||||
getEnvironmentTagKeys().then((tagKeys) => {
|
||||
if (tagKeys && Array.isArray(tagKeys) && tagKeys.length > 0) {
|
||||
getEnvironmentTagValues().then((tagValues) => {
|
||||
setEnvironments(tagValues);
|
||||
});
|
||||
}
|
||||
});
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="resourceAttributesFilter-container">
|
||||
<div className="environment-selector">
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
key={selectedEnvironments.join('')}
|
||||
showSearch
|
||||
mode="multiple"
|
||||
value={selectedEnvironments}
|
||||
placeholder="Select Environment/s"
|
||||
data-testId="resource-environment-filter"
|
||||
style={{ minWidth: 200, height: 34 }}
|
||||
onChange={handleEnvironmentChange}
|
||||
onBlur={handleBlur}
|
||||
>
|
||||
{environments.map((opt) => (
|
||||
<Select.Option key={opt.value} value={opt.value}>
|
||||
{opt.label}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</div>
|
||||
|
||||
<div className="resource-attributes-selector">
|
||||
<SearchContainer>
|
||||
<div>
|
||||
{queriesExcludingEnvironment.map((query) => (
|
||||
<QueryChip key={query.id} queryData={query} onClose={handleClose} />
|
||||
))}
|
||||
{staging.map((query, idx) => (
|
||||
<QueryChipItem key={uuid()}>
|
||||
{idx === 0 ? convertMetricKeyToTrace(query) : query}
|
||||
</QueryChipItem>
|
||||
))}
|
||||
</div>
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
placeholder={
|
||||
!isEmpty && 'Search and Filter based on resource attributes.'
|
||||
}
|
||||
onChange={handleChange}
|
||||
bordered={false}
|
||||
value={selectedQuery as never}
|
||||
style={{ flex: 1 }}
|
||||
options={optionsData.options}
|
||||
mode={optionsData?.mode}
|
||||
data-testId="resource-attributes-filter"
|
||||
showArrow={!!suffixIcon}
|
||||
onClick={handleFocus}
|
||||
onBlur={handleBlur}
|
||||
onClear={handleClearAll}
|
||||
suffixIcon={suffixIcon}
|
||||
notFoundContent={
|
||||
loading ? (
|
||||
<span>
|
||||
<Spin size="small" /> Loading...
|
||||
</span>
|
||||
) : (
|
||||
<span>
|
||||
No resource attributes available to filter. Please refer docs to send
|
||||
attributes.
|
||||
</span>
|
||||
)
|
||||
}
|
||||
/>
|
||||
|
||||
{queries.length || staging.length || selectedQuery.length ? (
|
||||
<Button
|
||||
onClick={handleClearAll}
|
||||
icon={<CloseCircleFilled />}
|
||||
type="text"
|
||||
/>
|
||||
) : null}
|
||||
</SearchContainer>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,10 @@ function QueryChip({ queryData, onClose }: IQueryChipProps): JSX.Element {
|
||||
<QueryChipContainer>
|
||||
<QueryChipItem>{convertMetricKeyToTrace(queryData.tagKey)}</QueryChipItem>
|
||||
<QueryChipItem>{queryData.operator}</QueryChipItem>
|
||||
<QueryChipItem closable onClose={onCloseHandler}>
|
||||
<QueryChipItem
|
||||
closable={queryData.tagKey !== 'resource_deployment_environment'}
|
||||
onClose={onCloseHandler}
|
||||
>
|
||||
{queryData.tagValue.join(', ')}
|
||||
</QueryChipItem>
|
||||
</QueryChipContainer>
|
||||
|
@ -7,9 +7,10 @@ export const SearchContainer = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.2rem;
|
||||
padding: 0.2rem;
|
||||
margin: 1rem 0;
|
||||
border: 1px solid #ccc5;
|
||||
padding: 0 0.2rem;
|
||||
border: 1px solid #454c58;
|
||||
box-sizing: border-box;
|
||||
border-radius: 3px;
|
||||
`;
|
||||
|
||||
export const QueryChipContainer = styled.span`
|
||||
|
@ -8,13 +8,12 @@ import {
|
||||
|
||||
export const useGetIntegrationStatus = ({
|
||||
integrationId,
|
||||
enabled,
|
||||
}: GetIntegrationPayloadProps): UseQueryResult<
|
||||
AxiosResponse<GetIntegrationStatusProps>,
|
||||
AxiosError
|
||||
> =>
|
||||
useQuery<AxiosResponse<GetIntegrationStatusProps>, AxiosError>({
|
||||
queryKey: ['Integration', integrationId, Date.now()],
|
||||
queryKey: ['integration-connection-status', integrationId],
|
||||
queryFn: () => getIntegrationStatus({ integrationId }),
|
||||
enabled,
|
||||
refetchInterval: 5000,
|
||||
});
|
||||
|
@ -1,4 +1,7 @@
|
||||
import { QUERY_BUILDER_OPERATORS_BY_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
OPERATORS,
|
||||
QUERY_BUILDER_OPERATORS_BY_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { getRemovePrefixFromKey } from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
@ -16,9 +19,14 @@ export const useOperators = (
|
||||
): IOperators =>
|
||||
useMemo(() => {
|
||||
const currentKey = keys?.find((el) => el.key === getRemovePrefixFromKey(key));
|
||||
const strippedKey = key.split(' ')[0];
|
||||
|
||||
// eslint-disable-next-line no-nested-ternary
|
||||
return currentKey?.dataType
|
||||
? QUERY_BUILDER_OPERATORS_BY_TYPES[
|
||||
currentKey.dataType as keyof typeof QUERY_BUILDER_OPERATORS_BY_TYPES
|
||||
]
|
||||
: strippedKey.endsWith('[*]') && strippedKey.startsWith('body.')
|
||||
? [OPERATORS.HAS, OPERATORS.NHAS]
|
||||
: QUERY_BUILDER_OPERATORS_BY_TYPES.universal;
|
||||
}, [keys, key]);
|
||||
|
@ -74,7 +74,14 @@ export const useTag = (
|
||||
const handleAddTag = useCallback(
|
||||
(value: string): void => {
|
||||
const { tagKey } = getTagToken(value);
|
||||
const [key, id] = tagKey.split('-');
|
||||
const parts = tagKey.split('-');
|
||||
// this is done to ensure that `hello-world` also gets converted to `body CONTAINS hello-world`
|
||||
let id = parts[parts.length - 1];
|
||||
let key = parts.slice(0, -1).join('-');
|
||||
if (parts.length === 1) {
|
||||
id = '';
|
||||
[key] = parts;
|
||||
}
|
||||
|
||||
if (id === 'custom') {
|
||||
const customValue = whereClauseConfig
|
||||
|
@ -52,6 +52,7 @@ function ResourceProvider({ children }: Props): JSX.Element {
|
||||
? `?resourceAttribute=${encode(JSON.stringify(queries))}`
|
||||
: '',
|
||||
});
|
||||
|
||||
setQueries(queries);
|
||||
},
|
||||
[pathname],
|
||||
@ -62,12 +63,14 @@ function ResourceProvider({ children }: Props): JSX.Element {
|
||||
onSelectTagKey: () => {
|
||||
handleLoading(true);
|
||||
GetTagKeys()
|
||||
.then((tagKeys) =>
|
||||
.then((tagKeys) => {
|
||||
const options = mappingWithRoutesAndKeys(pathname, tagKeys);
|
||||
|
||||
setOptionsData({
|
||||
options: mappingWithRoutesAndKeys(pathname, tagKeys),
|
||||
options,
|
||||
mode: undefined,
|
||||
}),
|
||||
)
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
handleLoading(false);
|
||||
});
|
||||
@ -96,6 +99,7 @@ function ResourceProvider({ children }: Props): JSX.Element {
|
||||
}
|
||||
|
||||
const generatedQuery = createQuery([...staging, selectedQuery]);
|
||||
|
||||
if (generatedQuery) {
|
||||
dispatchQueries([...queries, generatedQuery]);
|
||||
}
|
||||
@ -127,6 +131,29 @@ function ResourceProvider({ children }: Props): JSX.Element {
|
||||
[optionsData.mode, send],
|
||||
);
|
||||
|
||||
const handleEnvironmentChange = useCallback(
|
||||
(environments: string[]): void => {
|
||||
const staging = ['resource_deployment_environment', 'IN'];
|
||||
|
||||
const queriesCopy = queries.filter(
|
||||
(query) => query.tagKey !== 'resource_deployment_environment',
|
||||
);
|
||||
|
||||
if (environments && Array.isArray(environments) && environments.length > 0) {
|
||||
const generatedQuery = createQuery([...staging, environments]);
|
||||
|
||||
if (generatedQuery) {
|
||||
dispatchQueries([...queriesCopy, generatedQuery]);
|
||||
}
|
||||
} else {
|
||||
dispatchQueries([...queriesCopy]);
|
||||
}
|
||||
|
||||
send('RESET');
|
||||
},
|
||||
[dispatchQueries, queries, send],
|
||||
);
|
||||
|
||||
const handleClose = useCallback(
|
||||
(id: string): void => {
|
||||
dispatchQueries(queries.filter((queryData) => queryData.id !== id));
|
||||
@ -159,12 +186,14 @@ function ResourceProvider({ children }: Props): JSX.Element {
|
||||
handleFocus,
|
||||
loading,
|
||||
handleChange,
|
||||
handleEnvironmentChange,
|
||||
selectedQuery,
|
||||
optionsData,
|
||||
}),
|
||||
[
|
||||
handleBlur,
|
||||
handleChange,
|
||||
handleEnvironmentChange,
|
||||
handleClearAll,
|
||||
handleClose,
|
||||
handleFocus,
|
||||
|
@ -28,4 +28,5 @@ export interface IResourceAttributeProps {
|
||||
handleChange: (value: string) => void;
|
||||
selectedQuery: string[];
|
||||
optionsData: OptionsData;
|
||||
handleEnvironmentChange: (environments: string[]) => void;
|
||||
}
|
||||
|
@ -109,12 +109,43 @@ export const GetTagKeys = async (): Promise<IOption[]> => {
|
||||
if (!payload || !payload?.data) {
|
||||
return [];
|
||||
}
|
||||
return payload.data
|
||||
.filter((tagKey: string) => tagKey !== 'resource_deployment_environment')
|
||||
.map((tagKey: string) => ({
|
||||
label: convertMetricKeyToTrace(tagKey),
|
||||
value: tagKey,
|
||||
}));
|
||||
};
|
||||
|
||||
export const getEnvironmentTagKeys = async (): Promise<IOption[]> => {
|
||||
const { payload } = await getResourceAttributesTagKeys({
|
||||
metricName: 'signoz_calls_total',
|
||||
match: 'resource_deployment_environment',
|
||||
});
|
||||
if (!payload || !payload?.data) {
|
||||
return [];
|
||||
}
|
||||
return payload.data.map((tagKey: string) => ({
|
||||
label: convertMetricKeyToTrace(tagKey),
|
||||
value: tagKey,
|
||||
}));
|
||||
};
|
||||
|
||||
export const getEnvironmentTagValues = async (): Promise<IOption[]> => {
|
||||
const { payload } = await getResourceAttributesTagValues({
|
||||
tagKey: 'resource_deployment_environment',
|
||||
metricName: 'signoz_calls_total',
|
||||
});
|
||||
|
||||
if (!payload || !payload?.data) {
|
||||
return [];
|
||||
}
|
||||
return payload.data.map((tagValue: string) => ({
|
||||
label: tagValue,
|
||||
value: tagValue,
|
||||
}));
|
||||
};
|
||||
|
||||
export const GetTagValues = async (tagKey: string): Promise<IOption[]> => {
|
||||
const { payload } = await getResourceAttributesTagValues({
|
||||
tagKey,
|
||||
@ -132,6 +163,23 @@ export const GetTagValues = async (tagKey: string): Promise<IOption[]> => {
|
||||
|
||||
export const createQuery = (
|
||||
selectedItems: Array<string | string[]> = [],
|
||||
): IResourceAttribute | null => {
|
||||
console.log('selectedItems', selectedItems);
|
||||
|
||||
if (selectedItems.length === 3) {
|
||||
return {
|
||||
id: uuid().slice(0, 8),
|
||||
tagKey: selectedItems[0] as string,
|
||||
operator: selectedItems[1] as string,
|
||||
tagValue: selectedItems[2] as string[],
|
||||
};
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const updateQuery = (
|
||||
queryKey: string,
|
||||
selectedItems: Array<string | string[]> = [],
|
||||
): IResourceAttribute | null => {
|
||||
if (selectedItems.length === 3) {
|
||||
return {
|
||||
|
@ -3,6 +3,7 @@ import 'styles.scss';
|
||||
|
||||
import * as Sentry from '@sentry/react';
|
||||
import AppRoutes from 'AppRoutes';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ThemeProvider } from 'hooks/useDarkMode';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { createRoot } from 'react-dom/client';
|
||||
@ -16,6 +17,17 @@ const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
refetchOnWindowFocus: false,
|
||||
retry(failureCount, error): boolean {
|
||||
if (
|
||||
// in case of manually throwing errors please make sure to send error.response.status
|
||||
error instanceof AxiosError &&
|
||||
error.response?.status &&
|
||||
(error.response?.status >= 400 || error.response?.status <= 499)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
return failureCount < 2;
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
@ -12,6 +12,7 @@ import { Dimensions } from 'hooks/useDimensions';
|
||||
import { convertValue } from 'lib/getConvertedValue';
|
||||
import _noop from 'lodash-es/noop';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import onClickPlugin, { OnClickPluginOpts } from './plugins/onClickPlugin';
|
||||
@ -40,6 +41,7 @@ export interface GetUPlotChartOptions {
|
||||
maxTimeScale?: number;
|
||||
softMin: number | null;
|
||||
softMax: number | null;
|
||||
currentQuery?: Query;
|
||||
}
|
||||
|
||||
export const getUPlotChartOptions = ({
|
||||
@ -59,6 +61,7 @@ export const getUPlotChartOptions = ({
|
||||
softMax,
|
||||
softMin,
|
||||
panelType,
|
||||
currentQuery,
|
||||
}: GetUPlotChartOptions): uPlot.Options => {
|
||||
const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale);
|
||||
|
||||
@ -223,6 +226,7 @@ export const getUPlotChartOptions = ({
|
||||
widgetMetaData: apiResponse?.data.result,
|
||||
graphsVisibilityStates,
|
||||
panelType,
|
||||
currentQuery,
|
||||
}),
|
||||
axes: getAxes(isDarkMode, yAxisUnit),
|
||||
};
|
||||
|
@ -3,6 +3,7 @@ import { themeColors } from 'constants/theme';
|
||||
import dayjs from 'dayjs';
|
||||
import customParseFormat from 'dayjs/plugin/customParseFormat';
|
||||
import getLabelName from 'lib/getLabelName';
|
||||
import { get } from 'lodash-es';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
import { placement } from '../placement';
|
||||
@ -68,7 +69,18 @@ const generateTooltipContent = (
|
||||
const dataIngested = quantity[idx];
|
||||
const label = getLabelName(metric, queryName || '', legend || '');
|
||||
|
||||
const color = generateColor(label, themeColors.chartcolors);
|
||||
let color = generateColor(label, themeColors.chartcolors);
|
||||
|
||||
// in case of billing graph pick colors from the series options
|
||||
if (isBillingUsageGraphs) {
|
||||
let clr;
|
||||
series.forEach((item) => {
|
||||
if (item.label === label) {
|
||||
clr = get(item, '_fill');
|
||||
}
|
||||
});
|
||||
color = clr ?? color;
|
||||
}
|
||||
|
||||
let tooltipItemLabel = label;
|
||||
|
||||
|
@ -3,6 +3,7 @@ import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import getLabelName from 'lib/getLabelName';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
|
||||
import { drawStyles, lineInterpolations } from './constants';
|
||||
@ -31,6 +32,7 @@ const getSeries = ({
|
||||
widgetMetaData,
|
||||
graphsVisibilityStates,
|
||||
panelType,
|
||||
currentQuery,
|
||||
}: GetSeriesProps): uPlot.Options['series'] => {
|
||||
const configurations: uPlot.Series[] = [
|
||||
{ label: 'Timestamp', stroke: 'purple' },
|
||||
@ -40,13 +42,15 @@ const getSeries = ({
|
||||
const newGraphVisibilityStates = graphsVisibilityStates?.slice(1);
|
||||
|
||||
for (let i = 0; i < seriesList?.length; i += 1) {
|
||||
const { metric = {}, queryName = '', legend = '' } = widgetMetaData[i] || {};
|
||||
const { metric = {}, queryName = '', legend: lgd } = widgetMetaData[i] || {};
|
||||
|
||||
const label = getLabelName(
|
||||
metric,
|
||||
queryName || '', // query
|
||||
legend || '',
|
||||
);
|
||||
const newLegend =
|
||||
currentQuery?.builder.queryData.find((item) => item.queryName === queryName)
|
||||
?.legend || '';
|
||||
|
||||
const legend = newLegend || lgd || '';
|
||||
|
||||
const label = getLabelName(metric, queryName || '', legend);
|
||||
|
||||
const color = generateColor(label, themeColors.chartcolors);
|
||||
|
||||
@ -87,6 +91,7 @@ export type GetSeriesProps = {
|
||||
widgetMetaData: QueryData[];
|
||||
graphsVisibilityStates?: boolean[];
|
||||
panelType?: PANEL_TYPES;
|
||||
currentQuery?: Query;
|
||||
};
|
||||
|
||||
export default getSeries;
|
||||
|
@ -12,6 +12,11 @@ function AllAlertList(): JSX.Element {
|
||||
children: <TriggeredAlerts />,
|
||||
},
|
||||
// {
|
||||
// label: 'Planned Downtime',
|
||||
// key: 'Planned Downtime',
|
||||
// // children: <PlannedDowntime />,
|
||||
// },
|
||||
// {
|
||||
// label: 'Map Alert Channels',
|
||||
// key = 'Map Alert Channels',
|
||||
// children: <MapAlertChannels />,
|
||||
|
@ -1,7 +1,8 @@
|
||||
import './IntegrationDetailPage.styles.scss';
|
||||
|
||||
import { Button, Tabs, TabsProps, Typography } from 'antd';
|
||||
import { Drum, Hammer, Table2 } from 'lucide-react';
|
||||
import ConfigureIcon from 'assets/Integrations/ConfigureIcon';
|
||||
import { CableCar, Group } from 'lucide-react';
|
||||
import { IntegrationDetailedProps } from 'types/api/integrations/types';
|
||||
|
||||
import Configure from './IntegrationDetailContentTabs/Configure';
|
||||
@ -24,7 +25,7 @@ function IntegrationDetailContent(
|
||||
<Button
|
||||
type="text"
|
||||
className="integration-tab-btns"
|
||||
icon={<Drum size={14} />}
|
||||
icon={<CableCar size={14} />}
|
||||
>
|
||||
<Typography.Text className="typography">Overview</Typography.Text>
|
||||
</Button>
|
||||
@ -43,7 +44,7 @@ function IntegrationDetailContent(
|
||||
<Button
|
||||
type="text"
|
||||
className="integration-tab-btns"
|
||||
icon={<Hammer size={14} />}
|
||||
icon={<ConfigureIcon />}
|
||||
>
|
||||
<Typography.Text className="typography">Configure</Typography.Text>
|
||||
</Button>
|
||||
@ -56,7 +57,7 @@ function IntegrationDetailContent(
|
||||
<Button
|
||||
type="text"
|
||||
className="integration-tab-btns"
|
||||
icon={<Table2 size={14} />}
|
||||
icon={<Group size={14} />}
|
||||
>
|
||||
<Typography.Text className="typography">Data Collected</Typography.Text>
|
||||
</Button>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user