From 7460e650af82606825358fa987be40b533141d3e Mon Sep 17 00:00:00 2001 From: Vibhu Pandey Date: Fri, 10 May 2024 23:23:31 +0530 Subject: [PATCH 01/23] feat(workflow): integrate with workflow identity pool (#4945) * feat(workflows): add wif workflow * feat(workflows): add name of compute instance * feat(workflows): fix permissions * feat(workflows): add an OR true since github runs with -e * ci(testing-deployment): include GITHUB envs * ci(testing-deployment): move GCP information to secrets * ci(staging-deployment): wif workflow --------- Co-authored-by: Prashant Shahi --- .github/workflows/staging-deployment.yaml | 70 +++++++++++++---------- .github/workflows/testing-deployment.yaml | 68 +++++++++++++--------- 2 files changed, 81 insertions(+), 57 deletions(-) diff --git a/.github/workflows/staging-deployment.yaml b/.github/workflows/staging-deployment.yaml index 9b7a5121b2..718eda47db 100644 --- a/.github/workflows/staging-deployment.yaml +++ b/.github/workflows/staging-deployment.yaml @@ -9,34 +9,46 @@ jobs: name: Deploy latest develop branch to staging runs-on: ubuntu-latest environment: staging + permissions: + contents: 'read' + id-token: 'write' steps: - - name: Executing remote ssh commands using ssh key - uses: appleboy/ssh-action@v1.0.3 - env: - GITHUB_BRANCH: develop - GITHUB_SHA: ${{ github.sha }} + - id: 'auth' + uses: 'google-github-actions/auth@v2' with: - host: ${{ secrets.HOST_DNS }} - username: ${{ secrets.USERNAME }} - key: ${{ secrets.SSH_KEY }} - envs: GITHUB_BRANCH,GITHUB_SHA - command_timeout: 60m - script: | - echo "GITHUB_BRANCH: ${GITHUB_BRANCH}" - echo "GITHUB_SHA: ${GITHUB_SHA}" - export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it - export OTELCOL_TAG="main" - export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work - docker system prune --force - docker pull signoz/signoz-otel-collector:main - docker pull signoz/signoz-schema-migrator:main - cd ~/signoz - git status - git add . - git stash push -m "stashed on $(date --iso-8601=seconds)" - git fetch origin - git checkout ${GITHUB_BRANCH} - git pull - make build-ee-query-service-amd64 - make build-frontend-amd64 - make run-signoz \ No newline at end of file + workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }} + service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }} + + - name: 'sdk' + uses: 'google-github-actions/setup-gcloud@v2' + + - name: 'ssh' + shell: bash + env: + GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }} + GITHUB_SHA: ${{ github.sha }} + GCP_PROJECT: ${{ secrets.GCP_PROJECT }} + GCP_ZONE: ${{ secrets.GCP_ZONE }} + GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }} + run: | + read -r -d '' COMMAND < Date: Sun, 12 May 2024 16:45:16 +0530 Subject: [PATCH 02/23] fix: added right padding to the notifications bar to show cancel button (#4969) --- frontend/src/styles.scss | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/frontend/src/styles.scss b/frontend/src/styles.scss index 05f7c400ab..97be4ebd5b 100644 --- a/frontend/src/styles.scss +++ b/frontend/src/styles.scss @@ -242,3 +242,7 @@ body { } } } + +.ant-notification-notice-message { + padding-right: 20px; +} From 9f1c45bc3290abb2a4bbe9d4c377970a5983f2f4 Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Tue, 14 May 2024 10:34:43 +0530 Subject: [PATCH 03/23] chore: add toUnixTimestamp to supported functions (#4877) --- pkg/query-service/app/formula.go | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/pkg/query-service/app/formula.go b/pkg/query-service/app/formula.go index 619ae15cb3..8fa6010dfc 100644 --- a/pkg/query-service/app/formula.go +++ b/pkg/query-service/app/formula.go @@ -4,6 +4,7 @@ import ( "fmt" "math" "sort" + "time" "github.com/SigNoz/govaluate" v3 "go.signoz.io/signoz/pkg/query-service/model/v3" @@ -158,7 +159,7 @@ func processResults(results []*v3.Result, expression *govaluate.EvaluableExpress }, nil } -var SupportedFunctions = []string{"exp", "log", "ln", "exp2", "log2", "exp10", "log10", "sqrt", "cbrt", "erf", "erfc", "lgamma", "tgamma", "sin", "cos", "tan", "asin", "acos", "atan", "degrees", "radians"} +var SupportedFunctions = []string{"exp", "log", "ln", "exp2", "log2", "exp10", "log10", "sqrt", "cbrt", "erf", "erfc", "lgamma", "tgamma", "sin", "cos", "tan", "asin", "acos", "atan", "degrees", "radians", "now", "toUnixTimestamp"} func evalFuncs() map[string]govaluate.ExpressionFunction { GoValuateFuncs := make(map[string]govaluate.ExpressionFunction) @@ -247,5 +248,21 @@ func evalFuncs() map[string]govaluate.ExpressionFunction { GoValuateFuncs["radians"] = func(args ...interface{}) (interface{}, error) { return args[0].(float64) * math.Pi / 180, nil } + + GoValuateFuncs["now"] = func(args ...interface{}) (interface{}, error) { + return time.Now().Unix(), nil + } + + GoValuateFuncs["toUnixTimestamp"] = func(args ...interface{}) (interface{}, error) { + if len(args) != 1 { + return nil, fmt.Errorf("toUnixTimestamp requires exactly one argument") + } + t, err := time.Parse(time.RFC3339, args[0].(string)) + if err != nil { + return nil, err + } + return t.Unix(), nil + } + return GoValuateFuncs } From 30bfad527f2ae301271b8d415a31c55701da4456 Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Tue, 14 May 2024 17:03:29 +0530 Subject: [PATCH 04/23] chore: enable limits for trace queries (#4997) --- pkg/query-service/app/clickhouseReader/wrapper.go | 7 ------- 1 file changed, 7 deletions(-) diff --git a/pkg/query-service/app/clickhouseReader/wrapper.go b/pkg/query-service/app/clickhouseReader/wrapper.go index 2f08167534..c21fde0ceb 100644 --- a/pkg/query-service/app/clickhouseReader/wrapper.go +++ b/pkg/query-service/app/clickhouseReader/wrapper.go @@ -4,7 +4,6 @@ import ( "context" "encoding/json" "regexp" - "strings" "github.com/ClickHouse/clickhouse-go/v2" "github.com/ClickHouse/clickhouse-go/v2/lib/driver" @@ -43,12 +42,6 @@ func (c clickhouseConnWrapper) addClickHouseSettings(ctx context.Context, query settings["log_comment"] = logComment } - // don't add resource restrictions traces - if strings.Contains(query, "signoz_traces") { - ctx = clickhouse.Context(ctx, clickhouse.WithSettings(settings)) - return ctx - } - if c.settings.MaxBytesToRead != "" { settings["max_bytes_to_read"] = c.settings.MaxBytesToRead } From 0cbaa17d9f69990cd1f8f6b67d879040380274bc Mon Sep 17 00:00:00 2001 From: Vishal Sharma Date: Tue, 14 May 2024 18:05:59 +0530 Subject: [PATCH 05/23] chore: allow unlimited dashboards and alerts in community version (#4989) * chore: allow unlimited dashboards and alerts in community version * chore: update ee plan --- ee/query-service/model/plans.go | 4 ++-- pkg/query-service/model/featureSet.go | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ee/query-service/model/plans.go b/ee/query-service/model/plans.go index 09a88bbf9f..41bca047d5 100644 --- a/ee/query-service/model/plans.go +++ b/ee/query-service/model/plans.go @@ -52,14 +52,14 @@ var BasicPlan = basemodel.FeatureSet{ Name: basemodel.QueryBuilderPanels, Active: true, Usage: 0, - UsageLimit: 20, + UsageLimit: -1, Route: "", }, basemodel.Feature{ Name: basemodel.QueryBuilderAlerts, Active: true, Usage: 0, - UsageLimit: 10, + UsageLimit: -1, Route: "", }, basemodel.Feature{ diff --git a/pkg/query-service/model/featureSet.go b/pkg/query-service/model/featureSet.go index 2d0f4a55be..0e7a1c0278 100644 --- a/pkg/query-service/model/featureSet.go +++ b/pkg/query-service/model/featureSet.go @@ -56,14 +56,14 @@ var BasicPlan = FeatureSet{ Name: QueryBuilderPanels, Active: true, Usage: 0, - UsageLimit: 20, + UsageLimit: -1, Route: "", }, Feature{ Name: QueryBuilderAlerts, Active: true, Usage: 0, - UsageLimit: 10, + UsageLimit: -1, Route: "", }, Feature{ From 3efd9801a1f94460f468ce850cd63cf4cb6f25df Mon Sep 17 00:00:00 2001 From: Raj Kamal Singh <1133322+raj-k-singh@users.noreply.github.com> Date: Wed, 15 May 2024 14:36:52 +0530 Subject: [PATCH 06/23] Chore: restrict logs connection test for integrations to use log attributes for identifying logs (#4977) * chore: change logs connection test spec to be based on an attrib value * chore: disallow unknown fields while unmarshalling JSON for an integration * chore: add description field to collected metric spec * chore: update logs connection test for builtin integrations * chore: update logic for calculating logs connection status --- pkg/query-service/app/http_handler.go | 19 +++++++++-- pkg/query-service/app/integrations/builtin.go | 5 ++- .../clickhouse/integration.json | 14 ++------ .../mongo/integration.json | 14 ++------ .../nginx/integration.json | 14 ++------ .../postgres/integration.json | 14 ++------ .../redis/integration.json | 14 ++------ pkg/query-service/app/integrations/manager.go | 17 ++++++---- .../app/integrations/test_utils.go | 32 ++++--------------- 9 files changed, 48 insertions(+), 95 deletions(-) diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index c7adc95416..7f8916fa9f 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -2347,13 +2347,28 @@ func (ah *APIHandler) calculateConnectionStatus( func (ah *APIHandler) calculateLogsConnectionStatus( ctx context.Context, - logsConnectionTest *v3.FilterSet, + logsConnectionTest *integrations.LogsConnectionTest, lookbackSeconds int64, ) (*integrations.SignalConnectionStatus, *model.ApiError) { if logsConnectionTest == nil { return nil, nil } + logsConnTestFilter := &v3.FilterSet{ + Operator: "AND", + Items: []v3.FilterItem{ + { + Key: v3.AttributeKey{ + Key: logsConnectionTest.AttributeKey, + DataType: v3.AttributeKeyDataTypeString, + Type: v3.AttributeKeyTypeTag, + }, + Operator: "=", + Value: logsConnectionTest.AttributeValue, + }, + }, + } + qrParams := &v3.QueryRangeParamsV3{ Start: time.Now().UnixMilli() - (lookbackSeconds * 1000), End: time.Now().UnixMilli(), @@ -2363,7 +2378,7 @@ func (ah *APIHandler) calculateLogsConnectionStatus( BuilderQueries: map[string]*v3.BuilderQuery{ "A": { PageSize: 1, - Filters: logsConnectionTest, + Filters: logsConnTestFilter, QueryName: "A", DataSource: v3.DataSourceLogs, Expression: "A", diff --git a/pkg/query-service/app/integrations/builtin.go b/pkg/query-service/app/integrations/builtin.go index cf98b3ff9d..00810cacc1 100644 --- a/pkg/query-service/app/integrations/builtin.go +++ b/pkg/query-service/app/integrations/builtin.go @@ -1,6 +1,7 @@ package integrations import ( + "bytes" "context" "embed" "strings" @@ -120,7 +121,9 @@ func readBuiltInIntegration(dirpath string) ( } var integration IntegrationDetails - err = json.Unmarshal(hydratedSpecJson, &integration) + decoder := json.NewDecoder(bytes.NewReader(hydratedSpecJson)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&integration) if err != nil { return nil, fmt.Errorf( "couldn't parse hydrated JSON spec read from %s: %w", diff --git a/pkg/query-service/app/integrations/builtin_integrations/clickhouse/integration.json b/pkg/query-service/app/integrations/builtin_integrations/clickhouse/integration.json index 99bf16c72d..3135ce402e 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/clickhouse/integration.json +++ b/pkg/query-service/app/integrations/builtin_integrations/clickhouse/integration.json @@ -41,18 +41,8 @@ }, "connection_tests": { "logs": { - "op": "AND", - "items": [ - { - "key": { - "type": "tag", - "key": "source", - "dataType": "string" - }, - "op": "=", - "value": "clickhouse" - } - ] + "attribute_key": "source", + "attribute_value": "clickhouse" } }, "data_collected": "file://data-collected.json" diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json b/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json index b9543e0757..d5e24eb4fc 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json +++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json @@ -37,18 +37,8 @@ }, "connection_tests": { "logs": { - "op": "AND", - "items": [ - { - "key": { - "type": "tag", - "key": "source", - "dataType": "string" - }, - "op": "=", - "value": "mongo" - } - ] + "attribute_key": "source", + "attribute_value": "mongodb" } }, "data_collected": { diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json b/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json index 16f03bbed3..7789a5ae90 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json +++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json @@ -32,18 +32,8 @@ }, "connection_tests": { "logs": { - "op": "AND", - "items": [ - { - "key": { - "type": "tag", - "key": "source", - "dataType": "string" - }, - "op": "=", - "value": "nginx" - } - ] + "attribute_key": "source", + "attribute_value": "nginx" } }, "data_collected": { diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json b/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json index 823ba61223..2040a5c946 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json +++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json @@ -37,18 +37,8 @@ }, "connection_tests": { "logs": { - "op": "AND", - "items": [ - { - "key": { - "type": "tag", - "key": "source", - "dataType": "string" - }, - "op": "=", - "value": "postgres" - } - ] + "attribute_key": "source", + "attribute_value": "postgres" } }, "data_collected": { diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json b/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json index e3f5ef2e3c..c0da666181 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json +++ b/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json @@ -37,18 +37,8 @@ }, "connection_tests": { "logs": { - "op": "AND", - "items": [ - { - "key": { - "type": "tag", - "key": "source", - "dataType": "string" - }, - "op": "=", - "value": "redis" - } - ] + "attribute_key": "source", + "attribute_value": "redis" } }, "data_collected": { diff --git a/pkg/query-service/app/integrations/manager.go b/pkg/query-service/app/integrations/manager.go index c3ebd21cc2..56a57ee026 100644 --- a/pkg/query-service/app/integrations/manager.go +++ b/pkg/query-service/app/integrations/manager.go @@ -12,7 +12,6 @@ import ( "go.signoz.io/signoz/pkg/query-service/app/dashboards" "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline" "go.signoz.io/signoz/pkg/query-service/model" - v3 "go.signoz.io/signoz/pkg/query-service/model/v3" "go.signoz.io/signoz/pkg/query-service/rules" "go.signoz.io/signoz/pkg/query-service/utils" ) @@ -60,9 +59,10 @@ type CollectedLogAttribute struct { } type CollectedMetric struct { - Name string `json:"name"` - Type string `json:"type"` - Unit string `json:"unit"` + Name string `json:"name"` + Type string `json:"type"` + Unit string `json:"unit"` + Description string `json:"description"` } type SignalConnectionStatus struct { @@ -75,9 +75,14 @@ type IntegrationConnectionStatus struct { Metrics *SignalConnectionStatus `json:"metrics"` } +// log attribute value to use for finding logs for the integration. +type LogsConnectionTest struct { + AttributeKey string `json:"attribute_key"` + AttributeValue string `json:"attribute_value"` +} + type IntegrationConnectionTests struct { - // Filter to use for finding logs for the integration. - Logs *v3.FilterSet `json:"logs"` + Logs *LogsConnectionTest `json:"logs"` // Metric names expected to have been received for the integration. Metrics []string `json:"metrics"` diff --git a/pkg/query-service/app/integrations/test_utils.go b/pkg/query-service/app/integrations/test_utils.go index 1ff964b3e6..adb667b96e 100644 --- a/pkg/query-service/app/integrations/test_utils.go +++ b/pkg/query-service/app/integrations/test_utils.go @@ -96,19 +96,9 @@ func (t *TestAvailableIntegrationsRepo) list( Alerts: []rules.PostableRule{}, }, ConnectionTests: &IntegrationConnectionTests{ - Logs: &v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - { - Key: v3.AttributeKey{ - Key: "source", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - }, - Operator: "=", - Value: "nginx", - }, - }, + Logs: &LogsConnectionTest{ + AttributeKey: "source", + AttributeValue: "nginx", }, }, }, { @@ -174,19 +164,9 @@ func (t *TestAvailableIntegrationsRepo) list( Alerts: []rules.PostableRule{}, }, ConnectionTests: &IntegrationConnectionTests{ - Logs: &v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - { - Key: v3.AttributeKey{ - Key: "source", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - }, - Operator: "=", - Value: "nginx", - }, - }, + Logs: &LogsConnectionTest{ + AttributeKey: "source", + AttributeValue: "nginx", }, }, }, From 7e31b4ca01b2bbca2c4e82906182945ac4722b7d Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Wed, 15 May 2024 18:52:01 +0530 Subject: [PATCH 07/23] fix: several issues (#5001) --- .../container/CreateAlertChannels/index.tsx | 4 +- .../app/clickhouseReader/reader.go | 10 +- pkg/query-service/app/http_handler.go | 22 +- .../app/metrics/v3/cumulative_table.go | 27 --- .../app/metrics/v3/cumulative_table_test.go | 8 + pkg/query-service/app/metrics/v3/delta.go | 41 +--- .../app/metrics/v3/query_builder.go | 27 --- .../app/metrics/v4/query_builder.go | 3 + pkg/query-service/app/parser.go | 83 ++++++- pkg/query-service/app/parser_test.go | 225 +++++++++++++++++- pkg/query-service/app/querier/querier.go | 36 +-- pkg/query-service/app/querier/v2/querier.go | 36 +-- .../app/queryBuilder/query_builder_test.go | 1 + pkg/query-service/errors/clickhouse.go | 42 ++++ pkg/query-service/interfaces/interface.go | 2 +- pkg/query-service/model/v3/v3.go | 33 ++- pkg/query-service/utils/format.go | 4 +- pkg/query-service/utils/queryTemplate/vars.go | 18 +- 18 files changed, 469 insertions(+), 153 deletions(-) create mode 100644 pkg/query-service/errors/clickhouse.go diff --git a/frontend/src/container/CreateAlertChannels/index.tsx b/frontend/src/container/CreateAlertChannels/index.tsx index c0eec3ecdd..d10b6fb225 100644 --- a/frontend/src/container/CreateAlertChannels/index.tsx +++ b/frontend/src/container/CreateAlertChannels/index.tsx @@ -59,8 +59,8 @@ function CreateAlertChannels({ *Summary:* {{ .Annotations.summary }} *Description:* {{ .Annotations.description }} - *RelatedLogs:* {{ .Annotations.related_logs }} - *RelatedTraces:* {{ .Annotations.related_traces }} + *RelatedLogs:* {{ if gt (len .Annotations.related_logs) 0 -}} View in <{{ .Annotations.related_logs }}|logs explorer> {{- end}} + *RelatedTraces:* {{ if gt (len .Annotations.related_traces) 0 -}} View in <{{ .Annotations.related_traces }}|traces explorer> {{- end}} *Details:* {{ range .Labels.SortedPairs }} • *{{ .Name }}:* {{ .Value }} diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index efad720f74..fcc2efeb15 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -51,6 +51,7 @@ import ( "go.signoz.io/signoz/pkg/query-service/common" "go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/dao" + chErrors "go.signoz.io/signoz/pkg/query-service/errors" am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager" "go.signoz.io/signoz/pkg/query-service/interfaces" "go.signoz.io/signoz/pkg/query-service/model" @@ -4570,6 +4571,11 @@ func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNam return nil, err } groupBy, groupAttributes, groupAttributesArray, metricPoint := readRow(vars, columnNames) + // skip the point if the value is NaN or Inf + // are they ever useful enough to be returned? + if math.IsNaN(metricPoint.Value) || math.IsInf(metricPoint.Value, 0) { + continue + } sort.Strings(groupBy) key := strings.Join(groupBy, "") if _, exists := seriesToAttrs[key]; !exists { @@ -4700,11 +4706,11 @@ func getPersonalisedError(err error) error { } zap.L().Error("error while reading result", zap.Error(err)) if strings.Contains(err.Error(), "code: 307") { - return errors.New("query is consuming too much resources, please reach out to the team") + return chErrors.ErrResourceBytesLimitExceeded } if strings.Contains(err.Error(), "code: 159") { - return errors.New("Query is taking too long to run, please reach out to the team") + return chErrors.ErrResourceTimeLimitExceeded } return err } diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 7f8916fa9f..3a9ea9d420 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -2907,7 +2907,7 @@ func (aH *APIHandler) autoCompleteAttributeValues(w http.ResponseWriter, r *http aH.Respond(w, response) } -func (aH *APIHandler) execClickHouseGraphQueries(ctx context.Context, queries map[string]string) ([]*v3.Result, error, map[string]string) { +func (aH *APIHandler) execClickHouseGraphQueries(ctx context.Context, queries map[string]string) ([]*v3.Result, error, map[string]error) { type channelResult struct { Series []*v3.Series Err error @@ -2937,13 +2937,13 @@ func (aH *APIHandler) execClickHouseGraphQueries(ctx context.Context, queries ma close(ch) var errs []error - errQuriesByName := make(map[string]string) + errQuriesByName := make(map[string]error) res := make([]*v3.Result, 0) // read values from the channel for r := range ch { if r.Err != nil { errs = append(errs, r.Err) - errQuriesByName[r.Name] = r.Query + errQuriesByName[r.Name] = r.Err continue } res = append(res, &v3.Result{ @@ -2957,7 +2957,7 @@ func (aH *APIHandler) execClickHouseGraphQueries(ctx context.Context, queries ma return res, nil, nil } -func (aH *APIHandler) execClickHouseListQueries(ctx context.Context, queries map[string]string) ([]*v3.Result, error, map[string]string) { +func (aH *APIHandler) execClickHouseListQueries(ctx context.Context, queries map[string]string) ([]*v3.Result, error, map[string]error) { type channelResult struct { List []*v3.Row Err error @@ -2986,13 +2986,13 @@ func (aH *APIHandler) execClickHouseListQueries(ctx context.Context, queries map close(ch) var errs []error - errQuriesByName := make(map[string]string) + errQuriesByName := make(map[string]error) res := make([]*v3.Result, 0) // read values from the channel for r := range ch { if r.Err != nil { errs = append(errs, r.Err) - errQuriesByName[r.Name] = r.Query + errQuriesByName[r.Name] = r.Err continue } res = append(res, &v3.Result{ @@ -3006,7 +3006,7 @@ func (aH *APIHandler) execClickHouseListQueries(ctx context.Context, queries map return res, nil, nil } -func (aH *APIHandler) execPromQueries(ctx context.Context, metricsQueryRangeParams *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) { +func (aH *APIHandler) execPromQueries(ctx context.Context, metricsQueryRangeParams *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]error) { type channelResult struct { Series []*v3.Series Err error @@ -3066,13 +3066,13 @@ func (aH *APIHandler) execPromQueries(ctx context.Context, metricsQueryRangePara close(ch) var errs []error - errQuriesByName := make(map[string]string) + errQuriesByName := make(map[string]error) res := make([]*v3.Result, 0) // read values from the channel for r := range ch { if r.Err != nil { errs = append(errs, r.Err) - errQuriesByName[r.Name] = r.Query + errQuriesByName[r.Name] = r.Err continue } res = append(res, &v3.Result{ @@ -3170,7 +3170,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que var result []*v3.Result var err error - var errQuriesByName map[string]string + var errQuriesByName map[string]error var spanKeys map[string]v3.AttributeKey if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder { // check if any enrichment is required for logs if yes then enrich them @@ -3427,7 +3427,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que var result []*v3.Result var err error - var errQuriesByName map[string]string + var errQuriesByName map[string]error var spanKeys map[string]v3.AttributeKey if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder { // check if any enrichment is required for logs if yes then enrich them diff --git a/pkg/query-service/app/metrics/v3/cumulative_table.go b/pkg/query-service/app/metrics/v3/cumulative_table.go index 157216bb23..db9c909abf 100644 --- a/pkg/query-service/app/metrics/v3/cumulative_table.go +++ b/pkg/query-service/app/metrics/v3/cumulative_table.go @@ -36,33 +36,6 @@ func buildMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery, tableNam metricQueryGroupBy := mq.GroupBy - // if the aggregate operator is a histogram quantile, and user has not forgotten - // the le tag in the group by then add the le tag to the group by - if mq.AggregateOperator == v3.AggregateOperatorHistQuant50 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant75 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant90 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant95 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant99 { - found := false - for _, tag := range mq.GroupBy { - if tag.Key == "le" { - found = true - break - } - } - if !found { - metricQueryGroupBy = append( - metricQueryGroupBy, - v3.AttributeKey{ - Key: "le", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - IsColumn: false, - }, - ) - } - } - filterSubQuery, err := buildMetricsTimeSeriesFilterQuery(mq.Filters, metricQueryGroupBy, mq) if err != nil { return "", err diff --git a/pkg/query-service/app/metrics/v3/cumulative_table_test.go b/pkg/query-service/app/metrics/v3/cumulative_table_test.go index 2da3421da2..a6b489e5ec 100644 --- a/pkg/query-service/app/metrics/v3/cumulative_table_test.go +++ b/pkg/query-service/app/metrics/v3/cumulative_table_test.go @@ -60,6 +60,11 @@ func TestPanelTableForCumulative(t *testing.T) { }, }, Expression: "A", + GroupBy: []v3.AttributeKey{ + { + Key: "le", + }, + }, }, expected: "SELECT toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, le,ts ORDER BY fingerprint, le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, le ORDER BY fingerprint, le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts", }, @@ -77,6 +82,9 @@ func TestPanelTableForCumulative(t *testing.T) { { Key: "service_name", }, + { + Key: "le", + }, }, Expression: "A", }, diff --git a/pkg/query-service/app/metrics/v3/delta.go b/pkg/query-service/app/metrics/v3/delta.go index c2fa38507a..f82e086bea 100644 --- a/pkg/query-service/app/metrics/v3/delta.go +++ b/pkg/query-service/app/metrics/v3/delta.go @@ -12,39 +12,22 @@ func buildDeltaMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableNam metricQueryGroupBy := mq.GroupBy - // if the aggregate operator is a histogram quantile, and user has not forgotten - // the le tag in the group by then add the le tag to the group by - if mq.AggregateOperator == v3.AggregateOperatorHistQuant50 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant75 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant90 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant95 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant99 { - found := false - for _, tag := range mq.GroupBy { - if tag.Key == "le" { - found = true + if mq.Filters != nil { + temporalityFound := false + for _, filter := range mq.Filters.Items { + if filter.Key.Key == "__temporality__" { + temporalityFound = true break } } - if !found { - metricQueryGroupBy = append( - metricQueryGroupBy, - v3.AttributeKey{ - Key: "le", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - IsColumn: false, - }, - ) - } - } - if mq.Filters != nil { - mq.Filters.Items = append(mq.Filters.Items, v3.FilterItem{ - Key: v3.AttributeKey{Key: "__temporality__"}, - Operator: v3.FilterOperatorEqual, - Value: "Delta", - }) + if !temporalityFound { + mq.Filters.Items = append(mq.Filters.Items, v3.FilterItem{ + Key: v3.AttributeKey{Key: "__temporality__"}, + Operator: v3.FilterOperatorEqual, + Value: "Delta", + }) + } } filterSubQuery, err := buildMetricsTimeSeriesFilterQuery(mq.Filters, metricQueryGroupBy, mq) diff --git a/pkg/query-service/app/metrics/v3/query_builder.go b/pkg/query-service/app/metrics/v3/query_builder.go index 01b860c968..1e0a49198b 100644 --- a/pkg/query-service/app/metrics/v3/query_builder.go +++ b/pkg/query-service/app/metrics/v3/query_builder.go @@ -141,33 +141,6 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str metricQueryGroupBy := mq.GroupBy - // if the aggregate operator is a histogram quantile, and user has not forgotten - // the le tag in the group by then add the le tag to the group by - if mq.AggregateOperator == v3.AggregateOperatorHistQuant50 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant75 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant90 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant95 || - mq.AggregateOperator == v3.AggregateOperatorHistQuant99 { - found := false - for _, tag := range mq.GroupBy { - if tag.Key == "le" { - found = true - break - } - } - if !found { - metricQueryGroupBy = append( - metricQueryGroupBy, - v3.AttributeKey{ - Key: "le", - DataType: v3.AttributeKeyDataTypeString, - Type: v3.AttributeKeyTypeTag, - IsColumn: false, - }, - ) - } - } - filterSubQuery, err := buildMetricsTimeSeriesFilterQuery(mq.Filters, metricQueryGroupBy, mq) if err != nil { return "", err diff --git a/pkg/query-service/app/metrics/v4/query_builder.go b/pkg/query-service/app/metrics/v4/query_builder.go index c58c98c93f..bd8813dd3a 100644 --- a/pkg/query-service/app/metrics/v4/query_builder.go +++ b/pkg/query-service/app/metrics/v4/query_builder.go @@ -23,6 +23,8 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P var quantile float64 + percentileOperator := mq.SpaceAggregation + if v3.IsPercentileOperator(mq.SpaceAggregation) && mq.AggregateAttribute.Type != v3.AttributeKeyType(v3.MetricTypeExponentialHistogram) { quantile = v3.GetPercentileFromOperator(mq.SpaceAggregation) @@ -80,6 +82,7 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P // fixed-bucket histogram quantiles are calculated with UDF if quantile != 0 && mq.AggregateAttribute.Type != v3.AttributeKeyType(v3.MetricTypeExponentialHistogram) { query = fmt.Sprintf(`SELECT %s, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s`, groupBy, quantile, query, groupBy, orderBy) + mq.SpaceAggregation = percentileOperator } return query, nil diff --git a/pkg/query-service/app/parser.go b/pkg/query-service/app/parser.go index f7f622e250..2a70f96250 100644 --- a/pkg/query-service/app/parser.go +++ b/pkg/query-service/app/parser.go @@ -1005,6 +1005,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder { for _, query := range queryRangeParams.CompositeQuery.BuilderQueries { // Formula query + // Check if the queries used in the expression can be joined if query.QueryName != query.Expression { expression, err := govaluate.NewEvaluableExpressionWithFunctions(query.Expression, evalFuncs()) if err != nil { @@ -1039,10 +1040,12 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE } } + // If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval if minStep := common.MinAllowedStepInterval(queryRangeParams.Start, queryRangeParams.End); query.StepInterval < minStep { query.StepInterval = minStep } + // Remove the time shift function from the list of functions and set the shift by value var timeShiftBy int64 if len(query.Functions) > 0 { for idx := range query.Functions { @@ -1065,13 +1068,14 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE if query.Filters == nil || len(query.Filters.Items) == 0 { continue } + for idx := range query.Filters.Items { item := &query.Filters.Items[idx] value := item.Value if value != nil { switch x := value.(type) { case string: - variableName := strings.Trim(x, "{{ . }}") + variableName := strings.Trim(x, "{[.$]}") if _, ok := queryRangeParams.Variables[variableName]; ok { item.Value = queryRangeParams.Variables[variableName] } @@ -1079,7 +1083,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE if len(x) > 0 { switch x[0].(type) { case string: - variableName := strings.Trim(x[0].(string), "{{ . }}") + variableName := strings.Trim(x[0].(string), "{[.$]}") if _, ok := queryRangeParams.Variables[variableName]; ok { item.Value = queryRangeParams.Variables[variableName] } @@ -1087,6 +1091,67 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE } } } + + if item.Operator != v3.FilterOperatorIn && item.Operator != v3.FilterOperatorNotIn { + // the value type should not be multiple values + if _, ok := item.Value.([]interface{}); ok { + return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("multiple values %s are not allowed for operator `%s` for key `%s`", item.Value, item.Operator, item.Key.Key)} + } + } + } + + // for metrics v3 + + // if the aggregate operator is a histogram quantile, and user has not forgotten + // the le tag in the group by then add the le tag to the group by + if query.AggregateOperator == v3.AggregateOperatorHistQuant50 || + query.AggregateOperator == v3.AggregateOperatorHistQuant75 || + query.AggregateOperator == v3.AggregateOperatorHistQuant90 || + query.AggregateOperator == v3.AggregateOperatorHistQuant95 || + query.AggregateOperator == v3.AggregateOperatorHistQuant99 { + found := false + for _, tag := range query.GroupBy { + if tag.Key == "le" { + found = true + break + } + } + if !found { + query.GroupBy = append( + query.GroupBy, + v3.AttributeKey{ + Key: "le", + DataType: v3.AttributeKeyDataTypeString, + Type: v3.AttributeKeyTypeTag, + IsColumn: false, + }, + ) + } + } + + // for metrics v4 + if v3.IsPercentileOperator(query.SpaceAggregation) && + query.AggregateAttribute.Type != v3.AttributeKeyType(v3.MetricTypeExponentialHistogram) { + // If quantile is set, we need to group by le + // and set the space aggregation to sum + // and time aggregation to rate + query.TimeAggregation = v3.TimeAggregationRate + query.SpaceAggregation = v3.SpaceAggregationSum + // If le is not present in group by for quantile, add it + leFound := false + for _, groupBy := range query.GroupBy { + if groupBy.Key == "le" { + leFound = true + break + } + } + if !leFound { + query.GroupBy = append(query.GroupBy, v3.AttributeKey{ + Key: "le", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeString, + }) + } } } } @@ -1104,6 +1169,13 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE if chQuery.Disabled { continue } + + for name, value := range queryRangeParams.Variables { + chQuery.Query = strings.Replace(chQuery.Query, fmt.Sprintf("{{%s}}", name), fmt.Sprint(value), -1) + chQuery.Query = strings.Replace(chQuery.Query, fmt.Sprintf("[[%s]]", name), fmt.Sprint(value), -1) + chQuery.Query = strings.Replace(chQuery.Query, fmt.Sprintf("$%s", name), fmt.Sprint(value), -1) + } + tmpl := template.New("clickhouse-query") tmpl, err := tmpl.Parse(chQuery.Query) if err != nil { @@ -1128,6 +1200,13 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE if promQuery.Disabled { continue } + + for name, value := range queryRangeParams.Variables { + promQuery.Query = strings.Replace(promQuery.Query, fmt.Sprintf("{{%s}}", name), fmt.Sprint(value), -1) + promQuery.Query = strings.Replace(promQuery.Query, fmt.Sprintf("[[%s]]", name), fmt.Sprint(value), -1) + promQuery.Query = strings.Replace(promQuery.Query, fmt.Sprintf("$%s", name), fmt.Sprint(value), -1) + } + tmpl := template.New("prometheus-query") tmpl, err := tmpl.Parse(promQuery.Query) if err != nil { diff --git a/pkg/query-service/app/parser_test.go b/pkg/query-service/app/parser_test.go index be113d08b3..9d58a190f7 100644 --- a/pkg/query-service/app/parser_test.go +++ b/pkg/query-service/app/parser_test.go @@ -652,12 +652,12 @@ func TestParseQueryRangeParamsDashboardVarsSubstitution(t *testing.T) { Items: []v3.FilterItem{ { Key: v3.AttributeKey{Key: "service_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, - Operator: "EQ", + Operator: v3.FilterOperatorEqual, Value: "{{.service_name}}", }, { Key: v3.AttributeKey{Key: "operation_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, - Operator: "IN", + Operator: v3.FilterOperatorIn, Value: "{{.operation_name}}", }, }, @@ -675,6 +675,161 @@ func TestParseQueryRangeParamsDashboardVarsSubstitution(t *testing.T) { expectErr: false, expectedValue: []interface{}{"route", []interface{}{"GET /route", "POST /route"}}, }, + { + desc: "valid builder query with dashboard variables {{service_name}} and {{operation_name}}", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "attribute_metrics"}, + Expression: "A", + Filters: &v3.FilterSet{ + Operator: "AND", + Items: []v3.FilterItem{ + { + Key: v3.AttributeKey{Key: "service_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, + Operator: v3.FilterOperatorEqual, + Value: "{{service_name}}", + }, + { + Key: v3.AttributeKey{Key: "operation_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, + Operator: v3.FilterOperatorIn, + Value: "{{operation_name}}", + }, + }, + }, + }, + }, + }, + variables: map[string]interface{}{ + "service_name": "route", + "operation_name": []interface{}{ + "GET /route", + "POST /route", + }, + }, + expectErr: false, + expectedValue: []interface{}{"route", []interface{}{"GET /route", "POST /route"}}, + }, + { + desc: "valid builder query with dashboard variables [[service_name]] and [[operation_name]]", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "attribute_metrics"}, + Expression: "A", + Filters: &v3.FilterSet{ + Operator: "AND", + Items: []v3.FilterItem{ + { + Key: v3.AttributeKey{Key: "service_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, + Operator: v3.FilterOperatorEqual, + Value: "[[service_name]]", + }, + { + Key: v3.AttributeKey{Key: "operation_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, + Operator: v3.FilterOperatorIn, + Value: "[[operation_name]]", + }, + }, + }, + }, + }, + }, + variables: map[string]interface{}{ + "service_name": "route", + "operation_name": []interface{}{ + "GET /route", + "POST /route", + }, + }, + expectErr: false, + expectedValue: []interface{}{"route", []interface{}{"GET /route", "POST /route"}}, + }, + { + desc: "valid builder query with dashboard variables $service_name and $operation_name", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "attribute_metrics"}, + Expression: "A", + Filters: &v3.FilterSet{ + Operator: "AND", + Items: []v3.FilterItem{ + { + Key: v3.AttributeKey{Key: "service_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, + Operator: v3.FilterOperatorEqual, + Value: "$service_name", + }, + { + Key: v3.AttributeKey{Key: "operation_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, + Operator: v3.FilterOperatorIn, + Value: "$operation_name", + }, + }, + }, + }, + }, + }, + variables: map[string]interface{}{ + "service_name": "route", + "operation_name": []interface{}{ + "GET /route", + "POST /route", + }, + }, + expectErr: false, + expectedValue: []interface{}{"route", []interface{}{"GET /route", "POST /route"}}, + }, + { + desc: "multiple values for single select operator", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "attribute_metrics"}, + Expression: "A", + Filters: &v3.FilterSet{ + Operator: "AND", + Items: []v3.FilterItem{ + { + Key: v3.AttributeKey{Key: "operation_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, + Operator: v3.FilterOperatorEqual, + Value: "{{.operation_name}}", + }, + }, + }, + }, + }, + }, + variables: map[string]interface{}{ + "service_name": "route", + "operation_name": []interface{}{ + "GET /route", + "POST /route", + }, + }, + expectErr: true, + errMsg: "multiple values [GET /route POST /route] are not allowed for operator `=` for key `operation_name`", + }, } for _, tc := range reqCases { @@ -759,6 +914,72 @@ func TestParseQueryRangeParamsPromQLVars(t *testing.T) { expectErr: false, expectedQuery: "http_calls_total{service_name=\"route\", status_code=~\"200|505\"}", }, + { + desc: "valid prom query with dashboard variables {{service_name}} and {{status_code}}", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypePromQL, + PromQueries: map[string]*v3.PromQuery{ + "A": { + Query: "http_calls_total{service_name=\"{{service_name}}\", status_code=~\"{{status_code}}\"}", + Disabled: false, + }, + }, + }, + variables: map[string]interface{}{ + "service_name": "route", + "status_code": []interface{}{ + 200, + 505, + }, + }, + expectErr: false, + expectedQuery: "http_calls_total{service_name=\"route\", status_code=~\"200|505\"}", + }, + { + desc: "valid prom query with dashboard variables [[service_name]] and [[status_code]]", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypePromQL, + PromQueries: map[string]*v3.PromQuery{ + "A": { + Query: "http_calls_total{service_name=\"[[service_name]]\", status_code=~\"[[status_code]]\"}", + Disabled: false, + }, + }, + }, + variables: map[string]interface{}{ + "service_name": "route", + "status_code": []interface{}{ + 200, + 505, + }, + }, + expectErr: false, + expectedQuery: "http_calls_total{service_name=\"route\", status_code=~\"200|505\"}", + }, + { + desc: "valid prom query with dashboard variables $service_name and $status_code", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypePromQL, + PromQueries: map[string]*v3.PromQuery{ + "A": { + Query: "http_calls_total{service_name=\"$service_name\", status_code=~\"$status_code\"}", + Disabled: false, + }, + }, + }, + variables: map[string]interface{}{ + "service_name": "route", + "status_code": []interface{}{ + 200, + 505, + }, + }, + expectErr: false, + expectedQuery: "http_calls_total{service_name=\"route\", status_code=~\"200|505\"}", + }, { desc: "valid prom query with dashboard variables", compositeQuery: v3.CompositeQuery{ diff --git a/pkg/query-service/app/querier/querier.go b/pkg/query-service/app/querier/querier.go index 84677480b2..1f68879d0c 100644 --- a/pkg/query-service/app/querier/querier.go +++ b/pkg/query-service/app/querier/querier.go @@ -14,6 +14,7 @@ import ( metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3" "go.signoz.io/signoz/pkg/query-service/app/queryBuilder" tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3" + chErrors "go.signoz.io/signoz/pkg/query-service/errors" "go.signoz.io/signoz/pkg/query-service/cache" "go.signoz.io/signoz/pkg/query-service/interfaces" @@ -283,7 +284,7 @@ func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series { return mergedSeries } -func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) { +func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) { cacheKeys := q.keyGenerator.GenerateKeys(params) @@ -306,13 +307,13 @@ func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangePa close(ch) results := make([]*v3.Result, 0) - errQueriesByName := make(map[string]string) + errQueriesByName := make(map[string]error) var errs []error for result := range ch { if result.Err != nil { errs = append(errs, result.Err) - errQueriesByName[result.Name] = result.Err.Error() + errQueriesByName[result.Name] = result.Err continue } results = append(results, &v3.Result{ @@ -329,7 +330,7 @@ func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangePa return results, err, errQueriesByName } -func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) { +func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]error) { channelResults := make(chan channelResult, len(params.CompositeQuery.PromQueries)) var wg sync.WaitGroup cacheKeys := q.keyGenerator.GenerateKeys(params) @@ -390,13 +391,13 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam close(channelResults) results := make([]*v3.Result, 0) - errQueriesByName := make(map[string]string) + errQueriesByName := make(map[string]error) var errs []error for result := range channelResults { if result.Err != nil { errs = append(errs, result.Err) - errQueriesByName[result.Name] = result.Err.Error() + errQueriesByName[result.Name] = result.Err continue } results = append(results, &v3.Result{ @@ -413,7 +414,7 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam return results, err, errQueriesByName } -func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) { +func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]error) { channelResults := make(chan channelResult, len(params.CompositeQuery.ClickHouseQueries)) var wg sync.WaitGroup for queryName, clickHouseQuery := range params.CompositeQuery.ClickHouseQueries { @@ -431,13 +432,13 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang close(channelResults) results := make([]*v3.Result, 0) - errQueriesByName := make(map[string]string) + errQueriesByName := make(map[string]error) var errs []error for result := range channelResults { if result.Err != nil { errs = append(errs, result.Err) - errQueriesByName[result.Name] = result.Err.Error() + errQueriesByName[result.Name] = result.Err continue } results = append(results, &v3.Result{ @@ -453,7 +454,7 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang return results, err, errQueriesByName } -func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) { +func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) { queries, err := q.builder.PrepareQueries(params, keys) @@ -482,13 +483,13 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan close(ch) var errs []error - errQuriesByName := make(map[string]string) + errQuriesByName := make(map[string]error) res := make([]*v3.Result, 0) // read values from the channel for r := range ch { if r.Err != nil { errs = append(errs, r.Err) - errQuriesByName[r.Name] = r.Query + errQuriesByName[r.Name] = r.Err continue } res = append(res, &v3.Result{ @@ -502,10 +503,10 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan return res, nil, nil } -func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) { +func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) { var results []*v3.Result var err error - var errQueriesByName map[string]string + var errQueriesByName map[string]error if params.CompositeQuery != nil { switch params.CompositeQuery.QueryType { case v3.QueryTypeBuilder: @@ -514,6 +515,13 @@ func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, } else { results, err, errQueriesByName = q.runBuilderQueries(ctx, params, keys) } + // in builder query, the only errors we expose are the ones that exceed the resource limits + // everything else is internal error as they are not actionable by the user + for name, err := range errQueriesByName { + if !chErrors.IsResourceLimitError(err) { + delete(errQueriesByName, name) + } + } case v3.QueryTypePromQL: results, err, errQueriesByName = q.runPromQueries(ctx, params) case v3.QueryTypeClickHouseSQL: diff --git a/pkg/query-service/app/querier/v2/querier.go b/pkg/query-service/app/querier/v2/querier.go index 359546ea03..b3bf2c66f8 100644 --- a/pkg/query-service/app/querier/v2/querier.go +++ b/pkg/query-service/app/querier/v2/querier.go @@ -14,6 +14,7 @@ import ( metricsV4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4" "go.signoz.io/signoz/pkg/query-service/app/queryBuilder" tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3" + chErrors "go.signoz.io/signoz/pkg/query-service/errors" "go.signoz.io/signoz/pkg/query-service/cache" "go.signoz.io/signoz/pkg/query-service/interfaces" @@ -281,7 +282,7 @@ func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series { return mergedSeries } -func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) { +func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) { cacheKeys := q.keyGenerator.GenerateKeys(params) @@ -299,13 +300,13 @@ func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangePa close(ch) results := make([]*v3.Result, 0) - errQueriesByName := make(map[string]string) + errQueriesByName := make(map[string]error) var errs []error for result := range ch { if result.Err != nil { errs = append(errs, result.Err) - errQueriesByName[result.Name] = result.Err.Error() + errQueriesByName[result.Name] = result.Err continue } results = append(results, &v3.Result{ @@ -322,7 +323,7 @@ func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangePa return results, err, errQueriesByName } -func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) { +func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]error) { channelResults := make(chan channelResult, len(params.CompositeQuery.PromQueries)) var wg sync.WaitGroup cacheKeys := q.keyGenerator.GenerateKeys(params) @@ -383,13 +384,13 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam close(channelResults) results := make([]*v3.Result, 0) - errQueriesByName := make(map[string]string) + errQueriesByName := make(map[string]error) var errs []error for result := range channelResults { if result.Err != nil { errs = append(errs, result.Err) - errQueriesByName[result.Name] = result.Err.Error() + errQueriesByName[result.Name] = result.Err continue } results = append(results, &v3.Result{ @@ -406,7 +407,7 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam return results, err, errQueriesByName } -func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) { +func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]error) { channelResults := make(chan channelResult, len(params.CompositeQuery.ClickHouseQueries)) var wg sync.WaitGroup for queryName, clickHouseQuery := range params.CompositeQuery.ClickHouseQueries { @@ -424,13 +425,13 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang close(channelResults) results := make([]*v3.Result, 0) - errQueriesByName := make(map[string]string) + errQueriesByName := make(map[string]error) var errs []error for result := range channelResults { if result.Err != nil { errs = append(errs, result.Err) - errQueriesByName[result.Name] = result.Err.Error() + errQueriesByName[result.Name] = result.Err continue } results = append(results, &v3.Result{ @@ -446,7 +447,7 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang return results, err, errQueriesByName } -func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) { +func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) { queries, err := q.builder.PrepareQueries(params, keys) @@ -475,13 +476,13 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan close(ch) var errs []error - errQuriesByName := make(map[string]string) + errQuriesByName := make(map[string]error) res := make([]*v3.Result, 0) // read values from the channel for r := range ch { if r.Err != nil { errs = append(errs, r.Err) - errQuriesByName[r.Name] = r.Query + errQuriesByName[r.Name] = r.Err continue } res = append(res, &v3.Result{ @@ -495,10 +496,10 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan return res, nil, nil } -func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) { +func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) { var results []*v3.Result var err error - var errQueriesByName map[string]string + var errQueriesByName map[string]error if params.CompositeQuery != nil { switch params.CompositeQuery.QueryType { case v3.QueryTypeBuilder: @@ -507,6 +508,13 @@ func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, } else { results, err, errQueriesByName = q.runBuilderQueries(ctx, params, keys) } + // in builder query, the only errors we expose are the ones that exceed the resource limits + // everything else is internal error as they are not actionable by the user + for name, err := range errQueriesByName { + if !chErrors.IsResourceLimitError(err) { + delete(errQueriesByName, name) + } + } case v3.QueryTypePromQL: results, err, errQueriesByName = q.runPromQueries(ctx, params) case v3.QueryTypeClickHouseSQL: diff --git a/pkg/query-service/app/queryBuilder/query_builder_test.go b/pkg/query-service/app/queryBuilder/query_builder_test.go index 65fe21e1d7..4291bf2407 100644 --- a/pkg/query-service/app/queryBuilder/query_builder_test.go +++ b/pkg/query-service/app/queryBuilder/query_builder_test.go @@ -348,6 +348,7 @@ func TestDeltaQueryBuilder(t *testing.T) { Temporality: v3.Delta, GroupBy: []v3.AttributeKey{ {Key: "service_name"}, + {Key: "le"}, }, }, }, diff --git a/pkg/query-service/errors/clickhouse.go b/pkg/query-service/errors/clickhouse.go new file mode 100644 index 0000000000..573f1fae8c --- /dev/null +++ b/pkg/query-service/errors/clickhouse.go @@ -0,0 +1,42 @@ +package errors + +import "errors" + +var ( + // ErrResourceBytesLimitExceeded is returned when the resource bytes limit is exceeded + ErrResourceBytesLimitExceeded = NewResourceLimitError(errors.New("resource bytes limit exceeded, try applying filters such as service.name, etc. to reduce the data size")) + // ErrResourceTimeLimitExceeded is returned when the resource time limit is exceeded + ErrResourceTimeLimitExceeded = NewResourceLimitError(errors.New("resource time limit exceeded, try applying filters such as service.name, etc. to reduce the data size")) +) + +type ResourceLimitError struct { + err error +} + +func NewResourceLimitError(err error) error { + return &ResourceLimitError{err: err} +} + +func (e *ResourceLimitError) Error() string { + return e.err.Error() +} + +func (e *ResourceLimitError) Unwrap() error { + return e.err +} + +func IsResourceLimitError(err error) bool { + if err == nil { + return false + } + var target *ResourceLimitError + return errors.As(err, &target) +} + +func (e *ResourceLimitError) MarshalJSON() ([]byte, error) { + return []byte(`"` + e.Error() + `"`), nil +} + +func (e *ResourceLimitError) UnmarshalJSON([]byte) error { + return nil +} diff --git a/pkg/query-service/interfaces/interface.go b/pkg/query-service/interfaces/interface.go index 814f7b9071..4c89f6f793 100644 --- a/pkg/query-service/interfaces/interface.go +++ b/pkg/query-service/interfaces/interface.go @@ -107,7 +107,7 @@ type Reader interface { } type Querier interface { - QueryRange(context.Context, *v3.QueryRangeParamsV3, map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) + QueryRange(context.Context, *v3.QueryRangeParamsV3, map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]error) // test helpers QueriesExecuted() []string diff --git a/pkg/query-service/model/v3/v3.go b/pkg/query-service/model/v3/v3.go index c979a94786..d1f55d7bb0 100644 --- a/pkg/query-service/model/v3/v3.go +++ b/pkg/query-service/model/v3/v3.go @@ -433,24 +433,30 @@ func (c *CompositeQuery) Validate() error { } if c.BuilderQueries == nil && c.ClickHouseQueries == nil && c.PromQueries == nil { - return fmt.Errorf("composite query must contain at least one query") + return fmt.Errorf("composite query must contain at least one query type") } - for name, query := range c.BuilderQueries { - if err := query.Validate(); err != nil { - return fmt.Errorf("builder query %s is invalid: %w", name, err) + if c.QueryType == QueryTypeBuilder { + for name, query := range c.BuilderQueries { + if err := query.Validate(c.PanelType); err != nil { + return fmt.Errorf("builder query %s is invalid: %w", name, err) + } } } - for name, query := range c.ClickHouseQueries { - if err := query.Validate(); err != nil { - return fmt.Errorf("clickhouse query %s is invalid: %w", name, err) + if c.QueryType == QueryTypeClickHouseSQL { + for name, query := range c.ClickHouseQueries { + if err := query.Validate(); err != nil { + return fmt.Errorf("clickhouse query %s is invalid: %w", name, err) + } } } - for name, query := range c.PromQueries { - if err := query.Validate(); err != nil { - return fmt.Errorf("prom query %s is invalid: %w", name, err) + if c.QueryType == QueryTypePromQL { + for name, query := range c.PromQueries { + if err := query.Validate(); err != nil { + return fmt.Errorf("prom query %s is invalid: %w", name, err) + } } } @@ -663,10 +669,11 @@ type BuilderQuery struct { ShiftBy int64 } -func (b *BuilderQuery) Validate() error { +func (b *BuilderQuery) Validate(panelType PanelType) error { if b == nil { return nil } + if b.QueryName == "" { return fmt.Errorf("query name is required") } @@ -711,6 +718,10 @@ func (b *BuilderQuery) Validate() error { } } if b.GroupBy != nil { + if len(b.GroupBy) > 0 && panelType == PanelTypeList { + return fmt.Errorf("group by is not supported for list panel type") + } + for _, groupBy := range b.GroupBy { if err := groupBy.Validate(); err != nil { return fmt.Errorf("group by is invalid %w", err) diff --git a/pkg/query-service/utils/format.go b/pkg/query-service/utils/format.go index aa9fc59720..0adaebff4a 100644 --- a/pkg/query-service/utils/format.go +++ b/pkg/query-service/utils/format.go @@ -167,7 +167,7 @@ func ClickHouseFormattedValue(v interface{}) string { case []interface{}: if len(x) == 0 { - return "" + return "[]" } switch x[0].(type) { case string: @@ -184,7 +184,7 @@ func ClickHouseFormattedValue(v interface{}) string { return strings.Join(strings.Fields(fmt.Sprint(x)), ",") default: zap.L().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x[0]))) - return "" + return "[]" } default: zap.L().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x))) diff --git a/pkg/query-service/utils/queryTemplate/vars.go b/pkg/query-service/utils/queryTemplate/vars.go index 7297af6a24..677d3aa773 100644 --- a/pkg/query-service/utils/queryTemplate/vars.go +++ b/pkg/query-service/utils/queryTemplate/vars.go @@ -8,17 +8,17 @@ import ( // AssignReservedVars assigns values for go template vars. assumes that // model.QueryRangeParamsV3.Start and End are Unix Nano timestamps -func AssignReservedVarsV3(metricsQueryRangeParams *v3.QueryRangeParamsV3) { - metricsQueryRangeParams.Variables["start_timestamp"] = metricsQueryRangeParams.Start / 1000 - metricsQueryRangeParams.Variables["end_timestamp"] = metricsQueryRangeParams.End / 1000 +func AssignReservedVarsV3(queryRangeParams *v3.QueryRangeParamsV3) { + queryRangeParams.Variables["start_timestamp"] = queryRangeParams.Start / 1000 + queryRangeParams.Variables["end_timestamp"] = queryRangeParams.End / 1000 - metricsQueryRangeParams.Variables["start_timestamp_ms"] = metricsQueryRangeParams.Start - metricsQueryRangeParams.Variables["end_timestamp_ms"] = metricsQueryRangeParams.End + queryRangeParams.Variables["start_timestamp_ms"] = queryRangeParams.Start + queryRangeParams.Variables["end_timestamp_ms"] = queryRangeParams.End - metricsQueryRangeParams.Variables["start_timestamp_nano"] = metricsQueryRangeParams.Start * 1e6 - metricsQueryRangeParams.Variables["end_timestamp_nano"] = metricsQueryRangeParams.End * 1e6 + queryRangeParams.Variables["start_timestamp_nano"] = queryRangeParams.Start * 1e6 + queryRangeParams.Variables["end_timestamp_nano"] = queryRangeParams.End * 1e6 - metricsQueryRangeParams.Variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", metricsQueryRangeParams.Start/1000) - metricsQueryRangeParams.Variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", metricsQueryRangeParams.End/1000) + queryRangeParams.Variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.Start/1000) + queryRangeParams.Variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.End/1000) } From 5a778dcb18ec535f1ecffe53c893fd0a1d20dd5b Mon Sep 17 00:00:00 2001 From: Raj Kamal Singh <1133322+raj-k-singh@users.noreply.github.com> Date: Thu, 16 May 2024 21:44:46 +0530 Subject: [PATCH 08/23] Chore: integrations: populate updatedAt for integration dashboards (#5019) * chore: add test for updatedAt value being populated in integration dashboards and get it passing * chore: also populate createdAt, createBy and updateBy for instaled integration dashboards * chore: update clickhouse integration config instructions --- .../clickhouse/config/collect-query-logs.md | 2 + .../clickhouse/config/prerequisites.md | 2 +- pkg/query-service/app/integrations/manager.go | 51 +++++++++++++++---- .../integration/signoz_integrations_test.go | 5 ++ 4 files changed, 48 insertions(+), 12 deletions(-) diff --git a/pkg/query-service/app/integrations/builtin_integrations/clickhouse/config/collect-query-logs.md b/pkg/query-service/app/integrations/builtin_integrations/clickhouse/config/collect-query-logs.md index 373b1016d6..24e20a7815 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/clickhouse/config/collect-query-logs.md +++ b/pkg/query-service/app/integrations/builtin_integrations/clickhouse/config/collect-query-logs.md @@ -78,3 +78,5 @@ Make the collector config file available to your otel collector and use it by ad ``` Note: the collector can use multiple config files, specified by multiple occurrences of the --config flag. +Also note that only 1 collector instance should be configured to collect query_logs. +Using multiple collector instances or replicas with this config will lead to duplicate logs. diff --git a/pkg/query-service/app/integrations/builtin_integrations/clickhouse/config/prerequisites.md b/pkg/query-service/app/integrations/builtin_integrations/clickhouse/config/prerequisites.md index 7f1019666d..f2c667edbc 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/clickhouse/config/prerequisites.md +++ b/pkg/query-service/app/integrations/builtin_integrations/clickhouse/config/prerequisites.md @@ -30,7 +30,7 @@ To configure metrics and logs collection for a Clickhouse server, you need the f - **Ensure that an OTEL collector is running in your deployment environment** If needed, please [install SigNoz OTEL Collector](https://signoz.io/docs/tutorial/opentelemetry-binary-usage-in-virtual-machine/) If already installed, ensure that the collector version is v0.88.0 or newer. - If collecting logs from system.query_log table, ensure that the collector version is v0.88.22 or newer. + If collecting logs from system.query_log table, ensure that the collector version is v0.88.23 or newer. Also ensure that you can provide config files to the collector and that you can set environment variables and command line flags used for running it. diff --git a/pkg/query-service/app/integrations/manager.go b/pkg/query-service/app/integrations/manager.go index 56a57ee026..6cd5a0c853 100644 --- a/pkg/query-service/app/integrations/manager.go +++ b/pkg/query-service/app/integrations/manager.go @@ -258,7 +258,7 @@ func (m *Manager) UninstallIntegration( func (m *Manager) GetPipelinesForInstalledIntegrations( ctx context.Context, ) ([]logparsingpipeline.Pipeline, *model.ApiError) { - installedIntegrations, apiErr := m.getDetailsForInstalledIntegrations(ctx) + installedIntegrations, apiErr := m.getInstalledIntegrations(ctx) if apiErr != nil { return nil, apiErr } @@ -327,10 +327,15 @@ func (m *Manager) GetInstalledIntegrationDashboardById( if dId, exists := dd["id"]; exists { if id, ok := dId.(string); ok && id == dashboardId { isLocked := 1 + author := "integration" return &dashboards.Dashboard{ - Uuid: m.dashboardUuid(integrationId, string(dashboardId)), - Locked: &isLocked, - Data: dd, + Uuid: m.dashboardUuid(integrationId, string(dashboardId)), + Locked: &isLocked, + Data: dd, + CreatedAt: integration.Installation.InstalledAt, + CreateBy: &author, + UpdatedAt: integration.Installation.InstalledAt, + UpdateBy: &author, }, nil } } @@ -344,7 +349,7 @@ func (m *Manager) GetInstalledIntegrationDashboardById( func (m *Manager) GetDashboardsForInstalledIntegrations( ctx context.Context, ) ([]dashboards.Dashboard, *model.ApiError) { - installedIntegrations, apiErr := m.getDetailsForInstalledIntegrations(ctx) + installedIntegrations, apiErr := m.getInstalledIntegrations(ctx) if apiErr != nil { return nil, apiErr } @@ -356,10 +361,15 @@ func (m *Manager) GetDashboardsForInstalledIntegrations( if dId, exists := dd["id"]; exists { if dashboardId, ok := dId.(string); ok { isLocked := 1 + author := "integration" result = append(result, dashboards.Dashboard{ - Uuid: m.dashboardUuid(ii.IntegrationSummary.Id, dashboardId), - Locked: &isLocked, - Data: dd, + Uuid: m.dashboardUuid(ii.IntegrationSummary.Id, dashboardId), + Locked: &isLocked, + Data: dd, + CreatedAt: ii.Installation.InstalledAt, + CreateBy: &author, + UpdatedAt: ii.Installation.InstalledAt, + UpdateBy: &author, }) } } @@ -418,10 +428,10 @@ func (m *Manager) getInstalledIntegration( return &installation, nil } -func (m *Manager) getDetailsForInstalledIntegrations( +func (m *Manager) getInstalledIntegrations( ctx context.Context, ) ( - map[string]IntegrationDetails, *model.ApiError, + map[string]Integration, *model.ApiError, ) { installations, apiErr := m.installedIntegrationsRepo.list(ctx) if apiErr != nil { @@ -431,5 +441,24 @@ func (m *Manager) getDetailsForInstalledIntegrations( installedIds := utils.MapSlice(installations, func(i InstalledIntegration) string { return i.IntegrationId }) - return m.availableIntegrationsRepo.get(ctx, installedIds) + integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedIds) + if apiErr != nil { + return nil, apiErr + } + + result := map[string]Integration{} + for _, ii := range installations { + iDetails, exists := integrationDetails[ii.IntegrationId] + if !exists { + return nil, model.InternalError(fmt.Errorf( + "couldn't find integration details for %s", ii.IntegrationId, + )) + } + + result[ii.IntegrationId] = Integration{ + Installation: &ii, + IntegrationDetails: iDetails, + } + } + return result, nil } diff --git a/pkg/query-service/tests/integration/signoz_integrations_test.go b/pkg/query-service/tests/integration/signoz_integrations_test.go index d58ccaf51a..eae9603888 100644 --- a/pkg/query-service/tests/integration/signoz_integrations_test.go +++ b/pkg/query-service/tests/integration/signoz_integrations_test.go @@ -327,6 +327,7 @@ func TestDashboardsForInstalledIntegrationDashboards(t *testing.T) { // Installing an integration should make its dashboards appear in the dashboard list require.False(testAvailableIntegration.IsInstalled) + tsBeforeInstallation := time.Now().Unix() integrationsTB.RequestQSToInstallIntegration( testAvailableIntegration.Id, map[string]interface{}{}, ) @@ -344,9 +345,13 @@ func TestDashboardsForInstalledIntegrationDashboards(t *testing.T) { len(testIntegrationDashboards), len(dashboards), "dashboards for installed integrations should appear in dashboards list", ) + require.GreaterOrEqual(dashboards[0].CreatedAt.Unix(), tsBeforeInstallation) + require.GreaterOrEqual(dashboards[0].UpdatedAt.Unix(), tsBeforeInstallation) // Should be able to get installed integrations dashboard by id dd := integrationsTB.GetDashboardByIdFromQS(dashboards[0].Uuid) + require.GreaterOrEqual(dd.CreatedAt.Unix(), tsBeforeInstallation) + require.GreaterOrEqual(dd.UpdatedAt.Unix(), tsBeforeInstallation) require.Equal(*dd, dashboards[0]) // Integration dashboards should not longer appear in dashboard list after uninstallation From d313f44556c52ff56b1c34279e3c48f4a41b1a0f Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Fri, 17 May 2024 07:44:33 +0530 Subject: [PATCH 09/23] fix: multiple widgets getting created and hence blocking the delete (#5015) * fix: multiple widgets getting created and hence blocking the delete * fix: allow multiple deletes when multiple widgets present with same id * chore: use the avg for limit --------- Co-authored-by: Srikanth Chekuri --- frontend/src/container/NewWidget/index.tsx | 66 ++++++++++++++-------- pkg/query-service/app/dashboards/model.go | 59 ++++++++++++++++++- pkg/query-service/app/limit.go | 8 ++- 3 files changed, 107 insertions(+), 26 deletions(-) diff --git a/frontend/src/container/NewWidget/index.tsx b/frontend/src/container/NewWidget/index.tsx index fde9adf763..38fdf03aec 100644 --- a/frontend/src/container/NewWidget/index.tsx +++ b/frontend/src/container/NewWidget/index.tsx @@ -271,28 +271,50 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element { uuid: selectedDashboard.uuid, data: { ...selectedDashboard.data, - widgets: [ - ...preWidgets, - { - ...(selectedWidget || ({} as Widgets)), - description: selectedWidget?.description || '', - timePreferance: selectedTime.enum, - isStacked: selectedWidget?.isStacked || false, - opacity: selectedWidget?.opacity || '1', - nullZeroValues: selectedWidget?.nullZeroValues || 'zero', - title: selectedWidget?.title, - yAxisUnit: selectedWidget?.yAxisUnit, - panelTypes: graphType, - query: currentQuery, - thresholds: selectedWidget?.thresholds, - softMin: selectedWidget?.softMin || 0, - softMax: selectedWidget?.softMax || 0, - fillSpans: selectedWidget?.fillSpans, - selectedLogFields: selectedWidget?.selectedLogFields || [], - selectedTracesFields: selectedWidget?.selectedTracesFields || [], - }, - ...afterWidgets, - ], + widgets: isNewDashboard + ? [ + ...afterWidgets, + { + ...(selectedWidget || ({} as Widgets)), + description: selectedWidget?.description || '', + timePreferance: selectedTime.enum, + isStacked: selectedWidget?.isStacked || false, + opacity: selectedWidget?.opacity || '1', + nullZeroValues: selectedWidget?.nullZeroValues || 'zero', + title: selectedWidget?.title, + yAxisUnit: selectedWidget?.yAxisUnit, + panelTypes: graphType, + query: currentQuery, + thresholds: selectedWidget?.thresholds, + softMin: selectedWidget?.softMin || 0, + softMax: selectedWidget?.softMax || 0, + fillSpans: selectedWidget?.fillSpans, + selectedLogFields: selectedWidget?.selectedLogFields || [], + selectedTracesFields: selectedWidget?.selectedTracesFields || [], + }, + ] + : [ + ...preWidgets, + { + ...(selectedWidget || ({} as Widgets)), + description: selectedWidget?.description || '', + timePreferance: selectedTime.enum, + isStacked: selectedWidget?.isStacked || false, + opacity: selectedWidget?.opacity || '1', + nullZeroValues: selectedWidget?.nullZeroValues || 'zero', + title: selectedWidget?.title, + yAxisUnit: selectedWidget?.yAxisUnit, + panelTypes: graphType, + query: currentQuery, + thresholds: selectedWidget?.thresholds, + softMin: selectedWidget?.softMin || 0, + softMax: selectedWidget?.softMax || 0, + fillSpans: selectedWidget?.fillSpans, + selectedLogFields: selectedWidget?.selectedLogFields || [], + selectedTracesFields: selectedWidget?.selectedTracesFields || [], + }, + ...afterWidgets, + ], layout: [...updatedLayout], }, }; diff --git a/pkg/query-service/app/dashboards/model.go b/pkg/query-service/app/dashboards/model.go index c69f30a6bd..e7f48f8f87 100644 --- a/pkg/query-service/app/dashboards/model.go +++ b/pkg/query-service/app/dashboards/model.go @@ -326,7 +326,15 @@ func UpdateDashboard(ctx context.Context, uuid string, data map[string]interface if existingTotal > newTotal && existingTotal-newTotal > 1 { // if the total count of panels has reduced by more than 1, // return error - return nil, model.BadRequest(fmt.Errorf("deleting more than one panel is not supported")) + existingIds := getWidgetIds(dashboard.Data) + newIds := getWidgetIds(data) + + differenceIds := getIdDifference(existingIds, newIds) + + if len(differenceIds) > 1 { + return nil, model.BadRequest(fmt.Errorf("deleting more than one panel is not supported")) + } + } dashboard.UpdatedAt = time.Now() @@ -714,3 +722,52 @@ func countTraceAndLogsPanel(data map[string]interface{}) (int64, int64) { } return count, totalPanels } + +func getWidgetIds(data map[string]interface{}) []string { + widgetIds := []string{} + if data != nil && data["widgets"] != nil { + widgets, ok := data["widgets"].(interface{}) + if ok { + data, ok := widgets.([]interface{}) + if ok { + for _, widget := range data { + sData, ok := widget.(map[string]interface{}) + if ok && sData["query"] != nil && sData["id"] != nil { + id, ok := sData["id"].(string) + + if ok { + widgetIds = append(widgetIds, id) + } + + } + } + } + } + } + return widgetIds +} + +func getIdDifference(existingIds []string, newIds []string) []string { + // Convert newIds array to a map for faster lookups + newIdsMap := make(map[string]bool) + for _, id := range newIds { + newIdsMap[id] = true + } + + // Initialize a map to keep track of elements in the difference array + differenceMap := make(map[string]bool) + + // Initialize the difference array + difference := []string{} + + // Iterate through existingIds + for _, id := range existingIds { + // If the id is not found in newIds, and it's not already in the difference array + if _, found := newIdsMap[id]; !found && !differenceMap[id] { + difference = append(difference, id) + differenceMap[id] = true // Mark the id as seen in the difference array + } + } + + return difference +} diff --git a/pkg/query-service/app/limit.go b/pkg/query-service/app/limit.go index 6b8faecea2..55dd56a31c 100644 --- a/pkg/query-service/app/limit.go +++ b/pkg/query-service/app/limit.go @@ -40,12 +40,13 @@ func applyMetricLimit(results []*v3.Result, queryRangeParams *v3.QueryRangeParam } } - ithSum, jthSum := 0.0, 0.0 + ithSum, jthSum, ithCount, jthCount := 0.0, 0.0, 1.0, 1.0 for _, point := range result.Series[i].Points { if math.IsNaN(point.Value) || math.IsInf(point.Value, 0) { continue } ithSum += point.Value + ithCount++ } for _, point := range result.Series[j].Points { @@ -53,12 +54,13 @@ func applyMetricLimit(results []*v3.Result, queryRangeParams *v3.QueryRangeParam continue } jthSum += point.Value + jthCount++ } if orderBy.Order == "asc" { - return ithSum < jthSum + return ithSum/ithCount < jthSum/jthCount } else if orderBy.Order == "desc" { - return ithSum > jthSum + return ithSum/ithCount > jthSum/jthCount } } else { // Sort based on Labels map From 9ff0e340388e4eb07e60fb4ddc9f6239c168b623 Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Fri, 17 May 2024 07:45:03 +0530 Subject: [PATCH 10/23] chore: migrate alerts to v4 for supported operators (#5010) --- ee/query-service/main.go | 8 + pkg/query-service/app/formula.go | 1 + pkg/query-service/app/parser.go | 86 ++++------ pkg/query-service/main.go | 7 + .../migrate/0_45_alerts_to_v4/run.go | 153 ++++++++++++++++++ pkg/query-service/migrate/migate.go | 67 ++++++++ pkg/query-service/rules/db.go | 2 +- pkg/query-service/rules/manager.go | 2 +- 8 files changed, 268 insertions(+), 58 deletions(-) create mode 100644 pkg/query-service/migrate/0_45_alerts_to_v4/run.go create mode 100644 pkg/query-service/migrate/migate.go diff --git a/ee/query-service/main.go b/ee/query-service/main.go index 3323e5bdbd..4fad91008f 100644 --- a/ee/query-service/main.go +++ b/ee/query-service/main.go @@ -14,7 +14,9 @@ import ( semconv "go.opentelemetry.io/otel/semconv/v1.4.0" "go.signoz.io/signoz/ee/query-service/app" "go.signoz.io/signoz/pkg/query-service/auth" + "go.signoz.io/signoz/pkg/query-service/constants" baseconst "go.signoz.io/signoz/pkg/query-service/constants" + "go.signoz.io/signoz/pkg/query-service/migrate" "go.signoz.io/signoz/pkg/query-service/version" "google.golang.org/grpc" "google.golang.org/grpc/credentials/insecure" @@ -143,6 +145,12 @@ func main() { zap.L().Info("JWT secret key set successfully.") } + if err := migrate.Migrate(constants.RELATIONAL_DATASOURCE_PATH); err != nil { + zap.L().Error("Failed to migrate", zap.Error(err)) + } else { + zap.L().Info("Migration successful") + } + server, err := app.NewServer(serverOptions) if err != nil { zap.L().Fatal("Failed to create server", zap.Error(err)) diff --git a/pkg/query-service/app/formula.go b/pkg/query-service/app/formula.go index 8fa6010dfc..f1f10e4499 100644 --- a/pkg/query-service/app/formula.go +++ b/pkg/query-service/app/formula.go @@ -90,6 +90,7 @@ func joinAndCalculate(results []*v3.Result, uniqueLabelSet map[string]string, ex resultSeries := &v3.Series{ Labels: uniqueLabelSet, + Points: make([]v3.Point, 0), } timestamps := make([]int64, 0) for timestamp := range uniqueTimestamps { diff --git a/pkg/query-service/app/parser.go b/pkg/query-service/app/parser.go index 2a70f96250..773156cc0d 100644 --- a/pkg/query-service/app/parser.go +++ b/pkg/query-service/app/parser.go @@ -1065,43 +1065,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE } query.ShiftBy = timeShiftBy - if query.Filters == nil || len(query.Filters.Items) == 0 { - continue - } - - for idx := range query.Filters.Items { - item := &query.Filters.Items[idx] - value := item.Value - if value != nil { - switch x := value.(type) { - case string: - variableName := strings.Trim(x, "{[.$]}") - if _, ok := queryRangeParams.Variables[variableName]; ok { - item.Value = queryRangeParams.Variables[variableName] - } - case []interface{}: - if len(x) > 0 { - switch x[0].(type) { - case string: - variableName := strings.Trim(x[0].(string), "{[.$]}") - if _, ok := queryRangeParams.Variables[variableName]; ok { - item.Value = queryRangeParams.Variables[variableName] - } - } - } - } - } - - if item.Operator != v3.FilterOperatorIn && item.Operator != v3.FilterOperatorNotIn { - // the value type should not be multiple values - if _, ok := item.Value.([]interface{}); ok { - return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("multiple values %s are not allowed for operator `%s` for key `%s`", item.Value, item.Operator, item.Key.Key)} - } - } - } - // for metrics v3 - // if the aggregate operator is a histogram quantile, and user has not forgotten // the le tag in the group by then add the le tag to the group by if query.AggregateOperator == v3.AggregateOperatorHistQuant50 || @@ -1129,28 +1093,38 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE } } - // for metrics v4 - if v3.IsPercentileOperator(query.SpaceAggregation) && - query.AggregateAttribute.Type != v3.AttributeKeyType(v3.MetricTypeExponentialHistogram) { - // If quantile is set, we need to group by le - // and set the space aggregation to sum - // and time aggregation to rate - query.TimeAggregation = v3.TimeAggregationRate - query.SpaceAggregation = v3.SpaceAggregationSum - // If le is not present in group by for quantile, add it - leFound := false - for _, groupBy := range query.GroupBy { - if groupBy.Key == "le" { - leFound = true - break + if query.Filters == nil || len(query.Filters.Items) == 0 { + continue + } + + for idx := range query.Filters.Items { + item := &query.Filters.Items[idx] + value := item.Value + if value != nil { + switch x := value.(type) { + case string: + variableName := strings.Trim(x, "{[.$]}") + if _, ok := queryRangeParams.Variables[variableName]; ok { + item.Value = queryRangeParams.Variables[variableName] + } + case []interface{}: + if len(x) > 0 { + switch x[0].(type) { + case string: + variableName := strings.Trim(x[0].(string), "{[.$]}") + if _, ok := queryRangeParams.Variables[variableName]; ok { + item.Value = queryRangeParams.Variables[variableName] + } + } + } } } - if !leFound { - query.GroupBy = append(query.GroupBy, v3.AttributeKey{ - Key: "le", - Type: v3.AttributeKeyTypeTag, - DataType: v3.AttributeKeyDataTypeString, - }) + + if v3.FilterOperator(strings.ToLower((string(item.Operator)))) != v3.FilterOperatorIn && v3.FilterOperator(strings.ToLower((string(item.Operator)))) != v3.FilterOperatorNotIn { + // the value type should not be multiple values + if _, ok := item.Value.([]interface{}); ok { + return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("multiple values %s are not allowed for operator `%s` for key `%s`", item.Value, item.Operator, item.Key.Key)} + } } } } diff --git a/pkg/query-service/main.go b/pkg/query-service/main.go index b6f5e0281d..72962cfeef 100644 --- a/pkg/query-service/main.go +++ b/pkg/query-service/main.go @@ -11,6 +11,7 @@ import ( "go.signoz.io/signoz/pkg/query-service/app" "go.signoz.io/signoz/pkg/query-service/auth" "go.signoz.io/signoz/pkg/query-service/constants" + "go.signoz.io/signoz/pkg/query-service/migrate" "go.signoz.io/signoz/pkg/query-service/version" "go.uber.org/zap" @@ -90,6 +91,12 @@ func main() { zap.L().Info("JWT secret key set successfully.") } + if err := migrate.Migrate(constants.RELATIONAL_DATASOURCE_PATH); err != nil { + zap.L().Error("Failed to migrate", zap.Error(err)) + } else { + zap.L().Info("Migration successful") + } + server, err := app.NewServer(serverOptions) if err != nil { logger.Fatal("Failed to create server", zap.Error(err)) diff --git a/pkg/query-service/migrate/0_45_alerts_to_v4/run.go b/pkg/query-service/migrate/0_45_alerts_to_v4/run.go new file mode 100644 index 0000000000..f68f4ca43b --- /dev/null +++ b/pkg/query-service/migrate/0_45_alerts_to_v4/run.go @@ -0,0 +1,153 @@ +package alertstov4 + +import ( + "context" + "encoding/json" + + "github.com/jmoiron/sqlx" + v3 "go.signoz.io/signoz/pkg/query-service/model/v3" + "go.signoz.io/signoz/pkg/query-service/rules" + "go.uber.org/multierr" + "go.uber.org/zap" +) + +var Version = "0.45-alerts-to-v4" + +var mapTimeAggregation = map[v3.AggregateOperator]v3.TimeAggregation{ + v3.AggregateOperatorSum: v3.TimeAggregationSum, + v3.AggregateOperatorMin: v3.TimeAggregationMin, + v3.AggregateOperatorMax: v3.TimeAggregationMax, + v3.AggregateOperatorSumRate: v3.TimeAggregationRate, + v3.AggregateOperatorAvgRate: v3.TimeAggregationRate, + v3.AggregateOperatorMinRate: v3.TimeAggregationRate, + v3.AggregateOperatorMaxRate: v3.TimeAggregationRate, + v3.AggregateOperatorHistQuant50: v3.TimeAggregationUnspecified, + v3.AggregateOperatorHistQuant75: v3.TimeAggregationUnspecified, + v3.AggregateOperatorHistQuant90: v3.TimeAggregationUnspecified, + v3.AggregateOperatorHistQuant95: v3.TimeAggregationUnspecified, + v3.AggregateOperatorHistQuant99: v3.TimeAggregationUnspecified, +} + +var mapSpaceAggregation = map[v3.AggregateOperator]v3.SpaceAggregation{ + v3.AggregateOperatorSum: v3.SpaceAggregationSum, + v3.AggregateOperatorMin: v3.SpaceAggregationMin, + v3.AggregateOperatorMax: v3.SpaceAggregationMax, + v3.AggregateOperatorSumRate: v3.SpaceAggregationSum, + v3.AggregateOperatorAvgRate: v3.SpaceAggregationAvg, + v3.AggregateOperatorMinRate: v3.SpaceAggregationMin, + v3.AggregateOperatorMaxRate: v3.SpaceAggregationMax, + v3.AggregateOperatorHistQuant50: v3.SpaceAggregationPercentile50, + v3.AggregateOperatorHistQuant75: v3.SpaceAggregationPercentile75, + v3.AggregateOperatorHistQuant90: v3.SpaceAggregationPercentile90, + v3.AggregateOperatorHistQuant95: v3.SpaceAggregationPercentile95, + v3.AggregateOperatorHistQuant99: v3.SpaceAggregationPercentile99, +} + +func canMigrateOperator(operator v3.AggregateOperator) bool { + switch operator { + case v3.AggregateOperatorSum, + v3.AggregateOperatorMin, + v3.AggregateOperatorMax, + v3.AggregateOperatorSumRate, + v3.AggregateOperatorAvgRate, + v3.AggregateOperatorMinRate, + v3.AggregateOperatorMaxRate, + v3.AggregateOperatorHistQuant50, + v3.AggregateOperatorHistQuant75, + v3.AggregateOperatorHistQuant90, + v3.AggregateOperatorHistQuant95, + v3.AggregateOperatorHistQuant99: + return true + } + return false +} + +func Migrate(conn *sqlx.DB) error { + ruleDB := rules.NewRuleDB(conn) + storedRules, err := ruleDB.GetStoredRules(context.Background()) + if err != nil { + return err + } + + for _, storedRule := range storedRules { + parsedRule, errs := rules.ParsePostableRule([]byte(storedRule.Data)) + if len(errs) > 0 { + // this should not happen but if it does, we should not stop the migration + zap.L().Error("Error parsing rule", zap.Error(multierr.Combine(errs...)), zap.Int("rule", storedRule.Id)) + continue + } + zap.L().Info("Rule parsed", zap.Int("rule", storedRule.Id)) + updated := false + if parsedRule.RuleCondition != nil && parsedRule.Version == "" { + if parsedRule.RuleCondition.QueryType() == v3.QueryTypeBuilder { + // check if all the queries can be converted to v4 + canMigrate := true + for _, query := range parsedRule.RuleCondition.CompositeQuery.BuilderQueries { + if query.DataSource == v3.DataSourceMetrics && query.Expression == query.QueryName { + if !canMigrateOperator(query.AggregateOperator) { + canMigrate = false + break + } + } + } + + if canMigrate { + parsedRule.Version = "v4" + for _, query := range parsedRule.RuleCondition.CompositeQuery.BuilderQueries { + if query.DataSource == v3.DataSourceMetrics && query.Expression == query.QueryName { + // update aggregate attribute + if query.AggregateOperator == v3.AggregateOperatorSum || + query.AggregateOperator == v3.AggregateOperatorMin || + query.AggregateOperator == v3.AggregateOperatorMax { + query.AggregateAttribute.Type = "Gauge" + } + if query.AggregateOperator == v3.AggregateOperatorSumRate || + query.AggregateOperator == v3.AggregateOperatorAvgRate || + query.AggregateOperator == v3.AggregateOperatorMinRate || + query.AggregateOperator == v3.AggregateOperatorMaxRate { + query.AggregateAttribute.Type = "Sum" + } + + if query.AggregateOperator == v3.AggregateOperatorHistQuant50 || + query.AggregateOperator == v3.AggregateOperatorHistQuant75 || + query.AggregateOperator == v3.AggregateOperatorHistQuant90 || + query.AggregateOperator == v3.AggregateOperatorHistQuant95 || + query.AggregateOperator == v3.AggregateOperatorHistQuant99 { + query.AggregateAttribute.Type = "Histogram" + } + query.AggregateAttribute.DataType = v3.AttributeKeyDataTypeFloat64 + query.AggregateAttribute.IsColumn = true + query.TimeAggregation = mapTimeAggregation[query.AggregateOperator] + query.SpaceAggregation = mapSpaceAggregation[query.AggregateOperator] + query.AggregateOperator = v3.AggregateOperator(query.TimeAggregation) + updated = true + } + } + } + } + } + + if !updated { + zap.L().Info("Rule not updated", zap.Int("rule", storedRule.Id)) + continue + } + + ruleJSON, jsonErr := json.Marshal(parsedRule) + if jsonErr != nil { + zap.L().Error("Error marshalling rule; skipping rule migration", zap.Error(jsonErr), zap.Int("rule", storedRule.Id)) + continue + } + + stmt, prepareError := conn.PrepareContext(context.Background(), `UPDATE rules SET data=$3 WHERE id=$4;`) + if prepareError != nil { + zap.L().Error("Error in preparing statement for UPDATE to rules", zap.Error(prepareError)) + continue + } + defer stmt.Close() + + if _, err := stmt.Exec(ruleJSON, storedRule.Id); err != nil { + zap.L().Error("Error in Executing prepared statement for UPDATE to rules", zap.Error(err)) + } + } + return nil +} diff --git a/pkg/query-service/migrate/migate.go b/pkg/query-service/migrate/migate.go new file mode 100644 index 0000000000..f9d15a1567 --- /dev/null +++ b/pkg/query-service/migrate/migate.go @@ -0,0 +1,67 @@ +package migrate + +import ( + "database/sql" + + "github.com/jmoiron/sqlx" + alertstov4 "go.signoz.io/signoz/pkg/query-service/migrate/0_45_alerts_to_v4" + "go.uber.org/zap" +) + +type DataMigration struct { + ID int `db:"id"` + Version string `db:"version"` + CreatedAt string `db:"created_at"` + Succeeded bool `db:"succeeded"` +} + +func initSchema(conn *sqlx.DB) error { + tableSchema := ` + CREATE TABLE IF NOT EXISTS data_migrations ( + id SERIAL PRIMARY KEY, + version VARCHAR(255) NOT NULL UNIQUE, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + succeeded BOOLEAN NOT NULL DEFAULT FALSE + ); + ` + _, err := conn.Exec(tableSchema) + if err != nil { + return err + } + return nil +} + +func getMigrationVersion(conn *sqlx.DB, version string) (*DataMigration, error) { + var migration DataMigration + err := conn.Get(&migration, "SELECT * FROM data_migrations WHERE version = $1", version) + if err != nil { + if err == sql.ErrNoRows { + return nil, nil + } + return nil, err + } + return &migration, nil +} + +func Migrate(dsn string) error { + conn, err := sqlx.Connect("sqlite3", dsn) + if err != nil { + return err + } + if err := initSchema(conn); err != nil { + return err + } + + if m, err := getMigrationVersion(conn, "0.45_alerts_to_v4"); err == nil && m == nil { + if err := alertstov4.Migrate(conn); err != nil { + zap.L().Error("failed to migrate 0.45_alerts_to_v4", zap.Error(err)) + } else { + _, err := conn.Exec("INSERT INTO data_migrations (version, succeeded) VALUES ('0.45_alerts_to_v4', true)") + if err != nil { + return err + } + } + } + + return nil +} diff --git a/pkg/query-service/rules/db.go b/pkg/query-service/rules/db.go index cf903884fd..23372ce911 100644 --- a/pkg/query-service/rules/db.go +++ b/pkg/query-service/rules/db.go @@ -49,7 +49,7 @@ type ruleDB struct { // todo: move init methods for creating tables -func newRuleDB(db *sqlx.DB) RuleDB { +func NewRuleDB(db *sqlx.DB) RuleDB { return &ruleDB{ db, } diff --git a/pkg/query-service/rules/manager.go b/pkg/query-service/rules/manager.go index cad02523d7..d649b565fd 100644 --- a/pkg/query-service/rules/manager.go +++ b/pkg/query-service/rules/manager.go @@ -108,7 +108,7 @@ func NewManager(o *ManagerOptions) (*Manager, error) { return nil, err } - db := newRuleDB(o.DBConn) + db := NewRuleDB(o.DBConn) m := &Manager{ tasks: map[string]Task{}, From cf64da2631f96caae16ee23cc7d163c9a5818502 Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Mon, 20 May 2024 14:22:44 +0530 Subject: [PATCH 11/23] fix: metrics order by avg (#5029) --- pkg/query-service/app/limit.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pkg/query-service/app/limit.go b/pkg/query-service/app/limit.go index 55dd56a31c..3ace3c687c 100644 --- a/pkg/query-service/app/limit.go +++ b/pkg/query-service/app/limit.go @@ -40,7 +40,7 @@ func applyMetricLimit(results []*v3.Result, queryRangeParams *v3.QueryRangeParam } } - ithSum, jthSum, ithCount, jthCount := 0.0, 0.0, 1.0, 1.0 + ithSum, jthSum, ithCount, jthCount := 0.0, 0.0, 0.0, 0.0 for _, point := range result.Series[i].Points { if math.IsNaN(point.Value) || math.IsInf(point.Value, 0) { continue @@ -57,6 +57,10 @@ func applyMetricLimit(results []*v3.Result, queryRangeParams *v3.QueryRangeParam jthCount++ } + // avoid division by zero + ithCount = math.Max(ithCount, 1) + jthCount = math.Max(jthCount, 1) + if orderBy.Order == "asc" { return ithSum/ithCount < jthSum/jthCount } else if orderBy.Order == "desc" { From 2dbe598b2c1be525643446641420bb4b12defabe Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Mon, 20 May 2024 18:42:39 +0530 Subject: [PATCH 12/23] fix: retain the query name,expression and disabled property on changing panel type (#4976) * fix: retain the query name,expression and disabled property on changing panel type * fix: missing reset and init query for superset query --- frontend/src/container/NewWidget/utils.ts | 45 +++++++++++++++++++++++ frontend/src/providers/QueryBuilder.tsx | 4 ++ 2 files changed, 49 insertions(+) diff --git a/frontend/src/container/NewWidget/utils.ts b/frontend/src/container/NewWidget/utils.ts index 64d884de15..caf2604639 100644 --- a/frontend/src/container/NewWidget/utils.ts +++ b/frontend/src/container/NewWidget/utils.ts @@ -50,6 +50,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'having', 'orderBy', 'functions', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -65,6 +68,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'orderBy', 'functions', 'spaceAggregation', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -78,6 +84,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'limit', 'having', 'orderBy', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -94,6 +103,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'having', 'orderBy', 'functions', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -109,6 +121,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'orderBy', 'functions', 'spaceAggregation', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -122,6 +137,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'limit', 'having', 'orderBy', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -138,6 +156,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'having', 'orderBy', 'functions', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -153,6 +174,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'orderBy', 'functions', 'spaceAggregation', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -166,6 +190,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'limit', 'having', 'orderBy', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -182,6 +209,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'having', 'orderBy', 'functions', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -197,6 +227,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'orderBy', 'functions', 'spaceAggregation', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -210,6 +243,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'limit', 'having', 'orderBy', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -241,6 +277,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'reduceTo', 'having', 'functions', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -254,6 +293,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'reduceTo', 'functions', 'spaceAggregation', + 'queryName', + 'expression', + 'disabled', ], }, }, @@ -267,6 +309,9 @@ export const panelTypeDataSourceFormValuesMap: Record< 'limit', 'having', 'orderBy', + 'queryName', + 'expression', + 'disabled', ], }, }, diff --git a/frontend/src/providers/QueryBuilder.tsx b/frontend/src/providers/QueryBuilder.tsx index a2166a5bc8..f630b13f83 100644 --- a/frontend/src/providers/QueryBuilder.tsx +++ b/frontend/src/providers/QueryBuilder.tsx @@ -229,6 +229,9 @@ export function QueryBuilderProvider({ setCurrentQuery( timeUpdated ? merge(currentQuery, newQueryState) : newQueryState, ); + setSupersetQuery( + timeUpdated ? merge(currentQuery, newQueryState) : newQueryState, + ); setQueryType(type); }, [prepareQueryBuilderData, currentQuery], @@ -802,6 +805,7 @@ export function QueryBuilderProvider({ if (newCurrentQuery) { setCurrentQuery(newCurrentQuery); + setSupersetQuery(newCurrentQuery); } }; From 12be6ce02070f8670c8591e5219eb6e9d09ba11e Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Mon, 20 May 2024 19:41:44 +0530 Subject: [PATCH 13/23] feat: restrict the rendering of img and form tags in the logs content (#4905) * feat: restrict the rendering of img tags in the logs content * fix: forbidden tags code cleanup --- frontend/src/components/Logs/ListLogView/index.tsx | 7 ++++++- frontend/src/components/Logs/RawLogView/index.tsx | 5 ++++- frontend/src/components/Logs/TableView/useTableView.tsx | 7 ++++++- frontend/src/utils/app.ts | 3 +++ 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/Logs/ListLogView/index.tsx b/frontend/src/components/Logs/ListLogView/index.tsx index 47b108e944..fa8a2fb608 100644 --- a/frontend/src/components/Logs/ListLogView/index.tsx +++ b/frontend/src/components/Logs/ListLogView/index.tsx @@ -16,6 +16,7 @@ import { useCallback, useMemo, useState } from 'react'; // interfaces import { IField } from 'types/api/logs/fields'; import { ILog } from 'types/api/logs/log'; +import { FORBID_DOM_PURIFY_TAGS } from 'utils/app'; // components import AddToQueryHOC, { AddToQueryHOCProps } from '../AddToQueryHOC'; @@ -50,7 +51,11 @@ function LogGeneralField({ }: LogFieldProps): JSX.Element { const html = useMemo( () => ({ - __html: convert.toHtml(dompurify.sanitize(fieldValue)), + __html: convert.toHtml( + dompurify.sanitize(fieldValue, { + FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS], + }), + ), }), [fieldValue], ); diff --git a/frontend/src/components/Logs/RawLogView/index.tsx b/frontend/src/components/Logs/RawLogView/index.tsx index 712b97723d..fcb8beeeec 100644 --- a/frontend/src/components/Logs/RawLogView/index.tsx +++ b/frontend/src/components/Logs/RawLogView/index.tsx @@ -21,6 +21,7 @@ import { useMemo, useState, } from 'react'; +import { FORBID_DOM_PURIFY_TAGS } from 'utils/app'; import LogLinesActionButtons from '../LogLinesActionButtons/LogLinesActionButtons'; import LogStateIndicator from '../LogStateIndicator/LogStateIndicator'; @@ -144,7 +145,9 @@ function RawLogView({ const html = useMemo( () => ({ - __html: convert.toHtml(dompurify.sanitize(text)), + __html: convert.toHtml( + dompurify.sanitize(text, { FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS] }), + ), }), [text], ); diff --git a/frontend/src/components/Logs/TableView/useTableView.tsx b/frontend/src/components/Logs/TableView/useTableView.tsx index be34e998ef..fd37132110 100644 --- a/frontend/src/components/Logs/TableView/useTableView.tsx +++ b/frontend/src/components/Logs/TableView/useTableView.tsx @@ -8,6 +8,7 @@ import dompurify from 'dompurify'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { FlatLogData } from 'lib/logs/flatLogData'; import { useMemo } from 'react'; +import { FORBID_DOM_PURIFY_TAGS } from 'utils/app'; import LogStateIndicator from '../LogStateIndicator/LogStateIndicator'; import { getLogIndicatorTypeForTable } from '../LogStateIndicator/utils'; @@ -107,7 +108,11 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => { children: ( Date: Tue, 21 May 2024 12:01:21 +0530 Subject: [PATCH 14/23] chore: query range v3 metrics use v4 tables (#5021) --- .../app/clickhouseReader/reader.go | 41 +++-- pkg/query-service/app/http_handler.go | 47 +---- .../app/metrics/v3/cumulative_table.go | 13 +- .../app/metrics/v3/cumulative_table_test.go | 8 +- pkg/query-service/app/metrics/v3/delta.go | 15 +- .../app/metrics/v3/delta_table.go | 9 +- .../app/metrics/v3/delta_table_test.go | 8 +- .../app/metrics/v3/query_builder.go | 109 ++---------- .../app/metrics/v3/query_builder_test.go | 166 ++---------------- .../app/metrics/v4/helpers/sub_query.go | 85 +++++++++ pkg/query-service/app/querier/querier_test.go | 6 +- .../app/queryBuilder/query_builder_test.go | 10 +- pkg/query-service/constants/constants.go | 3 - 13 files changed, 175 insertions(+), 345 deletions(-) diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index fcc2efeb15..b7e1ec6a67 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -72,11 +72,17 @@ const ( signozTraceTableName = "distributed_signoz_index_v2" signozTraceLocalTableName = "signoz_index_v2" signozMetricDBName = "signoz_metrics" - signozSampleLocalTableName = "samples_v2" - signozSampleTableName = "distributed_samples_v2" - signozTSTableName = "distributed_time_series_v2" - signozTSTableNameV4 = "distributed_time_series_v4" - signozTSTableNameV41Day = "distributed_time_series_v4_1day" + signozSampleLocalTableName = "samples_v4" + signozSampleTableName = "distributed_samples_v4" + + signozTSLocalTableNameV4 = "time_series_v4" + signozTSTableNameV4 = "distributed_time_series_v4" + + signozTSLocalTableNameV46Hrs = "time_series_v4_6hrs" + signozTSTableNameV46Hrs = "distributed_time_series_v4_6hrs" + + signozTSLocalTableNameV41Day = "time_series_v4_1day" + signozTSTableNameV41Day = "distributed_time_series_v4_1day" minTimespanForProgressiveSearch = time.Hour minTimespanForProgressiveSearchMargin = time.Minute @@ -2382,15 +2388,17 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, } case constants.MetricsTTL: - tableName := signozMetricDBName + "." + signozSampleLocalTableName - statusItem, err := r.checkTTLStatusItem(ctx, tableName) - if err != nil { - return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing ttl_status check sql query")} + tableNames := []string{signozMetricDBName + "." + signozSampleLocalTableName, signozMetricDBName + "." + signozTSLocalTableNameV4, signozMetricDBName + "." + signozTSLocalTableNameV46Hrs, signozMetricDBName + "." + signozTSLocalTableNameV41Day} + for _, tableName := range tableNames { + statusItem, err := r.checkTTLStatusItem(ctx, tableName) + if err != nil { + return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")} + } + if statusItem.Status == constants.StatusPending { + return nil, &model.ApiError{Typ: model.ErrorConflict, Err: fmt.Errorf("TTL is already running")} + } } - if statusItem.Status == constants.StatusPending { - return nil, &model.ApiError{Typ: model.ErrorConflict, Err: fmt.Errorf("TTL is already running")} - } - go func(tableName string) { + metricTTL := func(tableName string) { _, dbErr := r.localDB.Exec("INSERT INTO ttl_status (transaction_id, created_at, updated_at, table_name, ttl, status, cold_storage_ttl) VALUES (?, ?, ?, ?, ?, ?, ?)", uuid, time.Now(), time.Now(), tableName, params.DelDuration, constants.StatusPending, coldStorageDuration) if dbErr != nil { zap.L().Error("Error in inserting to ttl_status table", zap.Error(dbErr)) @@ -2434,7 +2442,10 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr)) return } - }(tableName) + } + for _, tableName := range tableNames { + go metricTTL(tableName) + } case constants.LogsTTL: tableName := r.logsDB + "." + r.logsLocalTable statusItem, err := r.checkTTLStatusItem(ctx, tableName) @@ -3259,7 +3270,7 @@ func (r *ClickHouseReader) FetchTemporality(ctx context.Context, metricNames []s func (r *ClickHouseReader) GetTimeSeriesInfo(ctx context.Context) (map[string]interface{}, error) { - queryStr := fmt.Sprintf("SELECT count() as count from %s.%s where metric_name not like 'signoz_%%' group by metric_name order by count desc;", signozMetricDBName, signozTSTableName) + queryStr := fmt.Sprintf("SELECT countDistinct(fingerprint) as count from %s.%s where metric_name not like 'signoz_%%' group by metric_name order by count desc;", signozMetricDBName, signozTSTableNameV41Day) rows, _ := r.db.Query(ctx, queryStr) diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 3a9ea9d420..e3f7c5a165 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -483,49 +483,7 @@ func (aH *APIHandler) getRule(w http.ResponseWriter, r *http.Request) { aH.Respond(w, ruleResponse) } -func (aH *APIHandler) addTemporality(ctx context.Context, qp *v3.QueryRangeParamsV3) error { - - metricNames := make([]string, 0) - metricNameToTemporality := make(map[string]map[v3.Temporality]bool) - if qp.CompositeQuery != nil && len(qp.CompositeQuery.BuilderQueries) > 0 { - for _, query := range qp.CompositeQuery.BuilderQueries { - if query.DataSource == v3.DataSourceMetrics && query.Temporality == "" { - metricNames = append(metricNames, query.AggregateAttribute.Key) - if _, ok := metricNameToTemporality[query.AggregateAttribute.Key]; !ok { - metricNameToTemporality[query.AggregateAttribute.Key] = make(map[v3.Temporality]bool) - } - } - } - } - - var err error - - if aH.preferDelta { - zap.L().Debug("fetching metric temporality") - metricNameToTemporality, err = aH.reader.FetchTemporality(ctx, metricNames) - if err != nil { - return err - } - } - - if qp.CompositeQuery != nil && len(qp.CompositeQuery.BuilderQueries) > 0 { - for name := range qp.CompositeQuery.BuilderQueries { - query := qp.CompositeQuery.BuilderQueries[name] - if query.DataSource == v3.DataSourceMetrics && query.Temporality == "" { - if aH.preferDelta && metricNameToTemporality[query.AggregateAttribute.Key][v3.Delta] { - query.Temporality = v3.Delta - } else if metricNameToTemporality[query.AggregateAttribute.Key][v3.Cumulative] { - query.Temporality = v3.Cumulative - } else { - query.Temporality = v3.Unspecified - } - } - } - } - return nil -} - -// populateTemporality same as addTemporality but for v4 and better +// populateTemporality adds the temporality to the query if it is not present func (aH *APIHandler) populateTemporality(ctx context.Context, qp *v3.QueryRangeParamsV3) error { missingTemporality := make([]string, 0) @@ -3320,8 +3278,7 @@ func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) { } // add temporality for each metric - - temporalityErr := aH.addTemporality(r.Context(), queryRangeParams) + temporalityErr := aH.populateTemporality(r.Context(), queryRangeParams) if temporalityErr != nil { zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr)) RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil) diff --git a/pkg/query-service/app/metrics/v3/cumulative_table.go b/pkg/query-service/app/metrics/v3/cumulative_table.go index db9c909abf..d57743cb58 100644 --- a/pkg/query-service/app/metrics/v3/cumulative_table.go +++ b/pkg/query-service/app/metrics/v3/cumulative_table.go @@ -5,6 +5,7 @@ import ( "math" "strings" + "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers" "go.signoz.io/signoz/pkg/query-service/constants" v3 "go.signoz.io/signoz/pkg/query-service/model/v3" "go.signoz.io/signoz/pkg/query-service/utils" @@ -28,7 +29,7 @@ func stepForTableCumulative(start, end int64) int64 { return int64(step) } -func buildMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery, tableName string) (string, error) { +func buildMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery) (string, error) { step := stepForTableCumulative(start, end) @@ -36,19 +37,19 @@ func buildMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery, tableNam metricQueryGroupBy := mq.GroupBy - filterSubQuery, err := buildMetricsTimeSeriesFilterQuery(mq.Filters, metricQueryGroupBy, mq) + filterSubQuery, err := helpers.PrepareTimeseriesFilterQueryV3(start, end, mq) if err != nil { return "", err } - samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) + samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND unix_milli >= %d AND unix_milli <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) // Select the aggregate value for interval queryTmplCounterInner := "SELECT %s" + - " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," + + " toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL %d SECOND) as ts," + " %s as value" + - " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_V4_TABLENAME + " INNER JOIN" + " (%s) as filtered_time_series" + " USING fingerprint" + @@ -61,7 +62,7 @@ func buildMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery, tableNam "SELECT %s" + " toStartOfHour(now()) as ts," + // now() has no menaing & used as a placeholder for ts " %s as value" + - " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_V4_TABLENAME + " INNER JOIN" + " (%s) as filtered_time_series" + " USING fingerprint" + diff --git a/pkg/query-service/app/metrics/v3/cumulative_table_test.go b/pkg/query-service/app/metrics/v3/cumulative_table_test.go index a6b489e5ec..26748b9f09 100644 --- a/pkg/query-service/app/metrics/v3/cumulative_table_test.go +++ b/pkg/query-service/app/metrics/v3/cumulative_table_test.go @@ -38,7 +38,7 @@ func TestPanelTableForCumulative(t *testing.T) { }, Expression: "A", }, - expected: "SELECT toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_count' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch']) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts", + expected: "SELECT toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'signoz_latency_count' AND temporality = 'Cumulative' AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch']) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts", }, { name: "latency p50", @@ -66,7 +66,7 @@ func TestPanelTableForCumulative(t *testing.T) { }, }, }, - expected: "SELECT toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, le,ts ORDER BY fingerprint, le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, le ORDER BY fingerprint, le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts", + expected: "SELECT toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY fingerprint, le,ts ORDER BY fingerprint, le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, le ORDER BY fingerprint, le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts", }, { name: "latency p99 with group by", @@ -88,13 +88,13 @@ func TestPanelTableForCumulative(t *testing.T) { }, Expression: "A", }, - expected: "SELECT service_name, toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name,le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT service_name,le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, service_name,le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, service_name,le,ts ORDER BY fingerprint, service_name ASC,le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, service_name,le ORDER BY fingerprint, service_name ASC,le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", + expected: "SELECT service_name, toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name,le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT service_name,le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, service_name,le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY fingerprint, service_name,le,ts ORDER BY fingerprint, service_name ASC,le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, service_name,le ORDER BY fingerprint, service_name ASC,le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - query, err := buildMetricQueryForTable(1689255866000, 1689257640000, 1800, c.query, "distributed_time_series_v2") + query, err := buildMetricQueryForTable(1689255866000, 1689257640000, 1800, c.query) if err != nil { t.Fatalf("unexpected error: %v\n", err) } diff --git a/pkg/query-service/app/metrics/v3/delta.go b/pkg/query-service/app/metrics/v3/delta.go index f82e086bea..c2a1893af0 100644 --- a/pkg/query-service/app/metrics/v3/delta.go +++ b/pkg/query-service/app/metrics/v3/delta.go @@ -3,12 +3,13 @@ package v3 import ( "fmt" + "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers" "go.signoz.io/signoz/pkg/query-service/constants" v3 "go.signoz.io/signoz/pkg/query-service/model/v3" "go.signoz.io/signoz/pkg/query-service/utils" ) -func buildDeltaMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName string) (string, error) { +func buildDeltaMetricQuery(start, end, step int64, mq *v3.BuilderQuery) (string, error) { metricQueryGroupBy := mq.GroupBy @@ -30,19 +31,19 @@ func buildDeltaMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableNam } } - filterSubQuery, err := buildMetricsTimeSeriesFilterQuery(mq.Filters, metricQueryGroupBy, mq) + filterSubQuery, err := helpers.PrepareTimeseriesFilterQueryV3(start, end, mq) if err != nil { return "", err } - samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) + samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND unix_milli >= %d AND unix_milli <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) // Select the aggregate value for interval queryTmpl := "SELECT %s" + - " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," + + " toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL %d SECOND) as ts," + " %s as value" + - " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_V4_TABLENAME + " INNER JOIN" + " (%s) as filtered_time_series" + " USING fingerprint" + @@ -140,9 +141,9 @@ func buildDeltaMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableNam case v3.AggregateOperatorNoOp: queryTmpl := "SELECT fingerprint, labels as fullLabels," + - " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," + + " toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL %d SECOND) as ts," + " any(value) as value" + - " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_V4_TABLENAME + " INNER JOIN" + " (%s) as filtered_time_series" + " USING fingerprint" + diff --git a/pkg/query-service/app/metrics/v3/delta_table.go b/pkg/query-service/app/metrics/v3/delta_table.go index 7d98d27b1a..4fdf152d95 100644 --- a/pkg/query-service/app/metrics/v3/delta_table.go +++ b/pkg/query-service/app/metrics/v3/delta_table.go @@ -4,12 +4,13 @@ import ( "fmt" "math" + "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers" "go.signoz.io/signoz/pkg/query-service/constants" v3 "go.signoz.io/signoz/pkg/query-service/model/v3" "go.signoz.io/signoz/pkg/query-service/utils" ) -func buildDeltaMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery, tableName string) (string, error) { +func buildDeltaMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery) (string, error) { // round up to the nearest multiple of 60 step := int64(math.Ceil(float64(end-start+1)/1000/60) * 60) @@ -43,17 +44,17 @@ func buildDeltaMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery, tab } } - filterSubQuery, err := buildMetricsTimeSeriesFilterQuery(mq.Filters, metricQueryGroupBy, mq) + filterSubQuery, err := helpers.PrepareTimeseriesFilterQueryV3(start, end, mq) if err != nil { return "", err } - samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) + samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND unix_milli >= %d AND unix_milli <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) queryTmpl := "SELECT %s toStartOfHour(now()) as ts," + // now() has no menaing & used as a placeholder for ts " %s as value" + - " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_V4_TABLENAME + " INNER JOIN" + " (%s) as filtered_time_series" + " USING fingerprint" + diff --git a/pkg/query-service/app/metrics/v3/delta_table_test.go b/pkg/query-service/app/metrics/v3/delta_table_test.go index d22807f2c1..3cb0598cfa 100644 --- a/pkg/query-service/app/metrics/v3/delta_table_test.go +++ b/pkg/query-service/app/metrics/v3/delta_table_test.go @@ -38,7 +38,7 @@ func TestPanelTableForDelta(t *testing.T) { }, Expression: "A", }, - expected: "SELECT toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_count' AND temporality = 'Delta' AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch']) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY ts ORDER BY ts", + expected: "SELECT toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'signoz_latency_count' AND temporality = 'Delta' AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch']) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY ts ORDER BY ts", }, { name: "latency p50", @@ -61,7 +61,7 @@ func TestPanelTableForDelta(t *testing.T) { }, Expression: "A", }, - expected: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts", + expected: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts", }, { name: "latency p99 with group by", @@ -80,13 +80,13 @@ func TestPanelTableForDelta(t *testing.T) { }, Expression: "A", }, - expected: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name,le, toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' ) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", + expected: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name,le, toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - query, err := buildDeltaMetricQueryForTable(1689255866000, 1689257640000, 1800, c.query, "distributed_time_series_v2") + query, err := buildDeltaMetricQueryForTable(1689255866000, 1689257640000, 1800, c.query) if err != nil { t.Fatalf("unexpected error: %v", err) } diff --git a/pkg/query-service/app/metrics/v3/query_builder.go b/pkg/query-service/app/metrics/v3/query_builder.go index 1e0a49198b..b5453e97b4 100644 --- a/pkg/query-service/app/metrics/v3/query_builder.go +++ b/pkg/query-service/app/metrics/v3/query_builder.go @@ -6,6 +6,7 @@ import ( "strings" "time" + "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers" "go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/model" v3 "go.signoz.io/signoz/pkg/query-service/model/v3" @@ -51,109 +52,23 @@ var aggregateOperatorToSQLFunc = map[v3.AggregateOperator]string{ // See https://github.com/SigNoz/signoz/issues/2151#issuecomment-1467249056 var rateWithoutNegative = `If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) ` -// buildMetricsTimeSeriesFilterQuery builds the sub-query to be used for filtering -// timeseries based on search criteria -func buildMetricsTimeSeriesFilterQuery(fs *v3.FilterSet, groupTags []v3.AttributeKey, mq *v3.BuilderQuery) (string, error) { - metricName := mq.AggregateAttribute.Key - aggregateOperator := mq.AggregateOperator - var conditions []string - if mq.Temporality == v3.Delta { - conditions = append(conditions, fmt.Sprintf("metric_name = %s AND temporality = '%s' ", utils.ClickHouseFormattedValue(metricName), v3.Delta)) - } else { - conditions = append(conditions, fmt.Sprintf("metric_name = %s AND temporality IN ['%s', '%s']", utils.ClickHouseFormattedValue(metricName), v3.Cumulative, v3.Unspecified)) - } - - if fs != nil && len(fs.Items) != 0 { - for _, item := range fs.Items { - toFormat := item.Value - op := v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator)))) - // if the received value is an array for like/match op, just take the first value - // or should we throw an error? - if op == v3.FilterOperatorLike || op == v3.FilterOperatorRegex || op == v3.FilterOperatorNotLike || op == v3.FilterOperatorNotRegex { - x, ok := item.Value.([]interface{}) - if ok { - if len(x) == 0 { - continue - } - toFormat = x[0] - } - } - - if op == v3.FilterOperatorContains || op == v3.FilterOperatorNotContains { - toFormat = fmt.Sprintf("%%%s%%", toFormat) - } - fmtVal := utils.ClickHouseFormattedValue(toFormat) - switch op { - case v3.FilterOperatorEqual: - conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') = %s", item.Key.Key, fmtVal)) - case v3.FilterOperatorNotEqual: - conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') != %s", item.Key.Key, fmtVal)) - case v3.FilterOperatorIn: - conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') IN %s", item.Key.Key, fmtVal)) - case v3.FilterOperatorNotIn: - conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') NOT IN %s", item.Key.Key, fmtVal)) - case v3.FilterOperatorLike: - conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) - case v3.FilterOperatorNotLike: - conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) - case v3.FilterOperatorRegex: - conditions = append(conditions, fmt.Sprintf("match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) - case v3.FilterOperatorNotRegex: - conditions = append(conditions, fmt.Sprintf("not match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) - case v3.FilterOperatorGreaterThan: - conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') > %s", item.Key.Key, fmtVal)) - case v3.FilterOperatorGreaterThanOrEq: - conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') >= %s", item.Key.Key, fmtVal)) - case v3.FilterOperatorLessThan: - conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') < %s", item.Key.Key, fmtVal)) - case v3.FilterOperatorLessThanOrEq: - conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') <= %s", item.Key.Key, fmtVal)) - case v3.FilterOperatorContains: - conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) - case v3.FilterOperatorNotContains: - conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) - case v3.FilterOperatorExists: - conditions = append(conditions, fmt.Sprintf("has(JSONExtractKeys(labels), '%s')", item.Key.Key)) - case v3.FilterOperatorNotExists: - conditions = append(conditions, fmt.Sprintf("not has(JSONExtractKeys(labels), '%s')", item.Key.Key)) - default: - return "", fmt.Errorf("unsupported operation") - } - } - } - queryString := strings.Join(conditions, " AND ") - - var selectLabels string - if aggregateOperator == v3.AggregateOperatorNoOp || aggregateOperator == v3.AggregateOperatorRate { - selectLabels = "labels," - } else { - for _, tag := range groupTags { - selectLabels += fmt.Sprintf(" JSONExtractString(labels, '%s') as %s,", tag.Key, tag.Key) - } - } - - filterSubQuery := fmt.Sprintf("SELECT %s fingerprint FROM %s.%s WHERE %s", selectLabels, constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_LOCAL_TABLENAME, queryString) - - return filterSubQuery, nil -} - -func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName string) (string, error) { +func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery) (string, error) { metricQueryGroupBy := mq.GroupBy - filterSubQuery, err := buildMetricsTimeSeriesFilterQuery(mq.Filters, metricQueryGroupBy, mq) + filterSubQuery, err := helpers.PrepareTimeseriesFilterQueryV3(start, end, mq) if err != nil { return "", err } - samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms < %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) + samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND unix_milli >= %d AND unix_milli < %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) // Select the aggregate value for interval queryTmpl := "SELECT %s" + - " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," + + " toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL %d SECOND) as ts," + " %s as value" + - " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_V4_TABLENAME + " INNER JOIN" + " (%s) as filtered_time_series" + " USING fingerprint" + @@ -282,9 +197,9 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str case v3.AggregateOperatorNoOp: queryTmpl := "SELECT fingerprint, labels as fullLabels," + - " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," + + " toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL %d SECOND) as ts," + " any(value) as value" + - " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_V4_TABLENAME + " INNER JOIN" + " (%s) as filtered_time_series" + " USING fingerprint" + @@ -434,15 +349,15 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P var err error if mq.Temporality == v3.Delta { if panelType == v3.PanelTypeTable { - query, err = buildDeltaMetricQueryForTable(start, end, mq.StepInterval, mq, constants.SIGNOZ_TIMESERIES_TABLENAME) + query, err = buildDeltaMetricQueryForTable(start, end, mq.StepInterval, mq) } else { - query, err = buildDeltaMetricQuery(start, end, mq.StepInterval, mq, constants.SIGNOZ_TIMESERIES_TABLENAME) + query, err = buildDeltaMetricQuery(start, end, mq.StepInterval, mq) } } else { if panelType == v3.PanelTypeTable { - query, err = buildMetricQueryForTable(start, end, mq.StepInterval, mq, constants.SIGNOZ_TIMESERIES_TABLENAME) + query, err = buildMetricQueryForTable(start, end, mq.StepInterval, mq) } else { - query, err = buildMetricQuery(start, end, mq.StepInterval, mq, constants.SIGNOZ_TIMESERIES_TABLENAME) + query, err = buildMetricQuery(start, end, mq.StepInterval, mq) } } diff --git a/pkg/query-service/app/metrics/v3/query_builder_test.go b/pkg/query-service/app/metrics/v3/query_builder_test.go index 2ad6013de6..5b85036007 100644 --- a/pkg/query-service/app/metrics/v3/query_builder_test.go +++ b/pkg/query-service/app/metrics/v3/query_builder_test.go @@ -50,6 +50,7 @@ func TestBuildQueryWithFilters(t *testing.T) { }}, AggregateOperator: v3.AggregateOperatorRateMax, Expression: "A", + Temporality: v3.Cumulative, }, }, }, @@ -57,7 +58,7 @@ func TestBuildQueryWithFilters(t *testing.T) { query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{PreferRPM: false}) require.NoError(t, err) - require.Contains(t, query, "WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'a') != 'b'") + require.Contains(t, query, "WHERE metric_name = 'name' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'a') != 'b'") require.Contains(t, query, rateWithoutNegative) require.Contains(t, query, "not match(JSONExtractString(labels, 'code'), 'ERROR_*')") }) @@ -78,6 +79,7 @@ func TestBuildQueryWithMultipleQueries(t *testing.T) { {Key: v3.AttributeKey{Key: "in"}, Value: []interface{}{"a", "b", "c"}, Operator: v3.FilterOperatorIn}, }}, AggregateOperator: v3.AggregateOperatorRateAvg, + Temporality: v3.Cumulative, Expression: "A", }, "B": { @@ -85,6 +87,7 @@ func TestBuildQueryWithMultipleQueries(t *testing.T) { StepInterval: 60, AggregateAttribute: v3.AttributeKey{Key: "name2"}, AggregateOperator: v3.AggregateOperatorRateMax, + Temporality: v3.Cumulative, Expression: "B", }, }, @@ -94,158 +97,15 @@ func TestBuildQueryWithMultipleQueries(t *testing.T) { query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{PreferRPM: false}) require.NoError(t, err) - require.Contains(t, query, "WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'in') IN ['a','b','c']") + require.Contains(t, query, "WHERE metric_name = 'name' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'in') IN ['a','b','c']") require.Contains(t, query, rateWithoutNegative) }) } -func TestBuildQueryOperators(t *testing.T) { - testCases := []struct { - operator v3.FilterOperator - filterSet v3.FilterSet - expectedWhereClause string - }{ - { - operator: v3.FilterOperatorEqual, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "service_name"}, Value: "route", Operator: v3.FilterOperatorEqual}, - }, - }, - expectedWhereClause: "JSONExtractString(labels, 'service_name') = 'route'", - }, - { - operator: v3.FilterOperatorNotEqual, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "service_name"}, Value: "route", Operator: v3.FilterOperatorNotEqual}, - }, - }, - expectedWhereClause: "JSONExtractString(labels, 'service_name') != 'route'", - }, - { - operator: v3.FilterOperatorRegex, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "service_name"}, Value: "out", Operator: v3.FilterOperatorRegex}, - }, - }, - expectedWhereClause: "match(JSONExtractString(labels, 'service_name'), 'out')", - }, - { - operator: v3.FilterOperatorNotRegex, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "service_name"}, Value: "out", Operator: v3.FilterOperatorNotRegex}, - }, - }, - expectedWhereClause: "not match(JSONExtractString(labels, 'service_name'), 'out')", - }, - { - operator: v3.FilterOperatorIn, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "service_name"}, Value: []interface{}{"route", "driver"}, Operator: v3.FilterOperatorIn}, - }, - }, - expectedWhereClause: "JSONExtractString(labels, 'service_name') IN ['route','driver']", - }, - { - operator: v3.FilterOperatorNotIn, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "service_name"}, Value: []interface{}{"route", "driver"}, Operator: v3.FilterOperatorNotIn}, - }, - }, - expectedWhereClause: "JSONExtractString(labels, 'service_name') NOT IN ['route','driver']", - }, - { - operator: v3.FilterOperatorExists, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "horn"}, Operator: v3.FilterOperatorExists}, - }, - }, - expectedWhereClause: "has(JSONExtractKeys(labels), 'horn')", - }, - { - operator: v3.FilterOperatorNotExists, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "horn"}, Operator: v3.FilterOperatorNotExists}, - }, - }, - expectedWhereClause: "not has(JSONExtractKeys(labels), 'horn')", - }, - { - operator: v3.FilterOperatorContains, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "service_name"}, Value: "out", Operator: v3.FilterOperatorContains}, - }, - }, - expectedWhereClause: "like(JSONExtractString(labels, 'service_name'), '%out%')", - }, - { - operator: v3.FilterOperatorNotContains, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "serice_name"}, Value: "out", Operator: v3.FilterOperatorNotContains}, - }, - }, - expectedWhereClause: "notLike(JSONExtractString(labels, 'serice_name'), '%out%')", - }, - { - operator: v3.FilterOperatorLike, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "service_name"}, Value: "dri", Operator: v3.FilterOperatorLike}, - }, - }, - expectedWhereClause: "like(JSONExtractString(labels, 'service_name'), 'dri')", - }, - { - operator: v3.FilterOperatorNotLike, - filterSet: v3.FilterSet{ - Operator: "AND", - Items: []v3.FilterItem{ - {Key: v3.AttributeKey{Key: "serice_name"}, Value: "dri", Operator: v3.FilterOperatorNotLike}, - }, - }, - expectedWhereClause: "notLike(JSONExtractString(labels, 'serice_name'), 'dri')", - }, - } - - for i, tc := range testCases { - t.Run(fmt.Sprintf("case %d", i), func(t *testing.T) { - mq := v3.BuilderQuery{ - QueryName: "A", - StepInterval: 60, - AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, - AggregateOperator: v3.AggregateOperatorSum, - } - whereClause, err := buildMetricsTimeSeriesFilterQuery(&tc.filterSet, []v3.AttributeKey{}, &mq) - require.NoError(t, err) - require.Contains(t, whereClause, tc.expectedWhereClause) - }) - } -} - func TestBuildQueryXRate(t *testing.T) { t.Run("TestBuildQueryXRate", func(t *testing.T) { - tmpl := `SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991920000 AND timestamp_ms < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts` + tmpl := `SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'name' AND temporality = '' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts` cases := []struct { aggregateOperator v3.AggregateOperator @@ -298,7 +158,7 @@ func TestBuildQueryXRate(t *testing.T) { func TestBuildQueryRPM(t *testing.T) { t.Run("TestBuildQueryXRate", func(t *testing.T) { - tmpl := `SELECT ts, ceil(value * 60) as value FROM (SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991920000 AND timestamp_ms < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts)` + tmpl := `SELECT ts, ceil(value * 60) as value FROM (SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'name' AND temporality = '' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts)` cases := []struct { aggregateOperator v3.AggregateOperator @@ -377,7 +237,7 @@ func TestBuildQueryAdjustedTimes(t *testing.T) { }, }, // 20:10:00 - 20:41:00 - expected: "timestamp_ms >= 1686082200000 AND timestamp_ms < 1686084060000", + expected: "unix_milli >= 1686082200000 AND unix_milli < 1686084060000", }, { name: "TestBuildQueryAdjustedTimes start close to 50 seconds", @@ -402,7 +262,7 @@ func TestBuildQueryAdjustedTimes(t *testing.T) { }, }, // 20:10:00 - 20:41:00 - expected: "timestamp_ms >= 1686082200000 AND timestamp_ms < 1686084060000", + expected: "unix_milli >= 1686082200000 AND unix_milli < 1686084060000", }, { name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 30 seconds", @@ -427,7 +287,7 @@ func TestBuildQueryAdjustedTimes(t *testing.T) { }, }, // 20:11:00 - 20:41:00 - expected: "timestamp_ms >= 1686082260000 AND timestamp_ms < 1686084060000", + expected: "unix_milli >= 1686082260000 AND unix_milli < 1686084060000", }, { name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 30 seconds and end close to 30 seconds", @@ -452,7 +312,7 @@ func TestBuildQueryAdjustedTimes(t *testing.T) { }, }, // 20:11:00 - 20:41:00 - expected: "timestamp_ms >= 1686082260000 AND timestamp_ms < 1686084060000", + expected: "unix_milli >= 1686082260000 AND unix_milli < 1686084060000", }, { name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 300 seconds and end close to 30 seconds", @@ -479,7 +339,7 @@ func TestBuildQueryAdjustedTimes(t *testing.T) { // 20:05:00 - 20:41:00 // 20:10:00 is the nearest 5 minute interval, but we round down to 20:05:00 // as this is a rate query and we want to include the previous value for the first interval - expected: "timestamp_ms >= 1686081900000 AND timestamp_ms < 1686084060000", + expected: "unix_milli >= 1686081900000 AND unix_milli < 1686084060000", }, { name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 180 seconds and end close to 30 seconds", @@ -506,7 +366,7 @@ func TestBuildQueryAdjustedTimes(t *testing.T) { // 20:06:00 - 20:39:00 // 20:09:00 is the nearest 3 minute interval, but we round down to 20:06:00 // as this is a rate query and we want to include the previous value for the first interval - expected: "timestamp_ms >= 1686081960000 AND timestamp_ms < 1686084060000", + expected: "unix_milli >= 1686081960000 AND unix_milli < 1686084060000", }, } diff --git a/pkg/query-service/app/metrics/v4/helpers/sub_query.go b/pkg/query-service/app/metrics/v4/helpers/sub_query.go index d4cd103719..e1edc5a964 100644 --- a/pkg/query-service/app/metrics/v4/helpers/sub_query.go +++ b/pkg/query-service/app/metrics/v4/helpers/sub_query.go @@ -117,3 +117,88 @@ func PrepareTimeseriesFilterQuery(start, end int64, mq *v3.BuilderQuery) (string return filterSubQuery, nil } + +// PrepareTimeseriesFilterQuery builds the sub-query to be used for filtering timeseries based on the search criteria +func PrepareTimeseriesFilterQueryV3(start, end int64, mq *v3.BuilderQuery) (string, error) { + var conditions []string + var fs *v3.FilterSet = mq.Filters + var groupTags []v3.AttributeKey = mq.GroupBy + + conditions = append(conditions, fmt.Sprintf("metric_name = %s", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key))) + conditions = append(conditions, fmt.Sprintf("temporality = '%s'", mq.Temporality)) + + start, end, tableName := which(start, end) + + conditions = append(conditions, fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", start, end)) + + if fs != nil && len(fs.Items) != 0 { + for _, item := range fs.Items { + toFormat := item.Value + op := v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator)))) + if op == v3.FilterOperatorContains || op == v3.FilterOperatorNotContains { + toFormat = fmt.Sprintf("%%%s%%", toFormat) + } + fmtVal := utils.ClickHouseFormattedValue(toFormat) + switch op { + case v3.FilterOperatorEqual: + conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') = %s", item.Key.Key, fmtVal)) + case v3.FilterOperatorNotEqual: + conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') != %s", item.Key.Key, fmtVal)) + case v3.FilterOperatorIn: + conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') IN %s", item.Key.Key, fmtVal)) + case v3.FilterOperatorNotIn: + conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') NOT IN %s", item.Key.Key, fmtVal)) + case v3.FilterOperatorLike: + conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) + case v3.FilterOperatorNotLike: + conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) + case v3.FilterOperatorRegex: + conditions = append(conditions, fmt.Sprintf("match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) + case v3.FilterOperatorNotRegex: + conditions = append(conditions, fmt.Sprintf("not match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) + case v3.FilterOperatorGreaterThan: + conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') > %s", item.Key.Key, fmtVal)) + case v3.FilterOperatorGreaterThanOrEq: + conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') >= %s", item.Key.Key, fmtVal)) + case v3.FilterOperatorLessThan: + conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') < %s", item.Key.Key, fmtVal)) + case v3.FilterOperatorLessThanOrEq: + conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') <= %s", item.Key.Key, fmtVal)) + case v3.FilterOperatorContains: + conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) + case v3.FilterOperatorNotContains: + conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal)) + case v3.FilterOperatorExists: + conditions = append(conditions, fmt.Sprintf("has(JSONExtractKeys(labels), '%s')", item.Key.Key)) + case v3.FilterOperatorNotExists: + conditions = append(conditions, fmt.Sprintf("not has(JSONExtractKeys(labels), '%s')", item.Key.Key)) + default: + return "", fmt.Errorf("unsupported filter operator") + } + } + } + whereClause := strings.Join(conditions, " AND ") + + var selectLabels string + + if mq.AggregateOperator == v3.AggregateOperatorNoOp || mq.AggregateOperator == v3.AggregateOperatorRate { + selectLabels += "labels, " + } else { + for _, tag := range groupTags { + selectLabels += fmt.Sprintf("JSONExtractString(labels, '%s') as %s, ", tag.Key, tag.Key) + } + } + + // The table JOIN key always exists + selectLabels += "fingerprint" + + filterSubQuery := fmt.Sprintf( + "SELECT DISTINCT %s FROM %s.%s WHERE %s", + selectLabels, + constants.SIGNOZ_METRIC_DBNAME, + tableName, + whereClause, + ) + + return filterSubQuery, nil +} diff --git a/pkg/query-service/app/querier/querier_test.go b/pkg/query-service/app/querier/querier_test.go index 37514b6f23..80a3a07422 100644 --- a/pkg/query-service/app/querier/querier_test.go +++ b/pkg/query-service/app/querier/querier_test.go @@ -572,8 +572,8 @@ func TestQueryRange(t *testing.T) { } q := NewQuerier(opts) expectedTimeRangeInQueryString := []string{ - fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms < %d", 1675115520000, 1675115580000+120*60*1000), - fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms < %d", 1675115520000+120*60*1000, 1675115580000+180*60*1000), + fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115520000, 1675115580000+120*60*1000), + fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115520000+120*60*1000, 1675115580000+180*60*1000), fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", 1675115580000*1000000, (1675115580000+120*60*1000)*int64(1000000)), fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675115580000+60*60*1000)*int64(1000000), (1675115580000+180*60*1000)*int64(1000000)), } @@ -683,7 +683,7 @@ func TestQueryRangeValueType(t *testing.T) { q := NewQuerier(opts) // No caching expectedTimeRangeInQueryString := []string{ - fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms < %d", 1675115520000, 1675115580000+120*60*1000), + fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115520000, 1675115580000+120*60*1000), fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675115580000+60*60*1000)*int64(1000000), (1675115580000+180*60*1000)*int64(1000000)), } diff --git a/pkg/query-service/app/queryBuilder/query_builder_test.go b/pkg/query-service/app/queryBuilder/query_builder_test.go index 4291bf2407..3d170a7255 100644 --- a/pkg/query-service/app/queryBuilder/query_builder_test.go +++ b/pkg/query-service/app/queryBuilder/query_builder_test.go @@ -27,6 +27,7 @@ func TestBuildQueryWithMultipleQueriesAndFormula(t *testing.T) { {Key: v3.AttributeKey{Key: "in"}, Value: []interface{}{"a", "b", "c"}, Operator: v3.FilterOperatorIn}, }}, AggregateOperator: v3.AggregateOperatorRateMax, + Temporality: v3.Cumulative, Expression: "A", }, "B": { @@ -35,6 +36,7 @@ func TestBuildQueryWithMultipleQueriesAndFormula(t *testing.T) { AggregateAttribute: v3.AttributeKey{Key: "name2"}, DataSource: v3.DataSourceMetrics, AggregateOperator: v3.AggregateOperatorRateAvg, + Temporality: v3.Cumulative, Expression: "B", }, "C": { @@ -55,7 +57,7 @@ func TestBuildQueryWithMultipleQueriesAndFormula(t *testing.T) { require.NoError(t, err) require.Contains(t, queries["C"], "SELECT A.`ts` as `ts`, A.value / B.value") - require.Contains(t, queries["C"], "WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'in') IN ['a','b','c']") + require.Contains(t, queries["C"], "WHERE metric_name = 'name' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'in') IN ['a','b','c']") require.Contains(t, queries["C"], "(value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)))") }) } @@ -257,7 +259,7 @@ func TestDeltaQueryBuilder(t *testing.T) { }, }, queryToTest: "A", - expected: "SELECT toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_count' AND temporality = 'Delta' AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY ts ORDER BY ts", + expected: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_count' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY ts ORDER BY ts", }, { name: "TestQueryWithExpression - Error rate", @@ -327,7 +329,7 @@ func TestDeltaQueryBuilder(t *testing.T) { }, }, queryToTest: "C", - expected: "SELECT A.`ts` as `ts`, A.value * 100 / B.value as value FROM (SELECT toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_count' AND temporality = 'Delta' AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, 'status_code') IN ['STATUS_CODE_ERROR'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY ts ORDER BY ts) as A INNER JOIN (SELECT toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_count' AND temporality = 'Delta' AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY ts ORDER BY ts) as B ON A.`ts` = B.`ts`", + expected: "SELECT A.`ts` as `ts`, A.value * 100 / B.value as value FROM (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_count' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, 'status_code') IN ['STATUS_CODE_ERROR'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY ts ORDER BY ts) as A INNER JOIN (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_count' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY ts ORDER BY ts) as B ON A.`ts` = B.`ts`", }, { name: "TestQuery - Quantile", @@ -355,7 +357,7 @@ func TestDeltaQueryBuilder(t *testing.T) { }, }, queryToTest: "A", - expected: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) as value FROM (SELECT service_name,le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' ) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", + expected: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) as value FROM (SELECT service_name,le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", }, } diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index fe2ad8c86c..6e398a42e0 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -206,12 +206,9 @@ var GroupByColMap = map[string]struct{}{ const ( SIGNOZ_METRIC_DBNAME = "signoz_metrics" - SIGNOZ_SAMPLES_TABLENAME = "distributed_samples_v2" SIGNOZ_SAMPLES_V4_TABLENAME = "distributed_samples_v4" - SIGNOZ_TIMESERIES_TABLENAME = "distributed_time_series_v2" SIGNOZ_TRACE_DBNAME = "signoz_traces" SIGNOZ_SPAN_INDEX_TABLENAME = "distributed_signoz_index_v2" - SIGNOZ_TIMESERIES_LOCAL_TABLENAME = "time_series_v2" SIGNOZ_TIMESERIES_v4_LOCAL_TABLENAME = "time_series_v4" SIGNOZ_TIMESERIES_v4_6HRS_LOCAL_TABLENAME = "time_series_v4_6hrs" SIGNOZ_TIMESERIES_v4_1DAY_LOCAL_TABLENAME = "time_series_v4_1day" From 631c12259fa156fe480432ca456caae4cc5008ee Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Tue, 21 May 2024 22:54:18 +0530 Subject: [PATCH 15/23] Fix promql light theme (#5044) * fix: promQL icon not visible in light mode * fix: promql icon * fix: promQL icon not visible in light mode --- frontend/public/Icons/promQL.svg | 1 - frontend/src/assets/Dashboard/PromQl.tsx | 27 +++++++++++++++++++ .../container/FormAlertRules/QuerySection.tsx | 11 ++++++-- .../QuerySection/QuerySection.styles.scss | 5 ++++ .../LeftContainer/QuerySection/index.tsx | 9 ++++++- 5 files changed, 49 insertions(+), 4 deletions(-) delete mode 100644 frontend/public/Icons/promQL.svg create mode 100644 frontend/src/assets/Dashboard/PromQl.tsx diff --git a/frontend/public/Icons/promQL.svg b/frontend/public/Icons/promQL.svg deleted file mode 100644 index d02bcc2a4d..0000000000 --- a/frontend/public/Icons/promQL.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/frontend/src/assets/Dashboard/PromQl.tsx b/frontend/src/assets/Dashboard/PromQl.tsx new file mode 100644 index 0000000000..8a942ae74e --- /dev/null +++ b/frontend/src/assets/Dashboard/PromQl.tsx @@ -0,0 +1,27 @@ +import { CSSProperties } from 'react'; + +function PromQLIcon({ + fillColor, +}: { + fillColor: CSSProperties['color']; +}): JSX.Element { + return ( + + + + ); +} + +export default PromQLIcon; diff --git a/frontend/src/container/FormAlertRules/QuerySection.tsx b/frontend/src/container/FormAlertRules/QuerySection.tsx index 406a757eda..a567288585 100644 --- a/frontend/src/container/FormAlertRules/QuerySection.tsx +++ b/frontend/src/container/FormAlertRules/QuerySection.tsx @@ -1,12 +1,15 @@ import './QuerySection.styles.scss'; +import { Color } from '@signozhq/design-tokens'; import { Button, Tabs, Tooltip } from 'antd'; +import PromQLIcon from 'assets/Dashboard/PromQl'; import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts'; import { ENTITY_VERSION_V4 } from 'constants/app'; import { PANEL_TYPES } from 'constants/queryBuilder'; import { QBShortcuts } from 'constants/shortcuts/QBShortcuts'; import { QueryBuilder } from 'container/QueryBuilder'; import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys'; +import { useIsDarkMode } from 'hooks/useDarkMode'; import { Atom, Play, Terminal } from 'lucide-react'; import { useEffect, useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; @@ -48,6 +51,8 @@ function QuerySection({ const renderChQueryUI = (): JSX.Element => ; + const isDarkMode = useIsDarkMode(); + const renderMetricUI = (): JSX.Element => ( ), key: EQueryType.PROM, }, ], - [], + [isDarkMode], ); const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys(); diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QuerySection.styles.scss b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QuerySection.styles.scss index 968da1459a..d6ae43ac9a 100644 --- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QuerySection.styles.scss +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QuerySection.styles.scss @@ -12,6 +12,7 @@ .prom-ql-icon { height: 14px; width: 14px; + color: var(--bg-vanilla-200); } } .ant-btn-default { @@ -54,6 +55,10 @@ .ant-tabs-tab-active { .nav-btns { background: var(--bg-vanilla-300) !important; + + .prom-ql-icon { + color: var(--bg-ink-400); + } } } } diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/index.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/index.tsx index 8819cbf0f9..82ac79bc59 100644 --- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/index.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/index.tsx @@ -1,6 +1,8 @@ import './QuerySection.styles.scss'; +import { Color } from '@signozhq/design-tokens'; import { Button, Tabs, Tooltip, Typography } from 'antd'; +import PromQLIcon from 'assets/Dashboard/PromQl'; import TextToolTip from 'components/TextToolTip'; import { PANEL_TYPES } from 'constants/queryBuilder'; import { QBShortcuts } from 'constants/shortcuts/QBShortcuts'; @@ -11,6 +13,7 @@ import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl'; import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval'; +import { useIsDarkMode } from 'hooks/useDarkMode'; import useUrlQuery from 'hooks/useUrlQuery'; import { defaultTo } from 'lodash-es'; import { Atom, Play, Terminal } from 'lucide-react'; @@ -53,6 +56,8 @@ function QuerySection({ const { selectedDashboard, setSelectedDashboard } = useDashboard(); + const isDarkMode = useIsDarkMode(); + const { widgets } = selectedDashboard?.data || {}; const getWidget = useCallback(() => { @@ -196,7 +201,9 @@ function QuerySection({ label: ( ), From 4ab350e72115ee11eac8c3200a42e02eef7ed84c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 May 2024 10:33:28 +0530 Subject: [PATCH 16/23] chore(deps): bump follow-redirects from 1.15.2 to 1.15.6 in /frontend (#4714) Bumps [follow-redirects](https://github.com/follow-redirects/follow-redirects) from 1.15.2 to 1.15.6. - [Release notes](https://github.com/follow-redirects/follow-redirects/releases) - [Commits](https://github.com/follow-redirects/follow-redirects/compare/v1.15.2...v1.15.6) --- updated-dependencies: - dependency-name: follow-redirects dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- frontend/yarn.lock | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/frontend/yarn.lock b/frontend/yarn.lock index b04ba34736..88465b8bed 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -9199,15 +9199,10 @@ flatten-vertex-data@^1.0.0: dependencies: dtype "^2.0.0" -follow-redirects@^1.0.0, follow-redirects@^1.14.0: - version "1.15.2" - resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz" - integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== - -follow-redirects@^1.15.4: - version "1.15.4" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.4.tgz#cdc7d308bf6493126b17ea2191ea0ccf3e535adf" - integrity sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw== +follow-redirects@^1.0.0, follow-redirects@^1.14.0, follow-redirects@^1.15.0: + version "1.15.6" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" + integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== fontfaceobserver@2.3.0: version "2.3.0" From 611ec3e08d525293f769dfcc02495a584456787b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 May 2024 10:33:36 +0530 Subject: [PATCH 17/23] chore(deps): bump axios from 0.21.4 to 1.6.2 in /frontend (#4839) Bumps [axios](https://github.com/axios/axios) from 0.21.4 to 1.6.2. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v0.21.4...v1.6.2) --- updated-dependencies: - dependency-name: axios dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- frontend/yarn.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 88465b8bed..c076f98699 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -5824,7 +5824,7 @@ axios@1.6.4: axios@^0.21.1: version "0.21.4" - resolved "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== dependencies: follow-redirects "^1.14.0" From 6827d66ae9046b276df302f37eff5ea599550b63 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 May 2024 10:46:03 +0530 Subject: [PATCH 18/23] --- (#5048) updated-dependencies: - dependency-name: axios dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- frontend/yarn.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/yarn.lock b/frontend/yarn.lock index c076f98699..2ab5d08743 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -9199,7 +9199,7 @@ flatten-vertex-data@^1.0.0: dependencies: dtype "^2.0.0" -follow-redirects@^1.0.0, follow-redirects@^1.14.0, follow-redirects@^1.15.0: +follow-redirects@^1.0.0, follow-redirects@^1.14.0, follow-redirects@^1.15.4: version "1.15.6" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== From 10e44ce44071b4b14a30346f72aea30d66bb08ab Mon Sep 17 00:00:00 2001 From: Yunus M Date: Wed, 22 May 2024 11:38:51 +0530 Subject: [PATCH 19/23] feat: update packages (#5054) * feat: update packages * feat: remove jest preview --- frontend/jest.config.ts | 2 - frontend/package.json | 6 +- frontend/yarn.lock | 839 ++++------------------------------------ 3 files changed, 78 insertions(+), 769 deletions(-) diff --git a/frontend/jest.config.ts b/frontend/jest.config.ts index e7165ac8be..d7776b0034 100644 --- a/frontend/jest.config.ts +++ b/frontend/jest.config.ts @@ -21,8 +21,6 @@ const config: Config.InitialOptions = { transform: { '^.+\\.(ts|tsx)?$': 'ts-jest', '^.+\\.(js|jsx)$': 'babel-jest', - '^.+\\.(css|scss|sass|less)$': 'jest-preview/transforms/css', - '^(?!.*\\.(js|jsx|mjs|cjs|ts|tsx|css|json)$)': 'jest-preview/transforms/file', }, transformIgnorePatterns: [ 'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color)/)', diff --git a/frontend/package.json b/frontend/package.json index 437203a490..a433b698e2 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -13,8 +13,6 @@ "jest": "jest", "jest:coverage": "jest --coverage", "jest:watch": "jest --watch", - "jest-preview": "jest-preview", - "test:debug": "npm-run-all -p test jest-preview", "postinstall": "is-ci || yarn husky:configure", "playwright": "npm run i18n:generate-hash && NODE_ENV=testing playwright test --config=./playwright.config.ts", "playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium", @@ -183,7 +181,7 @@ "@types/webpack-dev-server": "^4.7.2", "@typescript-eslint/eslint-plugin": "^4.33.0", "@typescript-eslint/parser": "^4.33.0", - "autoprefixer": "^9.0.0", + "autoprefixer": "10.4.19", "babel-plugin-styled-components": "^1.12.0", "compression-webpack-plugin": "9.0.0", "copy-webpack-plugin": "^8.1.0", @@ -206,12 +204,12 @@ "husky": "^7.0.4", "is-ci": "^3.0.1", "jest-playwright-preset": "^1.7.2", - "jest-preview": "0.3.1", "jest-styled-components": "^7.0.8", "lint-staged": "^12.5.0", "msw": "1.3.2", "npm-run-all": "latest", "portfinder-sync": "^0.0.2", + "postcss": "8.4.38", "prettier": "2.2.1", "raw-loader": "4.0.2", "react-hooks-testing-library": "0.6.0", diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 2ab5d08743..071498c6fb 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -232,27 +232,6 @@ json5 "^2.2.3" semver "^6.3.1" -"@babel/core@^7.19.6": - version "7.23.9" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.9.tgz#b028820718000f267870822fec434820e9b1e4d1" - integrity sha512-5q0175NOjddqpvvzU+kDiSOAk4PfdO6FvwCWoQ6RO7rTzEe8vlo+4HVfcnAREhD4npMs0e9uZypjTwzZPCf/cw== - dependencies: - "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.23.5" - "@babel/generator" "^7.23.6" - "@babel/helper-compilation-targets" "^7.23.6" - "@babel/helper-module-transforms" "^7.23.3" - "@babel/helpers" "^7.23.9" - "@babel/parser" "^7.23.9" - "@babel/template" "^7.23.9" - "@babel/traverse" "^7.23.9" - "@babel/types" "^7.23.9" - convert-source-map "^2.0.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.3" - semver "^6.3.1" - "@babel/core@^7.22.11": version "7.22.15" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.15.tgz#15d4fd03f478a459015a4b94cfbb3bd42c48d2f4" @@ -314,16 +293,6 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/generator@^7.23.6": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.6.tgz#9e1fca4811c77a10580d17d26b57b036133f3c2e" - integrity sha512-qrSfCYxYQB5owCmGLbl8XRpX1ytXlpueOb0N0UmQwA073KZxejgQTzAmJezxvpwQD9uGtK2shHdi55QT+MbjIw== - dependencies: - "@babel/types" "^7.23.6" - "@jridgewell/gen-mapping" "^0.3.2" - "@jridgewell/trace-mapping" "^0.3.17" - jsesc "^2.5.1" - "@babel/generator@^7.24.1": version "7.24.1" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.24.1.tgz#e67e06f68568a4ebf194d1c6014235344f0476d0" @@ -825,15 +794,6 @@ "@babel/traverse" "^7.22.15" "@babel/types" "^7.22.15" -"@babel/helpers@^7.23.9": - version "7.23.9" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.9.tgz#c3e20bbe7f7a7e10cb9b178384b4affdf5995c7d" - integrity sha512-87ICKgU5t5SzOT7sBMfCOZQ2rHjRU+Pcb9BoILMYz600W6DkVRLFBPwQ18gwUVvggqXivaUakpnxWQGbpywbBQ== - dependencies: - "@babel/template" "^7.23.9" - "@babel/traverse" "^7.23.9" - "@babel/types" "^7.23.9" - "@babel/helpers@^7.24.1": version "7.24.1" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.24.1.tgz#183e44714b9eba36c3038e442516587b1e0a1a94" @@ -905,11 +865,6 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.6.tgz#ba1c9e512bda72a47e285ae42aff9d2a635a9e3b" integrity sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ== -"@babel/parser@^7.23.9": - version "7.23.9" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.9.tgz#7b903b6149b0f8fa7ad564af646c4c38a77fc44b" - integrity sha512-9tcKgqKbs3xGJ+NtKF2ndOBBLVwPjl1SHxPQkd36r3Dlirw3xWUeGaTbqr7uGZcTaxkVNwc+03SVP7aCdWrTlA== - "@babel/parser@^7.24.0", "@babel/parser@^7.24.1": version "7.24.1" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.24.1.tgz#1e416d3627393fab1cb5b0f2f1796a100ae9133a" @@ -2284,15 +2239,6 @@ "@babel/parser" "^7.22.5" "@babel/types" "^7.22.5" -"@babel/template@^7.23.9": - version "7.23.9" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.23.9.tgz#f881d0487cba2828d3259dcb9ef5005a9731011a" - integrity sha512-+xrD2BWLpvHKNmX2QbpdpsBaWnRxahMwJjO+KZk2JOElj5nSmKezyS1B4u+QbHMTX69t4ukm6hh9lsYQ7GHCKA== - dependencies: - "@babel/code-frame" "^7.23.5" - "@babel/parser" "^7.23.9" - "@babel/types" "^7.23.9" - "@babel/template@^7.24.0": version "7.24.0" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.24.0.tgz#c6a524aa93a4a05d66aaf31654258fae69d87d50" @@ -2318,22 +2264,6 @@ debug "^4.1.0" globals "^11.1.0" -"@babel/traverse@^7.23.9": - version "7.23.9" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.9.tgz#2f9d6aead6b564669394c5ce0f9302bb65b9d950" - integrity sha512-I/4UJ9vs90OkBtY6iiiTORVMyIhJ4kAVmsKo9KFc8UOxMeUfi2hvtIBsET5u9GizXE6/GFSuKCTNfgCswuEjRg== - dependencies: - "@babel/code-frame" "^7.23.5" - "@babel/generator" "^7.23.6" - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-function-name" "^7.23.0" - "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/parser" "^7.23.9" - "@babel/types" "^7.23.9" - debug "^4.3.1" - globals "^11.1.0" - "@babel/traverse@^7.24.1": version "7.24.1" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.24.1.tgz#d65c36ac9dd17282175d1e4a3c49d5b7988f530c" @@ -2386,15 +2316,6 @@ "@babel/helper-validator-identifier" "^7.22.20" to-fast-properties "^2.0.0" -"@babel/types@^7.23.6", "@babel/types@^7.23.9": - version "7.23.9" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.9.tgz#1dd7b59a9a2b5c87f8b41e52770b5ecbf492e002" - integrity sha512-dQjSq/7HaSjRM43FFGnv5keM2HsxpmyV1PfaSVm0nzzjwwTmjOe6J4bC8e3+pTEIgHaHj+1ZlLThRJ2auc/w1Q== - dependencies: - "@babel/helper-string-parser" "^7.23.4" - "@babel/helper-validator-identifier" "^7.22.20" - to-fast-properties "^2.0.0" - "@babel/types@^7.24.0": version "7.24.0" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.24.0.tgz#3b951f435a92e7333eba05b7566fd297960ea1bf" @@ -3308,11 +3229,6 @@ resolved "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz" integrity sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g== -"@polka/url@^1.0.0-next.24": - version "1.0.0-next.24" - resolved "https://registry.yarnpkg.com/@polka/url/-/url-1.0.0-next.24.tgz#58601079e11784d20f82d0585865bb42305c4df3" - integrity sha512-2LuNTFBIO0m7kKIQvvPHN6UE63VjpmL9rnEEaOOaiSPbZK+zUOYIzBAWcED+3XYzhYsd/0mD57VdxAEqqV52CQ== - "@radix-ui/primitive@1.0.1": version "1.0.1" resolved "https://registry.yarnpkg.com/@radix-ui/primitive/-/primitive-1.0.1.tgz#e46f9958b35d10e9f6dc71c497305c22e3e55dbd" @@ -3870,11 +3786,6 @@ resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== -"@sindresorhus/is@^0.14.0": - version "0.14.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" - integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== - "@sinonjs/commons@^1.7.0": version "1.8.6" resolved "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.6.tgz" @@ -3889,96 +3800,6 @@ dependencies: "@sinonjs/commons" "^1.7.0" -"@svgr/babel-plugin-add-jsx-attribute@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.5.1.tgz#74a5d648bd0347bda99d82409d87b8ca80b9a1ba" - integrity sha512-9PYGcXrAxitycIjRmZB+Q0JaN07GZIWaTBIGQzfaZv+qr1n8X1XUEJ5rZ/vx6OVD9RRYlrNnXWExQXcmZeD/BQ== - -"@svgr/babel-plugin-remove-jsx-attribute@*": - version "8.0.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz#69177f7937233caca3a1afb051906698f2f59186" - integrity sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA== - -"@svgr/babel-plugin-remove-jsx-empty-expression@*": - version "8.0.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz#c2c48104cfd7dcd557f373b70a56e9e3bdae1d44" - integrity sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA== - -"@svgr/babel-plugin-replace-jsx-attribute-value@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.5.1.tgz#fb9d22ea26d2bc5e0a44b763d4c46d5d3f596c60" - integrity sha512-8DPaVVE3fd5JKuIC29dqyMB54sA6mfgki2H2+swh+zNJoynC8pMPzOkidqHOSc6Wj032fhl8Z0TVn1GiPpAiJg== - -"@svgr/babel-plugin-svg-dynamic-title@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.5.1.tgz#01b2024a2b53ffaa5efceaa0bf3e1d5a4c520ce4" - integrity sha512-FwOEi0Il72iAzlkaHrlemVurgSQRDFbk0OC8dSvD5fSBPHltNh7JtLsxmZUhjYBZo2PpcU/RJvvi6Q0l7O7ogw== - -"@svgr/babel-plugin-svg-em-dimensions@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.5.1.tgz#dd3fa9f5b24eb4f93bcf121c3d40ff5facecb217" - integrity sha512-gWGsiwjb4tw+ITOJ86ndY/DZZ6cuXMNE/SjcDRg+HLuCmwpcjOktwRF9WgAiycTqJD/QXqL2f8IzE2Rzh7aVXA== - -"@svgr/babel-plugin-transform-react-native-svg@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.5.1.tgz#1d8e945a03df65b601551097d8f5e34351d3d305" - integrity sha512-2jT3nTayyYP7kI6aGutkyfJ7UMGtuguD72OjeGLwVNyfPRBD8zQthlvL+fAbAKk5n9ZNcvFkp/b1lZ7VsYqVJg== - -"@svgr/babel-plugin-transform-svg-component@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.5.1.tgz#48620b9e590e25ff95a80f811544218d27f8a250" - integrity sha512-a1p6LF5Jt33O3rZoVRBqdxL350oge54iZWHNI6LJB5tQ7EelvD/Mb1mfBiZNAan0dt4i3VArkFRjA4iObuNykQ== - -"@svgr/babel-preset@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-6.5.1.tgz#b90de7979c8843c5c580c7e2ec71f024b49eb828" - integrity sha512-6127fvO/FF2oi5EzSQOAjo1LE3OtNVh11R+/8FXa+mHx1ptAaS4cknIjnUA7e6j6fwGGJ17NzaTJFUwOV2zwCw== - dependencies: - "@svgr/babel-plugin-add-jsx-attribute" "^6.5.1" - "@svgr/babel-plugin-remove-jsx-attribute" "*" - "@svgr/babel-plugin-remove-jsx-empty-expression" "*" - "@svgr/babel-plugin-replace-jsx-attribute-value" "^6.5.1" - "@svgr/babel-plugin-svg-dynamic-title" "^6.5.1" - "@svgr/babel-plugin-svg-em-dimensions" "^6.5.1" - "@svgr/babel-plugin-transform-react-native-svg" "^6.5.1" - "@svgr/babel-plugin-transform-svg-component" "^6.5.1" - -"@svgr/core@^6.2.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@svgr/core/-/core-6.5.1.tgz#d3e8aa9dbe3fbd747f9ee4282c1c77a27410488a" - integrity sha512-/xdLSWxK5QkqG524ONSjvg3V/FkNyCv538OIBdQqPNaAta3AsXj/Bd2FbvR87yMbXO2hFSWiAe/Q6IkVPDw+mw== - dependencies: - "@babel/core" "^7.19.6" - "@svgr/babel-preset" "^6.5.1" - "@svgr/plugin-jsx" "^6.5.1" - camelcase "^6.2.0" - cosmiconfig "^7.0.1" - -"@svgr/hast-util-to-babel-ast@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.5.1.tgz#81800bd09b5bcdb968bf6ee7c863d2288fdb80d2" - integrity sha512-1hnUxxjd83EAxbL4a0JDJoD3Dao3hmjvyvyEV8PzWmLK3B9m9NPlW7GKjFyoWE8nM7HnXzPcmmSyOW8yOddSXw== - dependencies: - "@babel/types" "^7.20.0" - entities "^4.4.0" - -"@svgr/plugin-jsx@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-6.5.1.tgz#0e30d1878e771ca753c94e69581c7971542a7072" - integrity sha512-+UdQxI3jgtSjCykNSlEMuy1jSRQlGC7pqBCPvkG/2dATdWo082zHTTK3uhnAju2/6XpE6B5mZ3z4Z8Ns01S8Gw== - dependencies: - "@babel/core" "^7.19.6" - "@svgr/babel-preset" "^6.5.1" - "@svgr/hast-util-to-babel-ast" "^6.5.1" - svg-parser "^2.0.4" - -"@szmarczak/http-timer@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" - integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== - dependencies: - defer-to-connect "^1.0.1" - "@testing-library/dom@^8.5.0": version "8.20.0" resolved "https://registry.npmjs.org/@testing-library/dom/-/dom-8.20.0.tgz" @@ -5478,13 +5299,6 @@ an-array@^1.0.0: resolved "https://registry.npmjs.org/an-array/-/an-array-1.0.0.tgz" integrity sha512-M175GYI7RmsYu24Ok383yZQa3eveDfNnmhTe3OQ3bm70bEovz2gWenH+ST/n32M8lrwLWk74hcPds5CDRPe2wg== -ansi-align@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-3.0.1.tgz#0cdf12e111ace773a86e9a1fad1225c43cb19a59" - integrity sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w== - dependencies: - string-width "^4.1.0" - ansi-colors@^4.1.1: version "4.1.3" resolved "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz" @@ -5790,18 +5604,17 @@ asynckit@^0.4.0: resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== -autoprefixer@^9.0.0: - version "9.8.8" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.8.8.tgz#fd4bd4595385fa6f06599de749a4d5f7a474957a" - integrity sha512-eM9d/swFopRt5gdJ7jrpCwgvEMIayITpojhkkSMRsFHYuH5bkSQ4p/9qTEHtmNudUZh22Tehu7I6CxAW0IXTKA== +autoprefixer@10.4.19: + version "10.4.19" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.19.tgz#ad25a856e82ee9d7898c59583c1afeb3fa65f89f" + integrity sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew== dependencies: - browserslist "^4.12.0" - caniuse-lite "^1.0.30001109" + browserslist "^4.23.0" + caniuse-lite "^1.0.30001599" + fraction.js "^4.3.7" normalize-range "^0.1.2" - num2fraction "^1.2.2" - picocolors "^0.2.1" - postcss "^7.0.32" - postcss-value-parser "^4.1.0" + picocolors "^1.0.0" + postcss-value-parser "^4.2.0" available-typed-arrays@^1.0.5: version "1.0.5" @@ -6366,20 +6179,6 @@ boolbase@^1.0.0: resolved "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz" integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== -boxen@^5.0.0: - version "5.1.2" - resolved "https://registry.yarnpkg.com/boxen/-/boxen-5.1.2.tgz#788cb686fc83c1f486dfa8a40c68fc2b831d2b50" - integrity sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ== - dependencies: - ansi-align "^3.0.0" - camelcase "^6.2.0" - chalk "^4.1.0" - cli-boxes "^2.2.1" - string-width "^4.2.2" - type-fest "^0.20.2" - widest-line "^3.1.0" - wrap-ansi "^7.0.0" - brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" @@ -6421,7 +6220,7 @@ browser-process-hrtime@^1.0.0: resolved "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz" integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserslist@^4.0.0, browserslist@^4.12.0, browserslist@^4.14.5, browserslist@^4.21.3, browserslist@^4.21.4, browserslist@^4.21.5: +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.21.3, browserslist@^4.21.4, browserslist@^4.21.5: version "4.21.5" resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.21.5.tgz" integrity sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w== @@ -6451,6 +6250,16 @@ browserslist@^4.22.2: node-releases "^2.0.14" update-browserslist-db "^1.0.13" +browserslist@^4.23.0: + version "4.23.0" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.0.tgz#8f3acc2bbe73af7213399430890f86c63a5674ab" + integrity sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ== + dependencies: + caniuse-lite "^1.0.30001587" + electron-to-chromium "^1.4.668" + node-releases "^2.0.14" + update-browserslist-db "^1.0.13" + bs-logger@0.x: version "0.2.6" resolved "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz" @@ -6506,19 +6315,6 @@ bytes@3.1.2: resolved "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz" integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== -cacheable-request@^6.0.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" - integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== - dependencies: - clone-response "^1.0.2" - get-stream "^5.1.0" - http-cache-semantics "^4.0.0" - keyv "^3.0.0" - lowercase-keys "^2.0.0" - normalize-url "^4.1.0" - responselike "^1.0.2" - caching-transform@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz" @@ -6564,7 +6360,7 @@ camelcase@^5.0.0, camelcase@^5.3.1: resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== -camelcase@^6.1.0, camelcase@^6.2.0, camelcase@^6.3.0: +camelcase@^6.1.0, camelcase@^6.2.0: version "6.3.0" resolved "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== @@ -6584,7 +6380,7 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001109, caniuse-lite@^1.0.30001449: +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001449: version "1.0.30001481" resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001481.tgz" integrity sha512-KCqHwRnaa1InZBtqXzP98LPg0ajCVujMKjqKDhZEthIpAsJl/YEIa3YvXjGXPVqzZVguccuu7ga9KOE1J9rKPQ== @@ -6599,6 +6395,11 @@ caniuse-lite@^1.0.30001580: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001587.tgz#a0bce920155fa56a1885a69c74e1163fc34b4881" integrity sha512-HMFNotUmLXn71BQxg8cijvqxnIAofforZOwGsxyXJ0qugTdspUF4sPSJ2vhgprHCB996tIDzEq1ubumPDV8ULA== +caniuse-lite@^1.0.30001587, caniuse-lite@^1.0.30001599: + version "1.0.30001621" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001621.tgz#4adcb443c8b9c8303e04498318f987616b8fea2e" + integrity sha512-+NLXZiviFFKX0fk8Piwv3PfLPGtRqJeq2TiNoUff/qB5KJgwecJTvCXDpmlyP/eCI/GUEmp/h/y5j0yckiiZrA== + canvas-color-tracker@1: version "1.2.1" resolved "https://registry.npmjs.org/canvas-color-tracker/-/canvas-color-tracker-1.2.1.tgz" @@ -6646,7 +6447,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: +chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1: version "4.1.2" resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -6752,11 +6553,6 @@ chrome-trace-event@^1.0.2: resolved "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz" integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== -ci-info@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" - integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== - ci-info@^3.2.0: version "3.8.0" resolved "https://registry.npmjs.org/ci-info/-/ci-info-3.8.0.tgz" @@ -6784,11 +6580,6 @@ clean-stack@^2.0.0: resolved "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz" integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== -cli-boxes@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f" - integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw== - cli-cursor@^3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz" @@ -6858,13 +6649,6 @@ clone-deep@^4.0.1: kind-of "^6.0.2" shallow-clone "^3.0.0" -clone-response@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.3.tgz#af2032aa47816399cf5f0a1d0db902f517abb8c3" - integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA== - dependencies: - mimic-response "^1.0.0" - clone@^1.0.2: version "1.0.4" resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" @@ -7001,7 +6785,7 @@ commander@^8.3.0: resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== -commander@^9.2.0, commander@^9.3.0: +commander@^9.3.0: version "9.5.0" resolved "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz" integrity sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ== @@ -7075,18 +6859,6 @@ concat-map@0.0.1: resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== -configstore@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96" - integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA== - dependencies: - dot-prop "^5.2.0" - graceful-fs "^4.1.2" - make-dir "^3.0.0" - unique-string "^2.0.0" - write-file-atomic "^3.0.0" - xdg-basedir "^4.0.0" - confusing-browser-globals@^1.0.10: version "1.0.11" resolved "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz" @@ -7097,16 +6869,6 @@ connect-history-api-fallback@^2.0.0: resolved "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz" integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== -connect@^3.7.0: - version "3.7.0" - resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" - integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== - dependencies: - debug "2.6.9" - finalhandler "1.1.2" - parseurl "~1.3.3" - utils-merge "1.0.1" - constant-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/constant-case/-/constant-case-3.0.4.tgz#3b84a9aeaf4cf31ec45e6bf5de91bdfb0589faf1" @@ -7241,7 +7003,7 @@ cosmiconfig-typescript-loader@^2.0.0: cosmiconfig "^7" ts-node "^10.8.1" -cosmiconfig@^7, cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: +cosmiconfig@^7, cosmiconfig@^7.0.0: version "7.1.0" resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz" integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA== @@ -7313,11 +7075,6 @@ cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3: shebang-command "^2.0.0" which "^2.0.1" -crypto-random-string@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" - integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== - css-box-model@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/css-box-model/-/css-box-model-1.2.1.tgz#59951d3b81fd6b2074a62d49444415b0d2b4d7c1" @@ -7846,11 +7603,6 @@ deep-equal@^2.0.5: which-collection "^1.0.1" which-typed-array "^1.1.9" -deep-extend@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" - integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== - deep-is@^0.1.3, deep-is@~0.1.3: version "0.1.4" resolved "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz" @@ -7882,11 +7634,6 @@ defaults@^1.0.3: dependencies: clone "^1.0.2" -defer-to-connect@^1.0.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" - integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== - define-lazy-prop@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz" @@ -7932,11 +7679,6 @@ destroy@1.2.0: resolved "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz" integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== -detect-file@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" - integrity sha512-DtCOLG98P007x7wiiOmfI0fi3eIKyWiLTGJ2MDnVi/E04lWGbf+JzrRHMm0rgIIZJGtHpKpbVgLWHrv8xXpc3Q== - detect-newline@^3.0.0: version "3.1.0" resolved "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz" @@ -8118,7 +7860,7 @@ dot-case@^3.0.4: no-case "^3.0.4" tslib "^2.0.3" -dot-prop@^5.1.0, dot-prop@^5.2.0: +dot-prop@^5.1.0: version "5.3.0" resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88" integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== @@ -8145,11 +7887,6 @@ dtype@^2.0.0: resolved "https://registry.npmjs.org/dtype/-/dtype-2.0.0.tgz" integrity sha512-s2YVcLKdFGS0hpFqJaTwscsyt0E8nNFdmo73Ocd81xNPj4URI4rj6D60A+vFMIw7BXWlb4yRkEwfBqcZzPGiZg== -duplexer3@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.5.tgz#0b5e4d7bad5de8901ea4440624c8e1d20099217e" - integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA== - duplexer@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" @@ -8185,6 +7922,11 @@ electron-to-chromium@^1.4.648: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.667.tgz#2767d998548e5eeeaf8bdaffd67b56796bfbed3d" integrity sha512-66L3pLlWhTNVUhnmSA5+qDM3fwnXsM6KAqE36e2w4KN0g6pkEtlT5bs41FQtQwVwKnfhNBXiWRLPs30HSxd7Kw== +electron-to-chromium@^1.4.668: + version "1.4.777" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.777.tgz#f846fbba23fd11b3c6f97848cdda94896fdb8baf" + integrity sha512-n02NCwLJ3wexLfK/yQeqfywCblZqLcXphzmid5e8yVPdtEcida7li0A5WQKghHNG0FeOMCzeFOzEbtAh5riXFw== + emitter-component@^1.1.1: version "1.1.1" resolved "https://registry.npmjs.org/emitter-component/-/emitter-component-1.1.1.tgz" @@ -8215,13 +7957,6 @@ encodeurl@~1.0.2: resolved "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== -end-of-stream@^1.1.0: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - enhanced-resolve@^5.13.0, enhanced-resolve@^5.7.0: version "5.13.0" resolved "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.13.0.tgz" @@ -8421,11 +8156,6 @@ escalade@^3.1.1: resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== -escape-goat@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675" - integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q== - escape-html@~1.0.3: version "1.0.3" resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" @@ -8868,13 +8598,6 @@ expand-tilde@^1.2.2: dependencies: os-homedir "^1.0.1" -expand-tilde@^2.0.0, expand-tilde@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" - integrity sha512-A5EmesHW6rfnZ9ysHQjPdJRni0SRar0tjtG5MNtm9n5TUvsYU8oozprtRD4AqHxcZWWlVuAmQo2nWKfN9oyjTw== - dependencies: - homedir-polyfill "^1.0.1" - expect-playwright@^0.8.0: version "0.8.0" resolved "https://registry.npmjs.org/expect-playwright/-/expect-playwright-0.8.0.tgz" @@ -9070,19 +8793,6 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" -finalhandler@1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" - integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== - dependencies: - debug "2.6.9" - encodeurl "~1.0.2" - escape-html "~1.0.3" - on-finished "~2.3.0" - parseurl "~1.3.3" - statuses "~1.5.0" - unpipe "~1.0.0" - finalhandler@1.2.0: version "1.2.0" resolved "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz" @@ -9121,14 +8831,6 @@ find-file-up@^0.1.2: fs-exists-sync "^0.1.0" resolve-dir "^0.1.0" -find-node-modules@^2.1.3: - version "2.1.3" - resolved "https://registry.yarnpkg.com/find-node-modules/-/find-node-modules-2.1.3.tgz#3c976cff2ca29ee94b4f9eafc613987fc4c0ee44" - integrity sha512-UC2I2+nx1ZuOBclWVNdcnbDR5dlrOdVb7xNjmT/lHE+LsgztWks3dG7boJ37yTS/venXw84B/mAW9uHVoC5QRg== - dependencies: - findup-sync "^4.0.0" - merge "^2.1.1" - find-pkg@^0.1.2: version "0.1.2" resolved "https://registry.npmjs.org/find-pkg/-/find-pkg-0.1.2.tgz" @@ -9169,16 +8871,6 @@ find-up@^6.3.0: locate-path "^7.1.0" path-exists "^5.0.0" -findup-sync@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-4.0.0.tgz#956c9cdde804052b881b428512905c4a5f2cdef0" - integrity sha512-6jvvn/12IC4quLBL1KNokxC7wWTvYncaVUYSoxWw7YykPLuRrnv4qdHcSOywOI5RpkOVGeQRtWM8/q+G6W6qfQ== - dependencies: - detect-file "^1.0.0" - is-glob "^4.0.0" - micromatch "^4.0.2" - resolve-dir "^1.0.1" - flat-cache@^3.0.4: version "3.0.4" resolved "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz" @@ -9272,6 +8964,11 @@ forwarded@0.2.0: resolved "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz" integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== +fraction.js@^4.3.7: + version "4.3.7" + resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7" + integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew== + fresh@0.5.2: version "0.5.2" resolved "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz" @@ -9402,20 +9099,6 @@ get-package-type@^0.1.0: resolved "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== -get-stream@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-stream@^5.1.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" - integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== - dependencies: - pump "^3.0.0" - get-stream@^6.0.0: version "6.0.1" resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz" @@ -9491,13 +9174,6 @@ global-dirs@^0.1.1: dependencies: ini "^1.3.4" -global-dirs@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-3.0.1.tgz#0c488971f066baceda21447aecb1a8b911d22485" - integrity sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA== - dependencies: - ini "2.0.0" - global-modules@^0.2.3: version "0.2.3" resolved "https://registry.npmjs.org/global-modules/-/global-modules-0.2.3.tgz" @@ -9506,15 +9182,6 @@ global-modules@^0.2.3: global-prefix "^0.1.4" is-windows "^0.2.0" -global-modules@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" - integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== - dependencies: - global-prefix "^1.0.1" - is-windows "^1.0.1" - resolve-dir "^1.0.0" - global-prefix@^0.1.4: version "0.1.5" resolved "https://registry.npmjs.org/global-prefix/-/global-prefix-0.1.5.tgz" @@ -9525,17 +9192,6 @@ global-prefix@^0.1.4: is-windows "^0.2.0" which "^1.2.12" -global-prefix@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" - integrity sha512-5lsx1NUDHtSjfg0eHlmYvZKv8/nVqX4ckFbM+FrGcQ+04KWcWFo9P5MxPZYSzUvyzmdTbI7Eix8Q4IbELDqzKg== - dependencies: - expand-tilde "^2.0.2" - homedir-polyfill "^1.0.1" - ini "^1.3.4" - is-windows "^1.0.1" - which "^1.2.14" - global@^4.3.0, global@~4.4.0: version "4.4.0" resolved "https://registry.npmjs.org/global/-/global-4.4.0.tgz" @@ -9582,23 +9238,6 @@ gopd@^1.0.1: dependencies: get-intrinsic "^1.1.3" -got@^9.6.0: - version "9.6.0" - resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" - integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== - dependencies: - "@sindresorhus/is" "^0.14.0" - "@szmarczak/http-timer" "^1.1.2" - cacheable-request "^6.0.0" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^4.1.0" - lowercase-keys "^1.0.1" - mimic-response "^1.0.1" - p-cancelable "^1.0.0" - to-readable-stream "^1.0.0" - url-parse-lax "^3.0.0" - graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" @@ -9665,11 +9304,6 @@ has-tostringtag@^1.0.0: dependencies: has-symbols "^1.0.2" -has-yarn@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77" - integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw== - has@^1.0.3: version "1.0.3" resolved "https://registry.npmjs.org/has/-/has-1.0.3.tgz" @@ -9965,7 +9599,7 @@ hoist-non-react-statics@^3.0.0, hoist-non-react-statics@^3.1.0, hoist-non-react- dependencies: react-is "^16.7.0" -homedir-polyfill@^1.0.0, homedir-polyfill@^1.0.1: +homedir-polyfill@^1.0.0: version "1.0.3" resolved "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== @@ -10062,11 +9696,6 @@ htmlparser2@^6.1.0: domutils "^2.5.2" entities "^2.0.0" -http-cache-semantics@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" - integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== - http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz" @@ -10228,11 +9857,6 @@ import-fresh@^3.0.0, import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" -import-lazy@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43" - integrity sha512-m7ZEHgtw69qOGw+jwxXkHlrlIPdTGkyh66zXZ1ajZbxkDBNjSY/LGbmjc7h0s2ELsUDTAhFr55TrPSSqJGPG0A== - import-lazy@~4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-4.0.0.tgz#e8eb627483a0a43da3c03f3e35548be5cb0cc153" @@ -10279,12 +9903,7 @@ inherits@2.0.3: resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== -ini@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ini/-/ini-2.0.0.tgz#e5fd556ecdd5726be978fa1001862eacb0a94bc5" - integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== - -ini@^1.3.4, ini@~1.3.0: +ini@^1.3.4: version "1.3.8" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== @@ -10449,13 +10068,6 @@ is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: resolved "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz" integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== -is-ci@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" - integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== - dependencies: - ci-info "^2.0.0" - is-ci@^3.0.1: version "3.0.1" resolved "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz" @@ -10555,14 +10167,6 @@ is-hexadecimal@^2.0.0: resolved "https://registry.yarnpkg.com/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz#86b5bf668fca307498d319dfc03289d781a90027" integrity sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg== -is-installed-globally@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.4.0.tgz#9a0fd407949c30f86eb6959ef1b7994ed0b7b520" - integrity sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ== - dependencies: - global-dirs "^3.0.0" - is-path-inside "^3.0.2" - is-interactive@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" @@ -10583,11 +10187,6 @@ is-node-process@^1.2.0: resolved "https://registry.yarnpkg.com/is-node-process/-/is-node-process-1.2.0.tgz#ea02a1b90ddb3934a19aea414e88edef7e11d134" integrity sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw== -is-npm@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-5.0.0.tgz#43e8d65cc56e1b67f8d47262cf667099193f45a8" - integrity sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA== - is-number-object@^1.0.4: version "1.0.7" resolved "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz" @@ -10610,11 +10209,6 @@ is-obj@^2.0.0: resolved "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz" integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== -is-path-inside@^3.0.2: - version "3.0.3" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" - integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== - is-plain-obj@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz" @@ -10753,7 +10347,7 @@ is-windows@^0.2.0: resolved "https://registry.npmjs.org/is-windows/-/is-windows-0.2.0.tgz" integrity sha512-n67eJYmXbniZB7RF4I/FTjK1s6RPOCTxhYrVYLRaCt3lF0mpWZPKr3T2LSZAqyjQsxR2qMmGYXXzK0YWwcPM1Q== -is-windows@^1.0.1, is-windows@^1.0.2: +is-windows@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== @@ -10765,11 +10359,6 @@ is-wsl@^2.2.0: dependencies: is-docker "^2.0.0" -is-yarn-global@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/is-yarn-global/-/is-yarn-global-0.3.0.tgz#d502d3382590ea3004893746754c89139973e232" - integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw== - isarray@0.0.1: version "0.0.1" resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" @@ -11171,27 +10760,6 @@ jest-pnp-resolver@^1.2.2: resolved "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz" integrity sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w== -jest-preview@0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/jest-preview/-/jest-preview-0.3.1.tgz#5445ba977b06cafb30c9d8489b9cb549f258ccaa" - integrity sha512-gRR4shnXFSh8tdNaIncJC98d1zXD7w7LA52HQC0bu0DsPb+FXVEg+NQh9GTbO+n6/SCgcZNQAVt4MeCfsIkBPA== - dependencies: - "@svgr/core" "^6.2.1" - camelcase "^6.3.0" - chalk "^4.1.2" - chokidar "^3.5.3" - commander "^9.2.0" - connect "^3.7.0" - find-node-modules "^2.1.3" - open "^8.4.0" - postcss-import "^14.1.0" - postcss-load-config "^4.0.1" - sirv "^2.0.2" - slash "^3.0.0" - string-hash "^1.1.3" - update-notifier "^5.1.0" - ws "^8.5.0" - jest-process-manager@^0.3.1: version "0.3.1" resolved "https://registry.npmjs.org/jest-process-manager/-/jest-process-manager-0.3.1.tgz" @@ -11522,11 +11090,6 @@ jsesc@~0.5.0: resolved "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz" integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" - integrity sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ== - json-parse-better-errors@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" @@ -11627,13 +11190,6 @@ kapsule@1, kapsule@^1.14: dependencies: lodash-es "4" -keyv@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" - integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== - dependencies: - json-buffer "3.0.0" - kind-of@^6.0.2, kind-of@^6.0.3: version "6.0.3" resolved "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz" @@ -11671,13 +11227,6 @@ language-tags@=1.0.5: dependencies: language-subtag-registry "~0.3.2" -latest-version@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face" - integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA== - dependencies: - package-json "^6.3.0" - launch-editor@^2.6.0: version "2.6.0" resolved "https://registry.npmjs.org/launch-editor/-/launch-editor-2.6.0.tgz" @@ -11779,11 +11328,6 @@ lilconfig@^2.0.5, lilconfig@^2.1.0: resolved "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz" integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ== -lilconfig@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.0.0.tgz#f8067feb033b5b74dab4602a5f5029420be749bc" - integrity sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g== - lines-and-columns@^1.1.6: version "1.2.4" resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz" @@ -11984,16 +11528,6 @@ lower-case@^2.0.2: dependencies: tslib "^2.0.3" -lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== - -lowercase-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" - integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== - lowlight@^1.17.0: version "1.20.0" resolved "https://registry.yarnpkg.com/lowlight/-/lowlight-1.20.0.tgz#ddb197d33462ad0d93bf19d17b6c301aa3941888" @@ -12383,11 +11917,6 @@ merge2@^1.3.0, merge2@^1.4.1: resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== -merge@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/merge/-/merge-2.1.1.tgz#59ef4bf7e0b3e879186436e8481c06a6c162ca98" - integrity sha512-jz+Cfrg9GWOZbQAnDQ4hlVnQky+341Yk5ru8bZSe6sIDTCIg8n9i/u7hSQGSVOF3C7lH6mGtqjkiT9G4wFLL0w== - methods@~1.1.2: version "1.1.2" resolved "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" @@ -12827,7 +12356,7 @@ mimic-fn@^2.1.0: resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== -mimic-response@^1.0.0, mimic-response@^1.0.1: +mimic-response@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz" integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== @@ -12937,11 +12466,6 @@ mrmime@^1.0.0: resolved "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz" integrity sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw== -mrmime@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/mrmime/-/mrmime-2.0.0.tgz#151082a6e06e59a9a39b46b3e14d5cfe92b3abb4" - integrity sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw== - ms@2.1.2: version "2.1.2" resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" @@ -13026,6 +12550,11 @@ nanoid@^3.3.6: resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz" integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== +nanoid@^3.3.7: + version "3.3.7" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8" + integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g== + natural-compare@^1.4.0: version "1.4.0" resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz" @@ -13185,11 +12714,6 @@ normalize-range@^0.1.2: resolved "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz" integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== -normalize-url@^4.1.0: - version "4.5.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a" - integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== - nosleep.js@^0.7.0: version "0.7.0" resolved "https://registry.npmjs.org/nosleep.js/-/nosleep.js-0.7.0.tgz" @@ -13229,11 +12753,6 @@ nth-check@^2.0.0, nth-check@^2.0.1: dependencies: boolbase "^1.0.0" -num2fraction@^1.2.2: - version "1.2.2" - resolved "https://registry.npmjs.org/num2fraction/-/num2fraction-1.2.2.tgz" - integrity sha512-Y1wZESM7VUThYY+4W+X4ySH2maqcA+p7UR+w8VWNWVAd6lwuXXWz/w/Cz43J/dI2I+PS6wD5N+bJUF+gjWvIqg== - nwsapi@^2.2.0: version "2.2.4" resolved "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.4.tgz" @@ -13386,19 +12905,12 @@ on-finished@2.4.1: dependencies: ee-first "1.1.1" -on-finished@~2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" - integrity sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww== - dependencies: - ee-first "1.1.1" - on-headers@~1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== -once@^1.3.0, once@^1.3.1, once@^1.4.0: +once@^1.3.0, once@^1.3.1: version "1.4.0" resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== @@ -13419,7 +12931,7 @@ onetime@^5.1.0, onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" -open@^8.0.9, open@^8.4.0: +open@^8.0.9: version "8.4.2" resolved "https://registry.npmjs.org/open/-/open-8.4.2.tgz" integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== @@ -13487,11 +12999,6 @@ outvariant@^1.2.1, outvariant@^1.4.0: resolved "https://registry.yarnpkg.com/outvariant/-/outvariant-1.4.0.tgz#e742e4bda77692da3eca698ef5bfac62d9fba06e" integrity sha512-AlWY719RF02ujitly7Kk/0QlV+pXGFDHrHf9O2OKqyqgBieaPOIeuSkL8sRK6j2WK+/ZAURq2kZsY0d8JapUiw== -p-cancelable@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" - integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== - p-limit@^2.2.0: version "2.3.0" resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" @@ -13571,16 +13078,6 @@ package-hash@^4.0.0: lodash.flattendeep "^4.4.0" release-zalgo "^1.0.0" -package-json@^6.3.0: - version "6.5.0" - resolved "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0" - integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ== - dependencies: - got "^9.6.0" - registry-auth-token "^4.0.0" - registry-url "^5.0.0" - semver "^6.2.0" - pako@^2.0.4: version "2.1.0" resolved "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz" @@ -13832,11 +13329,6 @@ phin@^2.9.1: resolved "https://registry.npmjs.org/phin/-/phin-2.9.3.tgz" integrity sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA== -picocolors@^0.2.1: - version "0.2.1" - resolved "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz" - integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== - picocolors@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz" @@ -13857,11 +13349,6 @@ pidtree@^0.5.0: resolved "https://registry.npmjs.org/pidtree/-/pidtree-0.5.0.tgz" integrity sha512-9nxspIM7OpZuhBxPg73Zvyq7j1QMPMPsGKTqRc2XOaFQauDvoNz9fM1Wdkjmeo7l9GXOZiRs97sPkuayl39wjA== -pify@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== - pify@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" @@ -13966,15 +13453,6 @@ postcss-discard-overridden@^6.0.0: resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-6.0.0.tgz#49c5262db14e975e349692d9024442de7cd8e234" integrity sha512-4VELwssYXDFigPYAZ8vL4yX4mUepF/oCBeeIT4OXsJPYOtvJumyz9WflmJWTfDwCUcpDR+z0zvCWBXgTx35SVw== -postcss-import@^14.1.0: - version "14.1.0" - resolved "https://registry.yarnpkg.com/postcss-import/-/postcss-import-14.1.0.tgz#a7333ffe32f0b8795303ee9e40215dac922781f0" - integrity sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw== - dependencies: - postcss-value-parser "^4.0.0" - read-cache "^1.0.0" - resolve "^1.1.7" - postcss-load-config@^3.1.4: version "3.1.4" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" @@ -13983,14 +13461,6 @@ postcss-load-config@^3.1.4: lilconfig "^2.0.5" yaml "^1.10.2" -postcss-load-config@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-4.0.2.tgz#7159dcf626118d33e299f485d6afe4aff7c4a3e3" - integrity sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ== - dependencies: - lilconfig "^3.0.0" - yaml "^2.3.4" - postcss-merge-longhand@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-6.0.0.tgz#6f627b27db939bce316eaa97e22400267e798d69" @@ -14185,18 +13655,19 @@ postcss-unique-selectors@^6.0.0: dependencies: postcss-selector-parser "^6.0.5" -postcss-value-parser@^4.0.0, postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: +postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: version "4.2.0" resolved "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz" integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== -postcss@^7.0.32: - version "7.0.39" - resolved "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz" - integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== +postcss@8.4.38: + version "8.4.38" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.38.tgz#b387d533baf2054288e337066d81c6bee9db9e0e" + integrity sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A== dependencies: - picocolors "^0.2.1" - source-map "^0.6.1" + nanoid "^3.3.7" + picocolors "^1.0.0" + source-map-js "^1.2.0" postcss@^8.0.0, postcss@^8.4.21, postcss@^8.4.24: version "8.4.29" @@ -14235,11 +13706,6 @@ prelude-ls@~1.1.2: resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz" integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" - integrity sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA== - present@0.0.6: version "0.0.6" resolved "https://registry.npmjs.org/present/-/present-0.0.6.tgz" @@ -14387,26 +13853,11 @@ psl@^1.1.33: resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - punycode@^2.1.0, punycode@^2.1.1: version "2.3.0" resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz" integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== -pupa@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62" - integrity sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A== - dependencies: - escape-goat "^2.0.0" - q@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" @@ -14885,16 +14336,6 @@ rc-virtual-list@^3.11.1, rc-virtual-list@^3.5.1, rc-virtual-list@^3.5.2: rc-resize-observer "^1.0.0" rc-util "^5.36.0" -rc@1.2.8, rc@^1.2.8: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" - integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== - dependencies: - deep-extend "^0.6.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - react-addons-update@15.6.3: version "15.6.3" resolved "https://registry.yarnpkg.com/react-addons-update/-/react-addons-update-15.6.3.tgz#c449c309154024d04087b206d0400e020547b313" @@ -15208,13 +14649,6 @@ react@18.2.0: dependencies: loose-envify "^1.1.0" -read-cache@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" - integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== - dependencies: - pify "^2.3.0" - read-pkg-up@^7.0.1: version "7.0.1" resolved "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz" @@ -15392,20 +14826,6 @@ regexpu-core@^5.3.1: unicode-match-property-ecmascript "^2.0.0" unicode-match-property-value-ecmascript "^2.1.0" -registry-auth-token@^4.0.0: - version "4.2.2" - resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.2.tgz#f02d49c3668884612ca031419491a13539e21fac" - integrity sha512-PC5ZysNb42zpFME6D/XlIgtNGdTl8bBOCw90xQLVMpzuuubJKYDWFAEuUNc+Cn8Z8724tg2SDhDRrkVEsqfDMg== - dependencies: - rc "1.2.8" - -registry-url@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009" - integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw== - dependencies: - rc "^1.2.8" - regjsparser@^0.9.1: version "0.9.1" resolved "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz" @@ -15646,14 +15066,6 @@ resolve-dir@^0.1.0: expand-tilde "^1.2.2" global-modules "^0.2.3" -resolve-dir@^1.0.0, resolve-dir@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" - integrity sha512-R7uiTjECzvOsWSfdM0QKFNBVFcK27aHOUwdvK53BcW8zqnGdYp0Fbj82cy54+2A4P2tFM22J5kRfe1R+lM/1yg== - dependencies: - expand-tilde "^2.0.0" - global-modules "^1.0.0" - resolve-from@5.0.0, resolve-from@^5.0.0: version "5.0.0" resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz" @@ -15688,15 +15100,6 @@ resolve.exports@^1.1.0: resolved "https://registry.npmjs.org/resolve.exports/-/resolve.exports-1.1.1.tgz" integrity sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ== -resolve@^1.1.7, resolve@~1.22.1: - version "1.22.8" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" - integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== - dependencies: - is-core-module "^2.13.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - resolve@^1.10.0, resolve@^1.10.1, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.1, resolve@^1.9.0: version "1.22.2" resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz" @@ -15723,12 +15126,14 @@ resolve@~1.19.0: is-core-module "^2.1.0" path-parse "^1.0.6" -responselike@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" - integrity sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ== +resolve@~1.22.1: + version "1.22.8" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" + integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== dependencies: - lowercase-keys "^1.0.0" + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" restore-cursor@^3.1.0: version "3.1.0" @@ -15957,14 +15362,7 @@ selfsigned@^2.1.1: dependencies: node-forge "^1" -semver-diff@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b" - integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg== - dependencies: - semver "^6.3.0" - -"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.5.0, semver@^5.6.0, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.5.4, semver@~7.5.4: +"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.5.0, semver@^5.6.0, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.5.4, semver@~7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== @@ -16151,15 +15549,6 @@ sirv@^1.0.7: mrmime "^1.0.0" totalist "^1.0.0" -sirv@^2.0.2: - version "2.0.4" - resolved "https://registry.yarnpkg.com/sirv/-/sirv-2.0.4.tgz#5dd9a725c578e34e449f332703eb2a74e46a29b0" - integrity sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ== - dependencies: - "@polka/url" "^1.0.0-next.24" - mrmime "^2.0.0" - totalist "^3.0.0" - sisteransi@^1.0.5: version "1.0.5" resolved "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz" @@ -16241,6 +15630,11 @@ source-list-map@^2.0.0: resolved "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz" integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== +source-map-js@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af" + integrity sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg== + source-map-support@^0.5.6, source-map-support@~0.5.20: version "0.5.21" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" @@ -16408,7 +15802,7 @@ statuses@2.0.1: resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== -"statuses@>= 1.4.0 < 2", statuses@~1.5.0: +"statuses@>= 1.4.0 < 2": version "1.5.0" resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== @@ -16459,11 +15853,6 @@ string-convert@^0.2.0: resolved "https://registry.npmjs.org/string-convert/-/string-convert-0.2.1.tgz" integrity sha512-u/1tdPl4yQnPBjnVrmdLo9gtuLvELKsAoRapekWggdiQNvvvum+jYF329d84NAa660KQw7pB2n36KrIKVoXa3A== -string-hash@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/string-hash/-/string-hash-1.1.3.tgz#e8aafc0ac1855b4666929ed7dd1275df5d6c811b" - integrity sha512-kJUvRUFK49aub+a7T1nNE66EJbZBMnBgoC1UbCZ5n6bsZKBRga4KgBRTMn/pFkeCZSYtNeSyMxPDM0AXWELk2A== - string-length@^4.0.1: version "4.0.2" resolved "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz" @@ -16472,7 +15861,7 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -16603,11 +15992,6 @@ strip-json-comments@^3.1.0, strip-json-comments@^3.1.1, strip-json-comments@~3.1 resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== -strip-json-comments@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== - style-dictionary@3.8.0: version "3.8.0" resolved "https://registry.yarnpkg.com/style-dictionary/-/style-dictionary-3.8.0.tgz#7cb8d64360c53431f768d44def665f61e971a73e" @@ -16727,11 +16111,6 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== -svg-parser@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" - integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== - svgo@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/svgo/-/svgo-3.0.2.tgz#5e99eeea42c68ee0dc46aa16da093838c262fe0a" @@ -16951,11 +16330,6 @@ to-fast-properties@^2.0.0: resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== -to-readable-stream@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" - integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== - to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" @@ -16978,11 +16352,6 @@ totalist@^1.0.0: resolved "https://registry.npmjs.org/totalist/-/totalist-1.1.0.tgz" integrity sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g== -totalist@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/totalist/-/totalist-3.0.1.tgz#ba3a3d600c915b1a97872348f79c127475f6acf8" - integrity sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ== - tough-cookie@^4.0.0: version "4.1.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf" @@ -17294,13 +16663,6 @@ unified@^10.0.0, unified@^10.1.2, unified@~10.1.1: trough "^2.0.0" vfile "^5.0.0" -unique-string@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" - integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== - dependencies: - crypto-random-string "^2.0.0" - unist-util-filter@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/unist-util-filter/-/unist-util-filter-4.0.1.tgz#fd885dd48adaad345de5f5dc706ec4ff44a8d074" @@ -17460,26 +16822,6 @@ update-browserslist-db@^1.0.13: escalade "^3.1.1" picocolors "^1.0.0" -update-notifier@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-5.1.0.tgz#4ab0d7c7f36a231dd7316cf7729313f0214d9ad9" - integrity sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw== - dependencies: - boxen "^5.0.0" - chalk "^4.1.0" - configstore "^5.0.1" - has-yarn "^2.1.0" - import-lazy "^2.1.0" - is-ci "^2.0.0" - is-installed-globally "^0.4.0" - is-npm "^5.0.0" - is-yarn-global "^0.3.0" - latest-version "^5.1.0" - pupa "^2.1.1" - semver "^7.3.4" - semver-diff "^3.1.1" - xdg-basedir "^4.0.0" - uplot@1.6.24: version "1.6.24" resolved "https://registry.yarnpkg.com/uplot/-/uplot-1.6.24.tgz#dfa213fa7da92763261920ea972ed1a5f9f6af12" @@ -17511,13 +16853,6 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" - integrity sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ== - dependencies: - prepend-http "^2.0.0" - url-parse@^1.5.3: version "1.5.10" resolved "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz" @@ -18144,7 +17479,7 @@ which-typed-array@^1.1.9: has-tostringtag "^1.0.0" is-typed-array "^1.1.10" -which@^1.2.12, which@^1.2.14, which@^1.2.9: +which@^1.2.12, which@^1.2.9: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== @@ -18158,13 +17493,6 @@ which@^2.0.1, which@^2.0.2: dependencies: isexe "^2.0.0" -widest-line@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca" - integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg== - dependencies: - string-width "^4.0.0" - wildcard@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/wildcard/-/wildcard-2.0.1.tgz" @@ -18231,16 +17559,6 @@ ws@^8.13.0: resolved "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz" integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== -ws@^8.5.0: - version "8.16.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.16.0.tgz#d1cd774f36fbc07165066a60e40323eab6446fd4" - integrity sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ== - -xdg-basedir@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13" - integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q== - xhr-request@^1.0.1: version "1.1.0" resolved "https://registry.npmjs.org/xhr-request/-/xhr-request-1.1.0.tgz" @@ -18340,11 +17658,6 @@ yaml@^1.10.0, yaml@^1.10.2: resolved "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== -yaml@^2.3.4: - version "2.3.4" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.4.tgz#53fc1d514be80aabf386dc6001eb29bf3b7523b2" - integrity sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA== - yargs-parser@20.x, yargs-parser@^20.2.2, yargs-parser@^20.2.3: version "20.2.9" resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz" From cbb9fd51f886069d9f6e339a29869ed34750889e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 May 2024 12:15:47 +0530 Subject: [PATCH 20/23] --- (#5056) updated-dependencies: - dependency-name: postcss dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- frontend/yarn.lock | 34 +--------------------------------- 1 file changed, 1 insertion(+), 33 deletions(-) diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 071498c6fb..af8d30616c 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -12545,11 +12545,6 @@ nanoid@^2.0.3: resolved "https://registry.npmjs.org/nanoid/-/nanoid-2.1.11.tgz" integrity sha512-s/snB+WGm6uwi0WjsZdaVcuf3KJXlfGl2LcxgwkEwJF0D/BWzVWAZW/XY4bFaiR7s0Jk3FPvlnepg1H1b1UwlA== -nanoid@^3.3.6: - version "3.3.6" - resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz" - integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== - nanoid@^3.3.7: version "3.3.7" resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8" @@ -13660,7 +13655,7 @@ postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^ resolved "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz" integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== -postcss@8.4.38: +postcss@8.4.38, postcss@^8.0.0, postcss@^8.1.1, postcss@^8.3.7, postcss@^8.4.21, postcss@^8.4.24: version "8.4.38" resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.38.tgz#b387d533baf2054288e337066d81c6bee9db9e0e" integrity sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A== @@ -13669,33 +13664,6 @@ postcss@8.4.38: picocolors "^1.0.0" source-map-js "^1.2.0" -postcss@^8.0.0, postcss@^8.4.21, postcss@^8.4.24: - version "8.4.29" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.29.tgz#33bc121cf3b3688d4ddef50be869b2a54185a1dd" - integrity sha512-cbI+jaqIeu/VGqXEarWkRCCffhjgXc0qjBtXpqJhTBohMUjUQnbBr0xqX3vEKudc4iviTewcJo5ajcec5+wdJw== - dependencies: - nanoid "^3.3.6" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -postcss@^8.1.1: - version "8.4.31" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.31.tgz#92b451050a9f914da6755af352bdc0192508656d" - integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ== - dependencies: - nanoid "^3.3.6" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -postcss@^8.3.7: - version "8.4.23" - resolved "https://registry.npmjs.org/postcss/-/postcss-8.4.23.tgz" - integrity sha512-bQ3qMcpF6A/YjR55xtoTr0jGOlnPOKAIMdOWiv0EIT6HVPEaJiJB4NLljSbiHoC2RX7DN5Uvjtpbg1NPdwv1oA== - dependencies: - nanoid "^3.3.6" - picocolors "^1.0.0" - source-map-js "^1.0.2" - prelude-ls@^1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz" From f2b0387a1b8386d7118c4e8b2b1ab5e0970a4f32 Mon Sep 17 00:00:00 2001 From: hulk Date: Wed, 22 May 2024 15:12:13 +0800 Subject: [PATCH 21/23] feat: allow using the consistent naming with the signoz collector (#4865) Co-authored-by: Srikanth Chekuri --- pkg/query-service/main.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkg/query-service/main.go b/pkg/query-service/main.go index 72962cfeef..793ce25bf2 100644 --- a/pkg/query-service/main.go +++ b/pkg/query-service/main.go @@ -53,6 +53,8 @@ func main() { flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)") flag.StringVar(&fluxInterval, "flux-interval", "5m", "(cache config to use)") flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')") + // Allow using the consistent naming with the signoz collector + flag.StringVar(&cluster, "cluster-name", "cluster", "(cluster name - defaults to 'cluster')") flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool, only used with clickhouse if not set in ClickHouseUrl env var DSN.)") flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time, only used with clickhouse if not set in ClickHouseUrl env var DSN.)") flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection, only used with clickhouse if not set in ClickHouseUrl env var DSN.)") From 4887a1d8dd9f85f457e0a6176bd206adc8d2dfc2 Mon Sep 17 00:00:00 2001 From: SagarRajput-7 <162284829+SagarRajput-7@users.noreply.github.com> Date: Wed, 22 May 2024 16:00:49 +0530 Subject: [PATCH 22/23] feat: added helper text, learn-more and doc-link (#5024) * feat: added helper text, learn-more and doc-link * feat: added collapse and uncollapse text on hover * feat: added learn-more doc link for trace & logs explorer * feat: added learn-more doc link * feat: added learn-more doc link * chore: added alert links and minor updates * feat: added learn-more doc link --------- Co-authored-by: makeavish --- .../ExplorerOptions.styles.scss | 8 ++ .../ExplorerOptions/ExplorerOptions.tsx | 23 ++++ .../FormAlertRules/UserGuide/index.tsx | 2 +- .../container/ListAlertRules/ListAlert.tsx | 3 +- .../ListOfDashboard/DashboardsList.tsx | 3 +- .../LeftContainer/QuerySection/index.tsx | 5 +- .../Layouts/Pipeline/CreatePipelineButton.tsx | 2 +- .../PipelineListsView/PipelineListsView.tsx | 2 +- .../PipelinePageLayout.test.tsx.snap | 2 +- .../container/QueryBuilder/QueryBuilder.tsx | 36 +++++- .../QBEntityOptions/QBEntityOptions.tsx | 34 ++--- .../QueryBuilder/components/Query/Query.tsx | 117 ++++++++++++++---- .../QueryFunctions/QueryFunctions.tsx | 21 +++- .../src/pages/SaveView/SaveView.styles.scss | 3 + frontend/src/pages/SaveView/index.tsx | 9 +- 15 files changed, 217 insertions(+), 53 deletions(-) diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss b/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss index 7f6ac6a4dd..8af1e4ad0a 100644 --- a/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss +++ b/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss @@ -64,6 +64,10 @@ .view-options, .actions { + .info-icon { + padding: 8px; + } + .hidden { display: none; } @@ -252,6 +256,10 @@ color: var(--bg-ink-200); background-color: var(--bg-vanilla-300); } + + .info-icon { + color: var(--bg-ink-200); + } } } diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx index 0e9fd3704b..7253b45b94 100644 --- a/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx +++ b/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx @@ -1,6 +1,7 @@ /* eslint-disable react/jsx-props-no-spreading */ import './ExplorerOptions.styles.scss'; +import { InfoCircleOutlined } from '@ant-design/icons'; import { Color } from '@signozhq/design-tokens'; import { Button, @@ -402,6 +403,28 @@ function ExplorerOptions({
+ + {sourcepage === DataSource.LOGS + ? 'Learn more about Logs explorer ' + : 'Learn more about Traces explorer '} + + {' '} + here + {' '} +
+ } + > + + - + + Add New Formula + + {' '} +
+ Learn more +
+ + } + > diff --git a/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx b/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx index a2ec473921..652518f50d 100644 --- a/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx +++ b/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx @@ -66,21 +66,25 @@ export default function QBEntityOptions({
- - + + + + + + {entityType === 'query' && ( diff --git a/frontend/src/container/QueryBuilder/components/Query/Query.tsx b/frontend/src/container/QueryBuilder/components/Query/Query.tsx index fb8b0e1561..074443364e 100644 --- a/frontend/src/container/QueryBuilder/components/Query/Query.tsx +++ b/frontend/src/container/QueryBuilder/components/Query/Query.tsx @@ -1,7 +1,7 @@ /* eslint-disable sonarjs/cognitive-complexity */ import './Query.styles.scss'; -import { Col, Input, Row } from 'antd'; +import { Col, Input, Row, Tooltip, Typography } from 'antd'; import { ENTITY_VERSION_V4 } from 'constants/app'; // ** Constants import { ATTRIBUTE_TYPES, PANEL_TYPES } from 'constants/queryBuilder'; @@ -367,11 +367,29 @@ export const Query = memo(function Query({ {version && version === 'v3' && ( - + + Select Aggregate Operator + + {' '} +
+ Learn more +
+
+ } + > + + )} @@ -388,12 +406,30 @@ export const Query = memo(function Query({ Array.isArray(operators) && operators.length > 0 && ( - + + Select Aggregate Operator + + {' '} +
+ Learn more +
+
+ } + > + +
)} @@ -422,11 +458,28 @@ export const Query = memo(function Query({ - + + Select Aggregate Operator + + {' '} +
+ Learn more +
+ + } + > + +
- + + Name of legend + + {' '} +
+ Learn more +
+ + } + > + +
)} diff --git a/frontend/src/container/QueryBuilder/components/QueryFunctions/QueryFunctions.tsx b/frontend/src/container/QueryBuilder/components/QueryFunctions/QueryFunctions.tsx index 7dfdd99c9c..017047b507 100644 --- a/frontend/src/container/QueryBuilder/components/QueryFunctions/QueryFunctions.tsx +++ b/frontend/src/container/QueryBuilder/components/QueryFunctions/QueryFunctions.tsx @@ -1,6 +1,6 @@ import './QueryFunctions.styles.scss'; -import { Button, Tooltip } from 'antd'; +import { Button, Tooltip, Typography } from 'antd'; import cx from 'classnames'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { cloneDeep, pullAt } from 'lodash-es'; @@ -180,9 +180,22 @@ export default function QueryFunctions({ = 3 - ? 'Functions are in early access. You can add a maximum of 3 function as of now.' - : '' + functions && functions.length >= 3 ? ( + 'Functions are in early access. You can add a maximum of 3 function as of now.' + ) : ( +
+ Add new function + + {' '} +
+ Learn more +
+
+ ) } placement="right" > diff --git a/frontend/src/pages/SaveView/SaveView.styles.scss b/frontend/src/pages/SaveView/SaveView.styles.scss index 292a0b8d06..1e4b7bf0f6 100644 --- a/frontend/src/pages/SaveView/SaveView.styles.scss +++ b/frontend/src/pages/SaveView/SaveView.styles.scss @@ -25,6 +25,9 @@ line-height: 20px; /* 142.857% */ letter-spacing: -0.07px; } + .learn-more { + font-size: 14px; + } .ant-input-affix-wrapper { margin-top: 16px; diff --git a/frontend/src/pages/SaveView/index.tsx b/frontend/src/pages/SaveView/index.tsx index 2ba5535fc4..86a511291e 100644 --- a/frontend/src/pages/SaveView/index.tsx +++ b/frontend/src/pages/SaveView/index.tsx @@ -282,7 +282,14 @@ function SaveView(): JSX.Element {
Views - Manage your saved views for {ROUTES_VS_SOURCEPAGE[pathname]}. + Manage your saved views for {ROUTES_VS_SOURCEPAGE[pathname]}.{' '} + + Learn more + Date: Wed, 22 May 2024 21:45:38 +0530 Subject: [PATCH 23/23] =?UTF-8?q?chore(signoz):=20=F0=9F=93=8C=20pin=20ver?= =?UTF-8?q?sions:=20SigNoz=200.46.0,=20SigNoz=20OtelCollector=200.88.24?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Prashant Shahi --- .versions-golang | 8 ++++++++ deploy/docker-swarm/clickhouse-setup/docker-compose.yaml | 8 ++++---- deploy/docker/clickhouse-setup/docker-compose-core.yaml | 4 ++-- deploy/docker/clickhouse-setup/docker-compose.yaml | 8 ++++---- go.mod | 2 +- go.sum | 4 ++-- pkg/query-service/tests/test-deploy/docker-compose.yaml | 4 ++-- 7 files changed, 23 insertions(+), 15 deletions(-) create mode 100644 .versions-golang diff --git a/.versions-golang b/.versions-golang new file mode 100644 index 0000000000..bc26b1c17f --- /dev/null +++ b/.versions-golang @@ -0,0 +1,8 @@ +#### Auto generated by make versions/golang. DO NOT EDIT! #### +amd64=128d7baad667abc0e41a85673026a2cf9449ef40f384baf424aee45bc13f9235 +arm=a5f77dc34ccae0d43269675508aab8fa9078ded6fa3e2dcee54f7c230018100d +arm64=1cdad16d01542a57caca4b0a6893a5b69d711d69dd6bb4483c77c1d092baec41 +386=0c82e5195d14caa5daa01ea06a70139e7ea1edbd366c83259227c7d9965d4c5a +mips64le=25967f27f76031f31cd3ae2173958e151d8d961ca186ab4328af7a1895139a66 +ppc64le=6fa49b4730622b79560a1fc2677b02a1ee7aac5b28490a2bda6134050108fb3a +s390x=4e2c0198c3db1c769e8e2e8a1e504dbb5e3eff0dad62f8f5c543b4823a89d81b diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 43b17d3f80..e6354bb35e 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -146,7 +146,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.45.0 + image: signoz/query-service:0.46.0 command: [ "-config=/root/config/prometheus.yml", @@ -186,7 +186,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:0.45.0 + image: signoz/frontend:0.46.0 deploy: restart_policy: condition: on-failure @@ -199,7 +199,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.88.22 + image: signoz/signoz-otel-collector:0.88.24 command: [ "--config=/etc/otel-collector-config.yaml", @@ -237,7 +237,7 @@ services: - query-service otel-collector-migrator: - image: signoz/signoz-schema-migrator:0.88.22 + image: signoz/signoz-schema-migrator:0.88.24 deploy: restart_policy: condition: on-failure diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 6c0b0c7b2c..cf1e5f1ed4 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -66,7 +66,7 @@ services: - --storage.path=/data otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.22} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -81,7 +81,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: signoz-otel-collector - image: signoz/signoz-otel-collector:0.88.22 + image: signoz/signoz-otel-collector:0.88.24 command: [ "--config=/etc/otel-collector-config.yaml", diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index fca1149704..12b91b6992 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -164,7 +164,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` query-service: - image: signoz/query-service:${DOCKER_TAG:-0.45.0} + image: signoz/query-service:${DOCKER_TAG:-0.46.0} container_name: signoz-query-service command: [ @@ -203,7 +203,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:${DOCKER_TAG:-0.45.0} + image: signoz/frontend:${DOCKER_TAG:-0.46.0} container_name: signoz-frontend restart: on-failure depends_on: @@ -215,7 +215,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.22} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -229,7 +229,7 @@ services: otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.22} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.24} container_name: signoz-otel-collector command: [ diff --git a/go.mod b/go.mod index 5ea9a547ae..51d72be2f8 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ require ( github.com/ClickHouse/clickhouse-go/v2 v2.20.0 github.com/DATA-DOG/go-sqlmock v1.5.2 github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd - github.com/SigNoz/signoz-otel-collector v0.88.22 + github.com/SigNoz/signoz-otel-collector v0.88.24 github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974 github.com/antonmedv/expr v1.15.3 diff --git a/go.sum b/go.sum index 513d62c409..e0a7e14e85 100644 --- a/go.sum +++ b/go.sum @@ -98,8 +98,8 @@ github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkb github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc= github.com/SigNoz/prometheus v1.11.0 h1:toX7fU2wqY1TnzvPzDglIYx6OxpqrZ0NNlM/H5S5+u8= github.com/SigNoz/prometheus v1.11.0/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww= -github.com/SigNoz/signoz-otel-collector v0.88.22 h1:PW9TpdQ8b8vWnUKWVe/w1bX8/Rq2MUUHGDIsx+KA+o0= -github.com/SigNoz/signoz-otel-collector v0.88.22/go.mod h1:sT1EM9PFDaOJLbAz5npWpgXK6OhpWJ9PpSwyhHWs9rU= +github.com/SigNoz/signoz-otel-collector v0.88.24 h1:6ESLmQtYPHmik9ZZFSJSbfuj4VQ1/0IC3v1qV9hm5Nk= +github.com/SigNoz/signoz-otel-collector v0.88.24/go.mod h1:sT1EM9PFDaOJLbAz5npWpgXK6OhpWJ9PpSwyhHWs9rU= github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc= github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo= github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY= diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index c6af0bc058..396b059157 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -192,7 +192,7 @@ services: <<: *db-depend otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.22} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -205,7 +205,7 @@ services: # condition: service_healthy otel-collector: - image: signoz/signoz-otel-collector:0.88.22 + image: signoz/signoz-otel-collector:0.88.24 container_name: signoz-otel-collector command: [