From fd9a5020120fd921ea5c25ae74dd60e2415952b0 Mon Sep 17 00:00:00 2001
From: Raj Kamal Singh <1133322+raj-k-singh@users.noreply.github.com>
Date: Fri, 29 Dec 2023 21:55:38 +0530
Subject: [PATCH 01/39] fix: opamp server: do not panic if config generation
fails (#4307)
---
pkg/query-service/app/opamp/model/agent.go | 7 +++----
1 file changed, 3 insertions(+), 4 deletions(-)
diff --git a/pkg/query-service/app/opamp/model/agent.go b/pkg/query-service/app/opamp/model/agent.go
index 4bc5f2f3f0..1eef7bb4cf 100644
--- a/pkg/query-service/app/opamp/model/agent.go
+++ b/pkg/query-service/app/opamp/model/agent.go
@@ -4,7 +4,6 @@ import (
"bytes"
"context"
"crypto/sha256"
- "fmt"
"sync"
"time"
@@ -259,7 +258,7 @@ func (agent *Agent) processStatusUpdate(
// If remote config is changed and different from what the Agent has then
// send the new remote config to the Agent.
if configChanged ||
- (agent.Status.RemoteConfigStatus != nil &&
+ (agent.Status.RemoteConfigStatus != nil && agent.remoteConfig != nil &&
!bytes.Equal(agent.Status.RemoteConfigStatus.LastRemoteConfigHash, agent.remoteConfig.ConfigHash)) {
// The new status resulted in a change in the config of the Agent or the Agent
// does not have this config (hash is different). Send the new config the Agent.
@@ -277,8 +276,8 @@ func (agent *Agent) processStatusUpdate(
func (agent *Agent) updateRemoteConfig(configProvider AgentConfigProvider) bool {
recommendedConfig, confId, err := configProvider.RecommendAgentConfig([]byte(agent.EffectiveConfig))
if err != nil {
- // The server must always recommend a config.
- panic(fmt.Errorf("could not generate config recommendation for agent %s: %w", agent.ID, err))
+ zap.S().Error("could not generate config recommendation for agent:", agent.ID, err)
+ return false
}
cfg := protobufs.AgentRemoteConfig{
From 105216de3e7a29c6e06df3282723edf45847697c Mon Sep 17 00:00:00 2001
From: Srikanth Chekuri
Date: Sat, 30 Dec 2023 22:53:09 +0530
Subject: [PATCH 02/39] chore: add prepare query for delta/unspecified
timeseries (#4167)
* chore: update BuilderQuery struct and add PrepareTimeseriesFilterQuery
* chore: add prepare query for cumulative/unspecified timeseries
* chore: add prepare query for delta/unspecified timeseries
* chore: update group by to work with 23.11+
* chore: fix test
---------
Co-authored-by: Nityananda Gohain
---
.../app/metrics/v4/delta/helper.go | 61 +++++
.../app/metrics/v4/delta/time_series_test.go | 229 ++++++++++++++++++
.../app/metrics/v4/delta/timeseries.go | 120 +++++++++
3 files changed, 410 insertions(+)
create mode 100644 pkg/query-service/app/metrics/v4/delta/helper.go
create mode 100644 pkg/query-service/app/metrics/v4/delta/time_series_test.go
create mode 100644 pkg/query-service/app/metrics/v4/delta/timeseries.go
diff --git a/pkg/query-service/app/metrics/v4/delta/helper.go b/pkg/query-service/app/metrics/v4/delta/helper.go
new file mode 100644
index 0000000000..972120fc15
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/delta/helper.go
@@ -0,0 +1,61 @@
+package delta
+
+import (
+ "fmt"
+ "strings"
+
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+// groupingSets returns a string of comma separated tags for group by clause
+// `ts` is always added to the group by clause
+func groupingSets(tags ...string) string {
+ withTs := append(tags, "ts")
+ if len(withTs) > 1 {
+ return fmt.Sprintf(`GROUPING SETS ( (%s), (%s) )`, strings.Join(withTs, ", "), strings.Join(tags, ", "))
+ } else {
+ return strings.Join(withTs, ", ")
+ }
+}
+
+// groupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause
+func groupingSetsByAttributeKeyTags(tags ...v3.AttributeKey) string {
+ groupTags := []string{}
+ for _, tag := range tags {
+ groupTags = append(groupTags, tag.Key)
+ }
+ return groupingSets(groupTags...)
+}
+
+// groupBy returns a string of comma separated tags for group by clause
+func groupByAttributeKeyTags(tags ...v3.AttributeKey) string {
+ groupTags := []string{}
+ for _, tag := range tags {
+ groupTags = append(groupTags, tag.Key)
+ }
+ groupTags = append(groupTags, "ts")
+ return strings.Join(groupTags, ", ")
+}
+
+// orderBy returns a string of comma separated tags for order by clause
+// if the order is not specified, it defaults to ASC
+func orderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string {
+ var orderBy []string
+ for _, tag := range tags {
+ found := false
+ for _, item := range items {
+ if item.ColumnName == tag.Key {
+ found = true
+ orderBy = append(orderBy, fmt.Sprintf("%s %s", item.ColumnName, item.Order))
+ break
+ }
+ }
+ if !found {
+ orderBy = append(orderBy, fmt.Sprintf("%s ASC", tag.Key))
+ }
+ }
+
+ orderBy = append(orderBy, "ts ASC")
+
+ return strings.Join(orderBy, ", ")
+}
diff --git a/pkg/query-service/app/metrics/v4/delta/time_series_test.go b/pkg/query-service/app/metrics/v4/delta/time_series_test.go
new file mode 100644
index 0000000000..d22aa12961
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/delta/time_series_test.go
@@ -0,0 +1,229 @@
+package delta
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+func TestPrepareTimeAggregationSubQuery(t *testing.T) {
+ // The time aggregation is performed for each unique series - since the fingerprint represents the
+ // unique hash of label set, we always group by fingerprint regardless of the GroupBy
+ // This sub result is then aggregated on dimensions using the provided GroupBy clause keys
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ start int64
+ end int64
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = avg, temporality = delta",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "http_requests",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorNotEqual,
+ Value: "payment_service",
+ },
+ {
+ Key: v3.AttributeKey{
+ Key: "endpoint",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorIn,
+ Value: []interface{}{"/paycallback", "/payme", "/paypal"},
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationAvg,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts",
+ },
+ {
+ name: "test time aggregation = rate, temporality = delta",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "http_requests",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "payment_service",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := prepareTimeAggregationSubQueryTimeSeries(
+ testCase.start,
+ testCase.end,
+ testCase.builderQuery.StepInterval,
+ testCase.builderQuery,
+ )
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+func TestPrepareTimeseriesQuery(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ start int64
+ end int64
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = avg, space aggregation = sum, temporality = unspecified",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "system_memory_usage",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Unspecified,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "state",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorNotEqual,
+ Value: "idle",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationAvg,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
+ },
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = delta",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "http_requests",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "payment_service",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := prepareMetricQueryDeltaTimeSeries(
+ testCase.start,
+ testCase.end,
+ testCase.builderQuery.StepInterval,
+ testCase.builderQuery,
+ )
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
diff --git a/pkg/query-service/app/metrics/v4/delta/timeseries.go b/pkg/query-service/app/metrics/v4/delta/timeseries.go
new file mode 100644
index 0000000000..f9a9e265c3
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/delta/timeseries.go
@@ -0,0 +1,120 @@
+package delta
+
+import (
+ "fmt"
+
+ v4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.signoz.io/signoz/pkg/query-service/utils"
+)
+
+// prepareTimeAggregationSubQueryTimeSeries builds the sub-query to be used for temporal aggregation
+func prepareTimeAggregationSubQueryTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+
+ var subQuery string
+
+ timeSeriesSubQuery, err := v4.PrepareTimeseriesFilterQuery(mq)
+ if err != nil {
+ return "", err
+ }
+
+ samplesTableFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end)
+
+ // Select the aggregate value for interval
+ queryTmpl :=
+ "SELECT fingerprint, %s" +
+ " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
+ " %s as per_series_value" +
+ " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
+ " INNER JOIN" +
+ " (%s) as filtered_time_series" +
+ " USING fingerprint" +
+ " WHERE " + samplesTableFilter +
+ " GROUP BY fingerprint, ts" +
+ " ORDER BY fingerprint, ts"
+
+ var selectLabelsAny string
+ for _, tag := range mq.GroupBy {
+ selectLabelsAny += fmt.Sprintf("any(%s) as %s,", tag.Key, tag.Key)
+ }
+
+ var selectLabels string
+ for _, tag := range mq.GroupBy {
+ selectLabels += tag.Key + ","
+ }
+
+ switch mq.TimeAggregation {
+ case v3.TimeAggregationAvg:
+ op := "avg(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationSum:
+ op := "sum(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationMin:
+ op := "min(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationMax:
+ op := "max(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationCount:
+ op := "count(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationCountDistinct:
+ op := "count(distinct(value))"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationAnyLast:
+ op := "anyLast(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationRate:
+ op := fmt.Sprintf("sum(value)/%d", step)
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationIncrease:
+ op := "sum(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ }
+ return subQuery, nil
+}
+
+// prepareMetricQueryDeltaTimeSeries builds the query to be used for fetching metrics
+func prepareMetricQueryDeltaTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+
+ var query string
+
+ temporalAggSubQuery, err := prepareTimeAggregationSubQueryTimeSeries(start, end, step, mq)
+ if err != nil {
+ return "", err
+ }
+
+ groupBy := groupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := groupByAttributeKeyTags(mq.GroupBy...)
+
+ queryTmpl :=
+ "SELECT %s," +
+ " %s as value" +
+ " FROM (%s)" +
+ " WHERE isNaN(per_series_value) = 0" +
+ " GROUP BY %s" +
+ " ORDER BY %s"
+
+ switch mq.SpaceAggregation {
+ case v3.SpaceAggregationAvg:
+ op := "avg(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationSum:
+ op := "sum(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMin:
+ op := "min(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMax:
+ op := "max(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationCount:
+ op := "count(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ }
+
+ return query, nil
+}
From bdd7778e585339b5188754727a484a218d66a088 Mon Sep 17 00:00:00 2001
From: Yunus M
Date: Tue, 2 Jan 2024 17:42:36 +0530
Subject: [PATCH 03/39] update readme.md (#3814)
* chore: update read.me - fe maintainers
* chore: update code owner for frontend codebase
---
.github/CODEOWNERS | 2 +-
README.de-de.md | 19 ++++---------------
README.zh-cn.md | 16 +++++++++-------
3 files changed, 14 insertions(+), 23 deletions(-)
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 4008dd426a..573be5f290 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -2,7 +2,7 @@
# Owners are automatically requested for review for PRs that changes code
# that they own.
-/frontend/ @palashgdev @YounixM
+/frontend/ @YounixM
/frontend/src/container/MetricsApplication @srikanthccv
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
/deploy/ @prashant-shahi
diff --git a/README.de-de.md b/README.de-de.md
index 5c61097a15..1fdbcbda15 100644
--- a/README.de-de.md
+++ b/README.de-de.md
@@ -11,7 +11,6 @@
-
Dokumentation •
Readme auf Englisch •
@@ -40,12 +39,13 @@ SigNoz hilft Entwicklern, Anwendungen zu überwachen und Probleme in ihren berei
👉 Einfache Einrichtung von Benachrichtigungen mit dem selbst erstellbaren Abfrage-Builder.
##
+
### Anwendung Metriken

-
### Verteiltes Tracing
+
@@ -62,22 +62,18 @@ SigNoz hilft Entwicklern, Anwendungen zu überwachen und Probleme in ihren berei

-
### Alarme
-
-
## Werde Teil unserer Slack Community
Sag Hi zu uns auf [Slack](https://signoz.io/slack) 👋
-
## Funktionen:
- Einheitliche Benutzeroberfläche für Metriken, Traces und Logs. Keine Notwendigkeit, zwischen Prometheus und Jaeger zu wechseln, um Probleme zu debuggen oder ein separates Log-Tool wie Elastic neben Ihrer Metriken- und Traces-Stack zu verwenden.
@@ -93,7 +89,6 @@ Sag Hi zu uns auf [Slack](https://signoz.io/slack) 👋
-
## Wieso SigNoz?
Als Entwickler fanden wir es anstrengend, uns für jede kleine Funktion, die wir haben wollten, auf Closed Source SaaS Anbieter verlassen zu müssen. Closed Source Anbieter überraschen ihre Kunden zum Monatsende oft mit hohen Rechnungen, die keine Transparenz bzgl. der Kostenaufteilung bieten.
@@ -116,12 +111,10 @@ Wir unterstützen [OpenTelemetry](https://opentelemetry.io) als Bibliothek, mit
- Elixir
- Rust
-
Hier findest du die vollständige Liste von unterstützten Programmiersprachen - https://opentelemetry.io/docs/
-
## Erste Schritte mit SigNoz
### Bereitstellung mit Docker
@@ -138,7 +131,6 @@ Bitte folge den [hier](https://signoz.io/docs/deployment/helm_chart) aufgelistet
-
## Vergleiche mit bekannten Tools
### SigNoz vs Prometheus
@@ -179,7 +171,6 @@ Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen S
-
## Zum Projekt beitragen
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
@@ -197,6 +188,8 @@ Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #c
#### Frontend
- [Palash Gupta](https://github.com/palashgdev)
+- [Yunus M](https://github.com/YounixM)
+- [Rajat Dabade](https://github.com/Rajat-Dabade)
#### DevOps
@@ -204,16 +197,12 @@ Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #c
-
## Dokumentation
Du findest unsere Dokumentation unter https://signoz.io/docs/. Falls etwas unverständlich ist oder fehlt, öffne gerne ein Github Issue mit dem Label `documentation` oder schreib uns über den Community Slack Channel.
-
-
-
## Gemeinschaft
Werde Teil der [slack community](https://signoz.io/slack) um mehr über verteilte Einzelschritt-Fehlersuche, Messung von Systemzuständen oder SigNoz zu erfahren und sich mit anderen Nutzern und Mitwirkenden in Verbindung zu setzen.
diff --git a/README.zh-cn.md b/README.zh-cn.md
index 32b6328fcb..445474f6ba 100644
--- a/README.zh-cn.md
+++ b/README.zh-cn.md
@@ -19,7 +19,7 @@
Twitter
-##
+##
SigNoz 帮助开发人员监控应用并排查已部署应用的问题。你可以使用 SigNoz 实现如下能力:
@@ -67,7 +67,7 @@ SigNoz 帮助开发人员监控应用并排查已部署应用的问题。你可
## 加入我们 Slack 社区
-来 [Slack](https://signoz.io/slack) 和我们打招呼吧 👋
+来 [Slack](https://signoz.io/slack) 和我们打招呼吧 👋
@@ -83,7 +83,7 @@ SigNoz 帮助开发人员监控应用并排查已部署应用的问题。你可
- 通过 服务名、操作方式、延迟、错误、标签/注释 过滤 traces 数据
-- 通过聚合 trace 数据而获得业务相关的 metrics。 比如你可以通过 `customer_type: gold` 或者 `deployment_version: v2` 或者 `external_call: paypal` 获取错误率和 P99 延迟数据
+- 通过聚合 trace 数据而获得业务相关的 metrics。 比如你可以通过 `customer_type: gold` 或者 `deployment_version: v2` 或者 `external_call: paypal` 获取错误率和 P99 延迟数据
- 原生支持 OpenTelemetry 日志,高级日志查询,自动收集 k8s 相关日志
@@ -101,7 +101,7 @@ SigNoz 帮助开发人员监控应用并排查已部署应用的问题。你可
我们想做一个自托管并且可开源的工具,像 DataDog 和 NewRelic 那样, 为那些担心数据隐私和安全的公司提供第三方服务。
-作为开源的项目,你完全可以自己掌控你的配置、样本和更新。你同样可以基于 SigNoz 拓展特定的业务模块。
+作为开源的项目,你完全可以自己掌控你的配置、样本和更新。你同样可以基于 SigNoz 拓展特定的业务模块。
### 支持的编程语言:
@@ -153,9 +153,9 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
而且, SigNoz 相较于 Jaeger 拥有更对的高级功能:
-- Jaegar UI 不能提供任何基于 traces 的 metrics 查询和过滤。
+- Jaegar UI 不能提供任何基于 traces 的 metrics 查询和过滤。
-- Jaeger 不能针对过滤的 traces 做聚合。 比如, p99 延迟的请求有个标签是 customer_type='premium'。 而这些在 SigNoz 可以轻松做到。
+- Jaeger 不能针对过滤的 traces 做聚合。 比如, p99 延迟的请求有个标签是 customer_type='premium'。 而这些在 SigNoz 可以轻松做到。
 
@@ -185,7 +185,7 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
我们 ❤️ 你的贡献,无论大小。 请先阅读 [CONTRIBUTING.md](CONTRIBUTING.md) 再开始给 SigNoz 做贡献。
-如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。
+如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。
### 项目维护人员
@@ -199,6 +199,8 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
#### 前端
- [Palash Gupta](https://github.com/palashgdev)
+- [Yunus M](https://github.com/YounixM)
+- [Rajat Dabade](https://github.com/Rajat-Dabade)
#### 运维开发
From 7d960b79dd34ae8413b2dab0a10224eec64b34a1 Mon Sep 17 00:00:00 2001
From: Yunus M
Date: Fri, 5 Jan 2024 11:15:31 +0530
Subject: [PATCH 04/39] feat: update sidebar and base theme styles (#4272)
* feat: update sidebar and base theme styles
* feat: update sidebar items and styles
* feat: wire up logs navigation and update user settings page
* feat: update styles to handle light mode, add full view header
* feat: update onboarding header and styles
* feat: remove unused routes
* feat: handle sidebar collapse
* feat: show pointer on logo hover
* feat: fix logs module navigations
* feat: update logo click route
* feat: update entity name color to primary in application and dashboard tables
* feat: update sidebar item styles
* feat: update collapse icon and styles
* fix: name not updated in menu on change
* fix: show invite members nav item
* fix: open invite members modal on invite team member nav item click
---
frontend/package.json | 1 +
frontend/public/Logos/signoz-brand-logo.svg | 11 +
frontend/public/locales/en/titles.json | 1 +
frontend/src/AppRoutes/pageComponents.ts | 16 +-
frontend/src/AppRoutes/routes.ts | 8 +
.../src/components/Logs/RawLogView/styles.ts | 5 +-
frontend/src/constants/routes.ts | 12 +-
frontend/src/constants/theme.ts | 1 +
.../src/container/AllAlertChannels/styles.ts | 1 +
.../container/AppLayout/AppLayout.styles.scss | 53 +++
frontend/src/container/AppLayout/index.tsx | 89 ++++-
frontend/src/container/AppLayout/styles.ts | 1 +
.../FullViewHeader/FullViewHeader.styles.scss | 37 ++
.../FullViewHeader/FullViewHeader.tsx | 28 ++
.../ListOfDashboard/DashboardsList.tsx | 2 +-
.../ListOfDashboard/TableComponents/styles.ts | 3 +-
.../src/container/LiveLogsTopNav/styles.ts | 7 +-
frontend/src/container/LocalTopNav/index.tsx | 26 +-
frontend/src/container/LocalTopNav/styles.ts | 2 +-
.../src/container/LogDetailedView/index.tsx | 2 +-
frontend/src/container/LogsTopNav/index.tsx | 1 +
frontend/src/container/LogsTopNav/styles.ts | 7 +-
.../MySettings/MySettings.styles.scss | 5 +
.../container/MySettings/Password/index.tsx | 122 +++---
.../MySettings/UserInfo/UserInfo.styles.scss | 7 +
.../{UpdateName => UserInfo}/index.tsx | 71 ++--
frontend/src/container/MySettings/index.tsx | 26 +-
.../src/container/NewExplorerCTA/config.ts | 2 +-
.../src/container/NewExplorerCTA/index.tsx | 7 +-
.../Onboarding.styles.scss | 2 +-
.../OnboardingContainer.tsx | 7 +-
.../ModuleStepsContainer.styles.scss | 45 +++
.../ModuleStepsContainer.tsx | 17 +-
.../QueryBuilderSearch/OptionRenderer.tsx | 6 +-
.../filters/QueryBuilderSearch/style.ts | 11 +-
.../container/ServiceApplication/styles.ts | 3 +-
frontend/src/container/ServiceTable/styles.ts | 2 +-
.../SideNav/NavItem/NavItem.styles.scss | 112 ++++++
.../src/container/SideNav/NavItem/NavItem.tsx | 31 ++
.../src/container/SideNav/SideNav.styles.scss | 172 +++++++++
frontend/src/container/SideNav/SideNav.tsx | 357 +++++++++++++-----
frontend/src/container/SideNav/config.ts | 1 +
frontend/src/container/SideNav/menuItems.tsx | 113 +++---
.../src/container/SideNav/sideNav.types.ts | 3 +-
.../container/TopNav/Breadcrumbs/index.tsx | 1 +
.../TopNav/DateTimeSelection/config.ts | 3 +
frontend/src/container/TopNav/index.tsx | 28 +-
frontend/src/container/TopNav/styles.ts | 2 +-
frontend/src/hooks/logs/useActiveLog.ts | 6 +-
frontend/src/hooks/useDarkMode/index.tsx | 5 +
frontend/src/index.html.ejs | 2 +-
frontend/src/index.tsx | 2 -
frontend/src/pages/Logs/index.tsx | 4 +-
.../pages/LogsModulePage/LogsModulePage.tsx | 28 ++
frontend/src/pages/LogsModulePage/index.tsx | 3 +
frontend/src/pages/Pipelines/index.tsx | 2 +-
.../WorkspaceLocked.styles.scss | 2 +-
.../pages/WorkspaceLocked/WorkspaceLocked.tsx | 79 ++--
frontend/src/styles.scss | 2 +
frontend/src/utils/permission/index.ts | 2 +
frontend/yarn.lock | 114 +++++-
61 files changed, 1347 insertions(+), 374 deletions(-)
create mode 100644 frontend/public/Logos/signoz-brand-logo.svg
create mode 100644 frontend/src/container/AppLayout/AppLayout.styles.scss
create mode 100644 frontend/src/container/FullViewHeader/FullViewHeader.styles.scss
create mode 100644 frontend/src/container/FullViewHeader/FullViewHeader.tsx
create mode 100644 frontend/src/container/MySettings/MySettings.styles.scss
create mode 100644 frontend/src/container/MySettings/UserInfo/UserInfo.styles.scss
rename frontend/src/container/MySettings/{UpdateName => UserInfo}/index.tsx (58%)
create mode 100644 frontend/src/container/SideNav/NavItem/NavItem.styles.scss
create mode 100644 frontend/src/container/SideNav/NavItem/NavItem.tsx
create mode 100644 frontend/src/container/SideNav/SideNav.styles.scss
create mode 100644 frontend/src/pages/LogsModulePage/LogsModulePage.tsx
create mode 100644 frontend/src/pages/LogsModulePage/index.tsx
diff --git a/frontend/package.json b/frontend/package.json
index f0edc5c959..4a57943602 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -36,6 +36,7 @@
"@mdx-js/loader": "2.3.0",
"@mdx-js/react": "2.3.0",
"@monaco-editor/react": "^4.3.1",
+ "@signozhq/design-tokens": "0.0.6",
"@uiw/react-md-editor": "3.23.5",
"@xstate/react": "^3.0.0",
"ansi-to-html": "0.7.2",
diff --git a/frontend/public/Logos/signoz-brand-logo.svg b/frontend/public/Logos/signoz-brand-logo.svg
new file mode 100644
index 0000000000..aaa8a77669
--- /dev/null
+++ b/frontend/public/Logos/signoz-brand-logo.svg
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/frontend/public/locales/en/titles.json b/frontend/public/locales/en/titles.json
index 24b0f45269..71ec805100 100644
--- a/frontend/public/locales/en/titles.json
+++ b/frontend/public/locales/en/titles.json
@@ -31,6 +31,7 @@
"NOT_FOUND": "SigNoz | Page Not Found",
"LOGS": "SigNoz | Logs",
"LOGS_EXPLORER": "SigNoz | Logs Explorer",
+ "OLD_LOGS_EXPLORER": "SigNoz | Old Logs Explorer",
"LIVE_LOGS": "SigNoz | Live Logs",
"LOGS_PIPELINES": "SigNoz | Logs Pipelines",
"HOME_PAGE": "Open source Observability Platform | SigNoz",
diff --git a/frontend/src/AppRoutes/pageComponents.ts b/frontend/src/AppRoutes/pageComponents.ts
index 638c019506..aca7de9730 100644
--- a/frontend/src/AppRoutes/pageComponents.ts
+++ b/frontend/src/AppRoutes/pageComponents.ts
@@ -112,17 +112,25 @@ export const MySettings = Loadable(
);
export const Logs = Loadable(
- () => import(/* webpackChunkName: "Logs" */ 'pages/Logs'),
+ () => import(/* webpackChunkName: "Logs" */ 'pages/LogsModulePage'),
);
export const LogsExplorer = Loadable(
- () => import(/* webpackChunkName: "Logs Explorer" */ 'pages/LogsExplorer'),
+ () => import(/* webpackChunkName: "Logs Explorer" */ 'pages/LogsModulePage'),
+);
+
+export const OldLogsExplorer = Loadable(
+ () => import(/* webpackChunkName: "Logs Explorer" */ 'pages/Logs'),
);
export const LiveLogs = Loadable(
() => import(/* webpackChunkName: "Live Logs" */ 'pages/LiveLogs'),
);
+export const PipelinePage = Loadable(
+ () => import(/* webpackChunkName: "Pipelines" */ 'pages/LogsModulePage'),
+);
+
export const Login = Loadable(
() => import(/* webpackChunkName: "Login" */ 'pages/Login'),
);
@@ -151,10 +159,6 @@ export const LogsIndexToFields = Loadable(
import(/* webpackChunkName: "LogsIndexToFields Page" */ 'pages/LogsSettings'),
);
-export const PipelinePage = Loadable(
- () => import(/* webpackChunkName: "Pipelines" */ 'pages/Pipelines'),
-);
-
export const BillingPage = Loadable(
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Billing'),
);
diff --git a/frontend/src/AppRoutes/routes.ts b/frontend/src/AppRoutes/routes.ts
index 2f4142c809..6fa3accde0 100644
--- a/frontend/src/AppRoutes/routes.ts
+++ b/frontend/src/AppRoutes/routes.ts
@@ -23,6 +23,7 @@ import {
LogsIndexToFields,
MySettings,
NewDashboardPage,
+ OldLogsExplorer,
Onboarding,
OrganizationSettings,
PasswordReset,
@@ -246,6 +247,13 @@ const routes: AppRoutes[] = [
key: 'LOGS_EXPLORER',
isPrivate: true,
},
+ {
+ path: ROUTES.OLD_LOGS_EXPLORER,
+ exact: true,
+ component: OldLogsExplorer,
+ key: 'OLD_LOGS_EXPLORER',
+ isPrivate: true,
+ },
{
path: ROUTES.LIVE_LOGS,
exact: true,
diff --git a/frontend/src/components/Logs/RawLogView/styles.ts b/frontend/src/components/Logs/RawLogView/styles.ts
index 4944d05f74..a3df1c3dca 100644
--- a/frontend/src/components/Logs/RawLogView/styles.ts
+++ b/frontend/src/components/Logs/RawLogView/styles.ts
@@ -48,8 +48,9 @@ export const RawLogContent = styled.div`
line-clamp: ${linesPerRow};
-webkit-box-orient: vertical;`};
- font-size: 1rem;
- line-height: 2rem;
+ font-size: 12px;
+ line-height: 24px;
+ padding: 4px;
cursor: ${({ $isActiveLog, $isReadOnly }): string =>
$isActiveLog || $isReadOnly ? 'initial' : 'pointer'};
diff --git a/frontend/src/constants/routes.ts b/frontend/src/constants/routes.ts
index 208e83e525..39456318a7 100644
--- a/frontend/src/constants/routes.ts
+++ b/frontend/src/constants/routes.ts
@@ -6,7 +6,6 @@ const ROUTES = {
TRACE: '/trace',
TRACE_DETAIL: '/trace/:id',
TRACES_EXPLORER: '/traces-explorer',
- SETTINGS: '/settings',
GET_STARTED: '/get-started',
USAGE_EXPLORER: '/usage-explorer',
APPLICATION: '/services',
@@ -23,15 +22,18 @@ const ROUTES = {
ERROR_DETAIL: '/error-detail',
VERSION: '/status',
MY_SETTINGS: '/my-settings',
+ SETTINGS: '/settings',
ORG_SETTINGS: '/settings/org-settings',
INGESTION_SETTINGS: '/settings/ingestion-settings',
SOMETHING_WENT_WRONG: '/something-went-wrong',
UN_AUTHORIZED: '/un-authorized',
NOT_FOUND: '/not-found',
- LOGS: '/logs',
- LOGS_EXPLORER: '/logs-explorer',
- LIVE_LOGS: '/logs-explorer/live',
- LOGS_PIPELINES: '/pipelines',
+ LOGS_BASE: '/logs',
+ LOGS: '/logs/logs-explorer',
+ OLD_LOGS_EXPLORER: '/logs/old-logs-explorer',
+ LOGS_EXPLORER: '/logs/logs-explorer',
+ LIVE_LOGS: '/logs/logs-explorer/live',
+ LOGS_PIPELINES: '/logs/pipelines',
HOME_PAGE: '/',
PASSWORD_RESET: '/password-reset',
LIST_LICENSES: '/licenses',
diff --git a/frontend/src/constants/theme.ts b/frontend/src/constants/theme.ts
index 757926c0fe..427a13efe8 100644
--- a/frontend/src/constants/theme.ts
+++ b/frontend/src/constants/theme.ts
@@ -1,5 +1,6 @@
const themeColors = {
chartcolors: {
+ robin: '#3F5ECC',
dodgerBlue: '#2F80ED',
mediumOrchid: '#BB6BD9',
seaBuckthorn: '#F2994A',
diff --git a/frontend/src/container/AllAlertChannels/styles.ts b/frontend/src/container/AllAlertChannels/styles.ts
index 209860b867..454e48aeaf 100644
--- a/frontend/src/container/AllAlertChannels/styles.ts
+++ b/frontend/src/container/AllAlertChannels/styles.ts
@@ -15,6 +15,7 @@ export const ButtonContainer = styled.div`
align-items: center;
margin-top: 1rem;
margin-bottom: 1rem;
+ padding-right: 1rem;
}
`;
diff --git a/frontend/src/container/AppLayout/AppLayout.styles.scss b/frontend/src/container/AppLayout/AppLayout.styles.scss
new file mode 100644
index 0000000000..b62cab0a0d
--- /dev/null
+++ b/frontend/src/container/AppLayout/AppLayout.styles.scss
@@ -0,0 +1,53 @@
+@import '@signozhq/design-tokens';
+
+.app-layout {
+ height: 100%;
+ width: 100%;
+
+ .app-content {
+ width: 100%;
+ overflow: auto;
+ }
+}
+
+.isDarkMode {
+ .app-layout {
+ .app-content {
+ background: #0b0c0e;
+ }
+ }
+}
+
+.isLightMode {
+ .app-layout {
+ .app-content {
+ background: #ffffff;
+ }
+ }
+}
+
+.trial-expiry-banner {
+ padding: 8px;
+ background-color: #f25733;
+ color: white;
+ text-align: center;
+}
+
+.upgrade-link {
+ padding: 0px;
+ padding-right: 4px;
+ display: inline !important;
+ color: white;
+ text-decoration: underline;
+ text-decoration-color: white;
+ text-decoration-thickness: 2px;
+ text-underline-offset: 2px;
+
+ &:hover {
+ color: white;
+ text-decoration: underline;
+ text-decoration-color: white;
+ text-decoration-thickness: 2px;
+ text-underline-offset: 2px;
+ }
+}
diff --git a/frontend/src/container/AppLayout/index.tsx b/frontend/src/container/AppLayout/index.tsx
index 15f71fc692..28b48223b4 100644
--- a/frontend/src/container/AppLayout/index.tsx
+++ b/frontend/src/container/AppLayout/index.tsx
@@ -1,13 +1,22 @@
+/* eslint-disable jsx-a11y/no-static-element-interactions */
+/* eslint-disable jsx-a11y/click-events-have-key-events */
+/* eslint-disable jsx-a11y/anchor-is-valid */
+import './AppLayout.styles.scss';
+
+import { Flex } from 'antd';
import getDynamicConfigs from 'api/dynamicConfigs/getDynamicConfigs';
import getUserLatestVersion from 'api/user/getLatestVersion';
import getUserVersion from 'api/user/getVersion';
+import cx from 'classnames';
import ROUTES from 'constants/routes';
-import Header from 'container/Header';
import SideNav from 'container/SideNav';
import TopNav from 'container/TopNav';
+import { useIsDarkMode } from 'hooks/useDarkMode';
+import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
+import history from 'lib/history';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
-import { ReactNode, useEffect, useMemo, useRef } from 'react';
+import { ReactNode, useEffect, useMemo, useRef, useState } from 'react';
import { ErrorBoundary } from 'react-error-boundary';
import { Helmet } from 'react-helmet-async';
import { useTranslation } from 'react-i18next';
@@ -25,15 +34,20 @@ import {
UPDATE_LATEST_VERSION_ERROR,
} from 'types/actions/app';
import AppReducer from 'types/reducer/app';
+import { getFormattedDate, getRemainingDays } from 'utils/timeUtils';
import { ChildrenContainer, Layout, LayoutContent } from './styles';
import { getRouteKey } from './utils';
function AppLayout(props: AppLayoutProps): JSX.Element {
- const { isLoggedIn, user } = useSelector(
+ const { isLoggedIn, user, role } = useSelector(
(state) => state.app,
);
+ const isDarkMode = useIsDarkMode();
+
+ const { data: licenseData, isFetching } = useLicense();
+
const { pathname } = useLocation();
const { t } = useTranslation(['titles']);
@@ -196,25 +210,68 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
const renderFullScreen =
pathname === ROUTES.GET_STARTED || pathname === ROUTES.WORKSPACE_LOCKED;
+ const [showTrialExpiryBanner, setShowTrialExpiryBanner] = useState(false);
+
+ useEffect(() => {
+ if (
+ !isFetching &&
+ licenseData?.payload?.onTrial &&
+ !licenseData?.payload?.trialConvertedToSubscription &&
+ !licenseData?.payload?.workSpaceBlock &&
+ getRemainingDays(licenseData?.payload.trialEnd) < 7
+ ) {
+ setShowTrialExpiryBanner(true);
+ }
+ }, [licenseData, isFetching]);
+
+ const handleUpgrade = (): void => {
+ if (role === 'ADMIN') {
+ history.push(ROUTES.BILLING);
+ }
+ };
+
return (
-
+
{pageTitle}
- {isToDisplayLayout && }
-
- {isToDisplayLayout && !renderFullScreen && }
+ {showTrialExpiryBanner && (
+
+ You are in free trial period. Your free trial will end on{' '}
+
+ {getFormattedDate(licenseData?.payload?.trialEnd || Date.now())}.
+
+ {role === 'ADMIN' ? (
+
+ {' '}
+ Please{' '}
+
+ upgrade
+
+ to continue using SigNoz features.
+
+ ) : (
+ 'Please contact your administrator for upgrading to a paid plan.'
+ )}
+
+ )}
-
-
-
- {isToDisplayLayout && !renderFullScreen && }
- {children}
-
-
-
-
+
+ {isToDisplayLayout && !renderFullScreen && (
+
+ )}
+
+
+
+
+ {isToDisplayLayout && !renderFullScreen && }
+ {children}
+
+
+
+
+
);
}
diff --git a/frontend/src/container/AppLayout/styles.ts b/frontend/src/container/AppLayout/styles.ts
index 8bed914d66..f266087895 100644
--- a/frontend/src/container/AppLayout/styles.ts
+++ b/frontend/src/container/AppLayout/styles.ts
@@ -13,6 +13,7 @@ export const Layout = styled(LayoutComponent)`
export const LayoutContent = styled(LayoutComponent.Content)`
overflow-y: auto;
+ height: 100%;
`;
export const ChildrenContainer = styled.div`
diff --git a/frontend/src/container/FullViewHeader/FullViewHeader.styles.scss b/frontend/src/container/FullViewHeader/FullViewHeader.styles.scss
new file mode 100644
index 0000000000..b894ea4f12
--- /dev/null
+++ b/frontend/src/container/FullViewHeader/FullViewHeader.styles.scss
@@ -0,0 +1,37 @@
+.full-view-header-container {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ padding: 24px 0;
+
+ .brand-logo {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ gap: 16px;
+ cursor: pointer;
+
+ img {
+ height: 32px;
+ width: 32px;
+ }
+
+ .brand-logo-name {
+ font-family: 'Work Sans', sans-serif;
+ font-size: 24px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 18px;
+
+ color: #fff;
+ }
+ }
+}
+
+.lightMode {
+ .brand-logo {
+ .brand-logo-name {
+ color: black;
+ }
+ }
+}
diff --git a/frontend/src/container/FullViewHeader/FullViewHeader.tsx b/frontend/src/container/FullViewHeader/FullViewHeader.tsx
new file mode 100644
index 0000000000..8fa19b8ee4
--- /dev/null
+++ b/frontend/src/container/FullViewHeader/FullViewHeader.tsx
@@ -0,0 +1,28 @@
+/* eslint-disable jsx-a11y/no-static-element-interactions */
+/* eslint-disable jsx-a11y/click-events-have-key-events */
+import './FullViewHeader.styles.scss';
+
+import history from 'lib/history';
+
+export default function FullViewHeader({
+ overrideRoute,
+}: {
+ overrideRoute?: string;
+}): React.ReactElement {
+ const handleLogoClick = (): void => {
+ history.push(overrideRoute || '/');
+ };
+ return (
+
+
+
+
+
SigNoz
+
+
+ );
+}
+
+FullViewHeader.defaultProps = {
+ overrideRoute: '/',
+};
diff --git a/frontend/src/container/ListOfDashboard/DashboardsList.tsx b/frontend/src/container/ListOfDashboard/DashboardsList.tsx
index 8bb56490e8..9b416c65f4 100644
--- a/frontend/src/container/ListOfDashboard/DashboardsList.tsx
+++ b/frontend/src/container/ListOfDashboard/DashboardsList.tsx
@@ -332,7 +332,7 @@ function DashboardsList(): JSX.Element {
);
return (
-
+
{GetHeader}
diff --git a/frontend/src/container/ListOfDashboard/TableComponents/styles.ts b/frontend/src/container/ListOfDashboard/TableComponents/styles.ts
index 78c382700b..477da90004 100644
--- a/frontend/src/container/ListOfDashboard/TableComponents/styles.ts
+++ b/frontend/src/container/ListOfDashboard/TableComponents/styles.ts
@@ -1,8 +1,7 @@
-import { blue } from '@ant-design/colors';
import { Typography } from 'antd';
import styled from 'styled-components';
export const TableLinkText = styled(Typography.Text)`
- color: ${blue.primary} !important;
+ color: #4e74f8 !important;
cursor: pointer;
`;
diff --git a/frontend/src/container/LiveLogsTopNav/styles.ts b/frontend/src/container/LiveLogsTopNav/styles.ts
index f6c58b9415..1fb150e1e0 100644
--- a/frontend/src/container/LiveLogsTopNav/styles.ts
+++ b/frontend/src/container/LiveLogsTopNav/styles.ts
@@ -1,19 +1,18 @@
import { Button, ButtonProps } from 'antd';
-import { themeColors } from 'constants/theme';
import styled, { css, FlattenSimpleInterpolation } from 'styled-components';
export const LiveButtonStyled = styled(Button)`
- background-color: rgba(${themeColors.buttonSuccessRgb}, 0.9);
+ background-color: #1eb475;
${({ danger }): FlattenSimpleInterpolation =>
!danger
? css`
&:hover {
- background-color: rgba(${themeColors.buttonSuccessRgb}, 1) !important;
+ background-color: #1eb475 !important;
}
&:active {
- background-color: rgba(${themeColors.buttonSuccessRgb}, 0.7) !important;
+ background-color: #1eb475 !important;
}
`
: css``}
diff --git a/frontend/src/container/LocalTopNav/index.tsx b/frontend/src/container/LocalTopNav/index.tsx
index 3de2f823ef..ff864e94c7 100644
--- a/frontend/src/container/LocalTopNav/index.tsx
+++ b/frontend/src/container/LocalTopNav/index.tsx
@@ -1,7 +1,9 @@
-import { Col, Row, Space } from 'antd';
+import { Col, Row, Space, Typography } from 'antd';
+import ROUTES from 'constants/routes';
import NewExplorerCTA from 'container/NewExplorerCTA';
+import { FileText } from 'lucide-react';
+import { useLocation } from 'react-use';
-import ShowBreadcrumbs from '../TopNav/Breadcrumbs';
import DateTimeSelector from '../TopNav/DateTimeSelection';
import { Container } from './styles';
import { LocalTopNavProps } from './types';
@@ -10,13 +12,25 @@ function LocalTopNav({
actions,
renderPermissions,
}: LocalTopNavProps): JSX.Element | null {
+ const { pathname } = useLocation();
+
+ const isLiveLogsPage = pathname === ROUTES.LIVE_LOGS;
+
return (
-
-
-
+ {isLiveLogsPage && (
+
+
+
-
+
+ Live Logs
+
+
+
+ )}
+
+
diff --git a/frontend/src/container/LocalTopNav/styles.ts b/frontend/src/container/LocalTopNav/styles.ts
index feda027d24..9c936b664b 100644
--- a/frontend/src/container/LocalTopNav/styles.ts
+++ b/frontend/src/container/LocalTopNav/styles.ts
@@ -3,7 +3,7 @@ import styled from 'styled-components';
export const Container = styled(Row)`
&&& {
- margin-top: 2rem;
+ margin-top: 1rem;
min-height: 8vh;
}
`;
diff --git a/frontend/src/container/LogDetailedView/index.tsx b/frontend/src/container/LogDetailedView/index.tsx
index fe5b2cd3af..588cc7e240 100644
--- a/frontend/src/container/LogDetailedView/index.tsx
+++ b/frontend/src/container/LogDetailedView/index.tsx
@@ -63,7 +63,7 @@ function LogDetailedView({
queryString,
);
- history.replace(`${ROUTES.LOGS}?q=${updatedQueryString}`);
+ history.replace(`${ROUTES.OLD_LOGS_EXPLORER}?q=${updatedQueryString}`);
},
[history, queryString],
);
diff --git a/frontend/src/container/LogsTopNav/index.tsx b/frontend/src/container/LogsTopNav/index.tsx
index 6bb8f93530..40ce480e30 100644
--- a/frontend/src/container/LogsTopNav/index.tsx
+++ b/frontend/src/container/LogsTopNav/index.tsx
@@ -74,6 +74,7 @@ function LogsTopNav(): JSX.Element {
icon={ }
onClick={handleGoLive}
type="primary"
+ size="small"
>
Go Live
diff --git a/frontend/src/container/LogsTopNav/styles.ts b/frontend/src/container/LogsTopNav/styles.ts
index f6c58b9415..1fb150e1e0 100644
--- a/frontend/src/container/LogsTopNav/styles.ts
+++ b/frontend/src/container/LogsTopNav/styles.ts
@@ -1,19 +1,18 @@
import { Button, ButtonProps } from 'antd';
-import { themeColors } from 'constants/theme';
import styled, { css, FlattenSimpleInterpolation } from 'styled-components';
export const LiveButtonStyled = styled(Button)`
- background-color: rgba(${themeColors.buttonSuccessRgb}, 0.9);
+ background-color: #1eb475;
${({ danger }): FlattenSimpleInterpolation =>
!danger
? css`
&:hover {
- background-color: rgba(${themeColors.buttonSuccessRgb}, 1) !important;
+ background-color: #1eb475 !important;
}
&:active {
- background-color: rgba(${themeColors.buttonSuccessRgb}, 0.7) !important;
+ background-color: #1eb475 !important;
}
`
: css``}
diff --git a/frontend/src/container/MySettings/MySettings.styles.scss b/frontend/src/container/MySettings/MySettings.styles.scss
new file mode 100644
index 0000000000..c936bcb20a
--- /dev/null
+++ b/frontend/src/container/MySettings/MySettings.styles.scss
@@ -0,0 +1,5 @@
+.flexBtn {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+}
diff --git a/frontend/src/container/MySettings/Password/index.tsx b/frontend/src/container/MySettings/Password/index.tsx
index 493f5662b7..0bc9513c4e 100644
--- a/frontend/src/container/MySettings/Password/index.tsx
+++ b/frontend/src/container/MySettings/Password/index.tsx
@@ -1,6 +1,7 @@
-import { Button, Space, Typography } from 'antd';
+import { Button, Card, Space, Typography } from 'antd';
import changeMyPassword from 'api/user/changeMyPassword';
import { useNotifications } from 'hooks/useNotifications';
+import { Save } from 'lucide-react';
import { isPasswordNotValidMessage, isPasswordValid } from 'pages/SignUp/utils';
import { useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
@@ -20,9 +21,7 @@ function PasswordContainer(): JSX.Element {
false,
);
- const defaultPlaceHolder = t('input_password', {
- ns: 'settings',
- });
+ const defaultPlaceHolder = '*************';
const { notifications } = useNotifications();
@@ -89,66 +88,69 @@ function PasswordContainer(): JSX.Element {
currentPassword === updatePassword;
return (
-
-
- {t('change_password', {
- ns: 'settings',
- })}
-
-
-
- {t('current_password', {
+
+
+
+ {t('change_password', {
ns: 'settings',
})}
-
- {
- setCurrentPassword(event.target.value);
- }}
- value={currentPassword}
- />
-
-
-
- {t('new_password', {
- ns: 'settings',
- })}
-
- {
- const updatedValue = event.target.value;
- setUpdatePassword(updatedValue);
- }}
- value={updatePassword}
- />
-
-
- {isPasswordPolicyError && (
-
+
+
+ {t('current_password', {
+ ns: 'settings',
+ })}
+
+ {
+ setCurrentPassword(event.target.value);
}}
- >
- {isPasswordNotValidMessage}
-
- )}
+ value={currentPassword}
+ />
+
+
+
+ {t('new_password', {
+ ns: 'settings',
+ })}
+
+ {
+ const updatedValue = event.target.value;
+ setUpdatePassword(updatedValue);
+ }}
+ value={updatePassword}
+ />
+
+
+ {isPasswordPolicyError && (
+
+ {isPasswordNotValidMessage}
+
+ )}
+
+
+ {' '}
+ {t('change_password', {
+ ns: 'settings',
+ })}
+
-
- {t('change_password', {
- ns: 'settings',
- })}
-
-
+
);
}
diff --git a/frontend/src/container/MySettings/UserInfo/UserInfo.styles.scss b/frontend/src/container/MySettings/UserInfo/UserInfo.styles.scss
new file mode 100644
index 0000000000..d1cfae649c
--- /dev/null
+++ b/frontend/src/container/MySettings/UserInfo/UserInfo.styles.scss
@@ -0,0 +1,7 @@
+.userInfo-label {
+ min-width: 150px;
+}
+
+.userInfo-value {
+ min-width: 20rem;
+}
diff --git a/frontend/src/container/MySettings/UpdateName/index.tsx b/frontend/src/container/MySettings/UserInfo/index.tsx
similarity index 58%
rename from frontend/src/container/MySettings/UpdateName/index.tsx
rename to frontend/src/container/MySettings/UserInfo/index.tsx
index 6da15a237a..23187a9bf4 100644
--- a/frontend/src/container/MySettings/UpdateName/index.tsx
+++ b/frontend/src/container/MySettings/UserInfo/index.tsx
@@ -1,6 +1,10 @@
-import { Button, Space, Typography } from 'antd';
+import '../MySettings.styles.scss';
+import './UserInfo.styles.scss';
+
+import { Button, Card, Flex, Input, Space, Typography } from 'antd';
import editUser from 'api/user/editUser';
import { useNotifications } from 'hooks/useNotifications';
+import { PencilIcon, UserSquare } from 'lucide-react';
import { useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
@@ -12,7 +16,7 @@ import AppReducer from 'types/reducer/app';
import { NameInput } from '../styles';
-function UpdateName(): JSX.Element {
+function UserInfo(): JSX.Element {
const { user, role, org, userFlags } = useSelector(
(state) => state.app,
);
@@ -72,28 +76,51 @@ function UpdateName(): JSX.Element {
};
return (
-
+
);
}
-export default UpdateName;
+export default UserInfo;
diff --git a/frontend/src/container/MySettings/index.tsx b/frontend/src/container/MySettings/index.tsx
index f0938e6440..e3945c4d12 100644
--- a/frontend/src/container/MySettings/index.tsx
+++ b/frontend/src/container/MySettings/index.tsx
@@ -1,16 +1,28 @@
-import { Space, Typography } from 'antd';
-import { useTranslation } from 'react-i18next';
+import './MySettings.styles.scss';
+
+import { Button, Space } from 'antd';
+import { Logout } from 'api/utils';
+import { LogOut } from 'lucide-react';
import Password from './Password';
-import UpdateName from './UpdateName';
+import UserInfo from './UserInfo';
function MySettings(): JSX.Element {
- const { t } = useTranslation(['routes']);
return (
-
- {t('my_settings')}
-
+
+
+
+
+ Logout()} type="primary">
+ Logout
+
);
}
diff --git a/frontend/src/container/NewExplorerCTA/config.ts b/frontend/src/container/NewExplorerCTA/config.ts
index b2feeff572..886f044e57 100644
--- a/frontend/src/container/NewExplorerCTA/config.ts
+++ b/frontend/src/container/NewExplorerCTA/config.ts
@@ -7,5 +7,5 @@ export const RIBBON_STYLES = {
export const buttonText = {
[ROUTES.LOGS_EXPLORER]: 'Switch to Old Logs Explorer',
[ROUTES.TRACE]: 'Try new Traces Explorer',
- [ROUTES.LOGS]: 'Switch to New Logs Explorer',
+ [ROUTES.OLD_LOGS_EXPLORER]: 'Switch to New Logs Explorer',
};
diff --git a/frontend/src/container/NewExplorerCTA/index.tsx b/frontend/src/container/NewExplorerCTA/index.tsx
index 6b1a5b577c..5b6d4532e2 100644
--- a/frontend/src/container/NewExplorerCTA/index.tsx
+++ b/frontend/src/container/NewExplorerCTA/index.tsx
@@ -14,16 +14,16 @@ function NewExplorerCTA(): JSX.Element | null {
() =>
location.pathname === ROUTES.LOGS_EXPLORER ||
location.pathname === ROUTES.TRACE ||
- location.pathname === ROUTES.LOGS,
+ location.pathname === ROUTES.OLD_LOGS_EXPLORER,
[location.pathname],
);
const onClickHandler = useCallback((): void => {
if (location.pathname === ROUTES.LOGS_EXPLORER) {
- history.push(ROUTES.LOGS);
+ history.push(ROUTES.OLD_LOGS_EXPLORER);
} else if (location.pathname === ROUTES.TRACE) {
history.push(ROUTES.TRACES_EXPLORER);
- } else if (location.pathname === ROUTES.LOGS) {
+ } else if (location.pathname === ROUTES.OLD_LOGS_EXPLORER) {
history.push(ROUTES.LOGS_EXPLORER);
}
}, [location.pathname]);
@@ -36,6 +36,7 @@ function NewExplorerCTA(): JSX.Element | null {
danger
data-testid="newExplorerCTA"
type="primary"
+ size="small"
>
{buttonText[location.pathname]}
diff --git a/frontend/src/container/OnboardingContainer/Onboarding.styles.scss b/frontend/src/container/OnboardingContainer/Onboarding.styles.scss
index b5498f5074..4e00b629c8 100644
--- a/frontend/src/container/OnboardingContainer/Onboarding.styles.scss
+++ b/frontend/src/container/OnboardingContainer/Onboarding.styles.scss
@@ -32,12 +32,12 @@
.onboardingHeader {
text-align: center;
margin-top: 48px;
- margin-bottom: 24px;
}
.onboardingHeader h1 {
font-size: 24px;
font-weight: 500;
+ margin: 0;
}
.modulesContainer {
diff --git a/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx b/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx
index 82aa14cadc..4354349304 100644
--- a/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx
+++ b/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx
@@ -6,6 +6,7 @@ import { ArrowRightOutlined } from '@ant-design/icons';
import { Button, Card, Typography } from 'antd';
import getIngestionData from 'api/settings/getIngestionData';
import cx from 'classnames';
+import FullViewHeader from 'container/FullViewHeader/FullViewHeader';
import useAnalytics from 'hooks/analytics/useAnalytics';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useEffect, useState } from 'react';
@@ -218,11 +219,10 @@ export default function Onboarding(): JSX.Element {
{activeStep === 1 && (
<>
+
-
Get Started with SigNoz
-
Select a use-case to get started
+
Select a use-case to get started
-
{Object.keys(ModulesMap).map((module) => {
@@ -261,7 +261,6 @@ export default function Onboarding(): JSX.Element {
})}
-
} onClick={handleNext}>
Get Started
diff --git a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.styles.scss b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.styles.scss
index 3592d58798..02972209dd 100644
--- a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.styles.scss
+++ b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.styles.scss
@@ -39,6 +39,36 @@
.steps-container {
width: 20%;
height: 100%;
+
+ .steps-container-header {
+ display: flex;
+ align-items: center;
+ padding: 16px 0;
+ margin-bottom: 24px;
+
+ .brand-logo {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ gap: 16px;
+ cursor: pointer;
+
+ img {
+ height: 24px;
+ width: 24px;
+ }
+
+ .brand-logo-name {
+ font-family: 'Work Sans', sans-serif;
+ font-size: 18px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 18px;
+
+ color: #fff;
+ }
+ }
+ }
}
.selected-step-content {
@@ -153,3 +183,18 @@
.error-container {
margin: 8px 0;
}
+
+.lightMode {
+ .steps-container {
+ width: 20%;
+ height: 100%;
+
+ .steps-container-header {
+ .brand-logo {
+ .brand-logo-name {
+ color: black;
+ }
+ }
+ }
+ }
+}
diff --git a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx
index b1712ef263..3d3349b76e 100644
--- a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx
+++ b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx
@@ -1,3 +1,6 @@
+/* eslint-disable jsx-a11y/no-static-element-interactions */
+/* eslint-disable jsx-a11y/click-events-have-key-events */
+/* eslint-disable react/jsx-no-comment-textnodes */
/* eslint-disable sonarjs/prefer-single-boolean-return */
import './ModuleStepsContainer.styles.scss';
@@ -135,7 +138,7 @@ export default function ModuleStepsContainer({
if (selectedModule.id === ModulesMap.APM) {
history.push(ROUTES.APPLICATION);
} else if (selectedModule.id === ModulesMap.LogsManagement) {
- history.push(ROUTES.LOGS);
+ history.push(ROUTES.LOGS_EXPLORER);
} else if (selectedModule.id === ModulesMap.InfrastructureMonitoring) {
history.push(ROUTES.APPLICATION);
}
@@ -197,9 +200,21 @@ export default function ModuleStepsContainer({
}
};
+ const handleLogoClick = (): void => {
+ history.push('/');
+ };
+
return (
+
+
+
+
+
SigNoz
+
+
+
+
{optionType ? (
- {value}
-
+
{value}
+
Type:
{optionType}
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts
index c9c2e0e221..fd6d5f209e 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts
@@ -18,17 +18,18 @@ export const StyledCheckOutlined = styled(CheckOutlined)`
export const SelectOptionContainer = styled.div`
display: flex;
+ gap: 8px;
justify-content: space-between;
align-items: center;
+ overflow-x: auto;
`;
export const TagContainer = styled(Tag)`
&&& {
- border-radius: 0.25rem;
- padding: 0.063rem 0.5rem;
- font-weight: 600;
- font-size: 0.75rem;
- line-height: 1.25rem;
+ border-radius: 3px;
+ padding: 0.3rem 0.3rem;
+ font-weight: 400;
+ font-size: 0.6rem;
}
`;
diff --git a/frontend/src/container/ServiceApplication/styles.ts b/frontend/src/container/ServiceApplication/styles.ts
index 0bcca1738b..0bf860bb9a 100644
--- a/frontend/src/container/ServiceApplication/styles.ts
+++ b/frontend/src/container/ServiceApplication/styles.ts
@@ -1,5 +1,4 @@
import { Typography } from 'antd';
-import { themeColors } from 'constants/theme';
import styled from 'styled-components';
export const Container = styled.div`
@@ -9,7 +8,7 @@ export const Container = styled.div`
export const Name = styled(Typography)`
&&& {
font-weight: 600;
- color: ${themeColors.lightBlue};
+ color: #4e74f8;
cursor: pointer;
}
`;
diff --git a/frontend/src/container/ServiceTable/styles.ts b/frontend/src/container/ServiceTable/styles.ts
index 3050081a56..0bf860bb9a 100644
--- a/frontend/src/container/ServiceTable/styles.ts
+++ b/frontend/src/container/ServiceTable/styles.ts
@@ -8,7 +8,7 @@ export const Container = styled.div`
export const Name = styled(Typography)`
&&& {
font-weight: 600;
- color: #177ddc;
+ color: #4e74f8;
cursor: pointer;
}
`;
diff --git a/frontend/src/container/SideNav/NavItem/NavItem.styles.scss b/frontend/src/container/SideNav/NavItem/NavItem.styles.scss
new file mode 100644
index 0000000000..f182a1df6d
--- /dev/null
+++ b/frontend/src/container/SideNav/NavItem/NavItem.styles.scss
@@ -0,0 +1,112 @@
+.nav-item {
+ border-radius: 2px;
+
+ display: flex;
+ flex-direction: row;
+ align-items: center;
+
+ height: 36px;
+ margin-bottom: 4px;
+
+ &.active {
+ .nav-item-active-marker {
+ background: #3f5ecc;
+ }
+ }
+
+ &:hover {
+ cursor: pointer;
+
+ .nav-item-data {
+ color: white;
+ background: #121317;
+ }
+ }
+
+ &.active {
+ .nav-item-data {
+ color: white;
+ background: #121317;
+ // color: #3f5ecc;
+ }
+ }
+
+ .nav-item-active-marker {
+ margin: 8px 0;
+ width: 8px;
+ height: 24px;
+ background: transparent;
+ border-radius: 3px;
+ margin-left: -5px;
+ }
+
+ .nav-item-data {
+ flex-grow: 1;
+
+ max-width: calc(100% - 24px);
+
+ display: flex;
+ margin: 0px 8px;
+ padding: 4px 12px;
+ flex-direction: row;
+ align-items: center;
+ gap: 8px;
+ align-self: stretch;
+ color: #c0c1c3;
+
+ border-radius: 3px;
+ font-family: Inter;
+ font-size: 13px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 18px;
+
+ background: transparent;
+ border-left: 2px solid transparent;
+
+ transition: 0.2s all linear;
+
+ .nav-item-icon {
+ height: 16px;
+ }
+
+ .nav-item-label {
+ // width: 220px;
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ }
+ }
+}
+
+.lightMode {
+ .nav-item {
+ &.active {
+ .nav-item-active-marker {
+ background: #3f5ecc;
+ }
+ }
+
+ &:hover {
+ cursor: pointer;
+
+ .nav-item-data {
+ color: #121317;
+
+ background: white;
+ }
+ }
+
+ &.active {
+ .nav-item-data {
+ color: #121317;
+ background: white;
+ // color: #4e74f8;
+ }
+ }
+
+ .nav-item-data {
+ color: #121317;
+ }
+ }
+}
diff --git a/frontend/src/container/SideNav/NavItem/NavItem.tsx b/frontend/src/container/SideNav/NavItem/NavItem.tsx
new file mode 100644
index 0000000000..301acc402e
--- /dev/null
+++ b/frontend/src/container/SideNav/NavItem/NavItem.tsx
@@ -0,0 +1,31 @@
+import './NavItem.styles.scss';
+
+import cx from 'classnames';
+
+import { SidebarItem } from '../sideNav.types';
+
+export default function NavItem({
+ isCollapsed,
+ item,
+ isActive,
+ onClick,
+}: {
+ isCollapsed: boolean;
+ item: SidebarItem;
+ isActive: boolean;
+ onClick: () => void;
+}): JSX.Element {
+ const { label, icon } = item;
+
+ return (
+ // eslint-disable-next-line jsx-a11y/click-events-have-key-events, jsx-a11y/no-static-element-interactions
+
+
+
+
{icon}
+
+ {!isCollapsed &&
{label}
}
+
+
+ );
+}
diff --git a/frontend/src/container/SideNav/SideNav.styles.scss b/frontend/src/container/SideNav/SideNav.styles.scss
new file mode 100644
index 0000000000..379a7bba93
--- /dev/null
+++ b/frontend/src/container/SideNav/SideNav.styles.scss
@@ -0,0 +1,172 @@
+@import '@signozhq/design-tokens';
+
+.sideNav {
+ flex: 0 0 240px;
+ max-width: 240px;
+ min-width: 240px;
+ width: 240px;
+ border-right: 1px solid $bg-slate-400;
+ padding-bottom: 48px;
+ transition: all 0.3s, background 0s, border 0s;
+ position: relative;
+
+ .brand {
+ display: flex;
+ align-items: center;
+ gap: 12px;
+ padding: $padding-4;
+
+ .brand-logo {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+
+ cursor: pointer;
+
+ img {
+ height: $font-size-xl;
+ }
+
+ .brand-logo-name {
+ font-family: 'Work Sans', sans-serif;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 18px;
+
+ color: #fff;
+ }
+ }
+
+ .license {
+ &.tag {
+ box-sizing: border-box;
+ margin: 0;
+ padding: 0;
+ color: rgba(255, 255, 255, 0.85);
+ font-size: 8px;
+ font-weight: $font-weight-medium;
+ letter-spacing: 0.6px;
+ padding: 4px 8px;
+ text-transform: uppercase;
+ white-space: nowrap;
+ background: $bg-slate-400;
+ border: 1px solid $bg-slate-400;
+ border-radius: 20px;
+ opacity: 1;
+ transition: all 0.2s;
+ }
+ }
+ }
+
+ .get-started-nav-items {
+ display: flex;
+ margin: 4px 13px 4px 10px;
+
+ .get-started-btn {
+ display: flex;
+ align-items: center;
+ padding: 8px;
+ margin-left: 2px;
+ gap: 8px;
+
+ width: 100%;
+ height: 36px;
+
+ border: 1px solid $bg-slate-400;
+
+ border-radius: 2px;
+ box-shadow: none !important;
+ }
+ }
+
+ .secondary-nav-items {
+ border-top: 1px solid $bg-slate-400;
+ padding: 8px 0;
+ max-width: 100%;
+ position: fixed;
+ bottom: 0;
+ left: 0;
+ width: 240px;
+
+ transition: all 0.3s, background 0s, border 0s;
+
+ // position: relative;
+
+ .collapse-expand-handlers {
+ position: absolute;
+
+ top: -9px;
+ right: -9px;
+ cursor: pointer;
+
+ display: none;
+
+ transition: display 0.3s;
+
+ svg {
+ fill: $bg-vanilla-300;
+ color: $bg-slate-300;
+ }
+ }
+ }
+
+ &.collapsed {
+ flex: 0 0 64px;
+ max-width: 64px;
+ min-width: 64px;
+ width: 64px;
+
+ .secondary-nav-items {
+ width: 64px;
+ }
+
+ .brand {
+ justify-content: center;
+ }
+
+ .get-started-nav-items {
+ .get-started-btn {
+ justify-content: center;
+ }
+ }
+ }
+
+ &:hover {
+ .collapse-expand-handlers {
+ display: block;
+ }
+ }
+}
+
+.lightMode {
+ .sideNav {
+ background: $bg-vanilla-300;
+ border-right: 1px solid $bg-vanilla-400;
+
+ .get-started-nav-items {
+ .get-started-btn {
+ border: 1px solid $bg-vanilla-400;
+ }
+ }
+
+ .brand {
+ .brand-logo {
+ .brand-logo-name {
+ color: $bg-slate-400;
+ }
+ }
+ }
+
+ .secondary-nav-items {
+ border-top: 1px solid $bg-vanilla-400;
+
+ .collapse-expand-handlers {
+ svg {
+ color: $bg-slate-300;
+ fill: $bg-vanilla-300;
+ }
+ }
+ }
+ }
+}
diff --git a/frontend/src/container/SideNav/SideNav.tsx b/frontend/src/container/SideNav/SideNav.tsx
index 911913aef9..3fd937a52f 100644
--- a/frontend/src/container/SideNav/SideNav.tsx
+++ b/frontend/src/container/SideNav/SideNav.tsx
@@ -1,12 +1,26 @@
-import { CheckCircleTwoTone, WarningOutlined } from '@ant-design/icons';
-import { MenuProps } from 'antd';
+/* eslint-disable jsx-a11y/no-static-element-interactions */
+/* eslint-disable jsx-a11y/click-events-have-key-events */
+import './SideNav.styles.scss';
+
+import { Button } from 'antd';
import getLocalStorageKey from 'api/browser/localstorage/get';
+import cx from 'classnames';
import { IS_SIDEBAR_COLLAPSED } from 'constants/app';
import { FeatureKeys } from 'constants/features';
import ROUTES from 'constants/routes';
-import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense';
+import { ToggleButton } from 'container/Header/styles';
+import useComponentPermission from 'hooks/useComponentPermission';
+import useThemeMode, { useIsDarkMode } from 'hooks/useDarkMode';
+import { LICENSE_PLAN_KEY, LICENSE_PLAN_STATUS } from 'hooks/useLicense';
import history from 'lib/history';
-import { LifeBuoy } from 'lucide-react';
+import {
+ AlertTriangle,
+ CheckSquare,
+ ChevronLeftCircle,
+ ChevronRightCircle,
+ RocketIcon,
+ UserCircle,
+} from 'lucide-react';
import {
useCallback,
useEffect,
@@ -17,44 +31,82 @@ import {
import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
-import { sideBarCollapse } from 'store/actions/app';
+import { sideBarCollapse } from 'store/actions';
import { AppState } from 'store/reducers';
+import { License } from 'types/api/licenses/def';
import AppReducer from 'types/reducer/app';
import { USER_ROLES } from 'types/roles';
import { checkVersionState, isCloudUser, isEECloudUser } from 'utils/app';
-import { routeConfig, styles } from './config';
+import { routeConfig } from './config';
import { getQueryString } from './helper';
-import defaultMenuItems from './menuItems';
-import { MenuItem, SecondaryMenuItemKey } from './sideNav.types';
+import defaultMenuItems, {
+ helpSupportMenuItem,
+ inviteMemberMenuItem,
+ manageLicenseMenuItem,
+ slackSupportMenuItem,
+ trySignozCloudMenuItem,
+} from './menuItems';
+import NavItem from './NavItem/NavItem';
+import { SecondaryMenuItemKey } from './sideNav.types';
import { getActiveMenuKeyFromPath } from './sideNav.utils';
-import Slack from './Slack';
-import {
- MenuLabelContainer,
- RedDot,
- Sider,
- StyledPrimaryMenu,
- StyledSecondaryMenu,
- StyledText,
-} from './styles';
-function SideNav(): JSX.Element {
+function SideNav({
+ licenseData,
+ isFetching,
+}: {
+ licenseData: any;
+ isFetching: boolean;
+}): JSX.Element {
const dispatch = useDispatch();
const [menuItems, setMenuItems] = useState(defaultMenuItems);
const [collapsed, setCollapsed] = useState(
getLocalStorageKey(IS_SIDEBAR_COLLAPSED) === 'true',
);
+
+ const { pathname, search } = useLocation();
const {
+ user,
role,
+ featureResponse,
currentVersion,
latestVersion,
isCurrentVersionError,
- featureResponse,
} = useSelector((state) => state.app);
- const { data, isFetching } = useLicense();
+ const userSettingsMenuItem = {
+ key: ROUTES.MY_SETTINGS,
+ label: user?.name || 'User',
+ icon: ,
+ };
- let secondaryMenuItems: MenuItem[] = [];
+ const [userManagementMenuItems, setUserManagementMenuItems] = useState([
+ manageLicenseMenuItem,
+ ]);
+
+ const onClickSlackHandler = (): void => {
+ window.open('https://signoz.io/slack', '_blank');
+ };
+
+ const onClickVersionHandler = (): void => {
+ history.push(ROUTES.VERSION);
+ };
+
+ const isLatestVersion = checkVersionState(currentVersion, latestVersion);
+
+ const [inviteMembers] = useComponentPermission(['invite_members'], role);
+
+ useEffect(() => {
+ if (inviteMembers) {
+ const updatedUserManagementMenuItems = [
+ inviteMemberMenuItem,
+ manageLicenseMenuItem,
+ ];
+
+ setUserManagementMenuItems(updatedUserManagementMenuItems);
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [inviteMembers]);
useEffect((): void => {
const isOnboardingEnabled =
@@ -78,10 +130,10 @@ function SideNav(): JSX.Element {
let items = [...menuItems];
const isOnBasicPlan =
- data?.payload?.licenses?.some(
- (license) =>
+ licenseData?.payload?.licenses?.some(
+ (license: License) =>
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
- ) || data?.payload?.licenses === null;
+ ) || licenseData?.payload?.licenses === null;
if (role !== USER_ROLES.ADMIN || isOnBasicPlan) {
items = items.filter((item) => item.key !== ROUTES.BILLING);
@@ -90,9 +142,7 @@ function SideNav(): JSX.Element {
setMenuItems(items);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
- }, [data?.payload?.licenses, isFetching, role]);
-
- const { pathname, search } = useLocation();
+ }, [licenseData?.payload?.licenses, isFetching, role]);
const { t } = useTranslation('');
@@ -104,6 +154,26 @@ function SideNav(): JSX.Element {
dispatch(sideBarCollapse(collapsed));
}, [collapsed, dispatch]);
+ const isLicenseActive =
+ licenseData?.payload?.licenses?.find((e: License) => e.isCurrent)?.status ===
+ LICENSE_PLAN_STATUS.VALID;
+
+ const isEnterprise = licenseData?.payload?.licenses?.some(
+ (license: License) =>
+ license.isCurrent && license.planKey === LICENSE_PLAN_KEY.ENTERPRISE_PLAN,
+ );
+
+ const onClickSignozCloud = (): void => {
+ window.open(
+ 'https://signoz.io/oss-to-cloud/?utm_source=product_navbar&utm_medium=frontend&utm_campaign=oss_users',
+ '_blank',
+ );
+ };
+
+ const onClickGetStarted = (): void => {
+ history.push(`/get-started`);
+ };
+
const onClickHandler = useCallback(
(key: string) => {
const params = new URLSearchParams(search);
@@ -118,80 +188,175 @@ function SideNav(): JSX.Element {
[pathname, search],
);
- const onClickMenuHandler: MenuProps['onClick'] = (e) => {
- onClickHandler(e.key);
- };
-
- const onClickSlackHandler = (): void => {
- window.open('https://signoz.io/slack', '_blank');
- };
-
- const onClickVersionHandler = (): void => {
- history.push(ROUTES.VERSION);
- };
-
- const isLatestVersion = checkVersionState(currentVersion, latestVersion);
-
- if (isCloudUser() || isEECloudUser()) {
- secondaryMenuItems = [
- {
- key: SecondaryMenuItemKey.Support,
- label: 'Support',
- icon: ,
- onClick: onClickMenuHandler,
- },
- ];
- } else {
- secondaryMenuItems = [
- {
- key: SecondaryMenuItemKey.Version,
- icon: !isLatestVersion ? (
-
- ) : (
-
- ),
- label: (
-
-
- {!isCurrentVersionError ? currentVersion : t('n_a')}
-
- {!isLatestVersion && }
-
- ),
- onClick: onClickVersionHandler,
- },
- {
- key: SecondaryMenuItemKey.Slack,
- icon: ,
- label: Support ,
- onClick: onClickSlackHandler,
- },
- ];
- }
-
const activeMenuKey = useMemo(() => getActiveMenuKeyFromPath(pathname), [
pathname,
]);
+ const isDarkMode = useIsDarkMode();
+ const { toggleTheme } = useThemeMode();
+
+ const isCloudUserVal = isCloudUser();
+
+ useEffect(() => {
+ if (isCloudUser() || isEECloudUser()) {
+ const updatedUserManagementMenuItems = [
+ helpSupportMenuItem,
+ manageLicenseMenuItem,
+ ];
+
+ setUserManagementMenuItems(updatedUserManagementMenuItems);
+ } else if (currentVersion && latestVersion) {
+ const versionMenuItem = {
+ key: SecondaryMenuItemKey.Version,
+ label: !isCurrentVersionError ? currentVersion : t('n_a'),
+ icon: !isLatestVersion ? (
+
+ ) : (
+
+ ),
+ onClick: onClickVersionHandler,
+ };
+
+ const updatedUserManagementMenuItems = [
+ versionMenuItem,
+ slackSupportMenuItem,
+ manageLicenseMenuItem,
+ ];
+
+ setUserManagementMenuItems(updatedUserManagementMenuItems);
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [currentVersion, latestVersion]);
+
+ const handleUserManagentMenuItemClick = (key: string): void => {
+ switch (key) {
+ case SecondaryMenuItemKey.Slack:
+ onClickSlackHandler();
+ break;
+ case SecondaryMenuItemKey.Version:
+ onClickVersionHandler();
+ break;
+ default:
+ onClickHandler(key);
+ break;
+ }
+ };
+
return (
-
-
-
-
+
+
+
{
+ // Current home page
+ onClickHandler(ROUTES.APPLICATION);
+ }}
+ >
+
+
+ {!collapsed &&
SigNoz }
+
+
+ {!collapsed && (
+ <>
+
{!isEnterprise ? 'Free' : 'Enterprise'}
+
+
+ >
+ )}
+
+
+ {isCloudUserVal && (
+
+
+
+
+ {!collapsed && <> Get Started >}
+
+
+ )}
+
+
+ {menuItems.map((item, index) => (
+ {
+ if (item) {
+ onClickHandler(item?.key as string);
+ }
+ }}
+ />
+ ))}
+
+
+
+ {licenseData && !isLicenseActive && (
+
+ )}
+
+ {userManagementMenuItems.map(
+ (item, index): JSX.Element => (
+
{
+ handleUserManagentMenuItemClick(item?.key as string);
+ }}
+ />
+ ),
+ )}
+
+ {inviteMembers && (
+ {
+ history.push(`${inviteMemberMenuItem.key}`);
+ }}
+ />
+ )}
+
+ {user && (
+ {
+ handleUserManagentMenuItemClick(userSettingsMenuItem?.key as string);
+ }}
+ />
+ )}
+
+
+ {collapsed ? (
+
+ ) : (
+
+ )}
+
+
+
);
}
diff --git a/frontend/src/container/SideNav/config.ts b/frontend/src/container/SideNav/config.ts
index 2fbfa1e244..95028b0a05 100644
--- a/frontend/src/container/SideNav/config.ts
+++ b/frontend/src/container/SideNav/config.ts
@@ -31,6 +31,7 @@ export const routeConfig: Record = {
[ROUTES.LIST_LICENSES]: [QueryParams.resourceAttributes],
[ROUTES.LOGIN]: [QueryParams.resourceAttributes],
[ROUTES.LOGS]: [QueryParams.resourceAttributes],
+ [ROUTES.LOGS_BASE]: [QueryParams.resourceAttributes],
[ROUTES.MY_SETTINGS]: [QueryParams.resourceAttributes],
[ROUTES.NOT_FOUND]: [QueryParams.resourceAttributes],
[ROUTES.ORG_SETTINGS]: [QueryParams.resourceAttributes],
diff --git a/frontend/src/container/SideNav/menuItems.tsx b/frontend/src/container/SideNav/menuItems.tsx
index ae0acdd8c6..00ac98d259 100644
--- a/frontend/src/container/SideNav/menuItems.tsx
+++ b/frontend/src/container/SideNav/menuItems.tsx
@@ -1,88 +1,111 @@
-import {
- AlertOutlined,
- AlignLeftOutlined,
- BarChartOutlined,
- BugOutlined,
- DashboardFilled,
- DeploymentUnitOutlined,
- FileDoneOutlined,
- LineChartOutlined,
- MenuOutlined,
- RocketOutlined,
- SearchOutlined,
- SettingOutlined,
-} from '@ant-design/icons';
+import { RocketOutlined } from '@ant-design/icons';
import ROUTES from 'constants/routes';
+import {
+ AreaChart,
+ BarChart2,
+ BellDot,
+ BugIcon,
+ Cloudy,
+ DraftingCompass,
+ FileKey2,
+ LayoutGrid,
+ MessageSquare,
+ Receipt,
+ Route,
+ ScrollText,
+ Settings,
+ Slack,
+ UserPlus,
+} from 'lucide-react';
-import { SidebarMenu } from './sideNav.types';
+import { SecondaryMenuItemKey, SidebarItem } from './sideNav.types';
-const menuItems: SidebarMenu[] = [
- {
- key: ROUTES.GET_STARTED,
- label: 'Get Started',
- icon: ,
- },
+export const getStartedMenuItem = {
+ key: ROUTES.GET_STARTED,
+ label: 'Get Started',
+ icon: ,
+};
+
+export const inviteMemberMenuItem = {
+ key: `${ROUTES.ORG_SETTINGS}#invite-team-members`,
+ label: 'Invite Team Member',
+ icon: ,
+};
+
+export const manageLicenseMenuItem = {
+ key: ROUTES.LIST_LICENSES,
+ label: 'Manage Licenses',
+ icon: ,
+};
+
+export const helpSupportMenuItem = {
+ key: ROUTES.SUPPORT,
+ label: 'Help & Support',
+ icon: ,
+};
+
+export const slackSupportMenuItem = {
+ key: SecondaryMenuItemKey.Slack,
+ label: 'Slack Support',
+ icon: ,
+};
+
+export const trySignozCloudMenuItem: SidebarItem = {
+ key: 'trySignozCloud',
+ label: 'Try Signoz Cloud',
+ icon: ,
+};
+
+const menuItems: SidebarItem[] = [
{
key: ROUTES.APPLICATION,
label: 'Services',
- icon: ,
+ icon: ,
},
{
key: ROUTES.TRACE,
label: 'Traces',
- icon: ,
+ icon: ,
},
{
- key: ROUTES.LOGS_EXPLORER,
+ key: ROUTES.LOGS,
label: 'Logs',
- icon: ,
- children: [
- {
- key: ROUTES.LOGS_EXPLORER,
- icon: ,
- label: 'Logs Explorer',
- },
- {
- key: ROUTES.LOGS_PIPELINES,
- icon: ,
- label: 'Logs Pipelines',
- },
- ],
+ icon: ,
},
{
key: ROUTES.ALL_DASHBOARD,
label: 'Dashboards',
- icon: ,
+ icon: ,
},
{
key: ROUTES.LIST_ALL_ALERT,
label: 'Alerts',
- icon: ,
+ icon: ,
},
{
key: ROUTES.ALL_ERROR,
label: 'Exceptions',
- icon: ,
+ icon: ,
},
{
key: ROUTES.SERVICE_MAP,
label: 'Service Map',
- icon: ,
+ icon: ,
},
{
key: ROUTES.USAGE_EXPLORER,
label: 'Usage Explorer',
- icon: ,
+ icon: ,
},
{
key: ROUTES.BILLING,
label: 'Billing',
- icon: ,
+ icon: ,
},
{
key: ROUTES.SETTINGS,
label: 'Settings',
- icon: ,
+ icon: ,
},
];
@@ -90,7 +113,7 @@ const menuItems: SidebarMenu[] = [
export const NEW_ROUTES_MENU_ITEM_KEY_MAP = {
[ROUTES.TRACES_EXPLORER]: ROUTES.TRACE,
[ROUTES.TRACE_EXPLORER]: ROUTES.TRACE,
- [ROUTES.LOGS_EXPLORER]: ROUTES.LOGS_EXPLORER,
+ [ROUTES.LOGS_BASE]: ROUTES.LOGS_EXPLORER,
};
export default menuItems;
diff --git a/frontend/src/container/SideNav/sideNav.types.ts b/frontend/src/container/SideNav/sideNav.types.ts
index 804cad8d18..8bc7860478 100644
--- a/frontend/src/container/SideNav/sideNav.types.ts
+++ b/frontend/src/container/SideNav/sideNav.types.ts
@@ -8,10 +8,9 @@ export type SidebarMenu = MenuItem & {
};
export interface SidebarItem {
- onClick: VoidFunction;
icon?: ReactNode;
text?: ReactNode;
- key: string;
+ key: string | number;
label?: ReactNode;
}
diff --git a/frontend/src/container/TopNav/Breadcrumbs/index.tsx b/frontend/src/container/TopNav/Breadcrumbs/index.tsx
index f7b8bf5f21..d5e4941142 100644
--- a/frontend/src/container/TopNav/Breadcrumbs/index.tsx
+++ b/frontend/src/container/TopNav/Breadcrumbs/index.tsx
@@ -22,6 +22,7 @@ const breadcrumbNameMap = {
[ROUTES.ALL_DASHBOARD]: 'Dashboard',
[ROUTES.LOGS]: 'Logs',
[ROUTES.LOGS_EXPLORER]: 'Logs Explorer',
+ [ROUTES.OLD_LOGS_EXPLORER]: 'Old Logs Explorer',
[ROUTES.LIVE_LOGS]: 'Live View',
[ROUTES.LOGS_PIPELINES]: 'Logs Pipelines',
[ROUTES.BILLING]: 'Billing',
diff --git a/frontend/src/container/TopNav/DateTimeSelection/config.ts b/frontend/src/container/TopNav/DateTimeSelection/config.ts
index b99f6f6ae2..0ece952909 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/config.ts
+++ b/frontend/src/container/TopNav/DateTimeSelection/config.ts
@@ -90,6 +90,9 @@ export const routesToSkip = [
ROUTES.BILLING,
ROUTES.SUPPORT,
ROUTES.WORKSPACE_LOCKED,
+ ROUTES.LOGS,
+ ROUTES.MY_SETTINGS,
+ ROUTES.LIST_LICENSES,
];
export const routesToDisable = [ROUTES.LOGS_EXPLORER, ROUTES.LIVE_LOGS];
diff --git a/frontend/src/container/TopNav/index.tsx b/frontend/src/container/TopNav/index.tsx
index 6592faa569..3b2667eaf6 100644
--- a/frontend/src/container/TopNav/index.tsx
+++ b/frontend/src/container/TopNav/index.tsx
@@ -4,14 +4,8 @@ import { useMemo } from 'react';
import { matchPath, useHistory } from 'react-router-dom';
import NewExplorerCTA from '../NewExplorerCTA';
-import ShowBreadcrumbs from './Breadcrumbs';
import DateTimeSelector from './DateTimeSelection';
-import {
- routesToDisable,
- routesToHideBreadCrumbs,
- routesToSkip,
-} from './DateTimeSelection/config';
-import { Container } from './styles';
+import { routesToDisable, routesToSkip } from './DateTimeSelection/config';
function TopNav(): JSX.Element | null {
const { location } = useHistory();
@@ -24,14 +18,6 @@ function TopNav(): JSX.Element | null {
[location.pathname],
);
- const isRouteToHideBreadCrumbs = useMemo(
- () =>
- routesToHideBreadCrumbs.some((route) =>
- matchPath(location.pathname, { path: route, exact: true }),
- ),
- [location.pathname],
- );
-
const isDisabled = useMemo(
() =>
routesToDisable.some((route) =>
@@ -50,15 +36,9 @@ function TopNav(): JSX.Element | null {
}
return (
-
- {!isRouteToHideBreadCrumbs && (
-
-
-
- )}
-
+
{!isRouteToSkip && (
-
+
@@ -69,7 +49,7 @@ function TopNav(): JSX.Element | null {
)}
-
+
);
}
diff --git a/frontend/src/container/TopNav/styles.ts b/frontend/src/container/TopNav/styles.ts
index ef3cb15c37..4c88c63246 100644
--- a/frontend/src/container/TopNav/styles.ts
+++ b/frontend/src/container/TopNav/styles.ts
@@ -3,6 +3,6 @@ import styled from 'styled-components';
export const Container = styled(Row)`
&&& {
- margin-top: 2rem;
+ margin-top: 1rem;
}
`;
diff --git a/frontend/src/hooks/logs/useActiveLog.ts b/frontend/src/hooks/logs/useActiveLog.ts
index 8dbd58976b..a56c13c72e 100644
--- a/frontend/src/hooks/logs/useActiveLog.ts
+++ b/frontend/src/hooks/logs/useActiveLog.ts
@@ -36,7 +36,9 @@ export const useActiveLog = (): UseActiveLog => {
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
const { notifications } = useNotifications();
- const isLogsPage = useMemo(() => pathname === ROUTES.LOGS, [pathname]);
+ const isLogsPage = useMemo(() => pathname === ROUTES.OLD_LOGS_EXPLORER, [
+ pathname,
+ ]);
const [activeLog, setActiveLog] = useState(null);
@@ -135,7 +137,7 @@ export const useActiveLog = (): UseActiveLog => {
queryString,
);
- history.replace(`${ROUTES.LOGS}?q=${updatedQueryString}`);
+ history.replace(`${ROUTES.OLD_LOGS_EXPLORER}?q=${updatedQueryString}`);
},
[history, queryString],
);
diff --git a/frontend/src/hooks/useDarkMode/index.tsx b/frontend/src/hooks/useDarkMode/index.tsx
index 069e08b2de..baf0c21511 100644
--- a/frontend/src/hooks/useDarkMode/index.tsx
+++ b/frontend/src/hooks/useDarkMode/index.tsx
@@ -76,6 +76,11 @@ export const useThemeConfig = (): ThemeConfig => {
borderRadiusXS: 2,
fontFamily: 'Inter',
fontSize: 13,
+ colorPrimary: '#4E74F8',
+ colorBgBase: isDarkMode ? '#0B0C0E' : '#fff',
+ colorBgContainer: isDarkMode ? '#121317' : '#fff',
+ colorLink: '#4E74F8',
+ colorPrimaryText: '#3F5ECC',
},
};
};
diff --git a/frontend/src/index.html.ejs b/frontend/src/index.html.ejs
index f46fd07f01..8d756463cd 100644
--- a/frontend/src/index.html.ejs
+++ b/frontend/src/index.html.ejs
@@ -67,7 +67,7 @@
diff --git a/frontend/src/index.tsx b/frontend/src/index.tsx
index 17320546af..405a9c6bc4 100644
--- a/frontend/src/index.tsx
+++ b/frontend/src/index.tsx
@@ -8,7 +8,6 @@ import { createRoot } from 'react-dom/client';
import { ErrorBoundary } from 'react-error-boundary';
import { HelmetProvider } from 'react-helmet-async';
import { QueryClient, QueryClientProvider } from 'react-query';
-import { ReactQueryDevtools } from 'react-query/devtools';
import { Provider } from 'react-redux';
import store from 'store';
@@ -33,7 +32,6 @@ if (container) {
- {process.env.NODE_ENV === 'development' && }
diff --git a/frontend/src/pages/Logs/index.tsx b/frontend/src/pages/Logs/index.tsx
index d690f40f9c..cbbcc22c4c 100644
--- a/frontend/src/pages/Logs/index.tsx
+++ b/frontend/src/pages/Logs/index.tsx
@@ -30,7 +30,7 @@ import { useSelectedLogView } from './hooks';
import PopoverContent from './PopoverContent';
import SpaceContainer from './styles';
-function Logs(): JSX.Element {
+function OldLogsExplorer(): JSX.Element {
const dispatch = useDispatch>();
const { order } = useSelector((store) => store.logs);
const location = useLocation();
@@ -148,4 +148,4 @@ function Logs(): JSX.Element {
);
}
-export default Logs;
+export default OldLogsExplorer;
diff --git a/frontend/src/pages/LogsModulePage/LogsModulePage.tsx b/frontend/src/pages/LogsModulePage/LogsModulePage.tsx
new file mode 100644
index 0000000000..ecd8d2dcfc
--- /dev/null
+++ b/frontend/src/pages/LogsModulePage/LogsModulePage.tsx
@@ -0,0 +1,28 @@
+import RouteTab from 'components/RouteTab';
+import ROUTES from 'constants/routes';
+import history from 'lib/history';
+import LogsExplorer from 'pages/LogsExplorer';
+import Pipelines from 'pages/Pipelines';
+import { useLocation } from 'react-use';
+
+export const logsExplorer = {
+ Component: LogsExplorer,
+ name: 'Explorer',
+ route: ROUTES.LOGS,
+ key: ROUTES.LOGS,
+};
+
+export const logsPipelines = {
+ Component: Pipelines,
+ name: 'Pipelines',
+ route: ROUTES.LOGS_PIPELINES,
+ key: ROUTES.LOGS_PIPELINES,
+};
+
+export default function LogsModulePage(): JSX.Element {
+ const { pathname } = useLocation();
+
+ const routes = [logsExplorer, logsPipelines];
+
+ return ;
+}
diff --git a/frontend/src/pages/LogsModulePage/index.tsx b/frontend/src/pages/LogsModulePage/index.tsx
new file mode 100644
index 0000000000..680368481f
--- /dev/null
+++ b/frontend/src/pages/LogsModulePage/index.tsx
@@ -0,0 +1,3 @@
+import LogsModulePage from './LogsModulePage';
+
+export default LogsModulePage;
diff --git a/frontend/src/pages/Pipelines/index.tsx b/frontend/src/pages/Pipelines/index.tsx
index 1a05a4010a..d646390dda 100644
--- a/frontend/src/pages/Pipelines/index.tsx
+++ b/frontend/src/pages/Pipelines/index.tsx
@@ -81,7 +81,7 @@ function Pipelines(): JSX.Element {
return (
- ;
+
);
}
diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss
index f80a4925bc..7e5b32ab29 100644
--- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss
+++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss
@@ -1,7 +1,7 @@
.workspace-locked-container {
text-align: center;
padding: 48px;
- margin: 48px;
+ margin: 24px;
}
.workpace-locked-details {
diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx
index 924509de82..1a19e3d6a5 100644
--- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx
+++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx
@@ -6,6 +6,7 @@ import { Button, Card, Skeleton, Typography } from 'antd';
import updateCreditCardApi from 'api/billing/checkout';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import ROUTES from 'constants/routes';
+import FullViewHeader from 'container/FullViewHeader/FullViewHeader';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
@@ -75,42 +76,46 @@ export default function WorkspaceBlocked(): JSX.Element {
}, [activeLicense?.key, updateCreditCard]);
return (
-
- {isLoadingLicenseData || !licensesData?.payload?.workSpaceBlock ? (
-
- ) : (
- <>
-
- Workspace Locked
-
- You have been locked out of your workspace because your trial ended
- without an upgrade to a paid plan. Your data will continue to be ingested
- till{' '}
- {getFormattedDate(licensesData?.payload?.gracePeriodEnd || Date.now())} ,
- at which point we will drop all the ingested data and terminate the
- account.
- {!isAdmin && 'Please contact your administrator for further help'}
-
- {isAdmin && (
- }
- size="middle"
- loading={isLoading}
- onClick={handleUpdateCreditCard}
- >
- Update Credit Card
-
- )}
-
- >
- )}
-
+ <>
+
+
+
+ {isLoadingLicenseData || !licensesData?.payload?.workSpaceBlock ? (
+
+ ) : (
+ <>
+
+ Workspace Locked
+
+ You have been locked out of your workspace because your trial ended
+ without an upgrade to a paid plan. Your data will continue to be ingested
+ till{' '}
+ {getFormattedDate(licensesData?.payload?.gracePeriodEnd || Date.now())} ,
+ at which point we will drop all the ingested data and terminate the
+ account.
+ {!isAdmin && 'Please contact your administrator for further help'}
+
+ {isAdmin && (
+ }
+ size="middle"
+ loading={isLoading}
+ onClick={handleUpdateCreditCard}
+ >
+ Update Credit Card
+
+ )}
+
+ >
+ )}
+
+ >
);
}
diff --git a/frontend/src/styles.scss b/frontend/src/styles.scss
index 6712b4c59a..f2b1a3d413 100644
--- a/frontend/src/styles.scss
+++ b/frontend/src/styles.scss
@@ -1,3 +1,5 @@
+@import '@signozhq/design-tokens';
+
#root,
html,
body {
diff --git a/frontend/src/utils/permission/index.ts b/frontend/src/utils/permission/index.ts
index ee1a7a09e9..91372d237b 100644
--- a/frontend/src/utils/permission/index.ts
+++ b/frontend/src/utils/permission/index.ts
@@ -86,4 +86,6 @@ export const routePermission: Record = {
BILLING: ['ADMIN', 'EDITOR', 'VIEWER'],
SUPPORT: ['ADMIN', 'EDITOR', 'VIEWER'],
SOMETHING_WENT_WRONG: ['ADMIN', 'EDITOR', 'VIEWER'],
+ LOGS_BASE: [],
+ OLD_LOGS_EXPLORER: [],
};
diff --git a/frontend/yarn.lock b/frontend/yarn.lock
index 2099c438bb..fe33785fb5 100644
--- a/frontend/yarn.lock
+++ b/frontend/yarn.lock
@@ -3082,6 +3082,13 @@
resolved "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz"
integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==
+"@signozhq/design-tokens@0.0.6":
+ version "0.0.6"
+ resolved "https://registry.yarnpkg.com/@signozhq/design-tokens/-/design-tokens-0.0.6.tgz#42449052dca644c4d52448f9c2c521d39e535720"
+ integrity sha512-i+aG0YCuYL2KVUtRFj3qgAVDU6GbKmTdFXpqCqLUQp8diKMWH5Svzzxj4B14Q6+yE79+wbm1iZ0Nr6nYgkBA8Q==
+ dependencies:
+ style-dictionary "3.8.0"
+
"@sinclair/typebox@^0.25.16":
version "0.25.24"
resolved "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.25.24.tgz"
@@ -5422,6 +5429,15 @@ canvas-color-tracker@1:
dependencies:
tinycolor2 "^1.6.0"
+capital-case@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/capital-case/-/capital-case-1.0.4.tgz#9d130292353c9249f6b00fa5852bee38a717e669"
+ integrity sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+ upper-case-first "^2.0.2"
+
cardboard-vr-display@^1.0.19:
version "1.0.19"
resolved "https://registry.npmjs.org/cardboard-vr-display/-/cardboard-vr-display-1.0.19.tgz"
@@ -5461,6 +5477,24 @@ chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1:
ansi-styles "^4.1.0"
supports-color "^7.1.0"
+change-case@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/change-case/-/change-case-4.1.2.tgz#fedfc5f136045e2398c0410ee441f95704641e12"
+ integrity sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A==
+ dependencies:
+ camel-case "^4.1.2"
+ capital-case "^1.0.4"
+ constant-case "^3.0.4"
+ dot-case "^3.0.4"
+ header-case "^2.0.4"
+ no-case "^3.0.4"
+ param-case "^3.0.4"
+ pascal-case "^3.1.2"
+ path-case "^3.0.4"
+ sentence-case "^3.0.4"
+ snake-case "^3.0.4"
+ tslib "^2.0.3"
+
char-regex@^1.0.2:
version "1.0.2"
resolved "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz"
@@ -5842,6 +5876,15 @@ connect-history-api-fallback@^2.0.0:
resolved "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz"
integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==
+constant-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/constant-case/-/constant-case-3.0.4.tgz#3b84a9aeaf4cf31ec45e6bf5de91bdfb0589faf1"
+ integrity sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+ upper-case "^2.0.2"
+
content-disposition@0.5.4:
version "0.5.4"
resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz"
@@ -7993,7 +8036,7 @@ glob-to-regexp@^0.4.1:
resolved "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz"
integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==
-glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6:
+glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@^7.2.0:
version "7.2.3"
resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz"
integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==
@@ -8329,6 +8372,14 @@ he@^1.2.0:
resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz"
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
+header-case@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/header-case/-/header-case-2.0.4.tgz#5a42e63b55177349cf405beb8d775acabb92c063"
+ integrity sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q==
+ dependencies:
+ capital-case "^1.0.4"
+ tslib "^2.0.3"
+
headers-polyfill@3.2.5:
version "3.2.5"
resolved "https://registry.yarnpkg.com/headers-polyfill/-/headers-polyfill-3.2.5.tgz#6e67d392c9d113d37448fe45014e0afdd168faed"
@@ -9871,6 +9922,11 @@ json5@^1.0.2:
dependencies:
minimist "^1.2.0"
+jsonc-parser@^3.0.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76"
+ integrity sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==
+
jsonfile@^6.0.1:
version "6.1.0"
resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz"
@@ -11789,6 +11845,14 @@ pascal-case@^3.1.2:
no-case "^3.0.4"
tslib "^2.0.3"
+path-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/path-case/-/path-case-3.0.4.tgz#9168645334eb942658375c56f80b4c0cb5f82c6f"
+ integrity sha512-qO4qCFjXqVTrcbPt/hQfhTQ+VhFsqNKOPtytgNKkKxSoEp3XPUQ8ObFuePylOIok5gjn69ry8XiULxCwot3Wfg==
+ dependencies:
+ dot-case "^3.0.4"
+ tslib "^2.0.3"
+
path-exists@^4.0.0:
version "4.0.0"
resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz"
@@ -13841,6 +13905,15 @@ send@0.18.0:
range-parser "~1.2.1"
statuses "2.0.1"
+sentence-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/sentence-case/-/sentence-case-3.0.4.tgz#3645a7b8c117c787fde8702056225bb62a45131f"
+ integrity sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+ upper-case-first "^2.0.2"
+
serialize-javascript@^5.0.1:
version "5.0.1"
resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz"
@@ -14017,6 +14090,14 @@ slice-ansi@^5.0.0:
ansi-styles "^6.0.0"
is-fullwidth-code-point "^4.0.0"
+snake-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-3.0.4.tgz#4f2bbd568e9935abdfd593f34c691dadb49c452c"
+ integrity sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==
+ dependencies:
+ dot-case "^3.0.4"
+ tslib "^2.0.3"
+
sockjs@^0.3.24:
version "0.3.24"
resolved "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz"
@@ -14397,6 +14478,21 @@ strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz"
integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+style-dictionary@3.8.0:
+ version "3.8.0"
+ resolved "https://registry.yarnpkg.com/style-dictionary/-/style-dictionary-3.8.0.tgz#7cb8d64360c53431f768d44def665f61e971a73e"
+ integrity sha512-wHlB/f5eO3mDcYv6WtOz6gvQC477jBKrwuIXe+PtHskTCBsJdAOvL8hCquczJxDui2TnwpeNE+2msK91JJomZg==
+ dependencies:
+ chalk "^4.0.0"
+ change-case "^4.1.2"
+ commander "^8.3.0"
+ fs-extra "^10.0.0"
+ glob "^7.2.0"
+ json5 "^2.2.2"
+ jsonc-parser "^3.0.0"
+ lodash "^4.17.15"
+ tinycolor2 "^1.4.1"
+
style-loader@1.3.0:
version "1.3.0"
resolved "https://registry.npmjs.org/style-loader/-/style-loader-1.3.0.tgz"
@@ -14698,7 +14794,7 @@ tiny-warning@^1.0.0:
resolved "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz"
integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==
-tinycolor2@1, tinycolor2@1.6.0, tinycolor2@^1.6.0:
+tinycolor2@1, tinycolor2@1.6.0, tinycolor2@^1.4.1, tinycolor2@^1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/tinycolor2/-/tinycolor2-1.6.0.tgz#f98007460169b0263b97072c5ae92484ce02d09e"
integrity sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==
@@ -15156,6 +15252,20 @@ uplot@1.6.26:
resolved "https://registry.yarnpkg.com/uplot/-/uplot-1.6.26.tgz#a6012fd141ad4a71741c75af0c71283d0ade45a7"
integrity sha512-qN0mveL6UsP40TnHzHAJkUQvpfA3y8zSLXtXKVlJo/sLfj2+vjan/Z3g81MCZjy/hEDUFNtnLftPmETDA4s7Rg==
+upper-case-first@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/upper-case-first/-/upper-case-first-2.0.2.tgz#992c3273f882abd19d1e02894cc147117f844324"
+ integrity sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg==
+ dependencies:
+ tslib "^2.0.3"
+
+upper-case@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-2.0.2.tgz#d89810823faab1df1549b7d97a76f8662bae6f7a"
+ integrity sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg==
+ dependencies:
+ tslib "^2.0.3"
+
uri-js@^4.2.2:
version "4.4.1"
resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz"
From 525dea343cd0d0a761057fc0f67192bc38cc05ce Mon Sep 17 00:00:00 2001
From: Srikanth Chekuri
Date: Mon, 8 Jan 2024 01:33:04 +0530
Subject: [PATCH 05/39] chore: add prepare query for delta/unspecified table
(#4168)
---
.../app/metrics/v4/delta/table.go | 49 ++++++++
.../app/metrics/v4/delta/table_test.go | 114 ++++++++++++++++++
.../app/metrics/v4/delta/time_series_test.go | 2 +-
.../app/metrics/v4/delta/timeseries.go | 4 +-
4 files changed, 166 insertions(+), 3 deletions(-)
create mode 100644 pkg/query-service/app/metrics/v4/delta/table.go
create mode 100644 pkg/query-service/app/metrics/v4/delta/table_test.go
diff --git a/pkg/query-service/app/metrics/v4/delta/table.go b/pkg/query-service/app/metrics/v4/delta/table.go
new file mode 100644
index 0000000000..b2b42bb9a6
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/delta/table.go
@@ -0,0 +1,49 @@
+package delta
+
+import (
+ "fmt"
+
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+// prepareMetricQueryDeltaTable builds the query to be used for fetching metrics
+func prepareMetricQueryDeltaTable(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+ var query string
+
+ temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
+ if err != nil {
+ return "", err
+ }
+
+ groupBy := groupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := groupByAttributeKeyTags(mq.GroupBy...)
+
+ queryTmpl :=
+ "SELECT %s," +
+ " %s as value" +
+ " FROM (%s)" +
+ " WHERE isNaN(per_series_value) = 0" +
+ " GROUP BY %s" +
+ " ORDER BY %s"
+
+ switch mq.SpaceAggregation {
+ case v3.SpaceAggregationAvg:
+ op := "avg(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationSum:
+ op := "sum(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMin:
+ op := "min(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMax:
+ op := "max(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationCount:
+ op := "count(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ }
+
+ return query, nil
+}
diff --git a/pkg/query-service/app/metrics/v4/delta/table_test.go b/pkg/query-service/app/metrics/v4/delta/table_test.go
new file mode 100644
index 0000000000..271afcd0d2
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/delta/table_test.go
@@ -0,0 +1,114 @@
+package delta
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+func TestPrepareTableQuery(t *testing.T) {
+ // The table query is almost the same as the time series query, except that
+ // each row will be reduced to a single value using the `ReduceTo` aggregation
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ start int64
+ end int64
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = avg, space aggregation = sum, temporality = unspecified",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "system_memory_usage",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Unspecified,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "state",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorNotEqual,
+ Value: "idle",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationAvg,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
+ },
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = delta",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "http_requests",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "payment_service",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := prepareMetricQueryDeltaTable(
+ testCase.start,
+ testCase.end,
+ testCase.builderQuery.StepInterval,
+ testCase.builderQuery,
+ )
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
diff --git a/pkg/query-service/app/metrics/v4/delta/time_series_test.go b/pkg/query-service/app/metrics/v4/delta/time_series_test.go
index d22aa12961..6eada21482 100644
--- a/pkg/query-service/app/metrics/v4/delta/time_series_test.go
+++ b/pkg/query-service/app/metrics/v4/delta/time_series_test.go
@@ -113,7 +113,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
- query, err := prepareTimeAggregationSubQueryTimeSeries(
+ query, err := prepareTimeAggregationSubQuery(
testCase.start,
testCase.end,
testCase.builderQuery.StepInterval,
diff --git a/pkg/query-service/app/metrics/v4/delta/timeseries.go b/pkg/query-service/app/metrics/v4/delta/timeseries.go
index f9a9e265c3..83f9e2f111 100644
--- a/pkg/query-service/app/metrics/v4/delta/timeseries.go
+++ b/pkg/query-service/app/metrics/v4/delta/timeseries.go
@@ -10,7 +10,7 @@ import (
)
// prepareTimeAggregationSubQueryTimeSeries builds the sub-query to be used for temporal aggregation
-func prepareTimeAggregationSubQueryTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var subQuery string
@@ -81,7 +81,7 @@ func prepareMetricQueryDeltaTimeSeries(start, end, step int64, mq *v3.BuilderQue
var query string
- temporalAggSubQuery, err := prepareTimeAggregationSubQueryTimeSeries(start, end, step, mq)
+ temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
if err != nil {
return "", err
}
From e7fabca38e27d94b9cc0960f6fec4a9a75c7bd9f Mon Sep 17 00:00:00 2001
From: Srikanth Chekuri
Date: Mon, 8 Jan 2024 18:00:42 +0530
Subject: [PATCH 06/39] chore: fix the query range cache gaps (#4283)
---
pkg/query-service/app/querier/helper.go | 4 +--
pkg/query-service/app/querier/querier.go | 13 ++++++---
pkg/query-service/app/querier/querier_test.go | 18 ++++++++++--
pkg/query-service/model/v3/v3.go | 29 +++++++++++++++++++
4 files changed, 56 insertions(+), 8 deletions(-)
diff --git a/pkg/query-service/app/querier/helper.go b/pkg/query-service/app/querier/helper.go
index 82bf2cc464..5bb3cc81f8 100644
--- a/pkg/query-service/app/querier/helper.go
+++ b/pkg/query-service/app/querier/helper.go
@@ -160,7 +160,7 @@ func (q *querier) runBuilderQuery(
if !params.NoCache && q.cache != nil {
var retrieveStatus status.RetrieveStatus
data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
- zap.S().Debug("cache retrieve status", zap.String("status", retrieveStatus.String()))
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
if err == nil {
cachedData = data
}
@@ -254,7 +254,7 @@ func (q *querier) runBuilderExpression(
if !params.NoCache && q.cache != nil {
var retrieveStatus status.RetrieveStatus
data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
- zap.S().Debug("cache retrieve status", zap.String("status", retrieveStatus.String()))
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
if err == nil {
cachedData = data
}
diff --git a/pkg/query-service/app/querier/querier.go b/pkg/query-service/app/querier/querier.go
index d51e5713bf..395659ced2 100644
--- a/pkg/query-service/app/querier/querier.go
+++ b/pkg/query-service/app/querier/querier.go
@@ -145,7 +145,7 @@ func (q *querier) execPromQuery(ctx context.Context, params *model.QueryRangePar
//
// The [End - fluxInterval, End] is always added to the list of misses, because
// the data might still be in flux and not yet available in the database.
-func findMissingTimeRanges(start, end int64, seriesList []*v3.Series, fluxInterval time.Duration) (misses []missInterval) {
+func findMissingTimeRanges(start, end, step int64, seriesList []*v3.Series, fluxInterval time.Duration) (misses []missInterval) {
var cachedStart, cachedEnd int64
for idx := range seriesList {
series := seriesList[idx]
@@ -160,11 +160,15 @@ func findMissingTimeRanges(start, end int64, seriesList []*v3.Series, fluxInterv
}
}
+ endMillis := time.Now().UnixMilli()
+ adjustStep := int64(math.Min(float64(step), 60))
+ roundedMillis := endMillis - (endMillis % (adjustStep * 1000))
+
// Exclude the flux interval from the cached end time
cachedEnd = int64(
math.Min(
float64(cachedEnd),
- float64(time.Now().UnixMilli()-fluxInterval.Milliseconds()),
+ float64(roundedMillis-fluxInterval.Milliseconds()),
),
)
@@ -215,7 +219,7 @@ func (q *querier) findMissingTimeRanges(start, end, step int64, cachedData []byt
// In case of error, we return the entire range as a miss
return []missInterval{{start: start, end: end}}
}
- return findMissingTimeRanges(start, end, cachedSeriesList, q.fluxInterval)
+ return findMissingTimeRanges(start, end, step, cachedSeriesList, q.fluxInterval)
}
func labelsToString(labels map[string]string) string {
@@ -258,6 +262,7 @@ func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series {
for idx := range seriesesByLabels {
series := seriesesByLabels[idx]
series.SortPoints()
+ series.RemoveDuplicatePoints()
mergedSeries = append(mergedSeries, series)
}
return mergedSeries
@@ -326,7 +331,7 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam
// Ensure NoCache is not set and cache is not nil
if !params.NoCache && q.cache != nil {
data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
- zap.S().Debug("cache retrieve status", zap.String("status", retrieveStatus.String()))
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
if err == nil {
cachedData = data
}
diff --git a/pkg/query-service/app/querier/querier_test.go b/pkg/query-service/app/querier/querier_test.go
index f08ae82dcd..605d2f5180 100644
--- a/pkg/query-service/app/querier/querier_test.go
+++ b/pkg/query-service/app/querier/querier_test.go
@@ -23,6 +23,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name string
requestedStart int64 // in milliseconds
requestedEnd int64 // in milliseconds
+ requestedStep int64 // in seconds
cachedSeries []*v3.Series
expectedMiss []missInterval
}{
@@ -30,6 +31,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name: "cached time range is a subset of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -62,6 +64,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name: "cached time range is a superset of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -93,6 +96,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name: "cached time range is a left overlap of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -125,6 +129,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name: "cached time range is a right overlap of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -157,6 +162,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name: "cached time range is a disjoint of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -189,7 +195,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
- misses := findMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.cachedSeries, 0*time.Minute)
+ misses := findMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.requestedStep, tc.cachedSeries, 0*time.Minute)
if len(misses) != len(tc.expectedMiss) {
t.Errorf("expected %d misses, got %d", len(tc.expectedMiss), len(misses))
}
@@ -211,6 +217,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name string
requestedStart int64
requestedEnd int64
+ requestedStep int64
cachedSeries []*v3.Series
fluxInterval time.Duration
expectedMiss []missInterval
@@ -219,6 +226,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name: "cached time range is a subset of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -252,6 +260,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name: "cached time range is a superset of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -284,6 +293,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name: "cache time range is a left overlap of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -317,6 +327,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name: "cache time range is a right overlap of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -350,6 +361,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name: "cache time range is a disjoint of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -383,7 +395,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
- misses := findMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.cachedSeries, tc.fluxInterval)
+ misses := findMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.requestedStep, tc.cachedSeries, tc.fluxInterval)
if len(misses) != len(tc.expectedMiss) {
t.Errorf("expected %d misses, got %d", len(tc.expectedMiss), len(misses))
}
@@ -404,6 +416,7 @@ func TestQueryRange(t *testing.T) {
{
Start: 1675115596722,
End: 1675115596722 + 120*60*1000,
+ Step: 60,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypeBuilder,
PanelType: v3.PanelTypeGraph,
@@ -436,6 +449,7 @@ func TestQueryRange(t *testing.T) {
{
Start: 1675115596722 + 60*60*1000,
End: 1675115596722 + 180*60*1000,
+ Step: 60,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypeBuilder,
PanelType: v3.PanelTypeGraph,
diff --git a/pkg/query-service/model/v3/v3.go b/pkg/query-service/model/v3/v3.go
index 453c6475a8..968fe188e6 100644
--- a/pkg/query-service/model/v3/v3.go
+++ b/pkg/query-service/model/v3/v3.go
@@ -680,6 +680,35 @@ func (s *Series) SortPoints() {
})
}
+func (s *Series) RemoveDuplicatePoints() {
+ if len(s.Points) == 0 {
+ return
+ }
+
+ // priortize the last point
+ // this is to handle the case where the same point is sent twice
+ // the last point is the most recent point adjusted for the flux interval
+
+ newPoints := make([]Point, 0)
+ for i := len(s.Points) - 1; i >= 0; i-- {
+ if len(newPoints) == 0 {
+ newPoints = append(newPoints, s.Points[i])
+ continue
+ }
+ if newPoints[len(newPoints)-1].Timestamp != s.Points[i].Timestamp {
+ newPoints = append(newPoints, s.Points[i])
+ }
+ }
+
+ // reverse the points
+ for i := len(newPoints)/2 - 1; i >= 0; i-- {
+ opp := len(newPoints) - 1 - i
+ newPoints[i], newPoints[opp] = newPoints[opp], newPoints[i]
+ }
+
+ s.Points = newPoints
+}
+
type Row struct {
Timestamp time.Time `json:"timestamp"`
Data map[string]interface{} `json:"data"`
From 92717774a281720f2beabbd9da7abad29114f622 Mon Sep 17 00:00:00 2001
From: Srikanth Chekuri
Date: Mon, 8 Jan 2024 18:11:22 +0530
Subject: [PATCH 07/39] fix: throw error when response for value panel returns
more than one series (#4332)
---
pkg/query-service/app/querier/querier.go | 10 ++++++++++
1 file changed, 10 insertions(+)
diff --git a/pkg/query-service/app/querier/querier.go b/pkg/query-service/app/querier/querier.go
index 395659ced2..16ef778d20 100644
--- a/pkg/query-service/app/querier/querier.go
+++ b/pkg/query-service/app/querier/querier.go
@@ -507,6 +507,16 @@ func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3,
err = fmt.Errorf("invalid query type")
}
}
+
+ // return error if the number of series is more than one for value type panel
+ if params.CompositeQuery.PanelType == v3.PanelTypeValue {
+ if len(results) > 1 {
+ err = fmt.Errorf("there can be only one active query for value type panel")
+ } else if len(results) == 1 && len(results[0].Series) > 1 {
+ err = fmt.Errorf("there can be only one result series for value type panel but got %d", len(results[0].Series))
+ }
+ }
+
return results, err, errQueriesByName
}
From be6bca3717152a54e889457f0d9b742399b6724d Mon Sep 17 00:00:00 2001
From: Srikanth Chekuri
Date: Mon, 8 Jan 2024 20:04:21 +0530
Subject: [PATCH 08/39] chore: add prepare query for cumulative/unspecified
table (#4169)
---
.../app/metrics/v4/cumulative/table.go | 49 ++++++++
.../app/metrics/v4/cumulative/table_test.go | 112 ++++++++++++++++++
.../app/metrics/v4/cumulative/timeseries.go | 4 +-
.../metrics/v4/cumulative/timeseries_test.go | 2 +-
4 files changed, 164 insertions(+), 3 deletions(-)
create mode 100644 pkg/query-service/app/metrics/v4/cumulative/table.go
create mode 100644 pkg/query-service/app/metrics/v4/cumulative/table_test.go
diff --git a/pkg/query-service/app/metrics/v4/cumulative/table.go b/pkg/query-service/app/metrics/v4/cumulative/table.go
new file mode 100644
index 0000000000..b81f3e7d8c
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/cumulative/table.go
@@ -0,0 +1,49 @@
+package cumulative
+
+import (
+ "fmt"
+
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+// prepareMetricQueryTable prepares the query to be used for fetching metrics
+func prepareMetricQueryTable(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+ var query string
+
+ temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
+ if err != nil {
+ return "", err
+ }
+
+ groupBy := groupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := groupByAttributeKeyTags(mq.GroupBy...)
+
+ queryTmpl :=
+ "SELECT %s," +
+ " %s as value" +
+ " FROM (%s)" +
+ " WHERE isNaN(per_series_value) = 0" +
+ " GROUP BY %s" +
+ " ORDER BY %s"
+
+ switch mq.SpaceAggregation {
+ case v3.SpaceAggregationAvg:
+ op := "avg(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationSum:
+ op := "sum(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMin:
+ op := "min(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMax:
+ op := "max(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationCount:
+ op := "count(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ }
+
+ return query, nil
+}
diff --git a/pkg/query-service/app/metrics/v4/cumulative/table_test.go b/pkg/query-service/app/metrics/v4/cumulative/table_test.go
new file mode 100644
index 0000000000..45a6e657ea
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/cumulative/table_test.go
@@ -0,0 +1,112 @@
+package cumulative
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+func TestPrepareTableQuery(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ start int64
+ end int64
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = avg, space aggregation = sum, temporality = unspecified",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "system_memory_usage",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Unspecified,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "state",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorNotEqual,
+ Value: "idle",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationAvg,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
+ },
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "http_requests",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Cumulative,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "payment_service",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := prepareMetricQueryTable(
+ testCase.start,
+ testCase.end,
+ testCase.builderQuery.StepInterval,
+ testCase.builderQuery,
+ )
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
diff --git a/pkg/query-service/app/metrics/v4/cumulative/timeseries.go b/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
index 78d22be4aa..6f39a952cb 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
+++ b/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
@@ -104,7 +104,7 @@ const (
// value to be reset to 0. This will produce an inaccurate result. The max is the best approximation we can get.
// We don't expect the process to restart very often, so this should be a good approximation.
-func prepareTimeAggregationSubQueryTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var subQuery string
timeSeriesSubQuery, err := v4.PrepareTimeseriesFilterQuery(mq)
@@ -181,7 +181,7 @@ func prepareTimeAggregationSubQueryTimeSeries(start, end, step int64, mq *v3.Bui
func prepareMetricQueryCumulativeTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var query string
- temporalAggSubQuery, err := prepareTimeAggregationSubQueryTimeSeries(start, end, step, mq)
+ temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
if err != nil {
return "", err
}
diff --git a/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go b/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
index 671af6ac69..6b1d1e43b9 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
+++ b/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
@@ -113,7 +113,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
- query, err := prepareTimeAggregationSubQueryTimeSeries(
+ query, err := prepareTimeAggregationSubQuery(
testCase.start,
testCase.end,
testCase.builderQuery.StepInterval,
From a47a90b0f3a163e799c11218426c9108bfdfdc86 Mon Sep 17 00:00:00 2001
From: Yunus M
Date: Tue, 9 Jan 2024 12:55:24 +0530
Subject: [PATCH 09/39] fix: validate password on paste, change (#4344)
Co-authored-by: Vishal Sharma
---
frontend/src/api/utils.ts | 6 +-
.../components/WelcomeLeftContainer/styles.ts | 1 +
.../ResetPassword/ResetPassword.test.tsx | 72 ++++++++++++
.../src/container/ResetPassword/index.tsx | 106 +++++++++++++-----
.../src/container/ResetPassword/styles.ts | 6 +-
5 files changed, 160 insertions(+), 31 deletions(-)
create mode 100644 frontend/src/container/ResetPassword/ResetPassword.test.tsx
diff --git a/frontend/src/api/utils.ts b/frontend/src/api/utils.ts
index 140e793e35..bd81719eee 100644
--- a/frontend/src/api/utils.ts
+++ b/frontend/src/api/utils.ts
@@ -66,7 +66,11 @@ export const Logout = (): void => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
- window.Intercom('shutdown');
+ if (window && window.Intercom) {
+ // eslint-disable-next-line @typescript-eslint/ban-ts-comment
+ // @ts-ignore
+ window.Intercom('shutdown');
+ }
history.push(ROUTES.LOGIN);
};
diff --git a/frontend/src/components/WelcomeLeftContainer/styles.ts b/frontend/src/components/WelcomeLeftContainer/styles.ts
index 70428a7f1d..be312edac1 100644
--- a/frontend/src/components/WelcomeLeftContainer/styles.ts
+++ b/frontend/src/components/WelcomeLeftContainer/styles.ts
@@ -13,6 +13,7 @@ export const Container = styled.div`
&&& {
display: flex;
justify-content: center;
+ gap: 16px;
align-items: center;
min-height: 100vh;
diff --git a/frontend/src/container/ResetPassword/ResetPassword.test.tsx b/frontend/src/container/ResetPassword/ResetPassword.test.tsx
new file mode 100644
index 0000000000..b3345fedf3
--- /dev/null
+++ b/frontend/src/container/ResetPassword/ResetPassword.test.tsx
@@ -0,0 +1,72 @@
+import { fireEvent, render, screen, waitFor } from '@testing-library/react';
+import userEvent from '@testing-library/user-event';
+import { act } from 'react-dom/test-utils';
+
+import ResetPassword from './index';
+
+jest.mock('api/user/resetPassword', () => ({
+ __esModule: true,
+ default: jest.fn(),
+}));
+
+jest.useFakeTimers();
+
+describe('ResetPassword Component', () => {
+ beforeEach(() => {
+ userEvent.setup();
+ jest.clearAllMocks();
+ });
+
+ it('renders ResetPassword component correctly', () => {
+ render( );
+ expect(screen.getByText('Reset Your Password')).toBeInTheDocument();
+ expect(screen.getByLabelText('Password')).toBeInTheDocument();
+ // eslint-disable-next-line sonarjs/no-duplicate-string
+ expect(screen.getByLabelText('Confirm Password')).toBeInTheDocument();
+ expect(
+ // eslint-disable-next-line sonarjs/no-duplicate-string
+ screen.getByRole('button', { name: 'Get Started' }),
+ ).toBeInTheDocument();
+ });
+
+ it('disables the "Get Started" button when password is invalid', async () => {
+ render( );
+
+ const passwordInput = screen.getByLabelText('Password');
+ const confirmPasswordInput = screen.getByLabelText('Confirm Password');
+ const submitButton = screen.getByRole('button', { name: 'Get Started' });
+
+ act(() => {
+ // Set invalid password
+ fireEvent.change(passwordInput, { target: { value: 'password' } });
+ fireEvent.change(confirmPasswordInput, { target: { value: 'password' } });
+ });
+
+ await waitFor(() => {
+ // Expect the "Get Started" button to be disabled
+ expect(submitButton).toBeDisabled();
+ });
+ });
+
+ it('enables the "Get Started" button when password is valid', async () => {
+ render( );
+
+ const passwordInput = screen.getByLabelText('Password');
+ const confirmPasswordInput = screen.getByLabelText('Confirm Password');
+ const submitButton = screen.getByRole('button', { name: 'Get Started' });
+
+ act(() => {
+ fireEvent.change(passwordInput, { target: { value: 'newPassword' } });
+ fireEvent.change(confirmPasswordInput, { target: { value: 'newPassword' } });
+ });
+
+ act(() => {
+ jest.advanceTimersByTime(500);
+ });
+
+ await waitFor(() => {
+ // Expect the "Get Started" button to be enabled
+ expect(submitButton).toBeEnabled();
+ });
+ });
+});
diff --git a/frontend/src/container/ResetPassword/index.tsx b/frontend/src/container/ResetPassword/index.tsx
index eac4b098cd..f4be5310e2 100644
--- a/frontend/src/container/ResetPassword/index.tsx
+++ b/frontend/src/container/ResetPassword/index.tsx
@@ -3,6 +3,7 @@ import resetPasswordApi from 'api/user/resetPassword';
import { Logout } from 'api/utils';
import WelcomeLeftContainer from 'components/WelcomeLeftContainer';
import ROUTES from 'constants/routes';
+import useDebouncedFn from 'hooks/useDebouncedFunction';
import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
import { Label } from 'pages/SignUp/styles';
@@ -20,6 +21,8 @@ function ResetPassword({ version }: ResetPasswordProps): JSX.Element {
const [confirmPasswordError, setConfirmPasswordError] = useState(
false,
);
+
+ const [isValidPassword, setIsValidPassword] = useState(false);
const [loading, setLoading] = useState(false);
const { t } = useTranslation(['common']);
const { search } = useLocation();
@@ -35,7 +38,7 @@ function ResetPassword({ version }: ResetPasswordProps): JSX.Element {
}
}, [token]);
- const handleSubmit: () => Promise = async () => {
+ const handleFormSubmit: () => Promise = async () => {
try {
setLoading(true);
const { password } = form.getFieldsValue();
@@ -72,38 +75,88 @@ function ResetPassword({ version }: ResetPasswordProps): JSX.Element {
});
}
};
- const handleValuesChange: (changedValues: FormValues) => void = (
- changedValues,
- ) => {
- if ('confirmPassword' in changedValues) {
- const { confirmPassword } = changedValues;
- const isSamePassword = form.getFieldValue('password') === confirmPassword;
- setConfirmPasswordError(!isSamePassword);
+ const validatePassword = (): boolean => {
+ const { password, confirmPassword } = form.getFieldsValue();
+
+ if (
+ password &&
+ confirmPassword &&
+ password.trim() &&
+ confirmPassword.trim() &&
+ password.length > 0 &&
+ confirmPassword.length > 0
+ ) {
+ return password === confirmPassword;
+ }
+
+ return false;
+ };
+
+ const handleValuesChange = useDebouncedFn((): void => {
+ const { password, confirmPassword } = form.getFieldsValue();
+
+ if (!password || !confirmPassword) {
+ setIsValidPassword(false);
+ }
+
+ if (
+ password &&
+ confirmPassword &&
+ password.trim() &&
+ confirmPassword.trim()
+ ) {
+ const isValid = validatePassword();
+
+ setIsValidPassword(isValid);
+ setConfirmPasswordError(!isValid);
+ }
+ }, 100);
+
+ const handleSubmit = (): void => {
+ const isValid = validatePassword();
+ setIsValidPassword(isValid);
+
+ if (token) {
+ handleFormSubmit();
}
};
return (
-
+
Reset Your Password
- Password
-
-
-
+ Password
+
+
+
- Confirm Password
-
-
-
+ Confirm Password
+
+
+
{confirmPasswordError && (
- Passwords don’t match. Please try again
+ The passwords entered do not match. Please double-check and re-enter
+ your passwords.
)}
@@ -124,13 +178,7 @@ function ResetPassword({ version }: ResetPasswordProps): JSX.Element {
htmlType="submit"
data-attr="signup"
loading={loading}
- disabled={
- loading ||
- !form.getFieldValue('password') ||
- !form.getFieldValue('confirmPassword') ||
- confirmPasswordError ||
- token === null
- }
+ disabled={!isValidPassword || loading}
>
Get Started
diff --git a/frontend/src/container/ResetPassword/styles.ts b/frontend/src/container/ResetPassword/styles.ts
index e59a453695..f71860382e 100644
--- a/frontend/src/container/ResetPassword/styles.ts
+++ b/frontend/src/container/ResetPassword/styles.ts
@@ -4,8 +4,12 @@ import styled from 'styled-components';
export const FormWrapper = styled(Card)`
display: flex;
justify-content: center;
- max-width: 432px;
+ width: 432px;
flex: 1;
+
+ .ant-card-body {
+ width: 100%;
+ }
`;
export const ButtonContainer = styled.div`
From 5fe7948be9d36e0831900518aca9b9222bd50b5e Mon Sep 17 00:00:00 2001
From: Keshav Gupta
Date: Tue, 9 Jan 2024 13:26:44 +0530
Subject: [PATCH 10/39] feat: preserve the sorting searching and pagination in
dashboard page (#4319)
* feat: preserved the sorting searching and pagination
* fix: filter in dashboard data
---
.../ResizeTable/DynamicColumnTable.tsx | 2 +-
.../ListOfDashboard/DashboardsList.tsx | 60 +++++++++++++------
.../src/container/ListOfDashboard/utils.ts | 20 +++++++
3 files changed, 63 insertions(+), 19 deletions(-)
create mode 100644 frontend/src/container/ListOfDashboard/utils.ts
diff --git a/frontend/src/components/ResizeTable/DynamicColumnTable.tsx b/frontend/src/components/ResizeTable/DynamicColumnTable.tsx
index 55af931d5c..c0d77c967b 100644
--- a/frontend/src/components/ResizeTable/DynamicColumnTable.tsx
+++ b/frontend/src/components/ResizeTable/DynamicColumnTable.tsx
@@ -43,7 +43,7 @@ function DynamicColumnTable({
: undefined,
);
// eslint-disable-next-line react-hooks/exhaustive-deps
- }, [columns]);
+ }, [columns, dynamicColumns]);
const onToggleHandler = (index: number) => (
checked: boolean,
diff --git a/frontend/src/container/ListOfDashboard/DashboardsList.tsx b/frontend/src/container/ListOfDashboard/DashboardsList.tsx
index 9b416c65f4..1851e1d71a 100644
--- a/frontend/src/container/ListOfDashboard/DashboardsList.tsx
+++ b/frontend/src/container/ListOfDashboard/DashboardsList.tsx
@@ -24,10 +24,14 @@ import { Dashboard } from 'types/api/dashboard/getAll';
import AppReducer from 'types/reducer/app';
import DateComponent from '../../components/ResizeTable/TableComponent/DateComponent';
+import useSortableTable from '../../hooks/ResizeTable/useSortableTable';
+import useUrlQuery from '../../hooks/useUrlQuery';
+import { GettableAlert } from '../../types/api/alerts/get';
import ImportJSON from './ImportJSON';
import { ButtonContainer, NewDashboardButton, TableContainer } from './styles';
import DeleteButton from './TableComponents/DeleteButton';
import Name from './TableComponents/Name';
+import { filterDashboard } from './utils';
const { Search } = Input;
@@ -55,8 +59,26 @@ function DashboardsList(): JSX.Element {
const [uploadedGrafana, setUploadedGrafana] = useState(false);
const [isFilteringDashboards, setIsFilteringDashboards] = useState(false);
+ const params = useUrlQuery();
+ const orderColumnParam = params.get('columnKey');
+ const orderQueryParam = params.get('order');
+ const paginationParam = params.get('page');
+ const searchParams = params.get('search');
+ const [searchString, setSearchString] = useState(searchParams || '');
+
const [dashboards, setDashboards] = useState();
+ const sortingOrder: 'ascend' | 'descend' | null =
+ orderQueryParam === 'ascend' || orderQueryParam === 'descend'
+ ? orderQueryParam
+ : null;
+
+ const { sortedInfo, handleChange } = useSortableTable(
+ sortingOrder,
+ orderColumnParam || '',
+ searchString,
+ );
+
const sortDashboardsByCreatedAt = (dashboards: Dashboard[]): void => {
const sortedDashboards = dashboards.sort(
(a, b) =>
@@ -67,7 +89,12 @@ function DashboardsList(): JSX.Element {
useEffect(() => {
sortDashboardsByCreatedAt(dashboardListResponse);
- }, [dashboardListResponse]);
+ const filteredDashboards = filterDashboard(
+ searchString,
+ dashboardListResponse,
+ );
+ setDashboards(filteredDashboards || []);
+ }, [dashboardListResponse, searchString]);
const [newDashboardState, setNewDashboardState] = useState({
loading: false,
@@ -89,6 +116,10 @@ function DashboardsList(): JSX.Element {
return prev - next;
},
render: DateComponent,
+ sortOrder:
+ sortedInfo.columnKey === DynamicColumnsKey.CreatedAt
+ ? sortedInfo.order
+ : null,
},
{
title: 'Created By',
@@ -108,6 +139,10 @@ function DashboardsList(): JSX.Element {
return prev - next;
},
render: DateComponent,
+ sortOrder:
+ sortedInfo.columnKey === DynamicColumnsKey.UpdatedAt
+ ? sortedInfo.order
+ : null,
},
{
title: 'Last Updated By',
@@ -249,28 +284,13 @@ function DashboardsList(): JSX.Element {
return menuItems;
}, [createNewDashboard, isDashboardListLoading, onNewDashboardHandler, t]);
- const searchArrayOfObjects = (searchValue: string): any[] => {
- // Convert the searchValue to lowercase for case-insensitive search
- const searchValueLowerCase = searchValue.toLowerCase();
-
- // Use the filter method to find matching objects
- return dashboardListResponse.filter((item: any) => {
- // Convert each property value to lowercase for case-insensitive search
- const itemValues = Object.values(item?.data).map((value: any) =>
- value.toString().toLowerCase(),
- );
-
- // Check if any property value contains the searchValue
- return itemValues.some((value) => value.includes(searchValueLowerCase));
- });
- };
-
const handleSearch = useDebouncedFn((event: unknown): void => {
setIsFilteringDashboards(true);
const searchText = (event as React.BaseSyntheticEvent)?.target?.value || '';
- const filteredDashboards = searchArrayOfObjects(searchText);
+ const filteredDashboards = filterDashboard(searchText, dashboardListResponse);
setDashboards(filteredDashboards);
setIsFilteringDashboards(false);
+ setSearchString(searchText);
}, 500);
const GetHeader = useMemo(
@@ -283,6 +303,7 @@ function DashboardsList(): JSX.Element {
onChange={handleSearch}
loading={isFilteringDashboards}
style={{ marginBottom: 16, marginTop: 16 }}
+ defaultValue={searchString}
/>
@@ -328,6 +349,7 @@ function DashboardsList(): JSX.Element {
newDashboardState.loading,
newDashboardState.error,
getText,
+ searchString,
],
);
@@ -349,12 +371,14 @@ function DashboardsList(): JSX.Element {
pageSize: 10,
defaultPageSize: 10,
total: data?.length || 0,
+ defaultCurrent: Number(paginationParam) || 1,
}}
showHeader
bordered
sticky
loading={isDashboardListLoading}
dataSource={data}
+ onChange={handleChange}
showSorterTooltip
/>
diff --git a/frontend/src/container/ListOfDashboard/utils.ts b/frontend/src/container/ListOfDashboard/utils.ts
new file mode 100644
index 0000000000..199b356581
--- /dev/null
+++ b/frontend/src/container/ListOfDashboard/utils.ts
@@ -0,0 +1,20 @@
+import { Dashboard } from 'types/api/dashboard/getAll';
+
+export const filterDashboard = (
+ searchValue: string,
+ dashboardList: Dashboard[],
+): any[] => {
+ // Convert the searchValue to lowercase for case-insensitive search
+ const searchValueLowerCase = searchValue.toLowerCase();
+
+ // Use the filter method to find matching objects
+ return dashboardList.filter((item: Dashboard) => {
+ // Convert each property value to lowercase for case-insensitive search
+ const itemValues = Object.values(item?.data).map((value) =>
+ value.toString().toLowerCase(),
+ );
+
+ // Check if any property value contains the searchValue
+ return itemValues.some((value) => value.includes(searchValueLowerCase));
+ });
+};
From 5b39dc36d6d788b4fd909c524c8569d0b0d08754 Mon Sep 17 00:00:00 2001
From: Rajat Dabade
Date: Tue, 9 Jan 2024 14:19:23 +0530
Subject: [PATCH 11/39] [Feat]: soft min and soft max in uplot chart (#4287)
* feat: soft min and soft max in uplot chart
* fix: build pipeline
* fix: tsc
* refactor: added test case
* refactor: updated logic and added unit test
* refactor: updated logic
* chore: removed placeholder
---------
Co-authored-by: Srikanth Chekuri
Co-authored-by: Yunus M
---
.../FormAlertRules/ChartPreview/index.tsx | 2 +
.../GridCard/FullView/index.tsx | 2 +
.../GridCardLayout/GridCard/index.tsx | 4 +
.../MetricsApplication.factory.ts | 2 +
.../NewDashboard/ComponentsSlider/index.tsx | 2 +
.../WidgetGraph/WidgetGraphContainer.tsx | 4 +
.../WidgetGraph/WidgetGraphs.tsx | 8 +
.../LeftContainer/WidgetGraph/index.tsx | 4 +
.../NewWidget/LeftContainer/index.tsx | 4 +
.../NewWidget/RightContainer/constants.ts | 9 +
.../NewWidget/RightContainer/index.tsx | 50 ++++-
frontend/src/container/NewWidget/index.tsx | 22 ++
frontend/src/container/NewWidget/types.ts | 2 +
.../TimeSeriesView/TimeSeriesView.tsx | 2 +
frontend/src/hooks/dashboard/utils.ts | 2 +
.../src/lib/uPlotLib/getUplotChartOptions.ts | 12 +-
.../lib/uPlotLib/utils/getYAxisScale.test.ts | 211 ++++++++++++++++++
.../src/lib/uPlotLib/utils/getYAxisScale.ts | 171 ++++++++++++--
frontend/src/types/api/dashboard/getAll.ts | 2 +
19 files changed, 498 insertions(+), 17 deletions(-)
create mode 100644 frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts
diff --git a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx
index 7e22e3a11c..73b1f4715e 100644
--- a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx
+++ b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx
@@ -150,6 +150,8 @@ function ChartPreview({
thresholdUnit: alertDef?.condition.targetUnit,
},
],
+ softMax: null,
+ softMin: null,
}),
[
yAxisUnit,
diff --git a/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx b/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx
index db42625f1d..750cc6a707 100644
--- a/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx
+++ b/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx
@@ -132,6 +132,8 @@ function FullView({
thresholds: widget.thresholds,
minTimeScale,
maxTimeScale,
+ softMax: widget.softMax,
+ softMin: widget.softMin,
});
setChartOptions(newChartOptions);
diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx
index 32454bb28f..79b7d39b4d 100644
--- a/frontend/src/container/GridCardLayout/GridCard/index.tsx
+++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx
@@ -135,11 +135,15 @@ function GridCardGraph({
thresholds: widget.thresholds,
minTimeScale,
maxTimeScale,
+ softMax: widget.softMax,
+ softMin: widget.softMin,
}),
[
widget?.id,
widget?.yAxisUnit,
widget.thresholds,
+ widget.softMax,
+ widget.softMin,
queryResponse.data?.payload,
containerDimensions,
isDarkMode,
diff --git a/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts b/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts
index 9941308838..20becbb810 100644
--- a/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts
+++ b/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts
@@ -20,4 +20,6 @@ export const getWidgetQueryBuilder = ({
timePreferance: 'GLOBAL_TIME',
title,
yAxisUnit,
+ softMax: null,
+ softMin: null,
});
diff --git a/frontend/src/container/NewDashboard/ComponentsSlider/index.tsx b/frontend/src/container/NewDashboard/ComponentsSlider/index.tsx
index 1a111cf4fe..d355edfd1a 100644
--- a/frontend/src/container/NewDashboard/ComponentsSlider/index.tsx
+++ b/frontend/src/container/NewDashboard/ComponentsSlider/index.tsx
@@ -63,6 +63,8 @@ function DashboardGraphSlider(): JSX.Element {
panelTypes: name,
query: initialQueriesMap.metrics,
timePreferance: 'GLOBAL_TIME',
+ softMax: null,
+ softMin: null,
},
],
},
diff --git a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphContainer.tsx b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphContainer.tsx
index 6fcbcaa8c1..c3d47880d3 100644
--- a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphContainer.tsx
+++ b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphContainer.tsx
@@ -14,6 +14,8 @@ function WidgetGraphContainer({
selectedTime,
thresholds,
fillSpans = false,
+ softMax,
+ softMin,
}: WidgetGraphProps): JSX.Element {
const { selectedDashboard } = useDashboard();
@@ -59,6 +61,8 @@ function WidgetGraphContainer({
selectedWidget={selectedWidget}
thresholds={thresholds}
fillSpans={fillSpans}
+ softMax={softMax}
+ softMin={softMin}
/>
);
}
diff --git a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
index cc4c146a96..08b65fa9c1 100644
--- a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
+++ b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
@@ -23,6 +23,8 @@ function WidgetGraph({
yAxisUnit,
thresholds,
fillSpans,
+ softMax,
+ softMin,
}: WidgetGraphProps): JSX.Element {
const { stagedQuery } = useQueryBuilder();
@@ -83,6 +85,8 @@ function WidgetGraph({
fillSpans,
minTimeScale,
maxTimeScale,
+ softMax,
+ softMin,
}),
[
widgetId,
@@ -95,6 +99,8 @@ function WidgetGraph({
fillSpans,
minTimeScale,
maxTimeScale,
+ softMax,
+ softMin,
],
);
@@ -125,6 +131,8 @@ interface WidgetGraphProps {
SuccessResponse,
Error
>;
+ softMax: number | null;
+ softMin: number | null;
}
export default WidgetGraph;
diff --git a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/index.tsx b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/index.tsx
index e92b17f90a..56846c8dec 100644
--- a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/index.tsx
+++ b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/index.tsx
@@ -17,6 +17,8 @@ function WidgetGraph({
selectedTime,
thresholds,
fillSpans,
+ softMax,
+ softMin,
}: WidgetGraphProps): JSX.Element {
const { currentQuery } = useQueryBuilder();
const { selectedDashboard } = useDashboard();
@@ -53,6 +55,8 @@ function WidgetGraph({
selectedGraph={selectedGraph}
yAxisUnit={yAxisUnit}
fillSpans={fillSpans}
+ softMax={softMax}
+ softMin={softMin}
/>
);
diff --git a/frontend/src/container/NewWidget/LeftContainer/index.tsx b/frontend/src/container/NewWidget/LeftContainer/index.tsx
index ad48c19a5c..0a78a084d8 100644
--- a/frontend/src/container/NewWidget/LeftContainer/index.tsx
+++ b/frontend/src/container/NewWidget/LeftContainer/index.tsx
@@ -11,6 +11,8 @@ function LeftContainer({
selectedTime,
thresholds,
fillSpans,
+ softMax,
+ softMin,
}: WidgetGraphProps): JSX.Element {
return (
<>
@@ -20,6 +22,8 @@ function LeftContainer({
selectedGraph={selectedGraph}
yAxisUnit={yAxisUnit}
fillSpans={fillSpans}
+ softMax={softMax}
+ softMin={softMin}
/>
diff --git a/frontend/src/container/NewWidget/RightContainer/constants.ts b/frontend/src/container/NewWidget/RightContainer/constants.ts
index ae12b4f3a6..0030f6927b 100644
--- a/frontend/src/container/NewWidget/RightContainer/constants.ts
+++ b/frontend/src/container/NewWidget/RightContainer/constants.ts
@@ -30,6 +30,15 @@ export const panelTypeVsThreshold: { [key in PANEL_TYPES]: boolean } = {
[PANEL_TYPES.EMPTY_WIDGET]: false,
} as const;
+export const panelTypeVsSoftMinMax: { [key in PANEL_TYPES]: boolean } = {
+ [PANEL_TYPES.TIME_SERIES]: true,
+ [PANEL_TYPES.VALUE]: false,
+ [PANEL_TYPES.TABLE]: false,
+ [PANEL_TYPES.LIST]: false,
+ [PANEL_TYPES.TRACE]: false,
+ [PANEL_TYPES.EMPTY_WIDGET]: false,
+} as const;
+
export const panelTypeVsDragAndDrop: { [key in PANEL_TYPES]: boolean } = {
[PANEL_TYPES.TIME_SERIES]: false,
[PANEL_TYPES.VALUE]: true,
diff --git a/frontend/src/container/NewWidget/RightContainer/index.tsx b/frontend/src/container/NewWidget/RightContainer/index.tsx
index c39039c04f..5ab1a965fc 100644
--- a/frontend/src/container/NewWidget/RightContainer/index.tsx
+++ b/frontend/src/container/NewWidget/RightContainer/index.tsx
@@ -3,6 +3,7 @@ import {
Button,
Divider,
Input,
+ InputNumber,
Select,
Space,
Switch,
@@ -16,7 +17,7 @@ import useCreateAlerts from 'hooks/queryBuilder/useCreateAlerts';
import { Dispatch, SetStateAction, useCallback } from 'react';
import { Widgets } from 'types/api/dashboard/getAll';
-import { panelTypeVsThreshold } from './constants';
+import { panelTypeVsSoftMinMax, panelTypeVsThreshold } from './constants';
import { Container, Title } from './styles';
import ThresholdSelector from './Threshold/ThresholdSelector';
import { ThresholdProps } from './Threshold/types';
@@ -42,6 +43,10 @@ function RightContainer({
selectedWidget,
isFillSpans,
setIsFillSpans,
+ softMax,
+ softMin,
+ setSoftMax,
+ setSoftMin,
}: RightContainerProps): JSX.Element {
const onChangeHandler = useCallback(
(setFunc: Dispatch>, value: string) => {
@@ -56,6 +61,21 @@ function RightContainer({
const onCreateAlertsHandler = useCreateAlerts(selectedWidget);
const allowThreshold = panelTypeVsThreshold[selectedGraph];
+ const allowSoftMinMax = panelTypeVsSoftMinMax[selectedGraph];
+
+ const softMinHandler = useCallback(
+ (value: number | null) => {
+ setSoftMin(value);
+ },
+ [setSoftMin],
+ );
+
+ const softMaxHandler = useCallback(
+ (value: number | null) => {
+ setSoftMax(value);
+ },
+ [setSoftMax],
+ );
return (
@@ -129,6 +149,30 @@ function RightContainer({
)}
+ {allowSoftMinMax && (
+ <>
+
+
+ Soft Min
+
+
+
+ Soft Max
+
+
+ >
+ )}
+
{allowThreshold && (
<>
@@ -166,6 +210,10 @@ interface RightContainerProps {
selectedWidget?: Widgets;
isFillSpans: boolean;
setIsFillSpans: Dispatch>;
+ softMin: number | null;
+ softMax: number | null;
+ setSoftMin: Dispatch>;
+ setSoftMax: Dispatch>;
}
RightContainer.defaultProps = {
diff --git a/frontend/src/container/NewWidget/index.tsx b/frontend/src/container/NewWidget/index.tsx
index 8edf04c49c..9ea47330bf 100644
--- a/frontend/src/container/NewWidget/index.tsx
+++ b/frontend/src/container/NewWidget/index.tsx
@@ -104,6 +104,18 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
const [saveModal, setSaveModal] = useState(false);
const [discardModal, setDiscardModal] = useState(false);
+ const [softMin, setSoftMin] = useState(
+ selectedWidget?.softMin === null || selectedWidget?.softMin === undefined
+ ? null
+ : selectedWidget?.softMin || 0,
+ );
+
+ const [softMax, setSoftMax] = useState(
+ selectedWidget?.softMax === null || selectedWidget?.softMax === undefined
+ ? null
+ : selectedWidget?.softMax || 0,
+ );
+
const closeModal = (): void => {
setSaveModal(false);
setDiscardModal(false);
@@ -178,6 +190,8 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
yAxisUnit,
panelTypes: graphType,
thresholds,
+ softMin,
+ softMax,
fillSpans: isFillSpans,
},
...afterWidgets,
@@ -213,6 +227,8 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
yAxisUnit,
graphType,
thresholds,
+ softMin,
+ softMax,
isFillSpans,
afterWidgets,
updateDashboardMutation,
@@ -317,6 +333,8 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
yAxisUnit={yAxisUnit}
thresholds={thresholds}
fillSpans={isFillSpans}
+ softMax={softMax}
+ softMin={softMin}
/>
@@ -343,6 +361,10 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
selectedWidget={selectedWidget}
isFillSpans={isFillSpans}
setIsFillSpans={setIsFillSpans}
+ softMin={softMin}
+ setSoftMin={setSoftMin}
+ softMax={softMax}
+ setSoftMax={setSoftMax}
/>
diff --git a/frontend/src/container/NewWidget/types.ts b/frontend/src/container/NewWidget/types.ts
index 83475c0104..21d2268d76 100644
--- a/frontend/src/container/NewWidget/types.ts
+++ b/frontend/src/container/NewWidget/types.ts
@@ -13,4 +13,6 @@ export interface NewWidgetProps {
export interface WidgetGraphProps extends NewWidgetProps {
selectedTime: timePreferance;
thresholds: ThresholdProps[];
+ softMin: number | null;
+ softMax: number | null;
}
diff --git a/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx b/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx
index c1dd107f3f..767bf16d34 100644
--- a/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx
+++ b/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx
@@ -60,6 +60,8 @@ function TimeSeriesView({
isDarkMode,
minTimeScale,
maxTimeScale,
+ softMax: null,
+ softMin: null,
});
return (
diff --git a/frontend/src/hooks/dashboard/utils.ts b/frontend/src/hooks/dashboard/utils.ts
index 930fc080b2..4dfb8ce9c3 100644
--- a/frontend/src/hooks/dashboard/utils.ts
+++ b/frontend/src/hooks/dashboard/utils.ts
@@ -33,6 +33,8 @@ export const addEmptyWidgetInDashboardJSONWithQuery = (
title: '',
timePreferance: 'GLOBAL_TIME',
panelTypes: panelTypes || PANEL_TYPES.TIME_SERIES,
+ softMax: null,
+ softMin: null,
},
],
},
diff --git a/frontend/src/lib/uPlotLib/getUplotChartOptions.ts b/frontend/src/lib/uPlotLib/getUplotChartOptions.ts
index a86c1ee49c..b6e61a2ae6 100644
--- a/frontend/src/lib/uPlotLib/getUplotChartOptions.ts
+++ b/frontend/src/lib/uPlotLib/getUplotChartOptions.ts
@@ -35,6 +35,8 @@ interface GetUPlotChartOptions {
fillSpans?: boolean;
minTimeScale?: number;
maxTimeScale?: number;
+ softMin: number | null;
+ softMax: number | null;
}
export const getUPlotChartOptions = ({
@@ -51,6 +53,8 @@ export const getUPlotChartOptions = ({
setGraphsVisibilityStates,
thresholds,
fillSpans,
+ softMax,
+ softMin,
}: GetUPlotChartOptions): uPlot.Options => {
const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale);
@@ -87,11 +91,13 @@ export const getUPlotChartOptions = ({
...timeScaleProps,
},
y: {
- ...getYAxisScale(
+ ...getYAxisScale({
thresholds,
- apiResponse?.data.newResult.data.result,
+ series: apiResponse?.data.newResult.data.result,
yAxisUnit,
- ),
+ softMax,
+ softMin,
+ }),
},
},
plugins: [
diff --git a/frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts b/frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts
new file mode 100644
index 0000000000..ecd70084e8
--- /dev/null
+++ b/frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts
@@ -0,0 +1,211 @@
+import { PANEL_TYPES } from 'constants/queryBuilder';
+import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
+import { QueryDataV3 } from 'types/api/widgets/getQuery';
+
+import { GetYAxisScale, getYAxisScale } from './getYAxisScale';
+
+describe('getYAxisScale', () => {
+ const mockThresholds: ThresholdProps[] = [
+ {
+ index: '1',
+ keyIndex: 1,
+ thresholdValue: 10,
+ thresholdUnit: 'percentunit',
+ moveThreshold(dragIndex, hoverIndex): void {
+ console.log(dragIndex, hoverIndex);
+ },
+ selectedGraph: PANEL_TYPES.TIME_SERIES,
+ },
+ {
+ index: '2',
+ keyIndex: 2,
+ thresholdValue: 20,
+ thresholdUnit: 'percentunit',
+ moveThreshold(dragIndex, hoverIndex): void {
+ console.log(dragIndex, hoverIndex);
+ },
+ selectedGraph: PANEL_TYPES.TIME_SERIES,
+ },
+ ];
+
+ const mockSeriesData: QueryDataV3[] = [
+ {
+ list: null,
+ queryName: 'Mock Query',
+ series: [
+ {
+ labels: {},
+ values: [
+ { timestamp: 1, value: '15' },
+ { timestamp: 2, value: '25' },
+ ],
+ },
+ ],
+ },
+ ];
+
+ const mockYAxisUnit = 'percentunit';
+ const mockSoftMin = 5;
+ const mockSoftMax = 30;
+
+ it('threshold absent, series data absent and softmin and softmax is absent', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: [],
+ yAxisUnit: undefined,
+ softMin: null,
+ softMax: null,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({ auto: true });
+ });
+
+ it('Threshold absent, series data present softmin and softmax present', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: mockSeriesData,
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [5, 30],
+ });
+ });
+
+ it('Only series data present', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: mockSeriesData,
+ yAxisUnit: mockYAxisUnit,
+ softMin: null,
+ softMax: null,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({ auto: true });
+ });
+
+ it('Threshold absent, series data present, softmin present and softmax absent', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: mockSeriesData,
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: null,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [5, 25],
+ });
+ });
+
+ it('Threshold absent, series data present, softmin absent and softmax present', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: mockSeriesData,
+ yAxisUnit: mockYAxisUnit,
+ softMin: null,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [15, 30],
+ });
+ });
+
+ it('Threshold present, series absent and softmin and softmax present', () => {
+ const result = getYAxisScale({
+ thresholds: mockThresholds,
+ series: [],
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [5, 30],
+ });
+ });
+
+ it('Only threshold data present', () => {
+ const result = getYAxisScale({
+ thresholds: mockThresholds,
+ series: [],
+ yAxisUnit: mockYAxisUnit,
+ softMin: null,
+ softMax: null,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [10, 20],
+ });
+ });
+
+ it('Threshold present, series absent, softmin absent and softmax present', () => {
+ const result = getYAxisScale({
+ thresholds: mockThresholds,
+ series: [],
+ yAxisUnit: mockYAxisUnit,
+ softMin: null,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [10, 30],
+ });
+ });
+
+ it('Threshold data present, series data absent, softmin present and softmax absent', () => {
+ const result = getYAxisScale({
+ thresholds: mockThresholds,
+ series: [],
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: null,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [5, 20],
+ });
+ });
+
+ it('Threshold data absent, series absent, softmin and softmax present', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: [],
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ range: {
+ min: { soft: mockSoftMin, mode: 2 },
+ max: { soft: mockSoftMax, mode: 2 },
+ },
+ });
+ });
+
+ it('All data present', () => {
+ const result = getYAxisScale({
+ thresholds: mockThresholds,
+ series: mockSeriesData,
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [5, 30],
+ });
+ });
+});
diff --git a/frontend/src/lib/uPlotLib/utils/getYAxisScale.ts b/frontend/src/lib/uPlotLib/utils/getYAxisScale.ts
index 503abd5790..42860ea8c8 100644
--- a/frontend/src/lib/uPlotLib/utils/getYAxisScale.ts
+++ b/frontend/src/lib/uPlotLib/utils/getYAxisScale.ts
@@ -2,6 +2,7 @@ import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/typ
import { convertValue } from 'lib/getConvertedValue';
import { isFinite } from 'lodash-es';
import { QueryDataV3 } from 'types/api/widgets/getQuery';
+import uPlot from 'uplot';
function findMinMaxValues(data: QueryDataV3[]): [number, number] {
let min = Number.MAX_SAFE_INTEGER;
@@ -71,23 +72,167 @@ function areAllSeriesEmpty(series: QueryDataV3[]): boolean {
});
}
-export const getYAxisScale = (
- thresholds?: ThresholdProps[],
- series?: QueryDataV3[],
- yAxisUnit?: string,
-): {
- auto: boolean;
- range?: [number, number];
-} => {
- if (!thresholds || !series || thresholds.length === 0) return { auto: true };
+function configSoftMinMax(
+ softMin: number | null,
+ softMax: number | null,
+): { range: uPlot.Scale.Range } {
+ return {
+ range: {
+ min: {
+ soft: softMin !== null ? softMin : undefined,
+ mode: 2,
+ },
+ max: {
+ soft: softMax !== null ? softMax : undefined,
+ mode: 2,
+ },
+ },
+ };
+}
- if (areAllSeriesEmpty(series)) return { auto: true };
+export const getYAxisScale = ({
+ thresholds,
+ series,
+ yAxisUnit,
+ softMin,
+ softMax,
+}: // eslint-disable-next-line sonarjs/cognitive-complexity
+GetYAxisScale): { auto?: boolean; range?: uPlot.Scale.Range } => {
+ // Situation: thresholds and series data is absent
+ if (
+ (!thresholds || thresholds.length === 0) &&
+ (!series || areAllSeriesEmpty(series))
+ ) {
+ // Situation: softMin is not null or softMax is null
+ if (softMin !== null && softMax === null) {
+ return configSoftMinMax(softMin, softMin + 100);
+ }
- const [min, max] = getRange(thresholds, series, yAxisUnit);
+ // Situation: softMin is null softMax is not null
+ if (softMin === null && softMax !== null) {
+ return configSoftMinMax(softMax - 100, softMax);
+ }
- // Min and Max value can be same if the value is same for all the series
- if (min === max) {
+ // Situation: softMin is not null and softMax is not null
+ if (softMin !== null && softMax !== null) {
+ return configSoftMinMax(softMin, softMax);
+ }
+
+ // Situation: softMin and softMax are null and no threshold and no series data
return { auto: true };
}
+
+ // Situation: thresholds are absent
+ if (!thresholds || thresholds.length === 0) {
+ // Situation: No thresholds data but series data is present
+ if (series && !areAllSeriesEmpty(series)) {
+ // Situation: softMin and softMax are null
+ if (softMin === null && softMax === null) {
+ return { auto: true };
+ }
+
+ // Situation: either softMin or softMax is not null
+ let [min, max] = findMinMaxValues(series);
+
+ if (softMin !== null) {
+ // Compare with softMin if it is not null
+ min = Math.min(min, softMin);
+ }
+
+ if (softMax !== null) {
+ // Compare with softMax if it is not null
+ max = Math.max(max, softMax);
+ }
+
+ if (min === max) {
+ // Min and Max value can be same if the value is same for all the series
+ return { auto: true };
+ }
+
+ return { auto: false, range: [min, max] };
+ }
+
+ // Situation: No thresholds data and series data is absent but either soft min and soft max is present
+ if (softMin !== null && softMax === null) {
+ return configSoftMinMax(softMin, softMin + 100);
+ }
+
+ if (softMin === null && softMax !== null) {
+ return configSoftMinMax(softMax - 100, softMax);
+ }
+
+ if (softMin !== null && softMax !== null) {
+ return configSoftMinMax(softMin, softMax);
+ }
+
+ return { auto: true };
+ }
+
+ if (!series || areAllSeriesEmpty(series)) {
+ // series data is absent but threshold is present
+ if (thresholds.length > 0) {
+ // Situation: thresholds are present and series data is absent
+ let [min, max] = findMinMaxThresholdValues(thresholds, yAxisUnit);
+
+ if (softMin !== null) {
+ // Compare with softMin if it is not null
+ min = Math.min(min, softMin);
+ }
+
+ if (softMax !== null) {
+ // Compare with softMax if it is not null
+ max = Math.max(max, softMax);
+ }
+
+ if (min === max) {
+ // Min and Max value can be same if the value is same for all the series
+ return { auto: true };
+ }
+
+ return { auto: false, range: [min, max] };
+ }
+
+ // Situation: softMin or softMax is not null
+ if (softMin !== null && softMax === null) {
+ return configSoftMinMax(softMin, softMin + 100);
+ }
+
+ if (softMin === null && softMax !== null) {
+ return configSoftMinMax(softMax - 100, softMax);
+ }
+
+ if (softMin !== null && softMax !== null) {
+ return configSoftMinMax(softMin, softMax);
+ }
+
+ return { auto: true };
+ }
+
+ // Situation: thresholds and series data are present
+ let [min, max] = getRange(thresholds, series, yAxisUnit);
+
+ if (softMin !== null) {
+ // Compare with softMin if it is not null
+ min = Math.min(min, softMin);
+ }
+
+ if (softMax !== null) {
+ // Compare with softMax if it is not null
+ max = Math.max(max, softMax);
+ }
+
+ if (min === max) {
+ // Min and Max value can be same if the value is same for all the series
+ return { auto: true };
+ }
+
return { auto: false, range: [min, max] };
};
+
+export type GetYAxisScale = {
+ thresholds?: ThresholdProps[];
+ series?: QueryDataV3[];
+ yAxisUnit?: string;
+ softMin: number | null;
+ softMax: number | null;
+};
diff --git a/frontend/src/types/api/dashboard/getAll.ts b/frontend/src/types/api/dashboard/getAll.ts
index 3ad1c70f73..bb302c152b 100644
--- a/frontend/src/types/api/dashboard/getAll.ts
+++ b/frontend/src/types/api/dashboard/getAll.ts
@@ -73,6 +73,8 @@ export interface IBaseWidget {
stepSize?: number;
yAxisUnit?: string;
thresholds?: ThresholdProps[];
+ softMin: number | null;
+ softMax: number | null;
fillSpans?: boolean;
}
export interface Widgets extends IBaseWidget {
From 7b46f86f7fc8c9a5b4ea2806abc5c702a88c506e Mon Sep 17 00:00:00 2001
From: Rajat Dabade
Date: Tue, 9 Jan 2024 17:47:23 +0530
Subject: [PATCH 12/39] [Refactor]: resolve the wrong payload in timeseries log
explorer issue (#4345)
Co-authored-by: Vikrant Gupta <54737045+Vikrant2520@users.noreply.github.com>
---
frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts b/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts
index d3796f96cf..c874f5e6db 100644
--- a/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts
+++ b/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts
@@ -32,7 +32,7 @@ export const useGetExplorerQueryRange = (
const isEnabled = useMemo(() => {
if (!options) return isEnabledQuery;
if (typeof options.enabled === 'boolean') {
- return isEnabledQuery || options.enabled;
+ return isEnabledQuery && options.enabled;
}
return isEnabledQuery;
From 361efd3b52bb3f2cbd79a8df8872cfced15207b1 Mon Sep 17 00:00:00 2001
From: Srikanth Chekuri
Date: Tue, 9 Jan 2024 22:19:03 +0530
Subject: [PATCH 13/39] chore: add querier v2 (#4170)
---
.../app/metrics/v4/query_builder.go | 22 +
pkg/query-service/app/querier/v2/helper.go | 306 ++++++++++
pkg/query-service/app/querier/v2/querier.go | 525 ++++++++++++++++++
3 files changed, 853 insertions(+)
create mode 100644 pkg/query-service/app/querier/v2/helper.go
create mode 100644 pkg/query-service/app/querier/v2/querier.go
diff --git a/pkg/query-service/app/metrics/v4/query_builder.go b/pkg/query-service/app/metrics/v4/query_builder.go
index 70d35e8e08..5e6c18d72a 100644
--- a/pkg/query-service/app/metrics/v4/query_builder.go
+++ b/pkg/query-service/app/metrics/v4/query_builder.go
@@ -3,8 +3,11 @@ package v4
import (
"fmt"
"strings"
+ "time"
+ metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
"go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/utils"
)
@@ -84,3 +87,22 @@ func PrepareTimeseriesFilterQuery(mq *v3.BuilderQuery) (string, error) {
return filterSubQuery, nil
}
+
+// PrepareMetricQuery prepares the query to be used for fetching metrics
+// from the database
+// start and end are in milliseconds
+// step is in seconds
+func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery, options metricsV3.Options) (string, error) {
+
+ // TODO(srikanthcc): implement
+ return "", nil
+}
+
+func BuildPromQuery(promQuery *v3.PromQuery, step, start, end int64) *model.QueryRangeParams {
+ return &model.QueryRangeParams{
+ Query: promQuery.Query,
+ Start: time.UnixMilli(start),
+ End: time.UnixMilli(end),
+ Step: time.Duration(step * int64(time.Second)),
+ }
+}
diff --git a/pkg/query-service/app/querier/v2/helper.go b/pkg/query-service/app/querier/v2/helper.go
new file mode 100644
index 0000000000..61ab056251
--- /dev/null
+++ b/pkg/query-service/app/querier/v2/helper.go
@@ -0,0 +1,306 @@
+package v2
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "strings"
+ "sync"
+ "time"
+
+ logsV3 "go.signoz.io/signoz/pkg/query-service/app/logs/v3"
+ metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
+ metricsV4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
+ tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
+ "go.signoz.io/signoz/pkg/query-service/cache/status"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.uber.org/zap"
+)
+
+func (q *querier) runBuilderQuery(
+ ctx context.Context,
+ builderQuery *v3.BuilderQuery,
+ params *v3.QueryRangeParamsV3,
+ keys map[string]v3.AttributeKey,
+ cacheKeys map[string]string,
+ ch chan channelResult,
+ wg *sync.WaitGroup,
+) {
+ defer wg.Done()
+ queryName := builderQuery.QueryName
+
+ var preferRPM bool
+
+ if q.featureLookUp != nil {
+ preferRPM = q.featureLookUp.CheckFeature(constants.PreferRPM) == nil
+ }
+
+ // TODO: handle other data sources
+ if builderQuery.DataSource == v3.DataSourceLogs {
+ var query string
+ var err error
+ // for ts query with limit replace it as it is already formed
+ if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
+ limitQuery, err := logsV3.PrepareLogsQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.QueryType,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ logsV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
+ return
+ }
+ placeholderQuery, err := logsV3.PrepareLogsQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.QueryType,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ logsV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: placeholderQuery, Series: nil}
+ return
+ }
+ query = strings.Replace(placeholderQuery, "#LIMIT_PLACEHOLDER", limitQuery, 1)
+ } else {
+ query, err = logsV3.PrepareLogsQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.QueryType,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ logsV3.Options{PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
+ return
+ }
+ }
+
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
+ return
+ }
+ series, err := q.execClickHouseQuery(ctx, query)
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
+ return
+ }
+
+ if builderQuery.DataSource == v3.DataSourceTraces {
+
+ var query string
+ var err error
+ // for ts query with group by and limit form two queries
+ if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
+ limitQuery, err := tracesV3.PrepareTracesQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ keys,
+ tracesV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
+ return
+ }
+ placeholderQuery, err := tracesV3.PrepareTracesQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ keys,
+ tracesV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
+ return
+ }
+ query = fmt.Sprintf(placeholderQuery, limitQuery)
+ } else {
+ query, err = tracesV3.PrepareTracesQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ keys,
+ tracesV3.Options{PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
+ return
+ }
+ }
+
+ series, err := q.execClickHouseQuery(ctx, query)
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
+ return
+ }
+
+ // What is happening here?
+ // We are only caching the graph panel queries. A non-existant cache key means that the query is not cached.
+ // If the query is not cached, we execute the query and return the result without caching it.
+ if _, ok := cacheKeys[queryName]; !ok {
+ query, err := metricsV4.PrepareMetricQuery(params.Start, params.End, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{PreferRPM: preferRPM})
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
+ return
+ }
+ series, err := q.execClickHouseQuery(ctx, query)
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
+ return
+ }
+
+ cacheKey := cacheKeys[queryName]
+ var cachedData []byte
+ if !params.NoCache && q.cache != nil {
+ var retrieveStatus status.RetrieveStatus
+ data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
+ if err == nil {
+ cachedData = data
+ }
+ }
+ misses := q.findMissingTimeRanges(params.Start, params.End, params.Step, cachedData)
+ missedSeries := make([]*v3.Series, 0)
+ cachedSeries := make([]*v3.Series, 0)
+ for _, miss := range misses {
+ query, err := metricsV4.PrepareMetricQuery(
+ miss.start,
+ miss.end,
+ params.CompositeQuery.QueryType,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ metricsV3.Options{},
+ )
+ if err != nil {
+ ch <- channelResult{
+ Err: err,
+ Name: queryName,
+ Query: query,
+ Series: nil,
+ }
+ return
+ }
+ series, err := q.execClickHouseQuery(ctx, query)
+ if err != nil {
+ ch <- channelResult{
+ Err: err,
+ Name: queryName,
+ Query: query,
+ Series: nil,
+ }
+ return
+ }
+ missedSeries = append(missedSeries, series...)
+ }
+ if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil {
+ zap.S().Error("error unmarshalling cached data", zap.Error(err))
+ }
+ mergedSeries := mergeSerieses(cachedSeries, missedSeries)
+
+ ch <- channelResult{
+ Err: nil,
+ Name: queryName,
+ Series: mergedSeries,
+ }
+ // Cache the seriesList for future queries
+ if len(missedSeries) > 0 && !params.NoCache && q.cache != nil {
+ mergedSeriesData, err := json.Marshal(mergedSeries)
+ if err != nil {
+ zap.S().Error("error marshalling merged series", zap.Error(err))
+ return
+ }
+ err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour)
+ if err != nil {
+ zap.S().Error("error storing merged series", zap.Error(err))
+ return
+ }
+ }
+}
+
+func (q *querier) runBuilderExpression(
+ ctx context.Context,
+ builderQuery *v3.BuilderQuery,
+ params *v3.QueryRangeParamsV3,
+ keys map[string]v3.AttributeKey,
+ cacheKeys map[string]string,
+ ch chan channelResult,
+ wg *sync.WaitGroup,
+) {
+ defer wg.Done()
+
+ queryName := builderQuery.QueryName
+
+ queries, err := q.builder.PrepareQueries(params, keys)
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: "", Series: nil}
+ return
+ }
+
+ if _, ok := cacheKeys[queryName]; !ok {
+ query := queries[queryName]
+ series, err := q.execClickHouseQuery(ctx, query)
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
+ return
+ }
+
+ cacheKey := cacheKeys[queryName]
+ var cachedData []byte
+ if !params.NoCache && q.cache != nil {
+ var retrieveStatus status.RetrieveStatus
+ data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
+ if err == nil {
+ cachedData = data
+ }
+ }
+ misses := q.findMissingTimeRanges(params.Start, params.End, params.Step, cachedData)
+ missedSeries := make([]*v3.Series, 0)
+ cachedSeries := make([]*v3.Series, 0)
+ for _, miss := range misses {
+ missQueries, _ := q.builder.PrepareQueries(&v3.QueryRangeParamsV3{
+ Start: miss.start,
+ End: miss.end,
+ Step: params.Step,
+ NoCache: params.NoCache,
+ CompositeQuery: params.CompositeQuery,
+ Variables: params.Variables,
+ }, keys)
+ query := missQueries[queryName]
+ series, err := q.execClickHouseQuery(ctx, query)
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
+ return
+ }
+ missedSeries = append(missedSeries, series...)
+ }
+ if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil {
+ zap.S().Error("error unmarshalling cached data", zap.Error(err))
+ }
+ mergedSeries := mergeSerieses(cachedSeries, missedSeries)
+
+ ch <- channelResult{
+ Err: nil,
+ Name: queryName,
+ Series: mergedSeries,
+ }
+ // Cache the seriesList for future queries
+ if len(missedSeries) > 0 && !params.NoCache && q.cache != nil {
+ mergedSeriesData, err := json.Marshal(mergedSeries)
+ if err != nil {
+ zap.S().Error("error marshalling merged series", zap.Error(err))
+ return
+ }
+ err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour)
+ if err != nil {
+ zap.S().Error("error storing merged series", zap.Error(err))
+ return
+ }
+ }
+}
diff --git a/pkg/query-service/app/querier/v2/querier.go b/pkg/query-service/app/querier/v2/querier.go
new file mode 100644
index 0000000000..86a472c064
--- /dev/null
+++ b/pkg/query-service/app/querier/v2/querier.go
@@ -0,0 +1,525 @@
+package v2
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "math"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+
+ logsV3 "go.signoz.io/signoz/pkg/query-service/app/logs/v3"
+ metricsV4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
+ "go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
+ tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
+
+ "go.signoz.io/signoz/pkg/query-service/cache"
+ "go.signoz.io/signoz/pkg/query-service/interfaces"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.uber.org/multierr"
+ "go.uber.org/zap"
+)
+
+type channelResult struct {
+ Series []*v3.Series
+ List []*v3.Row
+ Err error
+ Name string
+ Query string
+}
+
+type missInterval struct {
+ start, end int64 // in milliseconds
+}
+
+type querier struct {
+ cache cache.Cache
+ reader interfaces.Reader
+ keyGenerator cache.KeyGenerator
+
+ fluxInterval time.Duration
+
+ builder *queryBuilder.QueryBuilder
+ featureLookUp interfaces.FeatureLookup
+
+ // used for testing
+ // TODO(srikanthccv): remove this once we have a proper mock
+ testingMode bool
+ queriesExecuted []string
+ returnedSeries []*v3.Series
+ returnedErr error
+}
+
+type QuerierOptions struct {
+ Reader interfaces.Reader
+ Cache cache.Cache
+ KeyGenerator cache.KeyGenerator
+ FluxInterval time.Duration
+ FeatureLookup interfaces.FeatureLookup
+
+ // used for testing
+ TestingMode bool
+ ReturnedSeries []*v3.Series
+ ReturnedErr error
+}
+
+func NewQuerier(opts QuerierOptions) interfaces.Querier {
+ return &querier{
+ cache: opts.Cache,
+ reader: opts.Reader,
+ keyGenerator: opts.KeyGenerator,
+ fluxInterval: opts.FluxInterval,
+
+ builder: queryBuilder.NewQueryBuilder(queryBuilder.QueryBuilderOptions{
+ BuildTraceQuery: tracesV3.PrepareTracesQuery,
+ BuildLogQuery: logsV3.PrepareLogsQuery,
+ BuildMetricQuery: metricsV4.PrepareMetricQuery,
+ }, opts.FeatureLookup),
+ featureLookUp: opts.FeatureLookup,
+
+ testingMode: opts.TestingMode,
+ returnedSeries: opts.ReturnedSeries,
+ returnedErr: opts.ReturnedErr,
+ }
+}
+
+func (q *querier) execClickHouseQuery(ctx context.Context, query string) ([]*v3.Series, error) {
+ q.queriesExecuted = append(q.queriesExecuted, query)
+ if q.testingMode && q.reader == nil {
+ return q.returnedSeries, q.returnedErr
+ }
+ result, err := q.reader.GetTimeSeriesResultV3(ctx, query)
+ var pointsWithNegativeTimestamps int
+ // Filter out the points with negative or zero timestamps
+ for idx := range result {
+ series := result[idx]
+ points := make([]v3.Point, 0)
+ for pointIdx := range series.Points {
+ point := series.Points[pointIdx]
+ if point.Timestamp > 0 {
+ points = append(points, point)
+ } else {
+ pointsWithNegativeTimestamps++
+ }
+ }
+ series.Points = points
+ }
+ if pointsWithNegativeTimestamps > 0 {
+ zap.S().Errorf("found points with negative timestamps for query %s", query)
+ }
+ return result, err
+}
+
+func (q *querier) execPromQuery(ctx context.Context, params *model.QueryRangeParams) ([]*v3.Series, error) {
+ q.queriesExecuted = append(q.queriesExecuted, params.Query)
+ if q.testingMode && q.reader == nil {
+ return q.returnedSeries, q.returnedErr
+ }
+ promResult, _, err := q.reader.GetQueryRangeResult(ctx, params)
+ if err != nil {
+ return nil, err
+ }
+ matrix, promErr := promResult.Matrix()
+ if promErr != nil {
+ return nil, promErr
+ }
+ var seriesList []*v3.Series
+ for _, v := range matrix {
+ var s v3.Series
+ s.Labels = v.Metric.Copy().Map()
+ for idx := range v.Floats {
+ p := v.Floats[idx]
+ s.Points = append(s.Points, v3.Point{Timestamp: p.T, Value: p.F})
+ }
+ seriesList = append(seriesList, &s)
+ }
+ return seriesList, nil
+}
+
+// findMissingTimeRanges finds the missing time ranges in the seriesList
+// and returns a list of miss structs, It takes the fluxInterval into
+// account to find the missing time ranges.
+//
+// The [End - fluxInterval, End] is always added to the list of misses, because
+// the data might still be in flux and not yet available in the database.
+func findMissingTimeRanges(start, end, step int64, seriesList []*v3.Series, fluxInterval time.Duration) (misses []missInterval) {
+ var cachedStart, cachedEnd int64
+ for idx := range seriesList {
+ series := seriesList[idx]
+ for pointIdx := range series.Points {
+ point := series.Points[pointIdx]
+ if cachedStart == 0 || point.Timestamp < cachedStart {
+ cachedStart = point.Timestamp
+ }
+ if cachedEnd == 0 || point.Timestamp > cachedEnd {
+ cachedEnd = point.Timestamp
+ }
+ }
+ }
+
+ endMillis := time.Now().UnixMilli()
+ adjustStep := int64(math.Min(float64(step), 60))
+ roundedMillis := endMillis - (endMillis % (adjustStep * 1000))
+
+ // Exclude the flux interval from the cached end time
+ cachedEnd = int64(
+ math.Min(
+ float64(cachedEnd),
+ float64(roundedMillis-fluxInterval.Milliseconds()),
+ ),
+ )
+
+ // There are five cases to consider
+ // 1. Cached time range is a subset of the requested time range
+ // 2. Cached time range is a superset of the requested time range
+ // 3. Cached time range is a left overlap of the requested time range
+ // 4. Cached time range is a right overlap of the requested time range
+ // 5. Cached time range is a disjoint of the requested time range
+ if cachedStart >= start && cachedEnd <= end {
+ // Case 1: Cached time range is a subset of the requested time range
+ // Add misses for the left and right sides of the cached time range
+ misses = append(misses, missInterval{start: start, end: cachedStart - 1})
+ misses = append(misses, missInterval{start: cachedEnd + 1, end: end})
+ } else if cachedStart <= start && cachedEnd >= end {
+ // Case 2: Cached time range is a superset of the requested time range
+ // No misses
+ } else if cachedStart <= start && cachedEnd >= start {
+ // Case 3: Cached time range is a left overlap of the requested time range
+ // Add a miss for the left side of the cached time range
+ misses = append(misses, missInterval{start: cachedEnd + 1, end: end})
+ } else if cachedStart <= end && cachedEnd >= end {
+ // Case 4: Cached time range is a right overlap of the requested time range
+ // Add a miss for the right side of the cached time range
+ misses = append(misses, missInterval{start: start, end: cachedStart - 1})
+ } else {
+ // Case 5: Cached time range is a disjoint of the requested time range
+ // Add a miss for the entire requested time range
+ misses = append(misses, missInterval{start: start, end: end})
+ }
+
+ // remove the struts with start > end
+ var validMisses []missInterval
+ for idx := range misses {
+ miss := misses[idx]
+ if miss.start < miss.end {
+ validMisses = append(validMisses, miss)
+ }
+ }
+ return validMisses
+}
+
+// findMissingTimeRanges finds the missing time ranges in the cached data
+// and returns them as a list of misses
+func (q *querier) findMissingTimeRanges(start, end, step int64, cachedData []byte) (misses []missInterval) {
+ var cachedSeriesList []*v3.Series
+ if err := json.Unmarshal(cachedData, &cachedSeriesList); err != nil {
+ // In case of error, we return the entire range as a miss
+ return []missInterval{{start: start, end: end}}
+ }
+ return findMissingTimeRanges(start, end, step, cachedSeriesList, q.fluxInterval)
+}
+
+func labelsToString(labels map[string]string) string {
+ type label struct {
+ Key string
+ Value string
+ }
+ var labelsList []label
+ for k, v := range labels {
+ labelsList = append(labelsList, label{Key: k, Value: v})
+ }
+ sort.Slice(labelsList, func(i, j int) bool {
+ return labelsList[i].Key < labelsList[j].Key
+ })
+ labelKVs := make([]string, len(labelsList))
+ for idx := range labelsList {
+ labelKVs[idx] = labelsList[idx].Key + "=" + labelsList[idx].Value
+ }
+ return fmt.Sprintf("{%s}", strings.Join(labelKVs, ","))
+}
+
+func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series {
+ // Merge the missed series with the cached series by timestamp
+ mergedSeries := make([]*v3.Series, 0)
+ seriesesByLabels := make(map[string]*v3.Series)
+ for idx := range cachedSeries {
+ series := cachedSeries[idx]
+ seriesesByLabels[labelsToString(series.Labels)] = series
+ }
+
+ for idx := range missedSeries {
+ series := missedSeries[idx]
+ if _, ok := seriesesByLabels[labelsToString(series.Labels)]; !ok {
+ seriesesByLabels[labelsToString(series.Labels)] = series
+ continue
+ }
+ seriesesByLabels[labelsToString(series.Labels)].Points = append(seriesesByLabels[labelsToString(series.Labels)].Points, series.Points...)
+ }
+ // Sort the points in each series by timestamp
+ for idx := range seriesesByLabels {
+ series := seriesesByLabels[idx]
+ series.SortPoints()
+ series.RemoveDuplicatePoints()
+ mergedSeries = append(mergedSeries, series)
+ }
+ return mergedSeries
+}
+
+func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
+
+ cacheKeys := q.keyGenerator.GenerateKeys(params)
+
+ ch := make(chan channelResult, len(params.CompositeQuery.BuilderQueries))
+ var wg sync.WaitGroup
+
+ for queryName, builderQuery := range params.CompositeQuery.BuilderQueries {
+ if builderQuery.Disabled {
+ continue
+ }
+ wg.Add(1)
+ if queryName == builderQuery.Expression {
+ go q.runBuilderQuery(ctx, builderQuery, params, keys, cacheKeys, ch, &wg)
+ } else {
+ go q.runBuilderExpression(ctx, builderQuery, params, keys, cacheKeys, ch, &wg)
+ }
+ }
+
+ wg.Wait()
+ close(ch)
+
+ results := make([]*v3.Result, 0)
+ errQueriesByName := make(map[string]string)
+ var errs []error
+
+ for result := range ch {
+ if result.Err != nil {
+ errs = append(errs, result.Err)
+ errQueriesByName[result.Name] = result.Err.Error()
+ continue
+ }
+ results = append(results, &v3.Result{
+ QueryName: result.Name,
+ Series: result.Series,
+ })
+ }
+
+ var err error
+ if len(errs) > 0 {
+ err = fmt.Errorf("error in builder queries")
+ }
+
+ return results, err, errQueriesByName
+}
+
+func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) {
+ channelResults := make(chan channelResult, len(params.CompositeQuery.PromQueries))
+ var wg sync.WaitGroup
+ cacheKeys := q.keyGenerator.GenerateKeys(params)
+
+ for queryName, promQuery := range params.CompositeQuery.PromQueries {
+ if promQuery.Disabled {
+ continue
+ }
+ wg.Add(1)
+ go func(queryName string, promQuery *v3.PromQuery) {
+ defer wg.Done()
+ cacheKey := cacheKeys[queryName]
+ var cachedData []byte
+ // Ensure NoCache is not set and cache is not nil
+ if !params.NoCache && q.cache != nil {
+ data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
+ if err == nil {
+ cachedData = data
+ }
+ }
+ misses := q.findMissingTimeRanges(params.Start, params.End, params.Step, cachedData)
+ missedSeries := make([]*v3.Series, 0)
+ cachedSeries := make([]*v3.Series, 0)
+ for _, miss := range misses {
+ query := metricsV4.BuildPromQuery(promQuery, params.Step, miss.start, miss.end)
+ series, err := q.execPromQuery(ctx, query)
+ if err != nil {
+ channelResults <- channelResult{Err: err, Name: queryName, Query: query.Query, Series: nil}
+ return
+ }
+ missedSeries = append(missedSeries, series...)
+ }
+ if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil {
+ // ideally we should not be getting an error here
+ zap.S().Error("error unmarshalling cached data", zap.Error(err))
+ }
+ mergedSeries := mergeSerieses(cachedSeries, missedSeries)
+
+ channelResults <- channelResult{Err: nil, Name: queryName, Query: promQuery.Query, Series: mergedSeries}
+
+ // Cache the seriesList for future queries
+ if len(missedSeries) > 0 && !params.NoCache && q.cache != nil {
+ mergedSeriesData, err := json.Marshal(mergedSeries)
+ if err != nil {
+ zap.S().Error("error marshalling merged series", zap.Error(err))
+ return
+ }
+ err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour)
+ if err != nil {
+ zap.S().Error("error storing merged series", zap.Error(err))
+ return
+ }
+ }
+ }(queryName, promQuery)
+ }
+ wg.Wait()
+ close(channelResults)
+
+ results := make([]*v3.Result, 0)
+ errQueriesByName := make(map[string]string)
+ var errs []error
+
+ for result := range channelResults {
+ if result.Err != nil {
+ errs = append(errs, result.Err)
+ errQueriesByName[result.Name] = result.Err.Error()
+ continue
+ }
+ results = append(results, &v3.Result{
+ QueryName: result.Name,
+ Series: result.Series,
+ })
+ }
+
+ var err error
+ if len(errs) > 0 {
+ err = fmt.Errorf("error in prom queries")
+ }
+
+ return results, err, errQueriesByName
+}
+
+func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) {
+ channelResults := make(chan channelResult, len(params.CompositeQuery.ClickHouseQueries))
+ var wg sync.WaitGroup
+ for queryName, clickHouseQuery := range params.CompositeQuery.ClickHouseQueries {
+ if clickHouseQuery.Disabled {
+ continue
+ }
+ wg.Add(1)
+ go func(queryName string, clickHouseQuery *v3.ClickHouseQuery) {
+ defer wg.Done()
+ series, err := q.execClickHouseQuery(ctx, clickHouseQuery.Query)
+ channelResults <- channelResult{Err: err, Name: queryName, Query: clickHouseQuery.Query, Series: series}
+ }(queryName, clickHouseQuery)
+ }
+ wg.Wait()
+ close(channelResults)
+
+ results := make([]*v3.Result, 0)
+ errQueriesByName := make(map[string]string)
+ var errs []error
+
+ for result := range channelResults {
+ if result.Err != nil {
+ errs = append(errs, result.Err)
+ errQueriesByName[result.Name] = result.Err.Error()
+ continue
+ }
+ results = append(results, &v3.Result{
+ QueryName: result.Name,
+ Series: result.Series,
+ })
+ }
+
+ var err error
+ if len(errs) > 0 {
+ err = fmt.Errorf("error in clickhouse queries")
+ }
+ return results, err, errQueriesByName
+}
+
+func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
+
+ queries, err := q.builder.PrepareQueries(params, keys)
+
+ if err != nil {
+ return nil, err, nil
+ }
+
+ ch := make(chan channelResult, len(queries))
+ var wg sync.WaitGroup
+
+ for name, query := range queries {
+ wg.Add(1)
+ go func(name, query string) {
+ defer wg.Done()
+ rowList, err := q.reader.GetListResultV3(ctx, query)
+
+ if err != nil {
+ ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query}
+ return
+ }
+ ch <- channelResult{List: rowList, Name: name, Query: query}
+ }(name, query)
+ }
+
+ wg.Wait()
+ close(ch)
+
+ var errs []error
+ errQuriesByName := make(map[string]string)
+ res := make([]*v3.Result, 0)
+ // read values from the channel
+ for r := range ch {
+ if r.Err != nil {
+ errs = append(errs, r.Err)
+ errQuriesByName[r.Name] = r.Query
+ continue
+ }
+ res = append(res, &v3.Result{
+ QueryName: r.Name,
+ List: r.List,
+ })
+ }
+ if len(errs) != 0 {
+ return nil, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...)), errQuriesByName
+ }
+ return res, nil, nil
+}
+
+func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
+ var results []*v3.Result
+ var err error
+ var errQueriesByName map[string]string
+ if params.CompositeQuery != nil {
+ switch params.CompositeQuery.QueryType {
+ case v3.QueryTypeBuilder:
+ if params.CompositeQuery.PanelType == v3.PanelTypeList || params.CompositeQuery.PanelType == v3.PanelTypeTrace {
+ results, err, errQueriesByName = q.runBuilderListQueries(ctx, params, keys)
+ } else {
+ results, err, errQueriesByName = q.runBuilderQueries(ctx, params, keys)
+ }
+ case v3.QueryTypePromQL:
+ results, err, errQueriesByName = q.runPromQueries(ctx, params)
+ case v3.QueryTypeClickHouseSQL:
+ results, err, errQueriesByName = q.runClickHouseQueries(ctx, params)
+ default:
+ err = fmt.Errorf("invalid query type")
+ }
+ }
+
+ // return error if the number of series is more than one for value type panel
+ if params.CompositeQuery.PanelType == v3.PanelTypeValue {
+ if len(results) > 1 {
+ err = fmt.Errorf("there can be only one active query for value type panel")
+ } else if len(results) == 1 && len(results[0].Series) > 1 {
+ err = fmt.Errorf("there can be only one result series for value type panel but got %d", len(results[0].Series))
+ }
+ }
+
+ return results, err, errQueriesByName
+}
+
+func (q *querier) QueriesExecuted() []string {
+ return q.queriesExecuted
+}
From 722a38491e9b4ed13fc3eb2bca831c18302467e4 Mon Sep 17 00:00:00 2001
From: Srikanth Chekuri
Date: Wed, 10 Jan 2024 01:16:24 +0530
Subject: [PATCH 14/39] chore: add signozspanmetrics delta temporality pipeline
(#3776)
---
.../clickhouse-setup/docker-compose.yaml | 21 +----
.../otel-collector-config.yaml | 31 ++++----
.../otel-collector-metrics-config.yaml | 64 ----------------
.../clickhouse-setup/docker-compose-core.yaml | 22 ------
.../docker-compose-local.yaml | 2 +-
.../clickhouse-setup/docker-compose.yaml | 20 +----
.../otel-collector-config.yaml | 33 ++++----
.../otel-collector-metrics-config.yaml | 69 -----------------
pkg/query-service/app/opamp/config.yaml | 76 -------------------
.../opamp/otelconfig/config_parser_test.go | 2 +-
.../app/opamp/otelconfig/testdata/basic.yaml | 6 +-
.../opamp/otelconfig/testdata/service.yaml | 2 +-
.../tests/test-deploy/docker-compose.yaml | 20 +----
.../test-deploy/otel-collector-config.yaml | 8 +-
.../otel-collector-metrics-config.yaml | 67 ----------------
15 files changed, 51 insertions(+), 392 deletions(-)
delete mode 100644 deploy/docker-swarm/clickhouse-setup/otel-collector-metrics-config.yaml
delete mode 100644 deploy/docker/clickhouse-setup/otel-collector-metrics-config.yaml
delete mode 100644 pkg/query-service/app/opamp/config.yaml
delete mode 100644 pkg/query-service/tests/test-deploy/otel-collector-metrics-config.yaml
diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml
index 9193dcc97e..c19836b5fe 100644
--- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml
+++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml
@@ -150,7 +150,7 @@ services:
command:
[
"-config=/root/config/prometheus.yml",
- "--prefer-delta=true"
+ # "--prefer-delta=true"
]
# ports:
# - "6060:6060" # pprof port
@@ -249,25 +249,6 @@ services:
# - clickhouse-2
# - clickhouse-3
- otel-collector-metrics:
- image: signoz/signoz-otel-collector:0.88.6
- command:
- [
- "--config=/etc/otel-collector-metrics-config.yaml",
- "--feature-gates=-pkg.translator.prometheus.NormalizeName"
- ]
- volumes:
- - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
- # ports:
- # - "1777:1777" # pprof extension
- # - "8888:8888" # OtelCollector internal metrics
- # - "13133:13133" # Health check extension
- # - "55679:55679" # zPages extension
- deploy:
- restart_policy:
- condition: on-failure
- <<: *db-depend
-
logspout:
image: "gliderlabs/logspout:v3.2.14"
volumes:
diff --git a/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml b/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml
index 29409919a7..424d717b09 100644
--- a/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml
+++ b/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml
@@ -15,13 +15,9 @@ receivers:
# please remove names from below if you want to collect logs from them
- type: filter
id: signoz_logs_filter
- expr: 'attributes.container_name matches "^signoz_(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
+ expr: 'attributes.container_name matches "^signoz_(logspout|frontend|alertmanager|query-service|otel-collector|clickhouse|zookeeper)"'
opencensus:
endpoint: 0.0.0.0:55678
- otlp/spanmetrics:
- protocols:
- grpc:
- endpoint: localhost:12345
otlp:
protocols:
grpc:
@@ -69,8 +65,8 @@ processors:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system] # include ec2 for AWS, gcp for GCP and azure for Azure.
timeout: 2s
- signozspanmetrics/prometheus:
- metrics_exporter: prometheus
+ signozspanmetrics/cumulative:
+ metrics_exporter: clickhousemetricswrite
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
dimensions:
@@ -97,6 +93,20 @@ processors:
# num_workers: 4
# queue_size: 100
# retry_on_failure: true
+ signozspanmetrics/delta:
+ metrics_exporter: clickhousemetricswrite
+ latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
+ dimensions_cache_size: 100000
+ aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA
+ dimensions:
+ - name: service.namespace
+ default: default
+ - name: deployment.environment
+ default: default
+ # This is added to ensure the uniqueness of the timeseries
+ # Otherwise, identical timeseries produced by multiple replicas of
+ # collectors result in incorrect APM metrics
+ - name: signoz.collector.id
exporters:
clickhousetraces:
@@ -109,8 +119,6 @@ exporters:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
- prometheus:
- endpoint: 0.0.0.0:8889
# logging: {}
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/
@@ -140,7 +148,7 @@ service:
pipelines:
traces:
receivers: [jaeger, otlp]
- processors: [signozspanmetrics/prometheus, batch]
+ processors: [signozspanmetrics/cumulative, signozspanmetrics/delta, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
@@ -154,9 +162,6 @@ service:
receivers: [prometheus]
processors: [batch]
exporters: [clickhousemetricswrite/prometheus]
- metrics/spanmetrics:
- receivers: [otlp/spanmetrics]
- exporters: [prometheus]
logs:
receivers: [otlp, tcplog/docker]
processors: [batch]
diff --git a/deploy/docker-swarm/clickhouse-setup/otel-collector-metrics-config.yaml b/deploy/docker-swarm/clickhouse-setup/otel-collector-metrics-config.yaml
deleted file mode 100644
index 099caa737b..0000000000
--- a/deploy/docker-swarm/clickhouse-setup/otel-collector-metrics-config.yaml
+++ /dev/null
@@ -1,64 +0,0 @@
-receivers:
- prometheus:
- config:
- scrape_configs:
- # otel-collector-metrics internal metrics
- - job_name: otel-collector-metrics
- scrape_interval: 60s
- static_configs:
- - targets:
- - localhost:8888
- labels:
- job_name: otel-collector-metrics
- # SigNoz span metrics
- - job_name: signozspanmetrics-collector
- scrape_interval: 60s
- dns_sd_configs:
- - names:
- - tasks.otel-collector
- type: A
- port: 8889
-
-processors:
- batch:
- send_batch_size: 10000
- send_batch_max_size: 11000
- timeout: 10s
- # memory_limiter:
- # # 80% of maximum memory up to 2G
- # limit_mib: 1500
- # # 25% of limit up to 2G
- # spike_limit_mib: 512
- # check_interval: 5s
- #
- # # 50% of the maximum memory
- # limit_percentage: 50
- # # 20% of max memory usage spike expected
- # spike_limit_percentage: 20
- # queued_retry:
- # num_workers: 4
- # queue_size: 100
- # retry_on_failure: true
-
-exporters:
- clickhousemetricswrite:
- endpoint: tcp://clickhouse:9000/?database=signoz_metrics
-
-extensions:
- health_check:
- endpoint: 0.0.0.0:13133
- zpages:
- endpoint: 0.0.0.0:55679
- pprof:
- endpoint: 0.0.0.0:1777
-
-service:
- telemetry:
- metrics:
- address: 0.0.0.0:8888
- extensions: [health_check, zpages, pprof]
- pipelines:
- metrics:
- receivers: [prometheus]
- processors: [batch]
- exporters: [clickhousemetricswrite]
diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml
index 81969766cc..4ab1954727 100644
--- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml
+++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml
@@ -116,28 +116,6 @@ services:
query-service:
condition: service_healthy
- otel-collector-metrics:
- container_name: signoz-otel-collector-metrics
- image: signoz/signoz-otel-collector:0.88.6
- command:
- [
- "--config=/etc/otel-collector-metrics-config.yaml",
- "--feature-gates=-pkg.translator.prometheus.NormalizeName"
- ]
- volumes:
- - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
- # ports:
- # - "1777:1777" # pprof extension
- # - "8888:8888" # OtelCollector internal metrics
- # - "13133:13133" # Health check extension
- # - "55679:55679" # zPages extension
- restart: on-failure
- depends_on:
- clickhouse:
- condition: service_healthy
- otel-collector-migrator:
- condition: service_completed_successfully
-
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout
diff --git a/deploy/docker/clickhouse-setup/docker-compose-local.yaml b/deploy/docker/clickhouse-setup/docker-compose-local.yaml
index a92c3dbcd9..248c7bf9f6 100644
--- a/deploy/docker/clickhouse-setup/docker-compose-local.yaml
+++ b/deploy/docker/clickhouse-setup/docker-compose-local.yaml
@@ -25,7 +25,7 @@ services:
command:
[
"-config=/root/config/prometheus.yml",
- "--prefer-delta=true"
+ # "--prefer-delta=true"
]
ports:
- "6060:6060"
diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml
index bad6e8ba74..6647c90bc5 100644
--- a/deploy/docker/clickhouse-setup/docker-compose.yaml
+++ b/deploy/docker/clickhouse-setup/docker-compose.yaml
@@ -169,7 +169,7 @@ services:
command:
[
"-config=/root/config/prometheus.yml",
- "--prefer-delta=true"
+ # "--prefer-delta=true"
]
# ports:
# - "6060:6060" # pprof port
@@ -268,24 +268,6 @@ services:
query-service:
condition: service_healthy
- otel-collector-metrics:
- image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.6}
- container_name: signoz-otel-collector-metrics
- command:
- [
- "--config=/etc/otel-collector-metrics-config.yaml",
- "--feature-gates=-pkg.translator.prometheus.NormalizeName"
- ]
- volumes:
- - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
- # ports:
- # - "1777:1777" # pprof extension
- # - "8888:8888" # OtelCollector internal metrics
- # - "13133:13133" # Health check extension
- # - "55679:55679" # zPages extension
- restart: on-failure
- <<: *db-depend
-
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout
diff --git a/deploy/docker/clickhouse-setup/otel-collector-config.yaml b/deploy/docker/clickhouse-setup/otel-collector-config.yaml
index 204dcd9511..f3d6900e6c 100644
--- a/deploy/docker/clickhouse-setup/otel-collector-config.yaml
+++ b/deploy/docker/clickhouse-setup/otel-collector-config.yaml
@@ -15,13 +15,9 @@ receivers:
# please remove names from below if you want to collect logs from them
- type: filter
id: signoz_logs_filter
- expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
+ expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|clickhouse|zookeeper)"'
opencensus:
endpoint: 0.0.0.0:55678
- otlp/spanmetrics:
- protocols:
- grpc:
- endpoint: localhost:12345
otlp:
protocols:
grpc:
@@ -66,8 +62,9 @@ processors:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
- signozspanmetrics/prometheus:
- metrics_exporter: prometheus
+ signozspanmetrics/cumulative:
+ metrics_exporter: clickhousemetricswrite
+ metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
dimensions:
@@ -98,6 +95,21 @@ processors:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system] # include ec2 for AWS, gcp for GCP and azure for Azure.
timeout: 2s
+ signozspanmetrics/delta:
+ metrics_exporter: clickhousemetricswrite
+ metrics_flush_interval: 60s
+ latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
+ dimensions_cache_size: 100000
+ aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA
+ dimensions:
+ - name: service.namespace
+ default: default
+ - name: deployment.environment
+ default: default
+ # This is added to ensure the uniqueness of the timeseries
+ # Otherwise, identical timeseries produced by multiple replicas of
+ # collectors result in incorrect APM metrics
+ - name: signoz.collector.id
extensions:
health_check:
@@ -118,8 +130,6 @@ exporters:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
- prometheus:
- endpoint: 0.0.0.0:8889
# logging: {}
clickhouselogsexporter:
@@ -145,7 +155,7 @@ service:
pipelines:
traces:
receivers: [jaeger, otlp]
- processors: [signozspanmetrics/prometheus, batch]
+ processors: [signozspanmetrics/cumulative, signozspanmetrics/delta, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
@@ -159,9 +169,6 @@ service:
receivers: [prometheus]
processors: [batch]
exporters: [clickhousemetricswrite/prometheus]
- metrics/spanmetrics:
- receivers: [otlp/spanmetrics]
- exporters: [prometheus]
logs:
receivers: [otlp, tcplog/docker]
processors: [batch]
diff --git a/deploy/docker/clickhouse-setup/otel-collector-metrics-config.yaml b/deploy/docker/clickhouse-setup/otel-collector-metrics-config.yaml
deleted file mode 100644
index 7543d1f6f6..0000000000
--- a/deploy/docker/clickhouse-setup/otel-collector-metrics-config.yaml
+++ /dev/null
@@ -1,69 +0,0 @@
-receivers:
- otlp:
- protocols:
- grpc:
- http:
- prometheus:
- config:
- scrape_configs:
- # otel-collector-metrics internal metrics
- - job_name: otel-collector-metrics
- scrape_interval: 60s
- static_configs:
- - targets:
- - localhost:8888
- labels:
- job_name: otel-collector-metrics
- # SigNoz span metrics
- - job_name: signozspanmetrics-collector
- scrape_interval: 60s
- static_configs:
- - targets:
- - otel-collector:8889
-
-processors:
- batch:
- send_batch_size: 10000
- send_batch_max_size: 11000
- timeout: 10s
- # memory_limiter:
- # # 80% of maximum memory up to 2G
- # limit_mib: 1500
- # # 25% of limit up to 2G
- # spike_limit_mib: 512
- # check_interval: 5s
- #
- # # 50% of the maximum memory
- # limit_percentage: 50
- # # 20% of max memory usage spike expected
- # spike_limit_percentage: 20
- # queued_retry:
- # num_workers: 4
- # queue_size: 100
- # retry_on_failure: true
-
-extensions:
- health_check:
- endpoint: 0.0.0.0:13133
- zpages:
- endpoint: 0.0.0.0:55679
- pprof:
- endpoint: 0.0.0.0:1777
-
-exporters:
- clickhousemetricswrite:
- endpoint: tcp://clickhouse:9000/?database=signoz_metrics
-
-service:
- telemetry:
- metrics:
- address: 0.0.0.0:8888
- extensions:
- - health_check
- - zpages
- - pprof
- pipelines:
- metrics:
- receivers: [prometheus]
- processors: [batch]
- exporters: [clickhousemetricswrite]
diff --git a/pkg/query-service/app/opamp/config.yaml b/pkg/query-service/app/opamp/config.yaml
deleted file mode 100644
index d5ef74e00f..0000000000
--- a/pkg/query-service/app/opamp/config.yaml
+++ /dev/null
@@ -1,76 +0,0 @@
-receivers:
- otlp/spanmetrics:
- protocols:
- grpc:
- endpoint: "localhost:12345"
- otlp:
- protocols:
- grpc:
- http:
- jaeger:
- protocols:
- grpc:
- thrift_http:
- hostmetrics:
- collection_interval: 30s
- scrapers:
- cpu:
- load:
- memory:
- disk:
- filesystem:
- network:
-processors:
- batch:
- send_batch_size: 1000
- timeout: 10s
- signozspanmetrics/prometheus:
- metrics_exporter: prometheus
- latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
- dimensions_cache_size: 10000
- dimensions:
- - name: service.namespace
- default: default
- - name: deployment.environment
- default: default
- # memory_limiter:
- # # 80% of maximum memory up to 2G
- # limit_mib: 1500
- # # 25% of limit up to 2G
- # spike_limit_mib: 512
- # check_interval: 5s
- #
- # # 50% of the maximum memory
- # limit_percentage: 50
- # # 20% of max memory usage spike expected
- # spike_limit_percentage: 20
- # queued_retry:
- # num_workers: 4
- # queue_size: 100
- # retry_on_failure: true
-extensions:
- zpages: {}
-exporters:
- clickhousetraces:
- datasource: tcp://localhost:9000/?database=signoz_traces
- migrations: exporter/clickhousetracesexporter/migrations
- clickhousemetricswrite:
- endpoint: tcp://localhost:9000/?database=signoz_metrics
- resource_to_telemetry_conversion:
- enabled: true
- prometheus:
- endpoint: "0.0.0.0:8889"
-service:
- extensions: [zpages]
- pipelines:
- traces:
- receivers: [jaeger, otlp]
- processors: [signozspanmetrics/prometheus, batch]
- exporters: [clickhousetraces]
- metrics:
- receivers: [otlp, hostmetrics]
- processors: [batch]
- exporters: [clickhousemetricswrite]
- metrics/spanmetrics:
- receivers: [otlp/spanmetrics]
- exporters: [prometheus]
\ No newline at end of file
diff --git a/pkg/query-service/app/opamp/otelconfig/config_parser_test.go b/pkg/query-service/app/opamp/otelconfig/config_parser_test.go
index f4a3ed0b1b..0a0d3c15b7 100644
--- a/pkg/query-service/app/opamp/otelconfig/config_parser_test.go
+++ b/pkg/query-service/app/opamp/otelconfig/config_parser_test.go
@@ -34,7 +34,7 @@ func TestServiceConfig(t *testing.T) {
"traces": map[string]interface{}{
"receivers": []interface{}{"jaeger", "otlp"},
"processors": []interface{}{
- "signozspanmetrics/prometheus", "batch",
+ "signozspanmetrics/cumulative", "batch",
},
"exporters": []interface{}{
"clickhousetraces",
diff --git a/pkg/query-service/app/opamp/otelconfig/testdata/basic.yaml b/pkg/query-service/app/opamp/otelconfig/testdata/basic.yaml
index d5ef74e00f..e8259a27e9 100644
--- a/pkg/query-service/app/opamp/otelconfig/testdata/basic.yaml
+++ b/pkg/query-service/app/opamp/otelconfig/testdata/basic.yaml
@@ -24,8 +24,8 @@ processors:
batch:
send_batch_size: 1000
timeout: 10s
- signozspanmetrics/prometheus:
- metrics_exporter: prometheus
+ signozspanmetrics/cumulative:
+ metrics_exporter: clickhousemetricswrite
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 10000
dimensions:
@@ -65,7 +65,7 @@ service:
pipelines:
traces:
receivers: [jaeger, otlp]
- processors: [signozspanmetrics/prometheus, batch]
+ processors: [signozspanmetrics/cumulative, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp, hostmetrics]
diff --git a/pkg/query-service/app/opamp/otelconfig/testdata/service.yaml b/pkg/query-service/app/opamp/otelconfig/testdata/service.yaml
index dd562fba0d..7e7ca3f9cb 100644
--- a/pkg/query-service/app/opamp/otelconfig/testdata/service.yaml
+++ b/pkg/query-service/app/opamp/otelconfig/testdata/service.yaml
@@ -3,7 +3,7 @@ service:
pipelines:
traces:
receivers: [jaeger, otlp]
- processors: [signozspanmetrics/prometheus, batch]
+ processors: [signozspanmetrics/cumulative, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp, hostmetrics]
diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml
index 52b213713d..7965b65795 100644
--- a/pkg/query-service/tests/test-deploy/docker-compose.yaml
+++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml
@@ -158,7 +158,7 @@ services:
command:
[
"-config=/root/config/prometheus.yml",
- "--prefer-delta=true"
+ # "--prefer-delta=true"
]
# ports:
# - "6060:6060" # pprof port
@@ -244,24 +244,6 @@ services:
query-service:
condition: service_healthy
- otel-collector-metrics:
- image: signoz/signoz-otel-collector:0.88.6
- container_name: signoz-otel-collector-metrics
- command:
- [
- "--config=/etc/otel-collector-metrics-config.yaml",
- "--feature-gates=-pkg.translator.prometheus.NormalizeName"
- ]
- volumes:
- - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
- # ports:
- # - "1777:1777" # pprof extension
- # - "8888:8888" # OtelCollector internal metrics
- # - "13133:13133" # Health check extension
- # - "55679:55679" # zPages extension
- restart: on-failure
- <<: *db-depend
-
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout
diff --git a/pkg/query-service/tests/test-deploy/otel-collector-config.yaml b/pkg/query-service/tests/test-deploy/otel-collector-config.yaml
index cc839e737f..8a0e899826 100644
--- a/pkg/query-service/tests/test-deploy/otel-collector-config.yaml
+++ b/pkg/query-service/tests/test-deploy/otel-collector-config.yaml
@@ -15,7 +15,7 @@ receivers:
# please remove names from below if you want to collect logs from them
- type: filter
id: signoz_logs_filter
- expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
+ expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|clickhouse|zookeeper)"'
opencensus:
endpoint: 0.0.0.0:55678
otlp/spanmetrics:
@@ -63,8 +63,8 @@ processors:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
- signozspanmetrics/prometheus:
- metrics_exporter: prometheus
+ signozspanmetrics/cumulative:
+ metrics_exporter: clickhousemetricswrite
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
dimensions:
@@ -135,7 +135,7 @@ service:
pipelines:
traces:
receivers: [jaeger, otlp]
- processors: [signozspanmetrics/prometheus, batch]
+ processors: [signozspanmetrics/cumulative, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
diff --git a/pkg/query-service/tests/test-deploy/otel-collector-metrics-config.yaml b/pkg/query-service/tests/test-deploy/otel-collector-metrics-config.yaml
deleted file mode 100644
index aecad4eaaf..0000000000
--- a/pkg/query-service/tests/test-deploy/otel-collector-metrics-config.yaml
+++ /dev/null
@@ -1,67 +0,0 @@
-receivers:
- otlp:
- protocols:
- grpc:
- http:
- prometheus:
- config:
- scrape_configs:
- # otel-collector-metrics internal metrics
- - job_name: otel-collector-metrics
- scrape_interval: 60s
- static_configs:
- - targets:
- - localhost:8888
- # SigNoz span metrics
- - job_name: signozspanmetrics-collector
- scrape_interval: 60s
- static_configs:
- - targets:
- - otel-collector:8889
-
-processors:
- batch:
- send_batch_size: 10000
- send_batch_max_size: 11000
- timeout: 10s
- # memory_limiter:
- # # 80% of maximum memory up to 2G
- # limit_mib: 1500
- # # 25% of limit up to 2G
- # spike_limit_mib: 512
- # check_interval: 5s
- #
- # # 50% of the maximum memory
- # limit_percentage: 50
- # # 20% of max memory usage spike expected
- # spike_limit_percentage: 20
- # queued_retry:
- # num_workers: 4
- # queue_size: 100
- # retry_on_failure: true
-
-extensions:
- health_check:
- endpoint: 0.0.0.0:13133
- zpages:
- endpoint: 0.0.0.0:55679
- pprof:
- endpoint: 0.0.0.0:1777
-
-exporters:
- clickhousemetricswrite:
- endpoint: tcp://clickhouse:9000/?database=signoz_metrics
-
-service:
- telemetry:
- metrics:
- address: 0.0.0.0:8888
- extensions:
- - health_check
- - zpages
- - pprof
- pipelines:
- metrics:
- receivers: [prometheus]
- processors: [batch]
- exporters: [clickhousemetricswrite]
From d65d75ef69502d36a940270c25b8cac61035c8f6 Mon Sep 17 00:00:00 2001
From: Raj Kamal Singh <1133322+raj-k-singh@users.noreply.github.com>
Date: Wed, 10 Jan 2024 11:26:25 +0530
Subject: [PATCH 15/39] Fix: FE: pipelines: should be able to exit edit mode
without making a change (#4335)
* fix: show cancel button on entering edit mode before any changes have been made
* chore: align pipeline page save/cancel buttons to the right
---
.../PipelineListsView/PipelineListsView.tsx | 3 ++-
.../PipelineListsView/SaveConfigButton.tsx | 20 +++++++++++--------
.../PipelinePage/PipelineListsView/styles.ts | 1 +
3 files changed, 15 insertions(+), 9 deletions(-)
diff --git a/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx b/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx
index 058c16761b..c4494569df 100644
--- a/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx
+++ b/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx
@@ -506,8 +506,9 @@ function PipelineListsView({
pagination={false}
/>
- {showSaveButton && (
+ {isEditingActionMode && (
diff --git a/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx b/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx
index e7aa0ecedc..700665b957 100644
--- a/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx
+++ b/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx
@@ -4,6 +4,7 @@ import { useTranslation } from 'react-i18next';
import { SaveConfigWrapper } from './styles';
function SaveConfigButton({
+ showSaveButton,
onSaveConfigurationHandler,
onCancelConfigurationHandler,
}: SaveConfigButtonTypes): JSX.Element {
@@ -11,14 +12,16 @@ function SaveConfigButton({
return (
-
- {t('save_configuration')}
-
+ {showSaveButton && (
+
+ {t('save_configuration')}
+
+ )}
{t('cancel')}
@@ -26,6 +29,7 @@ function SaveConfigButton({
);
}
export interface SaveConfigButtonTypes {
+ showSaveButton: boolean;
onSaveConfigurationHandler: VoidFunction;
onCancelConfigurationHandler: VoidFunction;
}
diff --git a/frontend/src/container/PipelinePage/PipelineListsView/styles.ts b/frontend/src/container/PipelinePage/PipelineListsView/styles.ts
index 0b3ddbff3d..d96eb7cd93 100644
--- a/frontend/src/container/PipelinePage/PipelineListsView/styles.ts
+++ b/frontend/src/container/PipelinePage/PipelineListsView/styles.ts
@@ -108,6 +108,7 @@ export const ModeAndConfigWrapper = styled.div`
export const SaveConfigWrapper = styled.div`
display: flex;
+ justify-content: flex-end;
gap: 0.938rem;
margin-top: 1.25rem;
`;
From 6f5f361a7ea4a6409fd097c15bc891389f6241c4 Mon Sep 17 00:00:00 2001
From: Rajat Dabade
Date: Wed, 10 Jan 2024 13:12:31 +0530
Subject: [PATCH 16/39] fix: soft min and soft max undefined issue (#4351)
---
.../src/container/GridCardLayout/GridCard/FullView/index.tsx | 4 ++--
frontend/src/container/GridCardLayout/GridCard/index.tsx | 4 ++--
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx b/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx
index 750cc6a707..4ee4c54e93 100644
--- a/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx
+++ b/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx
@@ -132,8 +132,8 @@ function FullView({
thresholds: widget.thresholds,
minTimeScale,
maxTimeScale,
- softMax: widget.softMax,
- softMin: widget.softMin,
+ softMax: widget.softMax === undefined ? null : widget.softMax,
+ softMin: widget.softMin === undefined ? null : widget.softMin,
});
setChartOptions(newChartOptions);
diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx
index 79b7d39b4d..1b388e0802 100644
--- a/frontend/src/container/GridCardLayout/GridCard/index.tsx
+++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx
@@ -135,8 +135,8 @@ function GridCardGraph({
thresholds: widget.thresholds,
minTimeScale,
maxTimeScale,
- softMax: widget.softMax,
- softMin: widget.softMin,
+ softMax: widget.softMax === undefined ? null : widget.softMax,
+ softMin: widget.softMin === undefined ? null : widget.softMin,
}),
[
widget?.id,
From 1e679a0d64bc679fba367661feeda1e68080dfa6 Mon Sep 17 00:00:00 2001
From: Yunus M
Date: Wed, 10 Jan 2024 14:07:23 +0530
Subject: [PATCH 17/39] fix: empty string search doesn't return all exceptions
(#4346)
* fix: empty string search doesn't return all exceptions
* fix: add type
---
frontend/src/container/AllError/index.tsx | 42 ++++++++++++++++-------
1 file changed, 30 insertions(+), 12 deletions(-)
diff --git a/frontend/src/container/AllError/index.tsx b/frontend/src/container/AllError/index.tsx
index 4bc7d199e1..e8c13d88cd 100644
--- a/frontend/src/container/AllError/index.tsx
+++ b/frontend/src/container/AllError/index.tsx
@@ -48,6 +48,15 @@ import {
urlKey,
} from './utils';
+type QueryParams = {
+ order: string;
+ offset: number;
+ orderParam: string;
+ pageSize: number;
+ exceptionType?: string;
+ serviceName?: string;
+};
+
function AllErrors(): JSX.Element {
const { maxTime, minTime, loading } = useSelector(
(state) => state.globalTime,
@@ -162,16 +171,23 @@ function AllErrors(): JSX.Element {
filterKey,
filterValue || '',
);
- history.replace(
- `${pathname}?${createQueryParams({
- order: updatedOrder,
- offset: getUpdatedOffset,
- orderParam: getUpdatedParams,
- pageSize: getUpdatedPageSize,
- exceptionType: exceptionFilterValue,
- serviceName: serviceFilterValue,
- })}`,
- );
+
+ const queryParams: QueryParams = {
+ order: updatedOrder,
+ offset: getUpdatedOffset,
+ orderParam: getUpdatedParams,
+ pageSize: getUpdatedPageSize,
+ };
+
+ if (exceptionFilterValue && exceptionFilterValue !== 'undefined') {
+ queryParams.exceptionType = exceptionFilterValue;
+ }
+
+ if (serviceFilterValue && serviceFilterValue !== 'undefined') {
+ queryParams.serviceName = serviceFilterValue;
+ }
+
+ history.replace(`${pathname}?${createQueryParams(queryParams)}`);
confirm();
},
[
@@ -198,8 +214,10 @@ function AllErrors(): JSX.Element {
- setSelectedKeys(e.target.value ? [e.target.value] : [])
+ onChange={
+ (e): void => setSelectedKeys(e.target.value ? [e.target.value] : [])
+
+ // Need to fix this logic, when the value in empty, it's setting undefined string as value
}
allowClear
defaultValue={getDefaultFilterValue(
From c28f367f46f92292f85884e5a0bc195882efd8f5 Mon Sep 17 00:00:00 2001
From: Rajat Dabade
Date: Thu, 11 Jan 2024 16:22:20 +0530
Subject: [PATCH 18/39] refactor: updated height for date selector (#4331)
---
frontend/src/container/TopNav/DateTimeSelection/index.tsx | 1 +
1 file changed, 1 insertion(+)
diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
index b36a1ebaba..65c6b36974 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx
+++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
@@ -295,6 +295,7 @@ function DateTimeSelection({
style={{
minWidth: 120,
}}
+ listHeight={400}
>
{options.map(({ value, label }) => (
From cbf150ef7bc30a6a0ae2b8890101e02f2a55dc5e Mon Sep 17 00:00:00 2001
From: Vikrant Gupta
Date: Mon, 15 Jan 2024 18:46:18 +0530
Subject: [PATCH 19/39] fix: update correct format in URL in case of custom
date time (#4371)
---
.../src/container/TopNav/DateTimeSelection/index.tsx | 10 ++++++++--
1 file changed, 8 insertions(+), 2 deletions(-)
diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
index 65c6b36974..4cb538cb57 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx
+++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
@@ -223,8 +223,14 @@ function DateTimeSelection({
setLocalStorageKey('endTime', endTimeMoment.toString());
updateLocalStorageForRoutes('custom');
if (!isLogsExplorerPage) {
- urlQuery.set(QueryParams.startTime, startTimeMoment.toString());
- urlQuery.set(QueryParams.endTime, endTimeMoment.toString());
+ urlQuery.set(
+ QueryParams.startTime,
+ startTimeMoment?.toDate().getTime().toString(),
+ );
+ urlQuery.set(
+ QueryParams.endTime,
+ endTimeMoment?.toDate().getTime().toString(),
+ );
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
history.replace(generatedUrl);
}
From 739b1bf387c5daee1c182e44a4afc5391cc715af Mon Sep 17 00:00:00 2001
From: Yunus M
Date: Tue, 16 Jan 2024 01:13:52 +0530
Subject: [PATCH 20/39] feat: custom date time value (#4367)
* feat: custom date time value
* fix: update custom date picker
* fix: old placeholder value flicker
* fix: html semantics and move styles to css
* fix: remove console logs
---------
Co-authored-by: Vikrant Gupta
---
.../CustomTimePicker.styles.scss | 88 ++++++++
.../CustomTimePicker/CustomTimePicker.tsx | 208 ++++++++++++++++++
.../TopNav/DateTimeSelection/index.tsx | 33 ++-
frontend/src/styles.scss | 6 +
4 files changed, 316 insertions(+), 19 deletions(-)
create mode 100644 frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss
create mode 100644 frontend/src/components/CustomTimePicker/CustomTimePicker.tsx
diff --git a/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss b/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss
new file mode 100644
index 0000000000..9efbf8f17c
--- /dev/null
+++ b/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss
@@ -0,0 +1,88 @@
+.time-options-container {
+ .time-options-item {
+ margin: 2px 0;
+ padding: 8px;
+ border-radius: 2px;
+
+ &.active {
+ background-color: rgba($color: #000000, $alpha: 0.2);
+
+ &:hover {
+ cursor: pointer;
+ background-color: rgba($color: #000000, $alpha: 0.3);
+ }
+ }
+
+ &:hover {
+ cursor: pointer;
+ background-color: rgba($color: #000000, $alpha: 0.3);
+ }
+ }
+}
+
+.time-selection-dropdown-content {
+ min-width: 172px;
+ width: 100%;
+}
+
+.timeSelection-input {
+ display: flex;
+ gap: 8px;
+ align-items: center;
+ padding: 4px 8px;
+ padding-left: 0px !important;
+
+ input::placeholder {
+ color: white;
+ }
+
+ input:focus::placeholder {
+ color: rgba($color: #ffffff, $alpha: 0.4);
+ }
+}
+
+.valid-format-error {
+ margin-top: 4px;
+ color: var(--bg-cherry-400, #ea6d71);
+}
+
+.lightMode {
+ .time-options-container {
+ .time-options-item {
+ &.active {
+ background-color: rgba($color: #ffffff, $alpha: 0.2);
+
+ &:hover {
+ cursor: pointer;
+ background-color: rgba($color: #ffffff, $alpha: 0.3);
+ }
+ }
+
+ &:hover {
+ cursor: pointer;
+ background-color: rgba($color: #ffffff, $alpha: 0.3);
+ }
+ }
+ }
+
+ .timeSelection-input {
+ display: flex;
+ gap: 8px;
+ align-items: center;
+ padding: 4px 8px;
+ padding-left: 0px !important;
+
+ input::placeholder {
+ color: var(---bg-ink-300);
+ }
+
+ input:focus::placeholder {
+ color: rgba($color: #000000, $alpha: 0.4);
+ }
+ }
+
+ .valid-format-error {
+ margin-top: 4px;
+ color: var(--bg-cherry-400, #ea6d71);
+ }
+}
diff --git a/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx b/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx
new file mode 100644
index 0000000000..c76baaf2ac
--- /dev/null
+++ b/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx
@@ -0,0 +1,208 @@
+/* eslint-disable jsx-a11y/click-events-have-key-events */
+/* eslint-disable jsx-a11y/no-static-element-interactions */
+import './CustomTimePicker.styles.scss';
+
+import { Input, Popover, Tooltip } from 'antd';
+import cx from 'classnames';
+import { Options } from 'container/TopNav/DateTimeSelection/config';
+import dayjs from 'dayjs';
+import debounce from 'lodash-es/debounce';
+import { CheckCircle, ChevronDown, Clock } from 'lucide-react';
+import { ChangeEvent, useEffect, useState } from 'react';
+import { popupContainer } from 'utils/selectPopupContainer';
+
+interface CustomTimePickerProps {
+ onSelect: (value: string) => void;
+ items: any[];
+ selectedValue: string;
+ selectedTime: string;
+ onValidCustomDateChange: ([t1, t2]: any[]) => void;
+}
+
+function CustomTimePicker({
+ onSelect,
+ items,
+ selectedValue,
+ selectedTime,
+ onValidCustomDateChange,
+}: CustomTimePickerProps): JSX.Element {
+ const [open, setOpen] = useState(false);
+ const [
+ selectedTimePlaceholderValue,
+ setSelectedTimePlaceholderValue,
+ ] = useState('Select / Enter Time Range');
+
+ const [inputValue, setInputValue] = useState('');
+ const [inputStatus, setInputStatus] = useState<'' | 'error' | 'success'>('');
+ const [isInputFocused, setIsInputFocused] = useState(false);
+
+ const getSelectedTimeRangeLabel = (
+ selectedTime: string,
+ selectedTimeValue: string,
+ ): string => {
+ if (selectedTime === 'custom') {
+ return selectedTimeValue;
+ }
+
+ for (let index = 0; index < Options.length; index++) {
+ if (Options[index].value === selectedTime) {
+ return Options[index].label;
+ }
+ }
+
+ return '';
+ };
+
+ useEffect(() => {
+ const value = getSelectedTimeRangeLabel(selectedTime, selectedValue);
+
+ setSelectedTimePlaceholderValue(value);
+ }, [selectedTime, selectedValue]);
+
+ const hide = (): void => {
+ setOpen(false);
+ };
+
+ const handleOpenChange = (newOpen: boolean): void => {
+ setOpen(newOpen);
+ };
+
+ const debouncedHandleInputChange = debounce((inputValue): void => {
+ const isValidFormat = /^(\d+)([mhdw])$/.test(inputValue);
+ if (isValidFormat) {
+ setInputStatus('success');
+
+ const match = inputValue.match(/^(\d+)([mhdw])$/);
+
+ const value = parseInt(match[1], 10);
+ const unit = match[2];
+
+ const currentTime = dayjs();
+ let minTime = null;
+
+ switch (unit) {
+ case 'm':
+ minTime = currentTime.subtract(value, 'minute');
+ break;
+
+ case 'h':
+ minTime = currentTime.subtract(value, 'hour');
+ break;
+ case 'd':
+ minTime = currentTime.subtract(value, 'day');
+ break;
+ case 'w':
+ minTime = currentTime.subtract(value, 'week');
+ break;
+ default:
+ break;
+ }
+
+ onValidCustomDateChange([minTime, currentTime]);
+ } else {
+ setInputStatus('error');
+ }
+ }, 300);
+
+ const handleInputChange = (event: ChangeEvent): void => {
+ const inputValue = event.target.value;
+
+ if (inputValue.length > 0) {
+ setOpen(false);
+ } else {
+ setOpen(true);
+ }
+
+ setInputValue(inputValue);
+
+ // Call the debounced function with the input value
+ debouncedHandleInputChange(inputValue);
+ };
+
+ const content = (
+
+
+ {items.map(({ value, label }) => (
+
{
+ onSelect(value);
+ setSelectedTimePlaceholderValue(label);
+ setInputStatus('');
+ setInputValue('');
+ hide();
+ }}
+ key={value}
+ className={cx(
+ 'time-options-item',
+ selectedValue === value ? 'active' : '',
+ )}
+ >
+ {label}
+
+ ))}
+
+
+ );
+
+ const handleFocus = (): void => {
+ setIsInputFocused(true);
+ };
+
+ const handleBlur = (): void => {
+ setIsInputFocused(false);
+ };
+
+ return (
+
+
+ ) : (
+
+
+
+ )
+ }
+ suffix={
+ {
+ setOpen(!open);
+ }}
+ />
+ }
+ />
+
+ );
+}
+
+export default CustomTimePicker;
diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
index 4cb538cb57..0f0e47d7df 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx
+++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
@@ -1,7 +1,8 @@
import { SyncOutlined } from '@ant-design/icons';
-import { Button, Select as DefaultSelect } from 'antd';
+import { Button } from 'antd';
import getLocalStorageKey from 'api/browser/localstorage/get';
import setLocalStorageKey from 'api/browser/localstorage/set';
+import CustomTimePicker from 'components/CustomTimePicker/CustomTimePicker';
import { LOCALSTORAGE } from 'constants/localStorage';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
@@ -21,7 +22,6 @@ import { GlobalTimeLoading, UpdateTimeInterval } from 'store/actions';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import { GlobalReducer } from 'types/reducer/globalTime';
-import { popupContainer } from 'utils/selectPopupContainer';
import AutoRefresh from '../AutoRefresh';
import CustomDateTimeModal, { DateTimeRangeType } from '../CustomDateTimeModal';
@@ -29,8 +29,6 @@ import { getDefaultOption, getOptions, Time } from './config';
import RefreshText from './Refresh';
import { Form, FormContainer, FormItem } from './styles';
-const { Option } = DefaultSelect;
-
function DateTimeSelection({
location,
updateTimeInterval,
@@ -211,6 +209,7 @@ function DateTimeSelection({
};
const onCustomDateHandler = (dateTimeRange: DateTimeRangeType): void => {
+ console.log('dateTimeRange', dateTimeRange);
if (dateTimeRange !== null) {
const [startTimeMoment, endTimeMoment] = dateTimeRange;
if (startTimeMoment && endTimeMoment) {
@@ -289,26 +288,22 @@ function DateTimeSelection({
initialValues={{ interval: selectedTime }}
>
- onSelectHandler(value as Time)}
- value={getInputLabel(
+ {
+ onSelectHandler(value as Time);
+ }}
+ selectedTime={selectedTime}
+ onValidCustomDateChange={(dateTime): void =>
+ onCustomDateHandler(dateTime as DateTimeRangeType)
+ }
+ selectedValue={getInputLabel(
dayjs(minTime / 1000000),
dayjs(maxTime / 1000000),
selectedTime,
)}
data-testid="dropDown"
- style={{
- minWidth: 120,
- }}
- listHeight={400}
- >
- {options.map(({ value, label }) => (
-
- {label}
-
- ))}
-
+ items={options}
+ />
Date: Tue, 16 Jan 2024 16:56:20 +0530
Subject: [PATCH 21/39] chore: add /v4/query_range endpoint (#4361)
---
ee/query-service/app/server.go | 1 +
pkg/query-service/app/http_handler.go | 83 +++-
.../app/metrics/v4/cumulative/table.go | 11 +-
.../app/metrics/v4/cumulative/table_test.go | 2 +-
.../app/metrics/v4/cumulative/timeseries.go | 25 +-
.../metrics/v4/cumulative/timeseries_test.go | 2 +-
.../app/metrics/v4/delta/helper.go | 61 ---
.../app/metrics/v4/delta/table.go | 16 +-
.../app/metrics/v4/delta/table_test.go | 4 +-
.../app/metrics/v4/delta/time_series_test.go | 4 +-
.../app/metrics/v4/delta/timeseries.go | 110 ++++-
.../helper.go => helpers/clauses.go} | 30 +-
.../app/metrics/v4/helpers/sub_query.go | 86 ++++
.../app/metrics/v4/query_builder.go | 128 +++---
.../app/metrics/v4/query_builder_test.go | 382 +++++++++++++++++-
pkg/query-service/app/server.go | 1 +
pkg/query-service/common/metrics.go | 19 +
pkg/query-service/model/v3/v3.go | 22 +-
18 files changed, 785 insertions(+), 202 deletions(-)
delete mode 100644 pkg/query-service/app/metrics/v4/delta/helper.go
rename pkg/query-service/app/metrics/v4/{cumulative/helper.go => helpers/clauses.go} (58%)
create mode 100644 pkg/query-service/app/metrics/v4/helpers/sub_query.go
create mode 100644 pkg/query-service/common/metrics.go
diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go
index 699894e691..a5c7c1db22 100644
--- a/ee/query-service/app/server.go
+++ b/ee/query-service/app/server.go
@@ -331,6 +331,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
apiHandler.RegisterMetricsRoutes(r, am)
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
+ apiHandler.RegisterQueryRangeV4Routes(r, am)
c := cors.New(cors.Options{
AllowedOrigins: []string{"*"},
diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go
index 1d01267860..173fe39ef4 100644
--- a/pkg/query-service/app/http_handler.go
+++ b/pkg/query-service/app/http_handler.go
@@ -29,6 +29,7 @@ import (
metricsv3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
"go.signoz.io/signoz/pkg/query-service/app/parser"
"go.signoz.io/signoz/pkg/query-service/app/querier"
+ querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
"go.signoz.io/signoz/pkg/query-service/auth"
@@ -78,6 +79,7 @@ type APIHandler struct {
featureFlags interfaces.FeatureLookup
ready func(http.HandlerFunc) http.HandlerFunc
querier interfaces.Querier
+ querierV2 interfaces.Querier
queryBuilder *queryBuilder.QueryBuilder
preferDelta bool
preferSpanMetrics bool
@@ -142,7 +144,16 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
FeatureLookup: opts.FeatureFlags,
}
+ querierOptsV2 := querierV2.QuerierOptions{
+ Reader: opts.Reader,
+ Cache: opts.Cache,
+ KeyGenerator: queryBuilder.NewKeyGenerator(),
+ FluxInterval: opts.FluxInterval,
+ FeatureLookup: opts.FeatureFlags,
+ }
+
querier := querier.NewQuerier(querierOpts)
+ querierv2 := querierV2.NewQuerier(querierOptsV2)
aH := &APIHandler{
reader: opts.Reader,
@@ -158,6 +169,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
featureFlags: opts.FeatureFlags,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
querier: querier,
+ querierV2: querierv2,
}
builderOpts := queryBuilder.QueryBuilderOptions{
@@ -320,6 +332,11 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *AuthMid
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.liveTailLogs)).Methods(http.MethodGet)
}
+func (aH *APIHandler) RegisterQueryRangeV4Routes(router *mux.Router, am *AuthMiddleware) {
+ subRouter := router.PathPrefix("/api/v4").Subrouter()
+ subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QueryRangeV4)).Methods(http.MethodPost)
+}
+
func (aH *APIHandler) Respond(w http.ResponseWriter, data interface{}) {
writeHttpResponse(w, data)
}
@@ -542,7 +559,7 @@ func (aH *APIHandler) addTemporality(ctx context.Context, qp *v3.QueryRangeParam
if qp.CompositeQuery != nil && len(qp.CompositeQuery.BuilderQueries) > 0 {
for name := range qp.CompositeQuery.BuilderQueries {
query := qp.CompositeQuery.BuilderQueries[name]
- if query.DataSource == v3.DataSourceMetrics {
+ if query.DataSource == v3.DataSourceMetrics && query.Temporality == "" {
if aH.preferDelta && metricNameToTemporality[query.AggregateAttribute.Key][v3.Delta] {
query.Temporality = v3.Delta
} else if metricNameToTemporality[query.AggregateAttribute.Key][v3.Cumulative] {
@@ -3241,3 +3258,67 @@ func (aH *APIHandler) liveTailLogs(w http.ResponseWriter, r *http.Request) {
}
}
}
+
+func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.QueryRangeParamsV3, w http.ResponseWriter, r *http.Request) {
+
+ var result []*v3.Result
+ var err error
+ var errQuriesByName map[string]string
+ var spanKeys map[string]v3.AttributeKey
+ if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
+ // check if any enrichment is required for logs if yes then enrich them
+ if logsv3.EnrichmentRequired(queryRangeParams) {
+ // get the fields if any logs query is present
+ var fields map[string]v3.AttributeKey
+ fields, err = aH.getLogFieldsV3(ctx, queryRangeParams)
+ if err != nil {
+ apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
+ RespondError(w, apiErrObj, errQuriesByName)
+ return
+ }
+ logsv3.Enrich(queryRangeParams, fields)
+ }
+
+ spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
+ if err != nil {
+ apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
+ RespondError(w, apiErrObj, errQuriesByName)
+ return
+ }
+ }
+
+ result, err, errQuriesByName = aH.querierV2.QueryRange(ctx, queryRangeParams, spanKeys)
+
+ if err != nil {
+ apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
+ RespondError(w, apiErrObj, errQuriesByName)
+ return
+ }
+
+ resp := v3.QueryRangeResponse{
+ Result: result,
+ }
+
+ aH.Respond(w, resp)
+}
+
+func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) {
+ queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
+
+ if apiErrorObj != nil {
+ zap.S().Errorf(apiErrorObj.Err.Error())
+ RespondError(w, apiErrorObj, nil)
+ return
+ }
+
+ // add temporality for each metric
+
+ temporalityErr := aH.addTemporality(r.Context(), queryRangeParams)
+ if temporalityErr != nil {
+ zap.S().Errorf("Error while adding temporality for metrics: %v", temporalityErr)
+ RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
+ return
+ }
+
+ aH.queryRangeV4(r.Context(), queryRangeParams, w, r)
+}
diff --git a/pkg/query-service/app/metrics/v4/cumulative/table.go b/pkg/query-service/app/metrics/v4/cumulative/table.go
index b81f3e7d8c..3e021a5811 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/table.go
+++ b/pkg/query-service/app/metrics/v4/cumulative/table.go
@@ -3,11 +3,12 @@ package cumulative
import (
"fmt"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
-// prepareMetricQueryTable prepares the query to be used for fetching metrics
-func prepareMetricQueryTable(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+// PrepareMetricQueryCumulativeTable prepares the query to be used for fetching metrics
+func PrepareMetricQueryCumulativeTable(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var query string
temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
@@ -15,9 +16,9 @@ func prepareMetricQueryTable(start, end, step int64, mq *v3.BuilderQuery) (strin
return "", err
}
- groupBy := groupingSetsByAttributeKeyTags(mq.GroupBy...)
- orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
- selectLabels := groupByAttributeKeyTags(mq.GroupBy...)
+ groupBy := helpers.GroupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
queryTmpl :=
"SELECT %s," +
diff --git a/pkg/query-service/app/metrics/v4/cumulative/table_test.go b/pkg/query-service/app/metrics/v4/cumulative/table_test.go
index 45a6e657ea..d562b5d93a 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/table_test.go
+++ b/pkg/query-service/app/metrics/v4/cumulative/table_test.go
@@ -99,7 +99,7 @@ func TestPrepareTableQuery(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
- query, err := prepareMetricQueryTable(
+ query, err := PrepareMetricQueryCumulativeTable(
testCase.start,
testCase.end,
testCase.builderQuery.StepInterval,
diff --git a/pkg/query-service/app/metrics/v4/cumulative/timeseries.go b/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
index 6f39a952cb..7dfa8fef87 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
+++ b/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
@@ -3,7 +3,7 @@ package cumulative
import (
"fmt"
- v4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
"go.signoz.io/signoz/pkg/query-service/constants"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/utils"
@@ -107,7 +107,7 @@ const (
func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var subQuery string
- timeSeriesSubQuery, err := v4.PrepareTimeseriesFilterQuery(mq)
+ timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(mq)
if err != nil {
return "", err
}
@@ -127,15 +127,8 @@ func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery)
" GROUP BY fingerprint, ts" +
" ORDER BY fingerprint, ts"
- var selectLabelsAny string
- for _, tag := range mq.GroupBy {
- selectLabelsAny += fmt.Sprintf("any(%s) as %s,", tag.Key, tag.Key)
- }
-
- var selectLabels string
- for _, tag := range mq.GroupBy {
- selectLabels += tag.Key + ","
- }
+ selectLabelsAny := helpers.SelectLabelsAny(mq.GroupBy)
+ selectLabels := helpers.SelectLabels(mq.GroupBy)
switch mq.TimeAggregation {
case v3.TimeAggregationAvg:
@@ -177,8 +170,8 @@ func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery)
return subQuery, nil
}
-// prepareMetricQueryCumulativeTimeSeries prepares the query to be used for fetching metrics
-func prepareMetricQueryCumulativeTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+// PrepareMetricQueryCumulativeTimeSeries prepares the query to be used for fetching metrics
+func PrepareMetricQueryCumulativeTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var query string
temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
@@ -186,9 +179,9 @@ func prepareMetricQueryCumulativeTimeSeries(start, end, step int64, mq *v3.Build
return "", err
}
- groupBy := groupingSetsByAttributeKeyTags(mq.GroupBy...)
- orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
- selectLabels := groupByAttributeKeyTags(mq.GroupBy...)
+ groupBy := helpers.GroupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
queryTmpl :=
"SELECT %s," +
diff --git a/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go b/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
index 6b1d1e43b9..91dd1c4a1e 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
+++ b/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
@@ -216,7 +216,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
- query, err := prepareMetricQueryCumulativeTimeSeries(
+ query, err := PrepareMetricQueryCumulativeTimeSeries(
testCase.start,
testCase.end,
testCase.builderQuery.StepInterval,
diff --git a/pkg/query-service/app/metrics/v4/delta/helper.go b/pkg/query-service/app/metrics/v4/delta/helper.go
deleted file mode 100644
index 972120fc15..0000000000
--- a/pkg/query-service/app/metrics/v4/delta/helper.go
+++ /dev/null
@@ -1,61 +0,0 @@
-package delta
-
-import (
- "fmt"
- "strings"
-
- v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
-)
-
-// groupingSets returns a string of comma separated tags for group by clause
-// `ts` is always added to the group by clause
-func groupingSets(tags ...string) string {
- withTs := append(tags, "ts")
- if len(withTs) > 1 {
- return fmt.Sprintf(`GROUPING SETS ( (%s), (%s) )`, strings.Join(withTs, ", "), strings.Join(tags, ", "))
- } else {
- return strings.Join(withTs, ", ")
- }
-}
-
-// groupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause
-func groupingSetsByAttributeKeyTags(tags ...v3.AttributeKey) string {
- groupTags := []string{}
- for _, tag := range tags {
- groupTags = append(groupTags, tag.Key)
- }
- return groupingSets(groupTags...)
-}
-
-// groupBy returns a string of comma separated tags for group by clause
-func groupByAttributeKeyTags(tags ...v3.AttributeKey) string {
- groupTags := []string{}
- for _, tag := range tags {
- groupTags = append(groupTags, tag.Key)
- }
- groupTags = append(groupTags, "ts")
- return strings.Join(groupTags, ", ")
-}
-
-// orderBy returns a string of comma separated tags for order by clause
-// if the order is not specified, it defaults to ASC
-func orderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string {
- var orderBy []string
- for _, tag := range tags {
- found := false
- for _, item := range items {
- if item.ColumnName == tag.Key {
- found = true
- orderBy = append(orderBy, fmt.Sprintf("%s %s", item.ColumnName, item.Order))
- break
- }
- }
- if !found {
- orderBy = append(orderBy, fmt.Sprintf("%s ASC", tag.Key))
- }
- }
-
- orderBy = append(orderBy, "ts ASC")
-
- return strings.Join(orderBy, ", ")
-}
diff --git a/pkg/query-service/app/metrics/v4/delta/table.go b/pkg/query-service/app/metrics/v4/delta/table.go
index b2b42bb9a6..bec10772f5 100644
--- a/pkg/query-service/app/metrics/v4/delta/table.go
+++ b/pkg/query-service/app/metrics/v4/delta/table.go
@@ -3,11 +3,17 @@ package delta
import (
"fmt"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
-// prepareMetricQueryDeltaTable builds the query to be used for fetching metrics
-func prepareMetricQueryDeltaTable(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+// PrepareMetricQueryDeltaTable builds the query to be used for fetching metrics
+func PrepareMetricQueryDeltaTable(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+
+ if canShortCircuit(mq) {
+ return prepareQueryOptimized(start, end, step, mq)
+ }
+
var query string
temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
@@ -15,9 +21,9 @@ func prepareMetricQueryDeltaTable(start, end, step int64, mq *v3.BuilderQuery) (
return "", err
}
- groupBy := groupingSetsByAttributeKeyTags(mq.GroupBy...)
- orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
- selectLabels := groupByAttributeKeyTags(mq.GroupBy...)
+ groupBy := helpers.GroupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
queryTmpl :=
"SELECT %s," +
diff --git a/pkg/query-service/app/metrics/v4/delta/table_test.go b/pkg/query-service/app/metrics/v4/delta/table_test.go
index 271afcd0d2..c7bce4268c 100644
--- a/pkg/query-service/app/metrics/v4/delta/table_test.go
+++ b/pkg/query-service/app/metrics/v4/delta/table_test.go
@@ -95,13 +95,13 @@ func TestPrepareTableQuery(t *testing.T) {
},
start: 1701794980000,
end: 1701796780000,
- expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
- query, err := prepareMetricQueryDeltaTable(
+ query, err := PrepareMetricQueryDeltaTable(
testCase.start,
testCase.end,
testCase.builderQuery.StepInterval,
diff --git a/pkg/query-service/app/metrics/v4/delta/time_series_test.go b/pkg/query-service/app/metrics/v4/delta/time_series_test.go
index 6eada21482..024371d328 100644
--- a/pkg/query-service/app/metrics/v4/delta/time_series_test.go
+++ b/pkg/query-service/app/metrics/v4/delta/time_series_test.go
@@ -210,13 +210,13 @@ func TestPrepareTimeseriesQuery(t *testing.T) {
},
start: 1701794980000,
end: 1701796780000,
- expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
- query, err := prepareMetricQueryDeltaTimeSeries(
+ query, err := PrepareMetricQueryDeltaTimeSeries(
testCase.start,
testCase.end,
testCase.builderQuery.StepInterval,
diff --git a/pkg/query-service/app/metrics/v4/delta/timeseries.go b/pkg/query-service/app/metrics/v4/delta/timeseries.go
index 83f9e2f111..3d6999f425 100644
--- a/pkg/query-service/app/metrics/v4/delta/timeseries.go
+++ b/pkg/query-service/app/metrics/v4/delta/timeseries.go
@@ -3,18 +3,18 @@ package delta
import (
"fmt"
- v4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
"go.signoz.io/signoz/pkg/query-service/constants"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/utils"
)
-// prepareTimeAggregationSubQueryTimeSeries builds the sub-query to be used for temporal aggregation
+// prepareTimeAggregationSubQuery builds the sub-query to be used for temporal aggregation
func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var subQuery string
- timeSeriesSubQuery, err := v4.PrepareTimeseriesFilterQuery(mq)
+ timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(mq)
if err != nil {
return "", err
}
@@ -34,15 +34,7 @@ func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery)
" GROUP BY fingerprint, ts" +
" ORDER BY fingerprint, ts"
- var selectLabelsAny string
- for _, tag := range mq.GroupBy {
- selectLabelsAny += fmt.Sprintf("any(%s) as %s,", tag.Key, tag.Key)
- }
-
- var selectLabels string
- for _, tag := range mq.GroupBy {
- selectLabels += tag.Key + ","
- }
+ selectLabelsAny := helpers.SelectLabelsAny(mq.GroupBy)
switch mq.TimeAggregation {
case v3.TimeAggregationAvg:
@@ -76,8 +68,58 @@ func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery)
return subQuery, nil
}
-// prepareMetricQueryDeltaTimeSeries builds the query to be used for fetching metrics
-func prepareMetricQueryDeltaTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+// See `canShortCircuit` below for details
+func prepareQueryOptimized(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+
+ groupBy := helpers.GroupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := helpers.SelectLabels(mq.GroupBy)
+
+ var query string
+
+ timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(mq)
+ if err != nil {
+ return "", err
+ }
+
+ samplesTableFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end)
+
+ // Select the aggregate value for interval
+ queryTmpl :=
+ "SELECT %s" +
+ " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
+ " %s as value" +
+ " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
+ " INNER JOIN" +
+ " (%s) as filtered_time_series" +
+ " USING fingerprint" +
+ " WHERE " + samplesTableFilter +
+ " GROUP BY %s" +
+ " ORDER BY %s"
+
+ switch mq.SpaceAggregation {
+ case v3.SpaceAggregationSum:
+ op := "sum(value)"
+ if mq.TimeAggregation == v3.TimeAggregationRate {
+ op = "sum(value)/" + fmt.Sprintf("%d", step)
+ }
+ query = fmt.Sprintf(queryTmpl, selectLabels, step, op, timeSeriesSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMin:
+ op := "min(value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, step, op, timeSeriesSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMax:
+ op := "max(value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, step, op, timeSeriesSubQuery, groupBy, orderBy)
+ }
+ return query, nil
+}
+
+// PrepareMetricQueryDeltaTimeSeries builds the query to be used for fetching metrics
+func PrepareMetricQueryDeltaTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+
+ if canShortCircuit(mq) {
+ return prepareQueryOptimized(start, end, step, mq)
+ }
var query string
@@ -86,9 +128,9 @@ func prepareMetricQueryDeltaTimeSeries(start, end, step int64, mq *v3.BuilderQue
return "", err
}
- groupBy := groupingSetsByAttributeKeyTags(mq.GroupBy...)
- orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
- selectLabels := groupByAttributeKeyTags(mq.GroupBy...)
+ groupBy := helpers.GroupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
queryTmpl :=
"SELECT %s," +
@@ -118,3 +160,37 @@ func prepareMetricQueryDeltaTimeSeries(start, end, step int64, mq *v3.BuilderQue
return query, nil
}
+
+// canShortCircuit returns true if we can use the optimized query
+// for the given query
+// This is used to avoid the group by fingerprint thus improving the performance
+// for certain queries
+// cases where we can short circuit:
+// 1. time aggregation = (rate|increase) and space aggregation = sum
+// - rate = sum(value)/step, increase = sum(value) - sum of sums is same as sum of all values
+//
+// 2. time aggregation = sum and space aggregation = sum
+// - sum of sums is same as sum of all values
+//
+// 3. time aggregation = min and space aggregation = min
+// - min of mins is same as min of all values
+//
+// 4. time aggregation = max and space aggregation = max
+// - max of maxs is same as max of all values
+//
+// all of this is true only for delta metrics
+func canShortCircuit(mq *v3.BuilderQuery) bool {
+ if (mq.TimeAggregation == v3.TimeAggregationRate || mq.TimeAggregation == v3.TimeAggregationIncrease) && mq.SpaceAggregation == v3.SpaceAggregationSum {
+ return true
+ }
+ if mq.TimeAggregation == v3.TimeAggregationSum && mq.SpaceAggregation == v3.SpaceAggregationSum {
+ return true
+ }
+ if mq.TimeAggregation == v3.TimeAggregationMin && mq.SpaceAggregation == v3.SpaceAggregationMin {
+ return true
+ }
+ if mq.TimeAggregation == v3.TimeAggregationMax && mq.SpaceAggregation == v3.SpaceAggregationMax {
+ return true
+ }
+ return false
+}
diff --git a/pkg/query-service/app/metrics/v4/cumulative/helper.go b/pkg/query-service/app/metrics/v4/helpers/clauses.go
similarity index 58%
rename from pkg/query-service/app/metrics/v4/cumulative/helper.go
rename to pkg/query-service/app/metrics/v4/helpers/clauses.go
index 6e692d3f37..06f4b13cea 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/helper.go
+++ b/pkg/query-service/app/metrics/v4/helpers/clauses.go
@@ -1,4 +1,4 @@
-package cumulative
+package helpers
import (
"fmt"
@@ -18,8 +18,8 @@ func groupingSets(tags ...string) string {
}
}
-// groupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause
-func groupingSetsByAttributeKeyTags(tags ...v3.AttributeKey) string {
+// GroupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause
+func GroupingSetsByAttributeKeyTags(tags ...v3.AttributeKey) string {
groupTags := []string{}
for _, tag := range tags {
groupTags = append(groupTags, tag.Key)
@@ -27,8 +27,8 @@ func groupingSetsByAttributeKeyTags(tags ...v3.AttributeKey) string {
return groupingSets(groupTags...)
}
-// groupBy returns a string of comma separated tags for group by clause
-func groupByAttributeKeyTags(tags ...v3.AttributeKey) string {
+// GroupByAttributeKeyTags returns a string of comma separated tags for group by clause
+func GroupByAttributeKeyTags(tags ...v3.AttributeKey) string {
groupTags := []string{}
for _, tag := range tags {
groupTags = append(groupTags, tag.Key)
@@ -37,9 +37,9 @@ func groupByAttributeKeyTags(tags ...v3.AttributeKey) string {
return strings.Join(groupTags, ", ")
}
-// orderBy returns a string of comma separated tags for order by clause
+// OrderByAttributeKeyTags returns a string of comma separated tags for order by clause
// if the order is not specified, it defaults to ASC
-func orderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string {
+func OrderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string {
var orderBy []string
for _, tag := range tags {
found := false
@@ -59,3 +59,19 @@ func orderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string
return strings.Join(orderBy, ", ")
}
+
+func SelectLabelsAny(tags []v3.AttributeKey) string {
+ var selectLabelsAny []string
+ for _, tag := range tags {
+ selectLabelsAny = append(selectLabelsAny, fmt.Sprintf("any(%s) as %s,", tag.Key, tag.Key))
+ }
+ return strings.Join(selectLabelsAny, " ")
+}
+
+func SelectLabels(tags []v3.AttributeKey) string {
+ var selectLabels []string
+ for _, tag := range tags {
+ selectLabels = append(selectLabels, fmt.Sprintf("%s,", tag.Key))
+ }
+ return strings.Join(selectLabels, " ")
+}
diff --git a/pkg/query-service/app/metrics/v4/helpers/sub_query.go b/pkg/query-service/app/metrics/v4/helpers/sub_query.go
new file mode 100644
index 0000000000..97176e54bd
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/helpers/sub_query.go
@@ -0,0 +1,86 @@
+package helpers
+
+import (
+ "fmt"
+ "strings"
+
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.signoz.io/signoz/pkg/query-service/utils"
+)
+
+// PrepareTimeseriesFilterQuery builds the sub-query to be used for filtering timeseries based on the search criteria
+func PrepareTimeseriesFilterQuery(mq *v3.BuilderQuery) (string, error) {
+ var conditions []string
+ var fs *v3.FilterSet = mq.Filters
+ var groupTags []v3.AttributeKey = mq.GroupBy
+
+ conditions = append(conditions, fmt.Sprintf("metric_name = %s", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key)))
+ conditions = append(conditions, fmt.Sprintf("temporality = '%s'", mq.Temporality))
+
+ if fs != nil && len(fs.Items) != 0 {
+ for _, item := range fs.Items {
+ toFormat := item.Value
+ op := v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator))))
+ if op == v3.FilterOperatorContains || op == v3.FilterOperatorNotContains {
+ toFormat = fmt.Sprintf("%%%s%%", toFormat)
+ }
+ fmtVal := utils.ClickHouseFormattedValue(toFormat)
+ switch op {
+ case v3.FilterOperatorEqual:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') = %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorNotEqual:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') != %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorIn:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') IN %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorNotIn:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') NOT IN %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorLike:
+ conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorNotLike:
+ conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorRegex:
+ conditions = append(conditions, fmt.Sprintf("match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorNotRegex:
+ conditions = append(conditions, fmt.Sprintf("not match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorGreaterThan:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') > %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorGreaterThanOrEq:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') >= %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorLessThan:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') < %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorLessThanOrEq:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') <= %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorContains:
+ conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorNotContains:
+ conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorExists:
+ conditions = append(conditions, fmt.Sprintf("has(JSONExtractKeys(labels), '%s')", item.Key.Key))
+ case v3.FilterOperatorNotExists:
+ conditions = append(conditions, fmt.Sprintf("not has(JSONExtractKeys(labels), '%s')", item.Key.Key))
+ default:
+ return "", fmt.Errorf("unsupported filter operator")
+ }
+ }
+ }
+ whereClause := strings.Join(conditions, " AND ")
+
+ var selectLabels string
+ for _, tag := range groupTags {
+ selectLabels += fmt.Sprintf("JSONExtractString(labels, '%s') as %s, ", tag.Key, tag.Key)
+ }
+
+ // The table JOIN key always exists
+ selectLabels += "fingerprint"
+
+ filterSubQuery := fmt.Sprintf(
+ "SELECT DISTINCT %s FROM %s.%s WHERE %s",
+ selectLabels,
+ constants.SIGNOZ_METRIC_DBNAME,
+ constants.SIGNOZ_TIMESERIES_LOCAL_TABLENAME,
+ whereClause,
+ )
+
+ return filterSubQuery, nil
+}
diff --git a/pkg/query-service/app/metrics/v4/query_builder.go b/pkg/query-service/app/metrics/v4/query_builder.go
index 5e6c18d72a..f54f2ff059 100644
--- a/pkg/query-service/app/metrics/v4/query_builder.go
+++ b/pkg/query-service/app/metrics/v4/query_builder.go
@@ -2,100 +2,66 @@ package v4
import (
"fmt"
- "strings"
"time"
metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
- "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/cumulative"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/delta"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
+ "go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
- "go.signoz.io/signoz/pkg/query-service/utils"
)
-// PrepareTimeseriesFilterQuery builds the sub-query to be used for filtering timeseries based on the search criteria
-func PrepareTimeseriesFilterQuery(mq *v3.BuilderQuery) (string, error) {
- var conditions []string
- var fs *v3.FilterSet = mq.Filters
- var groupTags []v3.AttributeKey = mq.GroupBy
-
- conditions = append(conditions, fmt.Sprintf("metric_name = %s", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key)))
- conditions = append(conditions, fmt.Sprintf("temporality = '%s'", mq.Temporality))
-
- if fs != nil && len(fs.Items) != 0 {
- for _, item := range fs.Items {
- toFormat := item.Value
- op := v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator))))
- if op == v3.FilterOperatorContains || op == v3.FilterOperatorNotContains {
- toFormat = fmt.Sprintf("%%%s%%", toFormat)
- }
- fmtVal := utils.ClickHouseFormattedValue(toFormat)
- switch op {
- case v3.FilterOperatorEqual:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') = %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorNotEqual:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') != %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorIn:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') IN %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorNotIn:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') NOT IN %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorLike:
- conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorNotLike:
- conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorRegex:
- conditions = append(conditions, fmt.Sprintf("match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorNotRegex:
- conditions = append(conditions, fmt.Sprintf("not match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorGreaterThan:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') > %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorGreaterThanOrEq:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') >= %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorLessThan:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') < %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorLessThanOrEq:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') <= %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorContains:
- conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorNotContains:
- conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorExists:
- conditions = append(conditions, fmt.Sprintf("has(JSONExtractKeys(labels), '%s')", item.Key.Key))
- case v3.FilterOperatorNotExists:
- conditions = append(conditions, fmt.Sprintf("not has(JSONExtractKeys(labels), '%s')", item.Key.Key))
- default:
- return "", fmt.Errorf("unsupported filter operator")
- }
- }
- }
- whereClause := strings.Join(conditions, " AND ")
-
- var selectLabels string
- for _, tag := range groupTags {
- selectLabels += fmt.Sprintf("JSONExtractString(labels, '%s') as %s, ", tag.Key, tag.Key)
- }
-
- // The table JOIN key always exists
- selectLabels += "fingerprint"
-
- filterSubQuery := fmt.Sprintf(
- "SELECT DISTINCT %s FROM %s.%s WHERE %s",
- selectLabels,
- constants.SIGNOZ_METRIC_DBNAME,
- constants.SIGNOZ_TIMESERIES_LOCAL_TABLENAME,
- whereClause,
- )
-
- return filterSubQuery, nil
-}
-
// PrepareMetricQuery prepares the query to be used for fetching metrics
// from the database
// start and end are in milliseconds
// step is in seconds
func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery, options metricsV3.Options) (string, error) {
- // TODO(srikanthcc): implement
- return "", nil
+ start, end = common.AdjustedMetricTimeRange(start, end, mq.StepInterval, mq.TimeAggregation)
+
+ groupBy := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+
+ if mq.Quantile != 0 {
+ // If quantile is set, we need to group by le
+ // and set the space aggregation to sum
+ // and time aggregation to rate
+ mq.TimeAggregation = v3.TimeAggregationRate
+ mq.SpaceAggregation = v3.SpaceAggregationSum
+ mq.GroupBy = append(mq.GroupBy, v3.AttributeKey{
+ Key: "le",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ })
+ }
+
+ var query string
+ var err error
+ if mq.Temporality == v3.Delta {
+ if panelType == v3.PanelTypeTable {
+ query, err = delta.PrepareMetricQueryDeltaTable(start, end, mq.StepInterval, mq)
+ } else {
+ query, err = delta.PrepareMetricQueryDeltaTimeSeries(start, end, mq.StepInterval, mq)
+ }
+ } else {
+ if panelType == v3.PanelTypeTable {
+ query, err = cumulative.PrepareMetricQueryCumulativeTable(start, end, mq.StepInterval, mq)
+ } else {
+ query, err = cumulative.PrepareMetricQueryCumulativeTimeSeries(start, end, mq.StepInterval, mq)
+ }
+ }
+
+ if err != nil {
+ return "", err
+ }
+
+ if mq.Quantile != 0 {
+ query = fmt.Sprintf(`SELECT %s, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s`, groupBy, mq.Quantile, query, groupBy, orderBy)
+ }
+
+ return query, nil
}
func BuildPromQuery(promQuery *v3.PromQuery, step, start, end int64) *model.QueryRangeParams {
diff --git a/pkg/query-service/app/metrics/v4/query_builder_test.go b/pkg/query-service/app/metrics/v4/query_builder_test.go
index eb071ecb2f..429c25b8e8 100644
--- a/pkg/query-service/app/metrics/v4/query_builder_test.go
+++ b/pkg/query-service/app/metrics/v4/query_builder_test.go
@@ -4,6 +4,8 @@ import (
"testing"
"github.com/stretchr/testify/assert"
+ metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
@@ -142,7 +144,385 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
- query, err := PrepareTimeseriesFilterQuery(testCase.builderQuery)
+ query, err := helpers.PrepareTimeseriesFilterQuery(testCase.builderQuery)
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+
+func TestPrepareMetricQueryCumulativeRate(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_calls_total",
+ },
+ Temporality: v3.Cumulative,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "frontend",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ },
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_calls_total",
+ },
+ Temporality: v3.Cumulative,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ GroupBy: []v3.AttributeKey{
+ {
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ {
+ Key: "endpoint",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ },
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, endpoint, ts), (service_name, endpoint) ) ORDER BY service_name ASC, endpoint ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{})
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+
+func TestPrepareMetricQueryDeltaRate(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = delta, no group by",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_calls_total",
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY ts ORDER BY ts ASC",
+ },
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = delta, group by service_name",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_calls_total",
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{})
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+
+func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ expectedQueryContains string
+ }{
+ {
+ name: "test temporality = cumulative, quantile = 0.99",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_latency_bucket",
+ },
+ Temporality: v3.Cumulative,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "frontend",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ Quantile: 0.99,
+ },
+ expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
+ },
+ {
+ name: "test temporality = cumulative, quantile = 0.99 without group by",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_latency_bucket",
+ },
+ Temporality: v3.Cumulative,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "frontend",
+ },
+ },
+ },
+ Expression: "A",
+ Disabled: false,
+ Quantile: 0.99,
+ },
+ expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{})
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+
+func TestPrepreMetricQueryDeltaQuantile(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ expectedQueryContains string
+ }{
+ {
+ name: "test temporality = delta, quantile = 0.99 group by service_name",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_latency_bucket",
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "frontend",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ Quantile: 0.99,
+ },
+ expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
+ },
+ {
+ name: "test temporality = delta, quantile = 0.99 no group by",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_latency_bucket",
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "frontend",
+ },
+ },
+ },
+ Expression: "A",
+ Disabled: false,
+ Quantile: 0.99,
+ },
+ expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{})
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+
+func TestPrepareMetricQueryGauge(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ expectedQueryContains string
+ }{
+ {
+ name: "test gauge query with no group by",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "system_cpu_usage",
+ },
+ Temporality: v3.Unspecified,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ Expression: "A",
+ TimeAggregation: v3.TimeAggregationAvg,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ Disabled: false,
+ },
+ expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified') as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
+ },
+ {
+ name: "test gauge query with group by host_name",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "system_cpu_usage",
+ },
+ Temporality: v3.Unspecified,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "host_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ TimeAggregation: v3.TimeAggregationAvg,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ Expression: "A",
+ Disabled: false,
+ },
+ expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified') as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (host_name, ts), (host_name) ) ORDER BY host_name ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{})
assert.Nil(t, err)
assert.Contains(t, query, testCase.expectedQueryContains)
})
diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go
index f7fa328b9f..eb50a775ce 100644
--- a/pkg/query-service/app/server.go
+++ b/pkg/query-service/app/server.go
@@ -267,6 +267,7 @@ func (s *Server) createPublicServer(api *APIHandler) (*http.Server, error) {
api.RegisterMetricsRoutes(r, am)
api.RegisterLogsRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
+ api.RegisterQueryRangeV4Routes(r, am)
c := cors.New(cors.Options{
AllowedOrigins: []string{"*"},
diff --git a/pkg/query-service/common/metrics.go b/pkg/query-service/common/metrics.go
new file mode 100644
index 0000000000..8596ba9d7c
--- /dev/null
+++ b/pkg/query-service/common/metrics.go
@@ -0,0 +1,19 @@
+package common
+
+import (
+ "math"
+
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+func AdjustedMetricTimeRange(start, end, step int64, aggregaOperator v3.TimeAggregation) (int64, int64) {
+ start = start - (start % (step * 1000))
+ // if the query is a rate query, we adjust the start time by one more step
+ // so that we can calculate the rate for the first data point
+ if aggregaOperator.IsRateOperator() {
+ start -= step * 1000
+ }
+ adjustStep := int64(math.Min(float64(step), 60))
+ end = end - (end % (adjustStep * 1000))
+ return start, end
+}
diff --git a/pkg/query-service/model/v3/v3.go b/pkg/query-service/model/v3/v3.go
index 968fe188e6..a11e888c15 100644
--- a/pkg/query-service/model/v3/v3.go
+++ b/pkg/query-service/model/v3/v3.go
@@ -462,6 +462,15 @@ const (
TimeAggregationIncrease TimeAggregation = "increase"
)
+func (t TimeAggregation) IsRateOperator() bool {
+ switch t {
+ case TimeAggregationRate, TimeAggregationIncrease:
+ return true
+ default:
+ return false
+ }
+}
+
type SpaceAggregation string
const (
@@ -500,6 +509,7 @@ type BuilderQuery struct {
SelectColumns []AttributeKey `json:"selectColumns,omitempty"`
TimeAggregation TimeAggregation `json:"timeAggregation,omitempty"`
SpaceAggregation SpaceAggregation `json:"spaceAggregation,omitempty"`
+ Quantile float64 `json:"quantile,omitempty"`
Functions []Function `json:"functions,omitempty"`
}
@@ -517,8 +527,16 @@ func (b *BuilderQuery) Validate() error {
if err := b.DataSource.Validate(); err != nil {
return fmt.Errorf("data source is invalid: %w", err)
}
- if err := b.AggregateOperator.Validate(); err != nil {
- return fmt.Errorf("aggregate operator is invalid: %w", err)
+ if b.DataSource == DataSourceMetrics {
+ if b.TimeAggregation == TimeAggregationUnspecified && b.Quantile == 0 {
+ if err := b.AggregateOperator.Validate(); err != nil {
+ return fmt.Errorf("aggregate operator is invalid: %w", err)
+ }
+ }
+ } else {
+ if err := b.AggregateOperator.Validate(); err != nil {
+ return fmt.Errorf("aggregate operator is invalid: %w", err)
+ }
}
if b.AggregateAttribute == (AttributeKey{}) && b.AggregateOperator.RequireAttribute(b.DataSource) {
return fmt.Errorf("aggregate attribute is required")
From cbce1b1847aa952b8983164bab886e9db23255d1 Mon Sep 17 00:00:00 2001
From: Vikrant Gupta
Date: Tue, 16 Jan 2024 17:35:07 +0530
Subject: [PATCH 22/39] feat: [GH-4325]: update the URL time query params when
zoom in and zoom out of charts
---
.../src/container/GridCardLayout/GridCard/index.tsx | 13 ++++++++++++-
.../container/MetricsApplication/Tabs/Overview.tsx | 12 ++++++++++--
.../LeftContainer/WidgetGraph/WidgetGraphs.tsx | 11 ++++++++++-
3 files changed, 32 insertions(+), 4 deletions(-)
diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx
index 1b388e0802..6dfe9a081b 100644
--- a/frontend/src/container/GridCardLayout/GridCard/index.tsx
+++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx
@@ -1,10 +1,13 @@
+import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useStepInterval } from 'hooks/queryBuilder/useStepInterval';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
+import useUrlQuery from 'hooks/useUrlQuery';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
+import history from 'lib/history';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import isEmpty from 'lodash-es/isEmpty';
@@ -12,6 +15,7 @@ import _noop from 'lodash-es/noop';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useDispatch, useSelector } from 'react-redux';
+import { useLocation } from 'react-router-dom';
import { UpdateTimeInterval } from 'store/actions';
import { AppState } from 'store/reducers';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -37,17 +41,24 @@ function GridCardGraph({
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
const [minTimeScale, setMinTimeScale] = useState();
const [maxTimeScale, setMaxTimeScale] = useState();
+ const urlQuery = useUrlQuery();
+ const location = useLocation();
const onDragSelect = useCallback(
(start: number, end: number): void => {
const startTimestamp = Math.trunc(start);
const endTimestamp = Math.trunc(end);
+ urlQuery.set(QueryParams.startTime, startTimestamp.toString());
+ urlQuery.set(QueryParams.endTime, endTimestamp.toString());
+ const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
+ history.replace(generatedUrl);
+
if (startTimestamp !== endTimestamp) {
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
}
},
- [dispatch],
+ [dispatch, location.pathname, urlQuery],
);
const graphRef = useRef(null);
diff --git a/frontend/src/container/MetricsApplication/Tabs/Overview.tsx b/frontend/src/container/MetricsApplication/Tabs/Overview.tsx
index b73b78411c..36db03b567 100644
--- a/frontend/src/container/MetricsApplication/Tabs/Overview.tsx
+++ b/frontend/src/container/MetricsApplication/Tabs/Overview.tsx
@@ -13,6 +13,7 @@ import {
convertRawQueriesToTraceSelectedTags,
resourceAttributesToTagFilterItems,
} from 'hooks/useResourceAttribute/utils';
+import useUrlQuery from 'hooks/useUrlQuery';
import history from 'lib/history';
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
import { useCallback, useMemo, useState } from 'react';
@@ -52,8 +53,10 @@ function Application(): JSX.Element {
);
const { servicename } = useParams();
const [selectedTimeStamp, setSelectedTimeStamp] = useState(0);
- const { search } = useLocation();
+ const { search, pathname } = useLocation();
const { queries } = useResourceAttribute();
+ const urlQuery = useUrlQuery();
+
const selectedTags = useMemo(
() => (convertRawQueriesToTraceSelectedTags(queries) as Tags[]) || [],
[queries],
@@ -157,11 +160,16 @@ function Application(): JSX.Element {
const startTimestamp = Math.trunc(start);
const endTimestamp = Math.trunc(end);
+ urlQuery.set(QueryParams.startTime, startTimestamp.toString());
+ urlQuery.set(QueryParams.endTime, endTimestamp.toString());
+ const generatedUrl = `${pathname}?${urlQuery.toString()}`;
+ history.replace(generatedUrl);
+
if (startTimestamp !== endTimestamp) {
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
}
},
- [dispatch],
+ [dispatch, pathname, urlQuery],
);
const onErrorTrackHandler = (timestamp: number): (() => void) => (): void => {
diff --git a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
index 08b65fa9c1..e544b7bbf0 100644
--- a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
+++ b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
@@ -1,14 +1,17 @@
+import { QueryParams } from 'constants/query';
import GridPanelSwitch from 'container/GridPanelSwitch';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import useUrlQuery from 'hooks/useUrlQuery';
+import history from 'lib/history';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { UseQueryResult } from 'react-query';
import { useDispatch, useSelector } from 'react-redux';
+import { useLocation } from 'react-router-dom';
import { UpdateTimeInterval } from 'store/actions';
import { AppState } from 'store/reducers';
import { SuccessResponse } from 'types/api';
@@ -35,6 +38,7 @@ function WidgetGraph({
const [minTimeScale, setMinTimeScale] = useState();
const [maxTimeScale, setMaxTimeScale] = useState();
+ const location = useLocation();
useEffect((): void => {
const { startTime, endTime } = getTimeRange(getWidgetQueryRange);
@@ -65,11 +69,16 @@ function WidgetGraph({
const startTimestamp = Math.trunc(start);
const endTimestamp = Math.trunc(end);
+ params.set(QueryParams.startTime, startTimestamp.toString());
+ params.set(QueryParams.endTime, endTimestamp.toString());
+ const generatedUrl = `${location.pathname}?${params.toString()}`;
+ history.replace(generatedUrl);
+
if (startTimestamp !== endTimestamp) {
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
}
},
- [dispatch],
+ [dispatch, location.pathname, params],
);
const options = useMemo(
From 00c9ef50de1bf974432a9946a01565b6c06f6b62 Mon Sep 17 00:00:00 2001
From: Yunus M
Date: Wed, 17 Jan 2024 13:01:55 +0530
Subject: [PATCH 23/39] fix: set max 6 months for user entered time (#4384)
* fix: set max 6 months for user entered time
* fix: set max 6 months for user entered time
---
.../CustomTimePicker.styles.scss | 14 +-
.../CustomTimePicker/CustomTimePicker.tsx | 130 +++++++++++-------
.../TopNav/DateTimeSelection/index.tsx | 19 ++-
3 files changed, 102 insertions(+), 61 deletions(-)
diff --git a/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss b/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss
index 9efbf8f17c..3304232d65 100644
--- a/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss
+++ b/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss
@@ -1,3 +1,8 @@
+.custom-time-picker {
+ display: flex;
+ flex-direction: column;
+}
+
.time-options-container {
.time-options-item {
margin: 2px 0;
@@ -43,7 +48,9 @@
.valid-format-error {
margin-top: 4px;
- color: var(--bg-cherry-400, #ea6d71);
+ color: var(--bg-cherry-400) !important;
+ font-size: 13px !important;
+ font-weight: 400 !important;
}
.lightMode {
@@ -80,9 +87,4 @@
color: rgba($color: #000000, $alpha: 0.4);
}
}
-
- .valid-format-error {
- margin-top: 4px;
- color: var(--bg-cherry-400, #ea6d71);
- }
}
diff --git a/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx b/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx
index c76baaf2ac..abefa8fd6f 100644
--- a/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx
+++ b/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx
@@ -2,7 +2,7 @@
/* eslint-disable jsx-a11y/no-static-element-interactions */
import './CustomTimePicker.styles.scss';
-import { Input, Popover, Tooltip } from 'antd';
+import { Input, Popover, Tooltip, Typography } from 'antd';
import cx from 'classnames';
import { Options } from 'container/TopNav/DateTimeSelection/config';
import dayjs from 'dayjs';
@@ -11,8 +11,11 @@ import { CheckCircle, ChevronDown, Clock } from 'lucide-react';
import { ChangeEvent, useEffect, useState } from 'react';
import { popupContainer } from 'utils/selectPopupContainer';
+const maxAllowedMinTimeInMonths = 6;
+
interface CustomTimePickerProps {
onSelect: (value: string) => void;
+ onError: (value: boolean) => void;
items: any[];
selectedValue: string;
selectedTime: string;
@@ -21,6 +24,7 @@ interface CustomTimePickerProps {
function CustomTimePicker({
onSelect,
+ onError,
items,
selectedValue,
selectedTime,
@@ -34,6 +38,9 @@ function CustomTimePicker({
const [inputValue, setInputValue] = useState('');
const [inputStatus, setInputStatus] = useState<'' | 'error' | 'success'>('');
+ const [inputErrorMessage, setInputErrorMessage] = useState(
+ null,
+ );
const [isInputFocused, setIsInputFocused] = useState(false);
const getSelectedTimeRangeLabel = (
@@ -71,6 +78,8 @@ function CustomTimePicker({
const isValidFormat = /^(\d+)([mhdw])$/.test(inputValue);
if (isValidFormat) {
setInputStatus('success');
+ onError(false);
+ setInputErrorMessage(null);
const match = inputValue.match(/^(\d+)([mhdw])$/);
@@ -78,6 +87,10 @@ function CustomTimePicker({
const unit = match[2];
const currentTime = dayjs();
+ const maxAllowedMinTime = currentTime.subtract(
+ maxAllowedMinTimeInMonths,
+ 'month',
+ );
let minTime = null;
switch (unit) {
@@ -98,9 +111,17 @@ function CustomTimePicker({
break;
}
- onValidCustomDateChange([minTime, currentTime]);
+ if (minTime && minTime < maxAllowedMinTime) {
+ setInputStatus('error');
+ onError(true);
+ setInputErrorMessage('Please enter time less than 6 months');
+ } else {
+ onValidCustomDateChange([minTime, currentTime]);
+ }
} else {
setInputStatus('error');
+ onError(true);
+ setInputErrorMessage(null);
}
}, 300);
@@ -128,6 +149,8 @@ function CustomTimePicker({
onSelect(value);
setSelectedTimePlaceholderValue(label);
setInputStatus('');
+ onError(false);
+ setInputErrorMessage(null);
setInputValue('');
hide();
}}
@@ -153,55 +176,64 @@ function CustomTimePicker({
};
return (
-
-
+
- ) : (
-
-
-
- )
- }
- suffix={
- {
- setOpen(!open);
- }}
- />
- }
- />
-
+ >
+
+ ) : (
+
+
+
+ )
+ }
+ suffix={
+ {
+ setOpen(!open);
+ }}
+ />
+ }
+ />
+
+
+ {inputStatus === 'error' && inputErrorMessage && (
+
+ {inputErrorMessage}
+
+ )}
+
);
}
diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
index 0f0e47d7df..2c93e9eebd 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx
+++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
@@ -36,6 +36,8 @@ function DateTimeSelection({
}: Props): JSX.Element {
const [formSelector] = Form.useForm();
+ const [hasSelectedTimeError, setHasSelectedTimeError] = useState(false);
+
const urlQuery = useUrlQuery();
const searchStartTime = urlQuery.get('startTime');
const searchEndTime = urlQuery.get('endTime');
@@ -292,6 +294,9 @@ function DateTimeSelection({
onSelect={(value: unknown): void => {
onSelectHandler(value as Time);
}}
+ onError={(hasError: boolean): void => {
+ setHasSelectedTimeError(hasError);
+ }}
selectedTime={selectedTime}
onValidCustomDateChange={(dateTime): void =>
onCustomDateHandler(dateTime as DateTimeRangeType)
@@ -319,12 +324,14 @@ function DateTimeSelection({
-
+ {!hasSelectedTimeError && (
+
+ )}
Date: Wed, 17 Jan 2024 13:57:05 +0530
Subject: [PATCH 24/39] feat: handle back btn changes
---
.../GridCardLayout/GridCard/index.tsx | 55 +++++++++++++++----
.../WidgetGraph/WidgetGraphs.tsx | 47 ++++++++++++++--
.../TopNav/DateTimeSelection/index.tsx | 4 +-
3 files changed, 88 insertions(+), 18 deletions(-)
diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx
index 6dfe9a081b..0616f20bdb 100644
--- a/frontend/src/container/GridCardLayout/GridCard/index.tsx
+++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx
@@ -1,5 +1,6 @@
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
+import dayjs from 'dayjs';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useStepInterval } from 'hooks/queryBuilder/useStepInterval';
import { useIsDarkMode } from 'hooks/useDarkMode';
@@ -7,6 +8,8 @@ import { useResizeObserver } from 'hooks/useDimensions';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
import useUrlQuery from 'hooks/useUrlQuery';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
+import GetMinMax from 'lib/getMinMax';
+import getTimeString from 'lib/getTimeString';
import history from 'lib/history';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
@@ -43,24 +46,61 @@ function GridCardGraph({
const [maxTimeScale, setMaxTimeScale] = useState();
const urlQuery = useUrlQuery();
const location = useLocation();
+ const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
+ AppState,
+ GlobalReducer
+ >((state) => state.globalTime);
const onDragSelect = useCallback(
(start: number, end: number): void => {
const startTimestamp = Math.trunc(start);
const endTimestamp = Math.trunc(end);
- urlQuery.set(QueryParams.startTime, startTimestamp.toString());
- urlQuery.set(QueryParams.endTime, endTimestamp.toString());
- const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
- history.replace(generatedUrl);
-
if (startTimestamp !== endTimestamp) {
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
}
+
+ const { maxTime, minTime } = GetMinMax('custom', [
+ startTimestamp,
+ endTimestamp,
+ ]);
+
+ urlQuery.set(QueryParams.startTime, minTime.toString());
+ urlQuery.set(QueryParams.endTime, maxTime.toString());
+ const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
+ history.push(generatedUrl);
},
[dispatch, location.pathname, urlQuery],
);
+ const handleBackNavigation = (): void => {
+ const searchParams = new URLSearchParams(window.location.search);
+ const startTime = searchParams.get(QueryParams.startTime);
+ const endTime = searchParams.get(QueryParams.endTime);
+
+ if (startTime && endTime && startTime !== endTime) {
+ console.log(startTime, endTime);
+ const startDate = dayjs(new Date(parseInt(getTimeString(startTime), 10)));
+ const endDate = dayjs(new Date(parseInt(getTimeString(endTime), 10)));
+
+ dispatch(
+ UpdateTimeInterval('custom', [
+ startDate.toDate().getTime() || 0,
+ endDate.toDate().getTime() || 0,
+ ]),
+ );
+ }
+ };
+
+ useEffect(() => {
+ window.addEventListener('popstate', handleBackNavigation);
+
+ return (): void => {
+ window.removeEventListener('popstate', handleBackNavigation);
+ };
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, []);
+
const graphRef = useRef(null);
const isVisible = useIntersectionObserver(graphRef, undefined, true);
@@ -81,11 +121,6 @@ function GridCardGraph({
const isEmptyWidget =
widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget);
- const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
- AppState,
- GlobalReducer
- >((state) => state.globalTime);
-
const queryResponse = useGetQueryRange(
{
selectedTime: widget?.timePreferance,
diff --git a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
index e544b7bbf0..c91636dd0c 100644
--- a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
+++ b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
@@ -1,10 +1,13 @@
import { QueryParams } from 'constants/query';
import GridPanelSwitch from 'container/GridPanelSwitch';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
+import dayjs from 'dayjs';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import useUrlQuery from 'hooks/useUrlQuery';
+import GetMinMax from 'lib/getMinMax';
+import getTimeString from 'lib/getTimeString';
import history from 'lib/history';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
@@ -68,19 +71,51 @@ function WidgetGraph({
(start: number, end: number): void => {
const startTimestamp = Math.trunc(start);
const endTimestamp = Math.trunc(end);
-
- params.set(QueryParams.startTime, startTimestamp.toString());
- params.set(QueryParams.endTime, endTimestamp.toString());
- const generatedUrl = `${location.pathname}?${params.toString()}`;
- history.replace(generatedUrl);
-
if (startTimestamp !== endTimestamp) {
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
}
+
+ const { maxTime, minTime } = GetMinMax('custom', [
+ startTimestamp,
+ endTimestamp,
+ ]);
+
+ params.set(QueryParams.startTime, minTime.toString());
+ params.set(QueryParams.endTime, maxTime.toString());
+ const generatedUrl = `${location.pathname}?${params.toString()}`;
+ history.push(generatedUrl);
},
[dispatch, location.pathname, params],
);
+ const handleBackNavigation = (): void => {
+ const searchParams = new URLSearchParams(window.location.search);
+ const startTime = searchParams.get(QueryParams.startTime);
+ const endTime = searchParams.get(QueryParams.endTime);
+
+ if (startTime && endTime && startTime !== endTime) {
+ console.log(startTime, endTime);
+ const startDate = dayjs(new Date(parseInt(getTimeString(startTime), 10)));
+ const endDate = dayjs(new Date(parseInt(getTimeString(endTime), 10)));
+
+ dispatch(
+ UpdateTimeInterval('custom', [
+ startDate.toDate().getTime() || 0,
+ endDate.toDate().getTime() || 0,
+ ]),
+ );
+ }
+ };
+
+ useEffect(() => {
+ window.addEventListener('popstate', handleBackNavigation);
+
+ return (): void => {
+ window.removeEventListener('popstate', handleBackNavigation);
+ };
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, []);
+
const options = useMemo(
() =>
getUPlotChartOptions({
diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
index 0f0e47d7df..bbed038be8 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx
+++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
@@ -194,7 +194,7 @@ function DateTimeSelection({
urlQuery.set(QueryParams.startTime, minTime.toString());
urlQuery.set(QueryParams.endTime, maxTime.toString());
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
- history.replace(generatedUrl);
+ history.push(generatedUrl);
}
if (!stagedQuery) {
@@ -231,7 +231,7 @@ function DateTimeSelection({
endTimeMoment?.toDate().getTime().toString(),
);
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
- history.replace(generatedUrl);
+ history.push(generatedUrl);
}
}
}
From 512fcda33d3b42a1289aeac27a9770bc57495600 Mon Sep 17 00:00:00 2001
From: Vikrant Gupta
Date: Wed, 17 Jan 2024 14:04:38 +0530
Subject: [PATCH 25/39] fix: address review comments
---
frontend/src/container/GridCardLayout/GridCard/index.tsx | 9 ++-------
.../NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx | 9 ++-------
2 files changed, 4 insertions(+), 14 deletions(-)
diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx
index 0616f20bdb..cf8d106224 100644
--- a/frontend/src/container/GridCardLayout/GridCard/index.tsx
+++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx
@@ -1,6 +1,5 @@
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
-import dayjs from 'dayjs';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useStepInterval } from 'hooks/queryBuilder/useStepInterval';
import { useIsDarkMode } from 'hooks/useDarkMode';
@@ -79,14 +78,10 @@ function GridCardGraph({
const endTime = searchParams.get(QueryParams.endTime);
if (startTime && endTime && startTime !== endTime) {
- console.log(startTime, endTime);
- const startDate = dayjs(new Date(parseInt(getTimeString(startTime), 10)));
- const endDate = dayjs(new Date(parseInt(getTimeString(endTime), 10)));
-
dispatch(
UpdateTimeInterval('custom', [
- startDate.toDate().getTime() || 0,
- endDate.toDate().getTime() || 0,
+ parseInt(getTimeString(startTime), 10),
+ parseInt(getTimeString(endTime), 10),
]),
);
}
diff --git a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
index c91636dd0c..f4a8ed1b22 100644
--- a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
+++ b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx
@@ -1,7 +1,6 @@
import { QueryParams } from 'constants/query';
import GridPanelSwitch from 'container/GridPanelSwitch';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
-import dayjs from 'dayjs';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
@@ -94,14 +93,10 @@ function WidgetGraph({
const endTime = searchParams.get(QueryParams.endTime);
if (startTime && endTime && startTime !== endTime) {
- console.log(startTime, endTime);
- const startDate = dayjs(new Date(parseInt(getTimeString(startTime), 10)));
- const endDate = dayjs(new Date(parseInt(getTimeString(endTime), 10)));
-
dispatch(
UpdateTimeInterval('custom', [
- startDate.toDate().getTime() || 0,
- endDate.toDate().getTime() || 0,
+ parseInt(getTimeString(startTime), 10),
+ parseInt(getTimeString(endTime), 10),
]),
);
}
From 4e8e7745c14658ab864f601e88a40309a36b607a Mon Sep 17 00:00:00 2001
From: Rajat Dabade
Date: Wed, 17 Jan 2024 15:17:51 +0530
Subject: [PATCH 26/39] [FE]: spaces in legend formatting (#4349)
* refactor: spaces in legend formatting
* refactor: spaces in legend formatting
* chore: string to const string
* refactor: replace string by const
---
frontend/src/constants/global.ts | 2 ++
.../QuerySection/QueryBuilder/clickHouse/query.tsx | 8 +++++++-
.../QuerySection/QueryBuilder/promQL/query.tsx | 8 +++++++-
.../container/QueryBuilder/components/Formula/Formula.tsx | 4 +++-
.../src/hooks/queryBuilder/useQueryBuilderOperations.ts | 7 ++++++-
frontend/src/utils/getFormatedLegend.ts | 2 ++
6 files changed, 27 insertions(+), 4 deletions(-)
create mode 100644 frontend/src/utils/getFormatedLegend.ts
diff --git a/frontend/src/constants/global.ts b/frontend/src/constants/global.ts
index d2a455ea57..42fb29720b 100644
--- a/frontend/src/constants/global.ts
+++ b/frontend/src/constants/global.ts
@@ -1,2 +1,4 @@
const MAX_RPS_LIMIT = 100;
export { MAX_RPS_LIMIT };
+
+export const LEGEND = 'legend';
diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx
index 304680c97c..522b21c803 100644
--- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx
+++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx
@@ -1,9 +1,11 @@
import { Input } from 'antd';
import MonacoEditor from 'components/Editor';
+import { LEGEND } from 'constants/global';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { ChangeEvent, useCallback } from 'react';
import { IClickHouseQuery } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
+import { getFormatedLegend } from 'utils/getFormatedLegend';
import QueryHeader from '../QueryHeader';
@@ -57,7 +59,11 @@ function ClickHouseQueryBuilder({
const handleUpdateInput = useCallback(
(e: ChangeEvent) => {
- const { name, value } = e.target;
+ const { name } = e.target;
+ let { value } = e.target;
+ if (name === LEGEND) {
+ value = getFormatedLegend(value);
+ }
handleUpdateQuery(name as keyof IClickHouseQuery, value);
},
[handleUpdateQuery],
diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/promQL/query.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/promQL/query.tsx
index 3ff9c7ad7d..2b32c5a6ae 100644
--- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/promQL/query.tsx
+++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/promQL/query.tsx
@@ -1,8 +1,10 @@
import { Input } from 'antd';
+import { LEGEND } from 'constants/global';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { ChangeEvent, useCallback } from 'react';
import { IPromQLQuery } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
+import { getFormatedLegend } from 'utils/getFormatedLegend';
import QueryHeader from '../QueryHeader';
@@ -28,7 +30,11 @@ function PromQLQueryBuilder({
const handleUpdateQuery = useCallback(
(e: ChangeEvent) => {
- const { name, value } = e.target;
+ const { name } = e.target;
+ let { value } = e.target;
+ if (name === LEGEND) {
+ value = getFormatedLegend(value);
+ }
const newQuery: IPromQLQuery = { ...queryData, [name]: value };
handleSetQueryItemData(queryIndex, EQueryType.PROM, newQuery);
diff --git a/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx b/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx
index a5cf155b2f..dd39a5b1af 100644
--- a/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx
+++ b/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx
@@ -1,4 +1,5 @@
import { Col, Input, Row } from 'antd';
+import { LEGEND } from 'constants/global';
// ** Components
import {
FilterLabel,
@@ -13,6 +14,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
import { ChangeEvent, useCallback, useMemo } from 'react';
import { IBuilderFormula } from 'types/api/queryBuilder/queryBuilderData';
+import { getFormatedLegend } from 'utils/getFormatedLegend';
import { AdditionalFiltersToggler } from '../AdditionalFiltersToggler';
// ** Types
@@ -58,7 +60,7 @@ export function Formula({
const { name, value } = e.target;
const newFormula: IBuilderFormula = {
...formula,
- [name]: value,
+ [name]: name === LEGEND ? getFormatedLegend(value) : value,
};
handleSetFormulaData(index, newFormula);
diff --git a/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts b/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts
index e87d7007a7..8e883852cb 100644
--- a/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts
+++ b/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts
@@ -1,3 +1,4 @@
+import { LEGEND } from 'constants/global';
import {
initialAutocompleteData,
initialQueryBuilderFormValuesMap,
@@ -21,6 +22,7 @@ import {
} from 'types/common/operations.types';
import { DataSource } from 'types/common/queryBuilder';
import { SelectOption } from 'types/common/select';
+import { getFormatedLegend } from 'utils/getFormatedLegend';
export const useQueryOperations: UseQueryOperations = ({
query,
@@ -157,7 +159,10 @@ export const useQueryOperations: UseQueryOperations = ({
(key, value) => {
const newQuery: IBuilderQuery = {
...query,
- [key]: value,
+ [key]:
+ key === LEGEND && typeof value === 'string'
+ ? getFormatedLegend(value)
+ : value,
};
handleSetQueryData(index, newQuery);
diff --git a/frontend/src/utils/getFormatedLegend.ts b/frontend/src/utils/getFormatedLegend.ts
new file mode 100644
index 0000000000..1b3a5ea988
--- /dev/null
+++ b/frontend/src/utils/getFormatedLegend.ts
@@ -0,0 +1,2 @@
+export const getFormatedLegend = (value: string): string =>
+ value.replace(/\{\s*\{\s*(.*?)\s*\}\s*\}/g, '{{$1}}');
From 1163c16506e257f94e1f0d28a502f5bb8bbd0fed Mon Sep 17 00:00:00 2001
From: Rajat Dabade
Date: Thu, 18 Jan 2024 14:14:10 +0530
Subject: [PATCH 27/39] [Feat]: added iscolumn in option rendering (#4334)
* feat: added iscolumn in option rendering
* chore: build failure
* fix: build failure
---
.../filters/AggregatorFilter/AggregatorFilter.tsx | 1 +
.../QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx | 1 +
.../filters/QueryBuilderSearch/OptionRenderer.tsx | 6 ++++++
.../QueryBuilder/filters/QueryBuilderSearch/index.tsx | 1 +
frontend/src/container/QueryBuilder/type.ts | 1 +
frontend/src/hooks/queryBuilder/useOptions.ts | 1 +
6 files changed, 11 insertions(+)
diff --git a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx
index f27336e96a..b5683f9ced 100644
--- a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx
+++ b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx
@@ -81,6 +81,7 @@ export const AggregatorFilter = memo(function AggregatorFilter({
}),
)}
dataType={item.dataType}
+ isColumn={item.isColumn}
/>
),
value: `${item.key}${selectValueDivider}${createIdFromObjectFields(
diff --git a/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx b/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx
index 386786f70c..a3213cb4bc 100644
--- a/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx
+++ b/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx
@@ -81,6 +81,7 @@ export const GroupByFilter = memo(function GroupByFilter({
}),
)}
dataType={item.dataType || ''}
+ isColumn={item.isColumn}
/>
),
value: `${transformStringWithPrefix({
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
index 32bf8fbd40..94cd2f2b9d 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
@@ -10,6 +10,7 @@ function OptionRenderer({
label,
value,
dataType,
+ isColumn,
}: OptionRendererProps): JSX.Element {
const optionType = getOptionType(label);
@@ -27,6 +28,10 @@ function OptionRenderer({
Data type:
{dataType}
+
+ isColumn:
+ {isColumn ? 'True' : 'False'}
+
) : (
@@ -40,6 +45,7 @@ interface OptionRendererProps {
label: string;
value: string;
dataType: string;
+ isColumn: boolean;
}
export default OptionRenderer;
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx
index 975c79a4a8..b4af20c58b 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx
@@ -210,6 +210,7 @@ function QueryBuilderSearch({
label={option.label}
value={option.value}
dataType={option.dataType || ''}
+ isColumn={option.isColumn || false}
/>
{option.selected && }
diff --git a/frontend/src/container/QueryBuilder/type.ts b/frontend/src/container/QueryBuilder/type.ts
index 892330ebdd..69add7d3db 100644
--- a/frontend/src/container/QueryBuilder/type.ts
+++ b/frontend/src/container/QueryBuilder/type.ts
@@ -15,4 +15,5 @@ export type Option = {
label: string;
selected?: boolean;
dataType?: string;
+ isColumn?: boolean;
};
diff --git a/frontend/src/hooks/queryBuilder/useOptions.ts b/frontend/src/hooks/queryBuilder/useOptions.ts
index 322934372c..bc599b0bcc 100644
--- a/frontend/src/hooks/queryBuilder/useOptions.ts
+++ b/frontend/src/hooks/queryBuilder/useOptions.ts
@@ -44,6 +44,7 @@ export const useOptions = (
label: `${getLabel(item)}`,
value: item.key,
dataType: item.dataType,
+ isColumn: item.isColumn,
})),
[getLabel],
);
From 4a1c48b72b4e1540af08fd0967f59d178c8589fc Mon Sep 17 00:00:00 2001
From: Yunus M
Date: Thu, 18 Jan 2024 15:01:10 +0530
Subject: [PATCH 28/39] fix: autosave layout to layout change (#4385)
* fix: autosave layout to layout change
* fix: autosave layout to layout change
* refactor: no update api call on opening dashboard
* refactor: removed extra put api call
* refactor: if condition changed
---------
Co-authored-by: Rajat-Dabade
---
.../GridCardLayout/GridCardLayout.tsx | 56 ++++++++++++-------
.../WidgetHeader/WidgetHeader.styles.scss | 2 +
.../src/container/GridCardLayout/utils.ts | 8 +++
3 files changed, 47 insertions(+), 19 deletions(-)
create mode 100644 frontend/src/container/GridCardLayout/utils.ts
diff --git a/frontend/src/container/GridCardLayout/GridCardLayout.tsx b/frontend/src/container/GridCardLayout/GridCardLayout.tsx
index 3ee11b4008..ed93df3a07 100644
--- a/frontend/src/container/GridCardLayout/GridCardLayout.tsx
+++ b/frontend/src/container/GridCardLayout/GridCardLayout.tsx
@@ -1,6 +1,6 @@
import './GridCardLayout.styles.scss';
-import { PlusOutlined, SaveFilled } from '@ant-design/icons';
+import { PlusOutlined } from '@ant-design/icons';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { themeColors } from 'constants/theme';
@@ -8,9 +8,12 @@ import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import useComponentPermission from 'hooks/useComponentPermission';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useNotifications } from 'hooks/useNotifications';
+import isEqual from 'lodash-es/isEqual';
import { FullscreenIcon } from 'lucide-react';
import { useDashboard } from 'providers/Dashboard/Dashboard';
+import { useEffect, useState } from 'react';
import { FullScreen, useFullScreenHandle } from 'react-full-screen';
+import { Layout } from 'react-grid-layout';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -29,6 +32,7 @@ import {
ReactGridLayout,
} from './styles';
import { GraphLayoutProps } from './types';
+import { removeUndefinedValuesFromLayout } from './utils';
function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
const {
@@ -51,6 +55,8 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
const isDarkMode = useIsDarkMode();
+ const [dashboardLayout, setDashboardLayout] = useState(layouts);
+
const updateDashboardMutation = useUpdateDashboard();
const { notifications } = useNotifications();
@@ -78,7 +84,7 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
...selectedDashboard,
data: {
...selectedDashboard.data,
- layout: layouts.filter((e) => e.i !== PANEL_TYPES.EMPTY_WIDGET),
+ layout: dashboardLayout.filter((e) => e.i !== PANEL_TYPES.EMPTY_WIDGET),
},
uuid: selectedDashboard.uuid,
};
@@ -90,9 +96,6 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
setLayouts(updatedDashboard.payload.data.layout);
setSelectedDashboard(updatedDashboard.payload);
}
- notifications.success({
- message: t('dashboard:layout_saved_successfully'),
- });
featureResponse.refetch();
},
@@ -108,6 +111,32 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
? [...ViewMenuAction, ...EditMenuAction]
: [...ViewMenuAction];
+ const handleLayoutChange = (layout: Layout[]): void => {
+ const filterLayout = removeUndefinedValuesFromLayout(layout);
+ const filterDashboardLayout = removeUndefinedValuesFromLayout(
+ dashboardLayout,
+ );
+ if (!isEqual(filterLayout, filterDashboardLayout)) {
+ setDashboardLayout(layout);
+ }
+ };
+
+ useEffect(() => {
+ if (
+ dashboardLayout &&
+ Array.isArray(dashboardLayout) &&
+ dashboardLayout.length > 0 &&
+ !isEqual(layouts, dashboardLayout) &&
+ !isDashboardLocked &&
+ saveLayoutPermission &&
+ !updateDashboardMutation.isLoading
+ ) {
+ onSaveHandler();
+ }
+
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [dashboardLayout]);
+
return (
<>
@@ -120,17 +149,6 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
{t('dashboard:full_view')}
- {!isDashboardLocked && saveLayoutPermission && (
- }
- disabled={updateDashboardMutation.isLoading}
- >
- {t('dashboard:save_layout')}
-
- )}
-
{!isDashboardLocked && addPanelPermission && (
- {layouts.map((layout) => {
+ {dashboardLayout.map((layout) => {
const { i: id } = layout;
const currentWidget = (widgets || [])?.find((e) => e.id === id);
diff --git a/frontend/src/container/GridCardLayout/WidgetHeader/WidgetHeader.styles.scss b/frontend/src/container/GridCardLayout/WidgetHeader/WidgetHeader.styles.scss
index 40d138e7df..799f0ed2f2 100644
--- a/frontend/src/container/GridCardLayout/WidgetHeader/WidgetHeader.styles.scss
+++ b/frontend/src/container/GridCardLayout/WidgetHeader/WidgetHeader.styles.scss
@@ -8,6 +8,8 @@
box-sizing: border-box;
font-size: 14px;
font-weight: 600;
+
+ cursor: move;
}
.widget-header-title {
diff --git a/frontend/src/container/GridCardLayout/utils.ts b/frontend/src/container/GridCardLayout/utils.ts
new file mode 100644
index 0000000000..2623e5b623
--- /dev/null
+++ b/frontend/src/container/GridCardLayout/utils.ts
@@ -0,0 +1,8 @@
+import { Layout } from 'react-grid-layout';
+
+export const removeUndefinedValuesFromLayout = (layout: Layout[]): Layout[] =>
+ layout.map((obj) =>
+ Object.fromEntries(
+ Object.entries(obj).filter(([, value]) => value !== undefined),
+ ),
+ ) as Layout[];
From f99da73098a4e627a284cdf5d726717581a28730 Mon Sep 17 00:00:00 2001
From: Vikrant Gupta
Date: Thu, 18 Jan 2024 15:01:32 +0530
Subject: [PATCH 29/39] fix: [GH-4383]: handle special characters in the
services name (#4388)
* fix: [GH-4383]: handle special characters in the services name
---
frontend/src/container/MetricsApplication/Tabs/Overview.tsx | 6 +++++-
.../ServiceApplication/Columns/GetColumnSearchProps.tsx | 6 +++++-
2 files changed, 10 insertions(+), 2 deletions(-)
diff --git a/frontend/src/container/MetricsApplication/Tabs/Overview.tsx b/frontend/src/container/MetricsApplication/Tabs/Overview.tsx
index 36db03b567..032c3c0cb8 100644
--- a/frontend/src/container/MetricsApplication/Tabs/Overview.tsx
+++ b/frontend/src/container/MetricsApplication/Tabs/Overview.tsx
@@ -16,6 +16,7 @@ import {
import useUrlQuery from 'hooks/useUrlQuery';
import history from 'lib/history';
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
+import { defaultTo } from 'lodash-es';
import { useCallback, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { useDispatch, useSelector } from 'react-redux';
@@ -107,7 +108,10 @@ function Application(): JSX.Element {
);
const topLevelOperationsRoute = useMemo(
- () => (topLevelOperations ? topLevelOperations[servicename || ''] : []),
+ () =>
+ topLevelOperations
+ ? defaultTo(topLevelOperations[servicename || ''], [])
+ : [],
[servicename, topLevelOperations],
);
diff --git a/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx b/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx
index 4257dc57ec..b272a39475 100644
--- a/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx
+++ b/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx
@@ -26,7 +26,11 @@ export const getColumnSearchProps = (
const queryString = getQueryString(avialableParams, urlParams);
return (
-
+
{metrics}
);
From 26d6a869c676ebe5fafa6a696e1f766b22a1cbc6 Mon Sep 17 00:00:00 2001
From: Vikrant Gupta
Date: Thu, 18 Jan 2024 16:44:52 +0530
Subject: [PATCH 30/39] fix: button visibility in clickhouse and promQL headers
(#4390)
---
.../LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)
diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx
index e878c61b4d..b56b53694a 100644
--- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx
+++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx
@@ -30,11 +30,10 @@ function QueryHeader({
const [collapse, setCollapse] = useState(false);
return (
-
+
: }
onClick={onDisable}
>
@@ -42,7 +41,6 @@ function QueryHeader({
: }
onClick={(): void => setCollapse(!collapse)}
/>
@@ -51,7 +49,6 @@ function QueryHeader({
{deletable && (
}
onClick={onDelete}
From 0c1a5001421368b9ee125310d3bf69a97e4b8c2e Mon Sep 17 00:00:00 2001
From: Srikanth Chekuri
Date: Thu, 18 Jan 2024 20:02:25 +0530
Subject: [PATCH 31/39] chore: update onboarding docs to send logs from the end
with a note for about beginning (#4374)
---
.../ApplicationLogs/applicationLogsFromLogFile.md | 6 ++++--
.../appplicationLogs-linuxamd64-configureReceiver.md | 4 +++-
.../appplicationLogs-linuxarm64-configureReceiver.md | 4 +++-
.../appplicationLogs-macosamd64-configureReceiver.md | 4 +++-
.../appplicationLogs-macosarm64-configureReceiver.md | 4 +++-
5 files changed, 16 insertions(+), 6 deletions(-)
diff --git a/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/applicationLogsFromLogFile.md b/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/applicationLogsFromLogFile.md
index d444b5ef41..cb9c6ed938 100644
--- a/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/applicationLogsFromLogFile.md
+++ b/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/applicationLogsFromLogFile.md
@@ -9,11 +9,13 @@
...
filelog/app:
include: [ /tmp/app.log ]
- start_at: beginning
+ start_at: end
...
```
- `start_at: beginning` can be removed once you are done testing.
+ Replace `/tmp/app.log` with the path to your log file.
+
+ Note: change the `start_at` value to `beginning` if you want to read the log file from the beginning. It may be useful if you want to send old logs to SigNoz. The log records older than the standard log retention period (default 15 days) will be discarded.
For parsing logs of different formats you will have to use operators, you can read more about operators [here](https://signoz.io/docs/userguide/logs/#operators-for-parsing-and-manipulating-logs).
diff --git a/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/LinuxAMD64/appplicationLogs-linuxamd64-configureReceiver.md b/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/LinuxAMD64/appplicationLogs-linuxamd64-configureReceiver.md
index 3513628a86..2b2d346f3a 100644
--- a/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/LinuxAMD64/appplicationLogs-linuxamd64-configureReceiver.md
+++ b/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/LinuxAMD64/appplicationLogs-linuxamd64-configureReceiver.md
@@ -7,11 +7,13 @@ receivers:
...
filelog/app:
include: [ /tmp/app.log ]
- start_at: beginning
+ start_at: end
...
```
Replace `/tmp/app.log` with the path to your log file.
+Note: change the `start_at` value to `beginning` if you want to read the log file from the beginning. It may be useful if you want to send old logs to SigNoz. The log records older than the standard log retention period (default 15 days) will be discarded.
+
For more configurations that are available for syslog receiver please check [here](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/filelogreceiver).
### Step 2: Include filelog receiver in the Pipeline
diff --git a/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/LinuxARM64/appplicationLogs-linuxarm64-configureReceiver.md b/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/LinuxARM64/appplicationLogs-linuxarm64-configureReceiver.md
index 3513628a86..2b2d346f3a 100644
--- a/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/LinuxARM64/appplicationLogs-linuxarm64-configureReceiver.md
+++ b/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/LinuxARM64/appplicationLogs-linuxarm64-configureReceiver.md
@@ -7,11 +7,13 @@ receivers:
...
filelog/app:
include: [ /tmp/app.log ]
- start_at: beginning
+ start_at: end
...
```
Replace `/tmp/app.log` with the path to your log file.
+Note: change the `start_at` value to `beginning` if you want to read the log file from the beginning. It may be useful if you want to send old logs to SigNoz. The log records older than the standard log retention period (default 15 days) will be discarded.
+
For more configurations that are available for syslog receiver please check [here](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/filelogreceiver).
### Step 2: Include filelog receiver in the Pipeline
diff --git a/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/MacOsAMD64/appplicationLogs-macosamd64-configureReceiver.md b/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/MacOsAMD64/appplicationLogs-macosamd64-configureReceiver.md
index 3513628a86..2b2d346f3a 100644
--- a/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/MacOsAMD64/appplicationLogs-macosamd64-configureReceiver.md
+++ b/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/MacOsAMD64/appplicationLogs-macosamd64-configureReceiver.md
@@ -7,11 +7,13 @@ receivers:
...
filelog/app:
include: [ /tmp/app.log ]
- start_at: beginning
+ start_at: end
...
```
Replace `/tmp/app.log` with the path to your log file.
+Note: change the `start_at` value to `beginning` if you want to read the log file from the beginning. It may be useful if you want to send old logs to SigNoz. The log records older than the standard log retention period (default 15 days) will be discarded.
+
For more configurations that are available for syslog receiver please check [here](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/filelogreceiver).
### Step 2: Include filelog receiver in the Pipeline
diff --git a/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/MacOsARM64/appplicationLogs-macosarm64-configureReceiver.md b/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/MacOsARM64/appplicationLogs-macosarm64-configureReceiver.md
index 3513628a86..2b2d346f3a 100644
--- a/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/MacOsARM64/appplicationLogs-macosarm64-configureReceiver.md
+++ b/frontend/src/container/OnboardingContainer/Modules/LogsManagement/ApplicationLogs/md-docs/MacOsARM64/appplicationLogs-macosarm64-configureReceiver.md
@@ -7,11 +7,13 @@ receivers:
...
filelog/app:
include: [ /tmp/app.log ]
- start_at: beginning
+ start_at: end
...
```
Replace `/tmp/app.log` with the path to your log file.
+Note: change the `start_at` value to `beginning` if you want to read the log file from the beginning. It may be useful if you want to send old logs to SigNoz. The log records older than the standard log retention period (default 15 days) will be discarded.
+
For more configurations that are available for syslog receiver please check [here](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/filelogreceiver).
### Step 2: Include filelog receiver in the Pipeline
From 46559014f753a711daec9b102d8a18f1b7930ada Mon Sep 17 00:00:00 2001
From: Rajat Dabade
Date: Thu, 18 Jan 2024 23:22:30 +0530
Subject: [PATCH 32/39] [feat]: persistence of graph toggle on variable change
(#4316)
* feat: persistence of graph toggle on variable change
* fix: reverted the previous changes
* chore: full view global time selector to right
---
.../FullViewHeader/FullViewHeader.styles.scss | 2 +-
.../GridCard/WidgetGraphComponent.tsx | 5 +++++
.../container/GridCardLayout/GridCard/index.tsx | 16 ++++++++++++++++
.../container/GridCardLayout/GridCard/types.ts | 1 +
4 files changed, 23 insertions(+), 1 deletion(-)
diff --git a/frontend/src/container/FullViewHeader/FullViewHeader.styles.scss b/frontend/src/container/FullViewHeader/FullViewHeader.styles.scss
index b894ea4f12..98f2897902 100644
--- a/frontend/src/container/FullViewHeader/FullViewHeader.styles.scss
+++ b/frontend/src/container/FullViewHeader/FullViewHeader.styles.scss
@@ -1,6 +1,6 @@
.full-view-header-container {
display: flex;
- justify-content: center;
+ justify-content: flex-end;
align-items: center;
padding: 24px 0;
diff --git a/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.tsx b/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.tsx
index 4fdbc05be7..34288a0b19 100644
--- a/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.tsx
+++ b/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.tsx
@@ -46,6 +46,7 @@ function WidgetGraphComponent({
data,
options,
onDragSelect,
+ graphVisibility,
}: WidgetGraphComponentProps): JSX.Element {
const [deleteModal, setDeleteModal] = useState(false);
const [hovered, setHovered] = useState(false);
@@ -83,6 +84,10 @@ function WidgetGraphComponent({
setGraphsVisibilityStates(localStoredVisibilityStates);
}, [localStoredVisibilityStates]);
+ graphVisibility?.forEach((state, index) => {
+ lineChartRef.current?.toggleGraph(index, state);
+ });
+
const { setLayouts, selectedDashboard, setSelectedDashboard } = useDashboard();
const featureResponse = useSelector(
diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx
index cf8d106224..c1c1f99c93 100644
--- a/frontend/src/container/GridCardLayout/GridCard/index.tsx
+++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx
@@ -163,6 +163,17 @@ function GridCardGraph({
? headerMenuList.filter((menu) => menu !== MenuItemKeys.CreateAlerts)
: headerMenuList;
+ const [graphVisibility, setGraphVisibility] = useState(
+ Array(queryResponse.data?.payload?.data.result.length || 0).fill(true),
+ );
+
+ useEffect(() => {
+ setGraphVisibility([
+ true,
+ ...Array(queryResponse.data?.payload?.data.result.length).fill(true),
+ ]);
+ }, [queryResponse.data?.payload?.data.result.length]);
+
const options = useMemo(
() =>
getUPlotChartOptions({
@@ -178,6 +189,8 @@ function GridCardGraph({
maxTimeScale,
softMax: widget.softMax === undefined ? null : widget.softMax,
softMin: widget.softMin === undefined ? null : widget.softMin,
+ graphsVisibilityStates: graphVisibility,
+ setGraphsVisibilityStates: setGraphVisibility,
}),
[
widget?.id,
@@ -192,6 +205,8 @@ function GridCardGraph({
onClickHandler,
minTimeScale,
maxTimeScale,
+ graphVisibility,
+ setGraphVisibility,
],
);
@@ -212,6 +227,7 @@ function GridCardGraph({
threshold={threshold}
headerMenuList={menuList}
onClickHandler={onClickHandler}
+ graphVisibility={graphVisibility}
/>
)}
diff --git a/frontend/src/container/GridCardLayout/GridCard/types.ts b/frontend/src/container/GridCardLayout/GridCard/types.ts
index 3674817291..2298b2b070 100644
--- a/frontend/src/container/GridCardLayout/GridCard/types.ts
+++ b/frontend/src/container/GridCardLayout/GridCard/types.ts
@@ -28,6 +28,7 @@ export interface WidgetGraphComponentProps extends UplotProps {
threshold?: ReactNode;
headerMenuList: MenuItemKeys[];
isWarning: boolean;
+ graphVisibility?: boolean[];
}
export interface GridCardGraphProps {
From c1b90491769b43ef4a8554879edcf9fdb87ba745 Mon Sep 17 00:00:00 2001
From: Yunus M
Date: Fri, 19 Jan 2024 11:11:29 +0530
Subject: [PATCH 33/39] fix: allow workspace blocked users to extend trial
(#4393)
Co-authored-by: Vishal Sharma
---
.../Steps/DataSource/DataSource.tsx | 11 ++--
.../ModuleStepsContainer.tsx | 8 ++-
.../context/OnboardingContext.tsx | 6 +-
.../WorkspaceLocked.styles.scss | 18 ++++--
.../pages/WorkspaceLocked/WorkspaceLocked.tsx | 60 ++++++++++++++++---
5 files changed, 79 insertions(+), 24 deletions(-)
diff --git a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx
index 3090bf6564..9483576bc2 100644
--- a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx
@@ -125,8 +125,9 @@ export default function DataSource(): JSX.Element {
)}
key={dataSource.name}
onClick={(): void => {
- updateSelectedFramework('');
+ updateSelectedFramework(null);
updateSelectedDataSource(dataSource);
+ form.setFieldsValue({ selectFramework: null });
}}
>
@@ -152,6 +153,7 @@ export default function DataSource(): JSX.Element {
(defaultMetaData);
const lastStepIndex = selectedModuleSteps.length - 1;
+ // eslint-disable-next-line sonarjs/cognitive-complexity
const isValidForm = (): boolean => {
const { id: selectedModuleID } = selectedModule;
const dataSourceStep = stepsMap.dataSource;
@@ -106,7 +107,10 @@ export default function ModuleStepsContainer({
dataSource: selectedDataSource,
});
- if (doesHaveFrameworks && selectedFramework === '') {
+ if (
+ doesHaveFrameworks &&
+ (selectedFramework === null || selectedFramework === '')
+ ) {
return false;
}
@@ -177,7 +181,7 @@ export default function ModuleStepsContainer({
},
{
name: 'Framework',
- value: selectedFramework,
+ value: selectedFramework || '',
},
{
name: 'Environment',
diff --git a/frontend/src/container/OnboardingContainer/context/OnboardingContext.tsx b/frontend/src/container/OnboardingContainer/context/OnboardingContext.tsx
index 0a17a6774f..68d7248bb5 100644
--- a/frontend/src/container/OnboardingContainer/context/OnboardingContext.tsx
+++ b/frontend/src/container/OnboardingContainer/context/OnboardingContext.tsx
@@ -14,7 +14,7 @@ interface OnboardingContextData {
ingestionData: any;
serviceName: string;
selectedEnvironment: string;
- selectedFramework: string;
+ selectedFramework: string | null;
selectedModule: ModuleProps | null;
selectedMethod: any;
selectedDataSource: DataSourceType | null;
@@ -51,7 +51,9 @@ function OnboardingContextProvider({
const [errorDetails, setErrorDetails] = useState(null);
const [selectedEnvironment, setSelectedEnvironment] = useState('');
- const [selectedFramework, setSelectedFramework] = useState('');
+ const [selectedFramework, setSelectedFramework] = useState(
+ null,
+ );
const [selectedMethod, setSelectedMethod] = useState(
OnboardingMethods.QUICK_START,
diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss
index 7e5b32ab29..c35284241a 100644
--- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss
+++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss
@@ -9,11 +9,19 @@
margin: 0 auto;
}
-.update-credit-card-btn {
- margin: 24px 0;
- border-radius: 5px;
-}
-
.contact-us {
margin-top: 48px;
}
+
+.cta {
+ display: flex;
+ gap: 8px;
+ align-items: center;
+ justify-content: center;
+
+ .update-credit-card-btn,
+ .extend-trial-btn {
+ margin: 24px 0;
+ border-radius: 5px;
+ }
+}
diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx
index 1a19e3d6a5..d9df5265df 100644
--- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx
+++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx
@@ -1,12 +1,17 @@
/* eslint-disable react/no-unescaped-entities */
import './WorkspaceLocked.styles.scss';
-import { CreditCardOutlined, LockOutlined } from '@ant-design/icons';
+import {
+ CreditCardOutlined,
+ LockOutlined,
+ SendOutlined,
+} from '@ant-design/icons';
import { Button, Card, Skeleton, Typography } from 'antd';
import updateCreditCardApi from 'api/billing/checkout';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import ROUTES from 'constants/routes';
import FullViewHeader from 'container/FullViewHeader/FullViewHeader';
+import useAnalytics from 'hooks/analytics/useAnalytics';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
@@ -22,6 +27,7 @@ export default function WorkspaceBlocked(): JSX.Element {
const { role } = useSelector((state) => state.app);
const isAdmin = role === 'ADMIN';
const [activeLicense, setActiveLicense] = useState(null);
+ const { trackEvent } = useAnalytics();
const { notifications } = useNotifications();
@@ -68,13 +74,36 @@ export default function WorkspaceBlocked(): JSX.Element {
);
const handleUpdateCreditCard = useCallback(async () => {
+ trackEvent('Workspace Blocked: User Clicked Update Credit Card');
+
updateCreditCard({
licenseKey: activeLicense?.key || '',
successURL: window.location.origin,
cancelURL: window.location.origin,
});
+ // eslint-disable-next-line react-hooks/exhaustive-deps
}, [activeLicense?.key, updateCreditCard]);
+ const handleExtendTrial = (): void => {
+ trackEvent('Workspace Blocked: User Clicked Extend Trial');
+
+ const recipient = 'cloud-support@signoz.io';
+ const subject = 'Extend SigNoz Cloud Trial';
+ const body = `I'd like to request an extension for SigNoz Cloud for my account. Please find my account details below
+
+ SigNoz URL:
+ Admin Email:
+ `;
+
+ // Create the mailto link
+ const mailtoLink = `mailto:${recipient}?subject=${encodeURIComponent(
+ subject,
+ )}&body=${encodeURIComponent(body)}`;
+
+ // Open the default email client
+ window.location.href = mailtoLink;
+ };
+
return (
<>
@@ -95,18 +124,31 @@ export default function WorkspaceBlocked(): JSX.Element {
account.
{!isAdmin && 'Please contact your administrator for further help'}
- {isAdmin && (
+
+
+ {isAdmin && (
+ }
+ size="middle"
+ loading={isLoading}
+ onClick={handleUpdateCreditCard}
+ >
+ Update Credit Card
+
+ )}
+
}
+ className="extend-trial-btn"
+ type="default"
+ icon={ }
size="middle"
- loading={isLoading}
- onClick={handleUpdateCreditCard}
+ onClick={handleExtendTrial}
>
- Update Credit Card
+ Extend Trial
- )}
+
Got Questions?
From 51c1f8859389a54a06a0a08624d78953301ff233 Mon Sep 17 00:00:00 2001
From: Rajat Dabade
Date: Fri, 19 Jan 2024 12:05:44 +0530
Subject: [PATCH 34/39] Revert "[Feat]: added iscolumn in option rendering"
(#4396)
---
.../filters/AggregatorFilter/AggregatorFilter.tsx | 1 -
.../QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx | 1 -
.../filters/QueryBuilderSearch/OptionRenderer.tsx | 6 ------
.../QueryBuilder/filters/QueryBuilderSearch/index.tsx | 1 -
frontend/src/container/QueryBuilder/type.ts | 1 -
frontend/src/hooks/queryBuilder/useOptions.ts | 1 -
6 files changed, 11 deletions(-)
diff --git a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx
index b5683f9ced..f27336e96a 100644
--- a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx
+++ b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx
@@ -81,7 +81,6 @@ export const AggregatorFilter = memo(function AggregatorFilter({
}),
)}
dataType={item.dataType}
- isColumn={item.isColumn}
/>
),
value: `${item.key}${selectValueDivider}${createIdFromObjectFields(
diff --git a/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx b/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx
index a3213cb4bc..386786f70c 100644
--- a/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx
+++ b/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx
@@ -81,7 +81,6 @@ export const GroupByFilter = memo(function GroupByFilter({
}),
)}
dataType={item.dataType || ''}
- isColumn={item.isColumn}
/>
),
value: `${transformStringWithPrefix({
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
index 94cd2f2b9d..32bf8fbd40 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
@@ -10,7 +10,6 @@ function OptionRenderer({
label,
value,
dataType,
- isColumn,
}: OptionRendererProps): JSX.Element {
const optionType = getOptionType(label);
@@ -28,10 +27,6 @@ function OptionRenderer({
Data type:
{dataType}
-
- isColumn:
- {isColumn ? 'True' : 'False'}
-
) : (
@@ -45,7 +40,6 @@ interface OptionRendererProps {
label: string;
value: string;
dataType: string;
- isColumn: boolean;
}
export default OptionRenderer;
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx
index b4af20c58b..975c79a4a8 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx
@@ -210,7 +210,6 @@ function QueryBuilderSearch({
label={option.label}
value={option.value}
dataType={option.dataType || ''}
- isColumn={option.isColumn || false}
/>
{option.selected && }
diff --git a/frontend/src/container/QueryBuilder/type.ts b/frontend/src/container/QueryBuilder/type.ts
index 69add7d3db..892330ebdd 100644
--- a/frontend/src/container/QueryBuilder/type.ts
+++ b/frontend/src/container/QueryBuilder/type.ts
@@ -15,5 +15,4 @@ export type Option = {
label: string;
selected?: boolean;
dataType?: string;
- isColumn?: boolean;
};
diff --git a/frontend/src/hooks/queryBuilder/useOptions.ts b/frontend/src/hooks/queryBuilder/useOptions.ts
index bc599b0bcc..322934372c 100644
--- a/frontend/src/hooks/queryBuilder/useOptions.ts
+++ b/frontend/src/hooks/queryBuilder/useOptions.ts
@@ -44,7 +44,6 @@ export const useOptions = (
label: `${getLabel(item)}`,
value: item.key,
dataType: item.dataType,
- isColumn: item.isColumn,
})),
[getLabel],
);
From 4a7d972c854ca147914e8a3bf267328ba783d427 Mon Sep 17 00:00:00 2001
From: Rajat Dabade
Date: Fri, 19 Jan 2024 13:50:51 +0530
Subject: [PATCH 35/39] refactor: conditional based apdex on metrics and trace
query range (#4395)
* refactor: conditional based apdex on metrics and trace query range
* chore: add invalid float conversion
* Revert "refactor: conditional based apdex on metrics and trace query range"
This reverts commit ca44a7aedd9d5635fe28c65f1608f4822a6bf204.
* refactor: added servicename to the query params
---------
Co-authored-by: Srikanth Chekuri
---
.../src/api/metrics/ApDex/getMetricMeta.ts | 3 ++-
.../Overview/ApDex/ApDexMetricsApplication.tsx | 5 ++++-
frontend/src/hooks/apDex/useGetMetricMeta.ts | 5 +++--
pkg/query-service/app/apdex.go | 3 ++-
.../app/clickhouseReader/reader.go | 18 +++++++++++++-----
pkg/query-service/interfaces/interface.go | 2 +-
6 files changed, 25 insertions(+), 11 deletions(-)
diff --git a/frontend/src/api/metrics/ApDex/getMetricMeta.ts b/frontend/src/api/metrics/ApDex/getMetricMeta.ts
index 36466e1e69..e3045730a7 100644
--- a/frontend/src/api/metrics/ApDex/getMetricMeta.ts
+++ b/frontend/src/api/metrics/ApDex/getMetricMeta.ts
@@ -4,5 +4,6 @@ import { MetricMetaProps } from 'types/api/metrics/getApDex';
export const getMetricMeta = (
metricName: string,
+ servicename: string,
): Promise> =>
- axios.get(`/metric_meta?metricName=${metricName}`);
+ axios.get(`/metric_meta?metricName=${metricName}&serviceName=${servicename}`);
diff --git a/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetricsApplication.tsx b/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetricsApplication.tsx
index 542a1e9e8d..3ee5fbf7d8 100644
--- a/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetricsApplication.tsx
+++ b/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetricsApplication.tsx
@@ -1,7 +1,9 @@
import Spinner from 'components/Spinner';
import { useGetMetricMeta } from 'hooks/apDex/useGetMetricMeta';
import useErrorNotification from 'hooks/useErrorNotification';
+import { useParams } from 'react-router-dom';
+import { IServiceName } from '../../types';
import ApDexMetrics from './ApDexMetrics';
import { metricMeta } from './constants';
import { ApDexDataSwitcherProps } from './types';
@@ -13,7 +15,8 @@ function ApDexMetricsApplication({
thresholdValue,
topLevelOperationsRoute,
}: ApDexDataSwitcherProps): JSX.Element {
- const { data, isLoading, error } = useGetMetricMeta(metricMeta);
+ const { servicename } = useParams();
+ const { data, isLoading, error } = useGetMetricMeta(metricMeta, servicename);
useErrorNotification(error);
if (isLoading) {
diff --git a/frontend/src/hooks/apDex/useGetMetricMeta.ts b/frontend/src/hooks/apDex/useGetMetricMeta.ts
index fd364e598d..6bf28c35a0 100644
--- a/frontend/src/hooks/apDex/useGetMetricMeta.ts
+++ b/frontend/src/hooks/apDex/useGetMetricMeta.ts
@@ -5,8 +5,9 @@ import { MetricMetaProps } from 'types/api/metrics/getApDex';
export const useGetMetricMeta = (
metricName: string,
+ servicename: string,
): UseQueryResult, AxiosError> =>
useQuery, AxiosError>({
- queryKey: [{ metricName }],
- queryFn: async () => getMetricMeta(metricName),
+ queryKey: [{ metricName, servicename }],
+ queryFn: async () => getMetricMeta(metricName, servicename),
});
diff --git a/pkg/query-service/app/apdex.go b/pkg/query-service/app/apdex.go
index 6854a91367..e3498d9c80 100644
--- a/pkg/query-service/app/apdex.go
+++ b/pkg/query-service/app/apdex.go
@@ -36,7 +36,8 @@ func (aH *APIHandler) getApdexSettings(w http.ResponseWriter, r *http.Request) {
func (aH *APIHandler) getLatencyMetricMetadata(w http.ResponseWriter, r *http.Request) {
metricName := r.URL.Query().Get("metricName")
- metricMetadata, err := aH.reader.GetLatencyMetricMetadata(r.Context(), metricName, aH.preferDelta)
+ serviceName := r.URL.Query().Get("serviceName")
+ metricMetadata, err := aH.reader.GetLatencyMetricMetadata(r.Context(), metricName, serviceName, aH.preferDelta)
if err != nil {
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
return
diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go
index 0340474596..3f11a4823a 100644
--- a/pkg/query-service/app/clickhouseReader/reader.go
+++ b/pkg/query-service/app/clickhouseReader/reader.go
@@ -4057,8 +4057,8 @@ func (r *ClickHouseReader) GetMetricAttributeValues(ctx context.Context, req *v3
return &attributeValues, nil
}
-func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricName string, preferDelta bool) (*v3.LatencyMetricMetadataResponse, error) {
- query := fmt.Sprintf("SELECT DISTINCT(temporality) from %s.%s WHERE metric_name='%s'", signozMetricDBName, signozTSTableName, metricName)
+func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricName, serviceName string, preferDelta bool) (*v3.LatencyMetricMetadataResponse, error) {
+ query := fmt.Sprintf("SELECT DISTINCT(temporality) from %s.%s WHERE metric_name='%s' AND JSONExtractString(labels, 'service_name') = '%s'", signozMetricDBName, signozTSTableName, metricName, serviceName)
rows, err := r.db.Query(ctx, query, metricName)
if err != nil {
zap.S().Error(err)
@@ -4077,7 +4077,7 @@ func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricN
}
}
- query = fmt.Sprintf("SELECT DISTINCT(toFloat64(JSONExtractString(labels, 'le'))) as le from %s.%s WHERE metric_name='%s' ORDER BY le", signozMetricDBName, signozTSTableName, metricName)
+ query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, 'le')) as le from %s.%s WHERE metric_name='%s' AND JSONExtractString(labels, 'service_name') = '%s' ORDER BY le", signozMetricDBName, signozTSTableName, metricName, serviceName)
rows, err = r.db.Query(ctx, query, metricName)
if err != nil {
zap.S().Error(err)
@@ -4087,10 +4087,18 @@ func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricN
var leFloat64 []float64
for rows.Next() {
- var le float64
- if err := rows.Scan(&le); err != nil {
+ var leStr string
+ if err := rows.Scan(&leStr); err != nil {
return nil, fmt.Errorf("error while scanning rows: %s", err.Error())
}
+ le, err := strconv.ParseFloat(leStr, 64)
+ // ignore the error and continue if the value is not a float
+ // ideally this should not happen but we have seen ClickHouse
+ // returning empty string for some values
+ if err != nil {
+ zap.S().Error("error while parsing le value: ", err)
+ continue
+ }
if math.IsInf(le, 0) {
continue
}
diff --git a/pkg/query-service/interfaces/interface.go b/pkg/query-service/interfaces/interface.go
index e2b2b49481..a75a2f5f30 100644
--- a/pkg/query-service/interfaces/interface.go
+++ b/pkg/query-service/interfaces/interface.go
@@ -98,7 +98,7 @@ type Reader interface {
QueryDashboardVars(ctx context.Context, query string) (*model.DashboardVar, error)
CheckClickHouse(ctx context.Context) error
- GetLatencyMetricMetadata(context.Context, string, bool) (*v3.LatencyMetricMetadataResponse, error)
+ GetLatencyMetricMetadata(context.Context, string, string, bool) (*v3.LatencyMetricMetadataResponse, error)
}
type Querier interface {
From f0669a6dc1c422715df5f61faaa1ae651d40f631 Mon Sep 17 00:00:00 2001
From: Vikrant Gupta
Date: Fri, 19 Jan 2024 13:52:14 +0530
Subject: [PATCH 36/39] fix: update time stamp on the first load in URL to help
with back navigation (#4397)
* fix: update time stamp on the first load in URL to help with back navigation
---
.../src/container/TopNav/DateTimeSelection/index.tsx | 12 ++++++++++++
1 file changed, 12 insertions(+)
diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
index c72ff7b82d..785f65da8f 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx
+++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
@@ -279,6 +279,18 @@ function DateTimeSelection({
setRefreshButtonHidden(updatedTime === 'custom');
updateTimeInterval(updatedTime, [preStartTime, preEndTime]);
+
+ if (updatedTime !== 'custom') {
+ const { minTime, maxTime } = GetMinMax(updatedTime);
+ urlQuery.set(QueryParams.startTime, minTime.toString());
+ urlQuery.set(QueryParams.endTime, maxTime.toString());
+ } else {
+ urlQuery.set(QueryParams.startTime, preStartTime.toString());
+ urlQuery.set(QueryParams.endTime, preEndTime.toString());
+ }
+ const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
+ history.replace(generatedUrl);
+
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [location.pathname, updateTimeInterval, globalTimeLoading]);
From 7b344f7a75b0757a9d00fc9855fa4115e1df8a66 Mon Sep 17 00:00:00 2001
From: Yunus M
Date: Fri, 19 Jan 2024 17:01:52 +0530
Subject: [PATCH 37/39] Revert "fix: update time stamp on the first load in URL
to help with back navigation" (#4398)
---
.../src/container/TopNav/DateTimeSelection/index.tsx | 12 ------------
1 file changed, 12 deletions(-)
diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
index 785f65da8f..c72ff7b82d 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx
+++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
@@ -279,18 +279,6 @@ function DateTimeSelection({
setRefreshButtonHidden(updatedTime === 'custom');
updateTimeInterval(updatedTime, [preStartTime, preEndTime]);
-
- if (updatedTime !== 'custom') {
- const { minTime, maxTime } = GetMinMax(updatedTime);
- urlQuery.set(QueryParams.startTime, minTime.toString());
- urlQuery.set(QueryParams.endTime, maxTime.toString());
- } else {
- urlQuery.set(QueryParams.startTime, preStartTime.toString());
- urlQuery.set(QueryParams.endTime, preEndTime.toString());
- }
- const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
- history.replace(generatedUrl);
-
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [location.pathname, updateTimeInterval, globalTimeLoading]);
From a8d70206ab48ecf5556c13620adf0eb797119eb0 Mon Sep 17 00:00:00 2001
From: Vishal Sharma
Date: Fri, 19 Jan 2024 17:32:52 +0530
Subject: [PATCH 38/39] chore: update onboarding events (#4387)
* chore: update onboarding events
* chore: update retry count
---
.../OnboardingContainer.tsx | 12 +-
.../ConnectionStatus/ConnectionStatus.tsx | 19 +++-
.../Steps/DataSource/DataSource.tsx | 37 -------
.../EnvironmentDetails/EnvironmentDetails.tsx | 25 -----
.../LogsConnectionStatus.tsx | 16 ++-
.../Steps/MarkdownStep/MarkdownStep.tsx | 23 ----
.../Steps/SelectMethod/SelectMethod.tsx | 32 +-----
.../ModuleStepsContainer.tsx | 103 +++++++++++++++++-
frontend/src/hooks/analytics/useAnalytics.tsx | 4 +-
9 files changed, 137 insertions(+), 134 deletions(-)
diff --git a/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx b/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx
index 4354349304..dfe2f7affe 100644
--- a/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx
+++ b/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx
@@ -93,7 +93,7 @@ export default function Onboarding(): JSX.Element {
} = useOnboardingContext();
useEffectOnce(() => {
- trackEvent('Onboarding Started');
+ trackEvent('Onboarding V2 Started');
});
const { status, data: ingestionData } = useQuery({
@@ -180,20 +180,12 @@ export default function Onboarding(): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [selectedModule, selectedDataSource, selectedEnvironment, selectedMethod]);
- useEffect(() => {
- // on select
- trackEvent('Onboarding: Module Selected', {
- selectedModule: selectedModule.id,
- });
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [selectedModule]);
-
const handleNext = (): void => {
if (activeStep <= 3) {
const nextStep = activeStep + 1;
// on next
- trackEvent('Onboarding: Get Started', {
+ trackEvent('Onboarding V2: Get Started', {
selectedModule: selectedModule.id,
nextStepId: nextStep,
});
diff --git a/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx b/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx
index 2a7e6621ea..ca168fbf88 100644
--- a/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx
@@ -30,6 +30,9 @@ export default function ConnectionStatus(): JSX.Element {
const {
serviceName,
selectedDataSource,
+ selectedEnvironment,
+ activeStep,
+ selectedMethod,
selectedFramework,
} = useOnboardingContext();
const { queries } = useResourceAttribute();
@@ -40,7 +43,7 @@ export default function ConnectionStatus(): JSX.Element {
const { trackEvent } = useAnalytics();
- const [retryCount, setRetryCount] = useState(20); // Retry for 5 mins
+ const [retryCount, setRetryCount] = useState(20); // Retry for 3 mins 20s
const [loading, setLoading] = useState(true);
const [isReceivingData, setIsReceivingData] = useState(false);
const dispatch = useDispatch();
@@ -122,7 +125,12 @@ export default function ConnectionStatus(): JSX.Element {
if (data || isError) {
setRetryCount(retryCount - 1);
if (retryCount < 0) {
- trackEvent('❌ Onboarding: APM: Connection Status', {
+ trackEvent('Onboarding V2: Connection Status', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
serviceName,
status: 'Failed',
});
@@ -136,7 +144,12 @@ export default function ConnectionStatus(): JSX.Element {
setLoading(false);
setIsReceivingData(true);
- trackEvent('✅ Onboarding: APM: Connection Status', {
+ trackEvent('Onboarding V2: Connection Status', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
serviceName,
status: 'Successful',
});
diff --git a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx
index 9483576bc2..a52113c572 100644
--- a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx
@@ -11,7 +11,6 @@ import {
getSupportedFrameworks,
hasFrameworks,
} from 'container/OnboardingContainer/utils/dataSourceUtils';
-import useAnalytics from 'hooks/analytics/useAnalytics';
import { useEffect, useState } from 'react';
import { popupContainer } from 'utils/selectPopupContainer';
@@ -25,10 +24,7 @@ export interface DataSourceType {
export default function DataSource(): JSX.Element {
const [form] = Form.useForm();
- const { trackEvent } = useAnalytics();
-
const {
- activeStep,
serviceName,
selectedModule,
selectedDataSource,
@@ -56,39 +52,6 @@ export default function DataSource(): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
- useEffect(() => {
- // on language select
- trackEvent('Onboarding: Data Source Selected', {
- dataSource: selectedDataSource,
- module: {
- name: activeStep?.module?.title,
- id: activeStep?.module?.id,
- },
- step: {
- name: activeStep?.step?.title,
- id: activeStep?.step?.id,
- },
- });
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [selectedDataSource]);
-
- useEffect(() => {
- // on framework select
- trackEvent('Onboarding: Framework Selected', {
- dataSource: selectedDataSource,
- framework: selectedFramework,
- module: {
- name: activeStep?.module?.title,
- id: activeStep?.module?.id,
- },
- step: {
- name: activeStep?.step?.title,
- id: activeStep?.step?.id,
- },
- });
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [selectedFramework]);
-
useEffect(() => {
if (selectedModule && selectedDataSource) {
const frameworks = hasFrameworks({
diff --git a/frontend/src/container/OnboardingContainer/Steps/EnvironmentDetails/EnvironmentDetails.tsx b/frontend/src/container/OnboardingContainer/Steps/EnvironmentDetails/EnvironmentDetails.tsx
index dc8cc85a5c..f4f8381de7 100644
--- a/frontend/src/container/OnboardingContainer/Steps/EnvironmentDetails/EnvironmentDetails.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/EnvironmentDetails/EnvironmentDetails.tsx
@@ -2,9 +2,7 @@ import { Card, Typography } from 'antd';
import cx from 'classnames';
import { useOnboardingContext } from 'container/OnboardingContainer/context/OnboardingContext';
import { useCases } from 'container/OnboardingContainer/OnboardingContainer';
-import useAnalytics from 'hooks/analytics/useAnalytics';
import { Server } from 'lucide-react';
-import { useEffect } from 'react';
interface SupportedEnvironmentsProps {
name: string;
@@ -36,9 +34,6 @@ const supportedEnvironments: SupportedEnvironmentsProps[] = [
export default function EnvironmentDetails(): JSX.Element {
const {
- activeStep,
- selectedDataSource,
- selectedFramework,
selectedEnvironment,
updateSelectedEnvironment,
selectedModule,
@@ -46,26 +41,6 @@ export default function EnvironmentDetails(): JSX.Element {
updateErrorDetails,
} = useOnboardingContext();
- const { trackEvent } = useAnalytics();
-
- useEffect(() => {
- // on language select
- trackEvent('Onboarding: Environment Selected', {
- dataSource: selectedDataSource,
- framework: selectedFramework,
- environment: selectedEnvironment,
- module: {
- name: activeStep?.module?.title,
- id: activeStep?.module?.id,
- },
- step: {
- name: activeStep?.step?.title,
- id: activeStep?.step?.id,
- },
- });
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [selectedEnvironment]);
-
return (
<>
diff --git a/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx b/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx
index 954dadd677..994234eca8 100644
--- a/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx
@@ -26,7 +26,11 @@ const enum ApplicationLogsType {
export default function LogsConnectionStatus(): JSX.Element {
const [loading, setLoading] = useState(true);
- const { selectedDataSource } = useOnboardingContext();
+ const {
+ selectedDataSource,
+ activeStep,
+ selectedEnvironment,
+ } = useOnboardingContext();
const { trackEvent } = useAnalytics();
const [isReceivingData, setIsReceivingData] = useState(false);
const [pollingInterval, setPollingInterval] = useState(15000); // initial Polling interval of 15 secs , Set to false after 5 mins
@@ -94,7 +98,10 @@ export default function LogsConnectionStatus(): JSX.Element {
setRetryCount(retryCount - 1);
if (retryCount < 0) {
- trackEvent('❌ Onboarding: Logs Management: Connection Status', {
+ trackEvent('Onboarding V2: Connection Status', {
+ dataSource: selectedDataSource?.id,
+ environment: selectedEnvironment,
+ module: activeStep?.module?.id,
status: 'Failed',
});
@@ -127,7 +134,10 @@ export default function LogsConnectionStatus(): JSX.Element {
setRetryCount(-1);
setPollingInterval(false);
- trackEvent('✅ Onboarding: Logs Management: Connection Status', {
+ trackEvent('Onboarding V2: Connection Status', {
+ dataSource: selectedDataSource?.id,
+ environment: selectedEnvironment,
+ module: activeStep?.module?.id,
status: 'Successful',
});
diff --git a/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx b/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx
index beec4d1065..d39e83ed53 100644
--- a/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx
@@ -8,7 +8,6 @@ import {
useOnboardingContext,
} from 'container/OnboardingContainer/context/OnboardingContext';
import { ModulesMap } from 'container/OnboardingContainer/OnboardingContainer';
-import useAnalytics from 'hooks/analytics/useAnalytics';
import { useEffect, useState } from 'react';
export interface IngestionInfoProps {
@@ -28,8 +27,6 @@ export default function MarkdownStep(): JSX.Element {
selectedMethod,
} = useOnboardingContext();
- const { trackEvent } = useAnalytics();
-
const [markdownContent, setMarkdownContent] = useState('');
const { step } = activeStep;
@@ -86,26 +83,6 @@ export default function MarkdownStep(): JSX.Element {
REGION: ingestionData?.REGION || 'region',
};
- useEffect(() => {
- trackEvent(
- `Onboarding: ${activeStep?.module?.id}: ${selectedDataSource?.name}: ${activeStep?.step?.title}`,
- {
- dataSource: selectedDataSource,
- framework: selectedFramework,
- environment: selectedEnvironment,
- module: {
- name: activeStep?.module?.title,
- id: activeStep?.module?.id,
- },
- step: {
- name: activeStep?.step?.title,
- id: activeStep?.step?.id,
- },
- },
- );
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [step]);
-
return (
diff --git a/frontend/src/container/OnboardingContainer/Steps/SelectMethod/SelectMethod.tsx b/frontend/src/container/OnboardingContainer/Steps/SelectMethod/SelectMethod.tsx
index 79cba00260..adde17d7be 100644
--- a/frontend/src/container/OnboardingContainer/Steps/SelectMethod/SelectMethod.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/SelectMethod/SelectMethod.tsx
@@ -3,45 +3,17 @@ import {
OnboardingMethods,
useOnboardingContext,
} from 'container/OnboardingContainer/context/OnboardingContext';
-import useAnalytics from 'hooks/analytics/useAnalytics';
-import { useEffect, useState } from 'react';
+import { useState } from 'react';
export default function SelectMethod(): JSX.Element {
- const {
- activeStep,
- selectedDataSource,
- selectedFramework,
- selectedEnvironment,
- selectedMethod,
- updateSelectedMethod,
- } = useOnboardingContext();
+ const { selectedMethod, updateSelectedMethod } = useOnboardingContext();
const [value, setValue] = useState(selectedMethod);
- const { trackEvent } = useAnalytics();
-
const onChange = (e: RadioChangeEvent): void => {
setValue(e.target.value);
updateSelectedMethod(e.target.value);
};
- useEffect(() => {
- // on language select
- trackEvent('Onboarding: Environment Selected', {
- dataSource: selectedDataSource,
- framework: selectedFramework,
- environment: selectedEnvironment,
- module: {
- name: activeStep?.module?.title,
- id: activeStep?.module?.id,
- },
- step: {
- name: activeStep?.step?.title,
- id: activeStep?.step?.id,
- },
- });
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [selectedMethod]);
-
return (
diff --git a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx
index efc4916790..7362e9a47d 100644
--- a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx
+++ b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx
@@ -68,6 +68,7 @@ export default function ModuleStepsContainer({
selectedDataSource,
selectedEnvironment,
selectedFramework,
+ selectedMethod,
updateActiveStep,
updateErrorDetails,
resetProgress,
@@ -135,8 +136,13 @@ export default function ModuleStepsContainer({
};
const redirectToModules = (): void => {
- trackEvent('Onboarding Complete', {
+ trackEvent('Onboarding V2 Complete', {
module: selectedModule.id,
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ serviceName,
});
if (selectedModule.id === ModulesMap.APM) {
@@ -166,6 +172,101 @@ export default function ModuleStepsContainer({
module: selectedModule,
step: selectedModuleSteps[current + 1],
});
+ // on next step click track events
+ switch (selectedModuleSteps[current].id) {
+ case stepsMap.dataSource:
+ trackEvent('Onboarding V2: Data Source Selected', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.environmentDetails:
+ trackEvent('Onboarding V2: Environment Selected', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.selectMethod:
+ trackEvent('Onboarding V2: Method Selected', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
+ });
+ break;
+
+ case stepsMap.setupOtelCollector:
+ trackEvent('Onboarding V2: Setup Otel Collector', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.instrumentApplication:
+ trackEvent('Onboarding V2: Instrument Application', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.cloneRepository:
+ trackEvent('Onboarding V2: Clone Repository', {
+ dataSource: selectedDataSource?.id,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.runApplication:
+ trackEvent('Onboarding V2: Run Application', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.addHttpDrain:
+ trackEvent('Onboarding V2: Add HTTP Drain', {
+ dataSource: selectedDataSource?.id,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.startContainer:
+ trackEvent('Onboarding V2: Start Container', {
+ dataSource: selectedDataSource?.id,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.setupLogDrains:
+ trackEvent('Onboarding V2: Setup Log Drains', {
+ dataSource: selectedDataSource?.id,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.configureReceiver:
+ trackEvent('Onboarding V2: Configure Receiver', {
+ dataSource: selectedDataSource?.id,
+ environment: selectedEnvironment,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.configureAws:
+ trackEvent('Onboarding V2: Configure AWS', {
+ dataSource: selectedDataSource?.id,
+ environment: selectedEnvironment,
+ module: activeStep?.module?.id,
+ });
+ break;
+ default:
+ break;
+ }
}
// set meta data
diff --git a/frontend/src/hooks/analytics/useAnalytics.tsx b/frontend/src/hooks/analytics/useAnalytics.tsx
index 23d535063d..28213c9579 100644
--- a/frontend/src/hooks/analytics/useAnalytics.tsx
+++ b/frontend/src/hooks/analytics/useAnalytics.tsx
@@ -26,8 +26,8 @@ const useAnalytics = (): any => {
},
};
- const updatedPropertes = { ...properties };
- updatedPropertes.userId = user.email;
+ const updatedProperties = { ...properties };
+ updatedProperties.userId = user.email;
window.analytics.track(eventName, properties, context);
}
};
From d4248fe93340f20d30499d1a8a4a0dce688b17b6 Mon Sep 17 00:00:00 2001
From: Prashant Shahi
Date: Sat, 20 Jan 2024 00:19:42 +0545
Subject: [PATCH 39/39] =?UTF-8?q?chore(signoz):=20=F0=9F=93=8C=20pin=20ver?=
=?UTF-8?q?sions:=20SigNoz=200.37.0,=20SigNoz=20OtelCollector=200.88.8?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Prashant Shahi
---
deploy/docker-swarm/clickhouse-setup/docker-compose.yaml | 8 ++++----
deploy/docker/clickhouse-setup/docker-compose-core.yaml | 4 ++--
deploy/docker/clickhouse-setup/docker-compose.yaml | 8 ++++----
go.mod | 2 +-
go.sum | 8 ++++----
pkg/query-service/tests/test-deploy/docker-compose.yaml | 4 ++--
6 files changed, 17 insertions(+), 17 deletions(-)
diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml
index c19836b5fe..71421a0196 100644
--- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml
+++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml
@@ -146,7 +146,7 @@ services:
condition: on-failure
query-service:
- image: signoz/query-service:0.36.2
+ image: signoz/query-service:0.37.0
command:
[
"-config=/root/config/prometheus.yml",
@@ -186,7 +186,7 @@ services:
<<: *db-depend
frontend:
- image: signoz/frontend:0.36.2
+ image: signoz/frontend:0.37.0
deploy:
restart_policy:
condition: on-failure
@@ -199,7 +199,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
- image: signoz/signoz-otel-collector:0.88.6
+ image: signoz/signoz-otel-collector:0.88.8
command:
[
"--config=/etc/otel-collector-config.yaml",
@@ -237,7 +237,7 @@ services:
- query-service
otel-collector-migrator:
- image: signoz/signoz-schema-migrator:0.88.6
+ image: signoz/signoz-schema-migrator:0.88.8
deploy:
restart_policy:
condition: on-failure
diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml
index 4ab1954727..866029e73d 100644
--- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml
+++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml
@@ -66,7 +66,7 @@ services:
- --storage.path=/data
otel-collector-migrator:
- image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.6}
+ image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.8}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -81,7 +81,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: signoz-otel-collector
- image: signoz/signoz-otel-collector:0.88.6
+ image: signoz/signoz-otel-collector:0.88.8
command:
[
"--config=/etc/otel-collector-config.yaml",
diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml
index 6647c90bc5..ef41cd3ae8 100644
--- a/deploy/docker/clickhouse-setup/docker-compose.yaml
+++ b/deploy/docker/clickhouse-setup/docker-compose.yaml
@@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
- image: signoz/query-service:${DOCKER_TAG:-0.36.2}
+ image: signoz/query-service:${DOCKER_TAG:-0.37.0}
container_name: signoz-query-service
command:
[
@@ -203,7 +203,7 @@ services:
<<: *db-depend
frontend:
- image: signoz/frontend:${DOCKER_TAG:-0.36.2}
+ image: signoz/frontend:${DOCKER_TAG:-0.37.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@@ -215,7 +215,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
- image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.6}
+ image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.8}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -229,7 +229,7 @@ services:
otel-collector:
- image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.6}
+ image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.8}
container_name: signoz-otel-collector
command:
[
diff --git a/go.mod b/go.mod
index 9ea23cfd4f..23505f3f98 100644
--- a/go.mod
+++ b/go.mod
@@ -5,7 +5,7 @@ go 1.21
require (
github.com/ClickHouse/clickhouse-go/v2 v2.15.0
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb
- github.com/SigNoz/signoz-otel-collector v0.88.6
+ github.com/SigNoz/signoz-otel-collector v0.88.8
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
github.com/antonmedv/expr v1.15.3
diff --git a/go.sum b/go.sum
index 0d385a233b..b7c5de4e09 100644
--- a/go.sum
+++ b/go.sum
@@ -56,8 +56,8 @@ cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq
contrib.go.opencensus.io/exporter/prometheus v0.4.2 h1:sqfsYl5GIY/L570iT+l93ehxaWJs2/OwXtiWwew3oAg=
contrib.go.opencensus.io/exporter/prometheus v0.4.2/go.mod h1:dvEHbiKmgvbr5pjaF9fpw1KeYcjrnC1J8B+JKjsZyRQ=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
-github.com/Azure/azure-sdk-for-go v65.0.0+incompatible h1:HzKLt3kIwMm4KeJYTdx9EbjRYTySD/t8i1Ee/W5EGXw=
-github.com/Azure/azure-sdk-for-go v65.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
+github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
+github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0 h1:8q4SaHjFsClSvuVne0ID/5Ka8u3fcIHyqkLjcFpNRHQ=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0 h1:vcYCAze6p19qBW7MhZybIsqD8sMV8js0NyQM8JDnVtg=
@@ -98,8 +98,8 @@ github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb h1:bneLSKPf9YUSFm
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA=
github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY=
github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww=
-github.com/SigNoz/signoz-otel-collector v0.88.6 h1:rvXm9bz4b9GsYeT8c3+F/g56DHPf0IN8mK8tUfZfnw8=
-github.com/SigNoz/signoz-otel-collector v0.88.6/go.mod h1:6lR8Uy99zBd0JGPg9zt0aEBW4A4GpblUtpcbszGmg8E=
+github.com/SigNoz/signoz-otel-collector v0.88.8 h1:oa/0gSfkGhjzXtz1htzWBQx3p4VhBPs5iwMRxqfa2uo=
+github.com/SigNoz/signoz-otel-collector v0.88.8/go.mod h1:7I4FWwraVSnDywsPNbo8TdHDsPxShtYkGU5usr6dTtk=
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=
diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml
index 7965b65795..9017f9326e 100644
--- a/pkg/query-service/tests/test-deploy/docker-compose.yaml
+++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml
@@ -192,7 +192,7 @@ services:
<<: *db-depend
otel-collector-migrator:
- image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.6}
+ image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.8}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -205,7 +205,7 @@ services:
# condition: service_healthy
otel-collector:
- image: signoz/signoz-otel-collector:0.88.6
+ image: signoz/signoz-otel-collector:0.88.8
container_name: signoz-otel-collector
command:
[