From 493aef0241a4cdd0d19d9d92fbe4dd076cf2183e Mon Sep 17 00:00:00 2001 From: Prashant Shahi Date: Wed, 21 Feb 2024 16:22:48 +0545 Subject: [PATCH 01/53] =?UTF-8?q?chore(signoz):=20=F0=9F=93=8C=20pin=20ver?= =?UTF-8?q?sions:=20SigNoz=200.39.1,=20SigNoz=20OtelCollector=200.88.13?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Prashant Shahi --- deploy/docker-swarm/clickhouse-setup/docker-compose.yaml | 8 ++++---- deploy/docker/clickhouse-setup/docker-compose-core.yaml | 4 ++-- deploy/docker/clickhouse-setup/docker-compose.yaml | 8 ++++---- go.mod | 2 +- go.sum | 6 ++---- pkg/query-service/tests/test-deploy/docker-compose.yaml | 4 ++-- 6 files changed, 15 insertions(+), 17 deletions(-) diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index edc5ead22c..38581568dc 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -146,7 +146,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.39.0 + image: signoz/query-service:0.39.1 command: [ "-config=/root/config/prometheus.yml", @@ -186,7 +186,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:0.39.0 + image: signoz/frontend:0.39.1 deploy: restart_policy: condition: on-failure @@ -199,7 +199,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.88.12 + image: signoz/signoz-otel-collector:0.88.13 command: [ "--config=/etc/otel-collector-config.yaml", @@ -237,7 +237,7 @@ services: - query-service otel-collector-migrator: - image: signoz/signoz-schema-migrator:0.88.12 + image: signoz/signoz-schema-migrator:0.88.13 deploy: restart_policy: condition: on-failure diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 61e03804f4..525fa5175d 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -66,7 +66,7 @@ services: - --storage.path=/data otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.12} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.13} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -81,7 +81,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: signoz-otel-collector - image: signoz/signoz-otel-collector:0.88.12 + image: signoz/signoz-otel-collector:0.88.13 command: [ "--config=/etc/otel-collector-config.yaml", diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index cb77c4c024..b0d11fbaf5 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -164,7 +164,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` query-service: - image: signoz/query-service:${DOCKER_TAG:-0.39.0} + image: signoz/query-service:${DOCKER_TAG:-0.39.1} container_name: signoz-query-service command: [ @@ -203,7 +203,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:${DOCKER_TAG:-0.39.0} + image: signoz/frontend:${DOCKER_TAG:-0.39.1} container_name: signoz-frontend restart: on-failure depends_on: @@ -215,7 +215,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.12} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.13} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -229,7 +229,7 @@ services: otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.12} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.13} container_name: signoz-otel-collector command: [ diff --git a/go.mod b/go.mod index ec4d7506ff..4b24c57239 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,7 @@ go 1.21.3 require ( github.com/ClickHouse/clickhouse-go/v2 v2.15.0 github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd - github.com/SigNoz/signoz-otel-collector v0.88.12 + github.com/SigNoz/signoz-otel-collector v0.88.13 github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974 github.com/antonmedv/expr v1.15.3 diff --git a/go.sum b/go.sum index ae300b0f17..ced65a3169 100644 --- a/go.sum +++ b/go.sum @@ -94,14 +94,12 @@ github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migc github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb h1:bneLSKPf9YUSFmafKx32bynV6QrzViL/s+ZDvQxH1E4= -github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA= github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8= github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc= github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY= github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww= -github.com/SigNoz/signoz-otel-collector v0.88.12 h1:UwkVi1o2NY9gRgCLBtWVKr+UDxb4FaTs63Sb20qgf8w= -github.com/SigNoz/signoz-otel-collector v0.88.12/go.mod h1:RH9OEjni6tkh9RgN/meSPxv3kykjcFscqMwJgbUAXmo= +github.com/SigNoz/signoz-otel-collector v0.88.13 h1:VAVXokL28Hqxo6xyzlCrFS1na/bd1cgqFAVOe1lJjUE= +github.com/SigNoz/signoz-otel-collector v0.88.13/go.mod h1:RH9OEjni6tkh9RgN/meSPxv3kykjcFscqMwJgbUAXmo= github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc= github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo= github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY= diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index 7c9b50199f..d88945b312 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -192,7 +192,7 @@ services: <<: *db-depend otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.12} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.13} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -205,7 +205,7 @@ services: # condition: service_healthy otel-collector: - image: signoz/signoz-otel-collector:0.88.12 + image: signoz/signoz-otel-collector:0.88.13 container_name: signoz-otel-collector command: [ From aad840da59a8ec257ffc70869e735e8e5c287573 Mon Sep 17 00:00:00 2001 From: makeavish Date: Fri, 23 Feb 2024 14:08:17 +0530 Subject: [PATCH 02/53] chore: send language and service name events as list --- pkg/query-service/telemetry/telemetry.go | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/pkg/query-service/telemetry/telemetry.go b/pkg/query-service/telemetry/telemetry.go index ea93d75a0a..939d0cd39b 100644 --- a/pkg/query-service/telemetry/telemetry.go +++ b/pkg/query-service/telemetry/telemetry.go @@ -228,14 +228,20 @@ func createTelemetry() { telemetry.SendEvent(TELEMETRY_EVENT_ENVIRONMENT, map[string]interface{}{"value": tagsInfo.Env}, "") } - for language, _ := range tagsInfo.Languages { - telemetry.SendEvent(TELEMETRY_EVENT_LANGUAGE, map[string]interface{}{"language": language}, "") + languages := []string{} + for language := range tagsInfo.Languages { + languages = append(languages, language) } - - for service, _ := range tagsInfo.Services { - telemetry.SendEvent(TELEMETRY_EVENT_SERVICE, map[string]interface{}{"serviceName": service}, "") + if len(languages) > 0 { + telemetry.SendEvent(TELEMETRY_EVENT_LANGUAGE, map[string]interface{}{"language": languages}, "") + } + services := []string{} + for service := range tagsInfo.Services { + services = append(services, service) + } + if len(services) > 0 { + telemetry.SendEvent(TELEMETRY_EVENT_SERVICE, map[string]interface{}{"serviceName": services}, "") } - totalSpans, _ := telemetry.reader.GetTotalSpans(context.Background()) totalLogs, _ := telemetry.reader.GetTotalLogs(context.Background()) spansInLastHeartBeatInterval, _ := telemetry.reader.GetSpansInLastHeartBeatInterval(context.Background(), HEART_BEAT_DURATION) From 9af1c2320bd378a828a64670f9240d3d88c64cf0 Mon Sep 17 00:00:00 2001 From: Prashant Shahi Date: Wed, 28 Feb 2024 19:13:23 +0545 Subject: [PATCH 03/53] =?UTF-8?q?chore(signoz):=20=F0=9F=93=8C=20pin=20ver?= =?UTF-8?q?sions:=20SigNoz=200.40.0,=20SigNoz=20OtelCollector=200.88.14?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Prashant Shahi --- deploy/docker-swarm/clickhouse-setup/docker-compose.yaml | 8 ++++---- deploy/docker/clickhouse-setup/docker-compose-core.yaml | 4 ++-- deploy/docker/clickhouse-setup/docker-compose.yaml | 8 ++++---- go.mod | 2 +- go.sum | 4 ++-- pkg/query-service/tests/test-deploy/docker-compose.yaml | 4 ++-- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 2a2ae9faf1..ae0fbbd357 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -146,7 +146,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.39.1 + image: signoz/query-service:0.40.0 command: [ "-config=/root/config/prometheus.yml", @@ -186,7 +186,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:0.39.1 + image: signoz/frontend:0.40.0 deploy: restart_policy: condition: on-failure @@ -199,7 +199,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.88.13 + image: signoz/signoz-otel-collector:0.88.14 command: [ "--config=/etc/otel-collector-config.yaml", @@ -237,7 +237,7 @@ services: - query-service otel-collector-migrator: - image: signoz/signoz-schema-migrator:0.88.13 + image: signoz/signoz-schema-migrator:0.88.14 deploy: restart_policy: condition: on-failure diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 525fa5175d..303016b38a 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -66,7 +66,7 @@ services: - --storage.path=/data otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.13} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.14} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -81,7 +81,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: signoz-otel-collector - image: signoz/signoz-otel-collector:0.88.13 + image: signoz/signoz-otel-collector:0.88.14 command: [ "--config=/etc/otel-collector-config.yaml", diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index ec3fb0e6ba..a0cc5c4f6b 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -164,7 +164,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` query-service: - image: signoz/query-service:${DOCKER_TAG:-0.39.1} + image: signoz/query-service:${DOCKER_TAG:-0.40.0} container_name: signoz-query-service command: [ @@ -203,7 +203,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:${DOCKER_TAG:-0.39.1} + image: signoz/frontend:${DOCKER_TAG:-0.40.0} container_name: signoz-frontend restart: on-failure depends_on: @@ -215,7 +215,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.13} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.14} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -229,7 +229,7 @@ services: otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.13} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.14} container_name: signoz-otel-collector command: [ diff --git a/go.mod b/go.mod index 4b24c57239..0ca9965546 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,7 @@ go 1.21.3 require ( github.com/ClickHouse/clickhouse-go/v2 v2.15.0 github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd - github.com/SigNoz/signoz-otel-collector v0.88.13 + github.com/SigNoz/signoz-otel-collector v0.88.14 github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974 github.com/antonmedv/expr v1.15.3 diff --git a/go.sum b/go.sum index ced65a3169..8ba2afd692 100644 --- a/go.sum +++ b/go.sum @@ -98,8 +98,8 @@ github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkb github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc= github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY= github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww= -github.com/SigNoz/signoz-otel-collector v0.88.13 h1:VAVXokL28Hqxo6xyzlCrFS1na/bd1cgqFAVOe1lJjUE= -github.com/SigNoz/signoz-otel-collector v0.88.13/go.mod h1:RH9OEjni6tkh9RgN/meSPxv3kykjcFscqMwJgbUAXmo= +github.com/SigNoz/signoz-otel-collector v0.88.14 h1:/40pH8au6M8PhUhdCXd4c+7nJ9h0VgoDaV9ERKbUtf4= +github.com/SigNoz/signoz-otel-collector v0.88.14/go.mod h1:RH9OEjni6tkh9RgN/meSPxv3kykjcFscqMwJgbUAXmo= github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc= github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo= github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY= diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index 4144f81d4e..dcbd059364 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -192,7 +192,7 @@ services: <<: *db-depend otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.13} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.14} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -205,7 +205,7 @@ services: # condition: service_healthy otel-collector: - image: signoz/signoz-otel-collector:0.88.13 + image: signoz/signoz-otel-collector:0.88.14 container_name: signoz-otel-collector command: [ From 6c2a3d5d43fa7ce6e9439a72a22b46bc4da47b90 Mon Sep 17 00:00:00 2001 From: Prashant Shahi Date: Thu, 14 Mar 2024 00:23:51 +0545 Subject: [PATCH 04/53] =?UTF-8?q?chore(signoz):=20=F0=9F=93=8C=20pin=20ver?= =?UTF-8?q?sions:=20SigNoz=200.41.0,=20SigNoz=20OtelCollector=200.88.15?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Prashant Shahi --- deploy/docker-swarm/clickhouse-setup/docker-compose.yaml | 8 ++++---- deploy/docker/clickhouse-setup/docker-compose-core.yaml | 4 ++-- deploy/docker/clickhouse-setup/docker-compose.yaml | 8 ++++---- go.mod | 2 +- go.sum | 4 ++-- pkg/query-service/tests/test-deploy/docker-compose.yaml | 4 ++-- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 6cdb6283ea..604c2d3f67 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -146,7 +146,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.40.0 + image: signoz/query-service:0.41.0 command: [ "-config=/root/config/prometheus.yml", @@ -186,7 +186,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:0.40.0 + image: signoz/frontend:0.41.0 deploy: restart_policy: condition: on-failure @@ -199,7 +199,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.88.14 + image: signoz/signoz-otel-collector:0.88.15 command: [ "--config=/etc/otel-collector-config.yaml", @@ -237,7 +237,7 @@ services: - query-service otel-collector-migrator: - image: signoz/signoz-schema-migrator:0.88.14 + image: signoz/signoz-schema-migrator:0.88.15 deploy: restart_policy: condition: on-failure diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 214427318f..f595b86e64 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -66,7 +66,7 @@ services: - --storage.path=/data otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.14} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.15} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -81,7 +81,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: signoz-otel-collector - image: signoz/signoz-otel-collector:0.88.14 + image: signoz/signoz-otel-collector:0.88.15 command: [ "--config=/etc/otel-collector-config.yaml", diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 8f9cbb72bc..217135c72b 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -164,7 +164,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` query-service: - image: signoz/query-service:${DOCKER_TAG:-0.40.0} + image: signoz/query-service:${DOCKER_TAG:-0.41.0} container_name: signoz-query-service command: [ @@ -203,7 +203,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:${DOCKER_TAG:-0.40.0} + image: signoz/frontend:${DOCKER_TAG:-0.41.0} container_name: signoz-frontend restart: on-failure depends_on: @@ -215,7 +215,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.14} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.15} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -229,7 +229,7 @@ services: otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.14} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.15} container_name: signoz-otel-collector command: [ diff --git a/go.mod b/go.mod index a9c0d9ff3a..213be786b1 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ require ( github.com/ClickHouse/clickhouse-go/v2 v2.20.0 github.com/DATA-DOG/go-sqlmock v1.5.2 github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd - github.com/SigNoz/signoz-otel-collector v0.88.14 + github.com/SigNoz/signoz-otel-collector v0.88.15 github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974 github.com/antonmedv/expr v1.15.3 diff --git a/go.sum b/go.sum index 9f0ad8c689..625e71e882 100644 --- a/go.sum +++ b/go.sum @@ -98,8 +98,8 @@ github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkb github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc= github.com/SigNoz/prometheus v1.9.79-0.1 h1:RjsOw7oXVKx7IDA+/sRXW2x5pnw60/tT9MMuEz3+8DU= github.com/SigNoz/prometheus v1.9.79-0.1/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww= -github.com/SigNoz/signoz-otel-collector v0.88.14 h1:/40pH8au6M8PhUhdCXd4c+7nJ9h0VgoDaV9ERKbUtf4= -github.com/SigNoz/signoz-otel-collector v0.88.14/go.mod h1:RH9OEjni6tkh9RgN/meSPxv3kykjcFscqMwJgbUAXmo= +github.com/SigNoz/signoz-otel-collector v0.88.15 h1:JUi9wzlj7WonPiXD4fak7yv/JMgd39sYFBGKTJIvP2Q= +github.com/SigNoz/signoz-otel-collector v0.88.15/go.mod h1:Dst94AfUCw8+w2R32FvOwTpjzL//ZaY3tIPGpyJ4iqw= github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc= github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo= github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY= diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index 3c6a2ad79c..679fb7f401 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -192,7 +192,7 @@ services: <<: *db-depend otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.14} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.15} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -205,7 +205,7 @@ services: # condition: service_healthy otel-collector: - image: signoz/signoz-otel-collector:0.88.14 + image: signoz/signoz-otel-collector:0.88.15 container_name: signoz-otel-collector command: [ From c04d0e9419fb8aa6d08fa2dfcc0a0826a9f2ad74 Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Fri, 15 Mar 2024 01:26:31 +0530 Subject: [PATCH 05/53] Revert "Explorer Toolbar maximised and minimised (#4656)" (#4705) This reverts commit aadb962b6ccf716375f71791c4215c833f4406f5. (cherry picked from commit cf2203956211d5ba1e4b119000127aad5a2395cb) --- frontend/src/constants/localStorage.ts | 1 - .../ExplorerOptions/ExplorerOptionWrapper.tsx | 56 ----- .../ExplorerOptions.styles.scss | 4 +- .../ExplorerOptions/ExplorerOptions.tsx | 220 +++++++----------- .../ExplorerOptionsDroppableArea.styles.scss | 55 ----- .../ExplorerOptionsDroppableArea.tsx | 83 ------- .../src/container/ExplorerOptions/utils.ts | 52 ----- .../src/container/LogsExplorerViews/index.tsx | 4 +- frontend/src/pages/TracesExplorer/index.tsx | 6 +- 9 files changed, 88 insertions(+), 393 deletions(-) delete mode 100644 frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx delete mode 100644 frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.styles.scss delete mode 100644 frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.tsx diff --git a/frontend/src/constants/localStorage.ts b/frontend/src/constants/localStorage.ts index 0ba6cac302..296735b286 100644 --- a/frontend/src/constants/localStorage.ts +++ b/frontend/src/constants/localStorage.ts @@ -16,5 +16,4 @@ export enum LOCALSTORAGE { CHAT_SUPPORT = 'CHAT_SUPPORT', IS_IDENTIFIED_USER = 'IS_IDENTIFIED_USER', DASHBOARD_VARIABLES = 'DASHBOARD_VARIABLES', - SHOW_EXPLORER_TOOLBAR = 'SHOW_EXPLORER_TOOLBAR', } diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx deleted file mode 100644 index bdb300c404..0000000000 --- a/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx +++ /dev/null @@ -1,56 +0,0 @@ -import { DndContext, DragEndEvent } from '@dnd-kit/core'; -import { useEffect, useState } from 'react'; - -import ExplorerOptions, { ExplorerOptionsProps } from './ExplorerOptions'; -import { - getExplorerToolBarVisibility, - setExplorerToolBarVisibility, -} from './utils'; - -type ExplorerOptionsWrapperProps = Omit< - ExplorerOptionsProps, - 'isExplorerOptionDrop' ->; - -function ExplorerOptionWrapper({ - disabled, - query, - isLoading, - onExport, - sourcepage, -}: ExplorerOptionsWrapperProps): JSX.Element { - const [isExplorerOptionHidden, setIsExplorerOptionHidden] = useState(false); - - useEffect(() => { - const toolbarVisibility = getExplorerToolBarVisibility(sourcepage); - setIsExplorerOptionHidden(!toolbarVisibility); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, []); - - const handleDragEnd = (event: DragEndEvent): void => { - const { active, over } = event; - if ( - over !== null && - active.id === 'explorer-options-draggable' && - over.id === 'explorer-options-droppable' - ) { - setIsExplorerOptionHidden(true); - setExplorerToolBarVisibility(false, sourcepage); - } - }; - return ( - - - - ); -} - -export default ExplorerOptionWrapper; diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss b/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss index 9f4441904d..d76d18bb4f 100644 --- a/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss +++ b/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss @@ -3,7 +3,7 @@ } .explorer-update { position: fixed; - bottom: 24px; + bottom: 16px; left: calc(50% - 225px); display: flex; align-items: center; @@ -47,7 +47,7 @@ box-shadow: 4px 4px 16px 4px rgba(0, 0, 0, 0.25); backdrop-filter: blur(20px); position: fixed; - bottom: 24px; + bottom: 16px; left: calc(50% + 240px); transform: translate(calc(-50% - 120px), 0); transition: left 0.2s linear; diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx index ab26e03abc..8322c694d6 100644 --- a/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx +++ b/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx @@ -1,7 +1,5 @@ -/* eslint-disable react/jsx-props-no-spreading */ import './ExplorerOptions.styles.scss'; -import { useDraggable } from '@dnd-kit/core'; import { Color } from '@signozhq/design-tokens'; import { Button, @@ -33,15 +31,7 @@ import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange'; import { useNotifications } from 'hooks/useNotifications'; import { mapCompositeQueryFromQuery } from 'lib/newQueryBuilder/queryBuilderMappers/mapCompositeQueryFromQuery'; import { Check, ConciergeBell, Disc3, Plus, X, XCircle } from 'lucide-react'; -import { - CSSProperties, - Dispatch, - SetStateAction, - useCallback, - useMemo, - useRef, - useState, -} from 'react'; +import { CSSProperties, useCallback, useMemo, useRef, useState } from 'react'; import { useSelector } from 'react-redux'; import { useHistory } from 'react-router-dom'; import { AppState } from 'store/reducers'; @@ -51,7 +41,6 @@ import { DataSource } from 'types/common/queryBuilder'; import AppReducer from 'types/reducer/app'; import { USER_ROLES } from 'types/roles'; -import ExplorerOptionsDroppableArea from './ExplorerOptionsDroppableArea'; import { DATASOURCE_VS_ROUTES, generateRGBAFromHex, @@ -68,8 +57,6 @@ function ExplorerOptions({ onExport, query, sourcepage, - isExplorerOptionHidden = false, - setIsExplorerOptionHidden, }: ExplorerOptionsProps): JSX.Element { const [isExport, setIsExport] = useState(false); const [isSaveModalOpen, setIsSaveModalOpen] = useState(false); @@ -79,7 +66,6 @@ function ExplorerOptions({ const history = useHistory(); const ref = useRef(null); const isDarkMode = useIsDarkMode(); - const [isDragEnabled, setIsDragEnabled] = useState(false); const onModalToggle = useCallback((value: boolean) => { setIsExport(value); @@ -271,31 +257,11 @@ function ExplorerOptions({ [isDarkMode], ); - const { - attributes, - listeners, - setNodeRef, - transform, - isDragging, - } = useDraggable({ - id: 'explorer-options-draggable', - disabled: isDragEnabled, - }); - const isEditDeleteSupported = allowedRoles.includes(role as string); - const style: React.CSSProperties | undefined = transform - ? { - transform: `translate3d(${transform.x - 338}px, ${transform.y}px, 0)`, - width: `${400 - transform.y * 6}px`, - maxWidth: '440px', // initial width of the explorer options - overflow: 'hidden', - } - : undefined; - return ( <> - {isQueryUpdated && !isExplorerOptionHidden && !isDragging && ( + {isQueryUpdated && (
)} - {!isExplorerOptionHidden && ( -
-
- - showSearch - placeholder="Select a view" - loading={viewsIsLoading || isRefetching} - value={viewName || undefined} - onSelect={handleSelect} - style={{ - minWidth: 170, - }} - dropdownStyle={dropdownStyle} - className="views-dropdown" - allowClear={{ - clearIcon: , - }} - onDropdownVisibleChange={(open): void => { - setIsDragEnabled(open); - }} - onClear={handleClearSelect} - ref={ref} - > - {viewsData?.data?.data?.map((view) => { - const extraData = - view.extraData !== '' ? JSON.parse(view.extraData) : ''; - let bgColor = getRandomColor(); - if (extraData !== '') { - bgColor = extraData.color; - } - return ( - -
- {' '} - {view.name} -
-
- ); - })} - +
+
+ + showSearch + placeholder="Select a view" + loading={viewsIsLoading || isRefetching} + value={viewName || undefined} + onSelect={handleSelect} + style={{ + minWidth: 170, + }} + dropdownStyle={dropdownStyle} + className="views-dropdown" + allowClear={{ + clearIcon: , + }} + onClear={handleClearSelect} + ref={ref} + > + {viewsData?.data?.data?.map((view) => { + const extraData = + view.extraData !== '' ? JSON.parse(view.extraData) : ''; + let bgColor = getRandomColor(); + if (extraData !== '') { + bgColor = extraData.color; + } + return ( + +
+ {' '} + {view.name} +
+
+ ); + })} + - -
- -
- -
- - - - - - - -
+
- )} - +
+ +
+ + + + + + + +
+
>; } -ExplorerOptions.defaultProps = { - isLoading: false, - isExplorerOptionHidden: false, - setIsExplorerOptionHidden: undefined, -}; +ExplorerOptions.defaultProps = { isLoading: false }; export default ExplorerOptions; diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.styles.scss b/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.styles.scss deleted file mode 100644 index e092229bb9..0000000000 --- a/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.styles.scss +++ /dev/null @@ -1,55 +0,0 @@ -.explorer-option-droppable-container { - position: fixed; - bottom: 0; - width: -webkit-fill-available; - height: 24px; - display: flex; - justify-content: center; - border-radius: 10px 10px 0px 0px; - // box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); - // backdrop-filter: blur(20px); - - .explorer-actions-btn { - display: flex; - gap: 8px; - margin-right: 8px; - - .action-btn { - display: flex; - justify-content: center; - align-items: center; - border-radius: 10px 10px 0px 0px; - box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); - backdrop-filter: blur(20px); - height: 24px !important; - border: none; - } - } - - .explorer-show-btn { - border-radius: 10px 10px 0px 0px; - border: 1px solid var(--bg-slate-400); - background: rgba(22, 24, 29, 0.40); - box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); - backdrop-filter: blur(20px); - align-self: center; - padding: 8px 12px; - height: 24px !important; - - .menu-bar { - border-radius: 50px; - background: var(--bg-slate-200); - height: 4px; - width: 50px; - } - } -} - -.lightMode { - .explorer-option-droppable-container { - - .explorer-show-btn { - background: var(--bg-vanilla-400); - } - } -} \ No newline at end of file diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.tsx deleted file mode 100644 index 33bef7c984..0000000000 --- a/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.tsx +++ /dev/null @@ -1,83 +0,0 @@ -/* eslint-disable no-nested-ternary */ -import './ExplorerOptionsDroppableArea.styles.scss'; - -import { useDroppable } from '@dnd-kit/core'; -import { Color } from '@signozhq/design-tokens'; -import { Button, Tooltip } from 'antd'; -import { Disc3, X } from 'lucide-react'; -import { Dispatch, SetStateAction } from 'react'; -import { DataSource } from 'types/common/queryBuilder'; - -import { setExplorerToolBarVisibility } from './utils'; - -interface DroppableAreaProps { - isQueryUpdated: boolean; - isExplorerOptionHidden?: boolean; - sourcepage: DataSource; - setIsExplorerOptionHidden?: Dispatch>; - handleClearSelect: () => void; - onUpdateQueryHandler: () => void; -} - -function ExplorerOptionsDroppableArea({ - isQueryUpdated, - isExplorerOptionHidden, - sourcepage, - setIsExplorerOptionHidden, - handleClearSelect, - onUpdateQueryHandler, -}: DroppableAreaProps): JSX.Element { - const { setNodeRef } = useDroppable({ - id: 'explorer-options-droppable', - }); - - const handleShowExplorerOption = (): void => { - if (setIsExplorerOptionHidden) { - setIsExplorerOptionHidden(false); - setExplorerToolBarVisibility(true, sourcepage); - } - }; - - return ( -
- {isExplorerOptionHidden && ( - <> - {isQueryUpdated && ( -
- -
- )} - - - )} -
- ); -} - -ExplorerOptionsDroppableArea.defaultProps = { - isExplorerOptionHidden: undefined, - setIsExplorerOptionHidden: undefined, -}; - -export default ExplorerOptionsDroppableArea; diff --git a/frontend/src/container/ExplorerOptions/utils.ts b/frontend/src/container/ExplorerOptions/utils.ts index d94e64161e..e3ac710609 100644 --- a/frontend/src/container/ExplorerOptions/utils.ts +++ b/frontend/src/container/ExplorerOptions/utils.ts @@ -1,6 +1,5 @@ import { Color } from '@signozhq/design-tokens'; import { showErrorNotification } from 'components/ExplorerCard/utils'; -import { LOCALSTORAGE } from 'constants/localStorage'; import { QueryParams } from 'constants/query'; import ROUTES from 'constants/routes'; import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi'; @@ -68,54 +67,3 @@ export const generateRGBAFromHex = (hex: string, opacity: number): string => hex.slice(3, 5), 16, )}, ${parseInt(hex.slice(5, 7), 16)}, ${opacity})`; - -export const getExplorerToolBarVisibility = (dataSource: string): boolean => { - try { - const showExplorerToolbar = localStorage.getItem( - LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, - ); - if (showExplorerToolbar === null) { - const parsedShowExplorerToolbar: { - [DataSource.LOGS]: boolean; - [DataSource.TRACES]: boolean; - [DataSource.METRICS]: boolean; - } = { - [DataSource.METRICS]: true, - [DataSource.TRACES]: true, - [DataSource.LOGS]: true, - }; - localStorage.setItem( - LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, - JSON.stringify(parsedShowExplorerToolbar), - ); - return true; - } - const parsedShowExplorerToolbar = JSON.parse(showExplorerToolbar || '{}'); - return parsedShowExplorerToolbar[dataSource]; - } catch (error) { - console.error(error); - return false; - } -}; - -export const setExplorerToolBarVisibility = ( - value: boolean, - dataSource: string, -): void => { - try { - const showExplorerToolbar = localStorage.getItem( - LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, - ); - if (showExplorerToolbar) { - const parsedShowExplorerToolbar = JSON.parse(showExplorerToolbar); - parsedShowExplorerToolbar[dataSource] = value; - localStorage.setItem( - LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, - JSON.stringify(parsedShowExplorerToolbar), - ); - return; - } - } catch (error) { - console.error(error); - } -}; diff --git a/frontend/src/container/LogsExplorerViews/index.tsx b/frontend/src/container/LogsExplorerViews/index.tsx index 45b33d01af..a12fd80997 100644 --- a/frontend/src/container/LogsExplorerViews/index.tsx +++ b/frontend/src/container/LogsExplorerViews/index.tsx @@ -14,7 +14,7 @@ import { PANEL_TYPES, } from 'constants/queryBuilder'; import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config'; -import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper'; +import ExplorerOptions from 'container/ExplorerOptions/ExplorerOptions'; import GoToTop from 'container/GoToTop'; import LogsExplorerChart from 'container/LogsExplorerChart'; import LogsExplorerList from 'container/LogsExplorerList'; @@ -634,7 +634,7 @@ function LogsExplorerViews({ - - From 7c062163a10e8ca9ba7734657c6f327fa9ad772a Mon Sep 17 00:00:00 2001 From: Prashant Shahi Date: Fri, 15 Mar 2024 02:24:09 +0545 Subject: [PATCH 06/53] =?UTF-8?q?chore(release):=20=F0=9F=93=8C=20pin=20ve?= =?UTF-8?q?rsions:=20SigNoz=200.41.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Prashant Shahi --- deploy/docker-swarm/clickhouse-setup/docker-compose.yaml | 4 ++-- deploy/docker/clickhouse-setup/docker-compose.yaml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 604c2d3f67..6c1bb497c6 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -146,7 +146,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.41.0 + image: signoz/query-service:0.41.1 command: [ "-config=/root/config/prometheus.yml", @@ -186,7 +186,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:0.41.0 + image: signoz/frontend:0.41.1 deploy: restart_policy: condition: on-failure diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 217135c72b..43e6eccb19 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -164,7 +164,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` query-service: - image: signoz/query-service:${DOCKER_TAG:-0.41.0} + image: signoz/query-service:${DOCKER_TAG:-0.41.1} container_name: signoz-query-service command: [ @@ -203,7 +203,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:${DOCKER_TAG:-0.41.0} + image: signoz/frontend:${DOCKER_TAG:-0.41.1} container_name: signoz-frontend restart: on-failure depends_on: From 5a8479f4e99cfd733dddc8c3fcaa449405045959 Mon Sep 17 00:00:00 2001 From: CheetoDa Date: Wed, 13 Mar 2024 10:32:26 +0530 Subject: [PATCH 07/53] feat:php flow --- .../php-kubernetes-installOtelCollector.md | 24 +++++ .../php-kubernetes-instrumentApplication.md | 64 +++++++++++++ .../php-kubernetes-runApplication.md | 16 ++++ ...xamd64-quickStart-instrumentApplication.md | 60 ++++++++++++ ...hp-linuxamd64-quickStart-runApplication.md | 16 ++++ ...xamd64-recommended-installOtelCollector.md | 96 +++++++++++++++++++ ...amd64-recommended-instrumentApplication.md | 62 ++++++++++++ ...p-linuxamd64-recommended-runApplication.md | 41 ++++++++ ...xarm64-quickStart-instrumentApplication.md | 60 ++++++++++++ ...hp-linuxarm64-quickStart-runApplication.md | 16 ++++ ...xarm64-recommended-installOtelCollector.md | 96 +++++++++++++++++++ ...arm64-recommended-instrumentApplication.md | 62 ++++++++++++ ...p-linuxarm64-recommended-runApplication.md | 41 ++++++++ ...samd64-quickStart-instrumentApplication.md | 60 ++++++++++++ ...hp-macosamd64-quickStart-runApplication.md | 16 ++++ ...samd64-recommended-installOtelCollector.md | 96 +++++++++++++++++++ ...amd64-recommended-instrumentApplication.md | 62 ++++++++++++ ...p-macosamd64-recommended-runApplication.md | 41 ++++++++ ...sarm64-quickStart-instrumentApplication.md | 60 ++++++++++++ ...hp-macosarm64-quickStart-runApplication.md | 16 ++++ ...sarm64-recommended-installOtelCollector.md | 96 +++++++++++++++++++ ...arm64-recommended-instrumentApplication.md | 62 ++++++++++++ ...p-macosarm64-recommended-runApplication.md | 41 ++++++++ .../constants/apmDocFilePaths.ts | 65 ++++++++++++- .../utils/dataSourceUtils.ts | 11 ++- frontend/src/utils/app.ts | 2 + 26 files changed, 1279 insertions(+), 3 deletions(-) create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-installOtelCollector.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-instrumentApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-runApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-instrumentApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-runApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-installOtelCollector.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-instrumentApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-runApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-instrumentApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-runApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-installOtelCollector.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-instrumentApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-runApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-instrumentApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-runApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-installOtelCollector.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-instrumentApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-runApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-instrumentApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-runApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-installOtelCollector.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-instrumentApplication.md create mode 100644 frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-runApplication.md diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-installOtelCollector.md new file mode 100644 index 0000000000..946b7fbdbf --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-installOtelCollector.md @@ -0,0 +1,24 @@ +## Install otel-collector in your Kubernetes infra +  + +Add the SigNoz Helm Chart repository +```bash +helm repo add signoz https://charts.signoz.io +``` +  + +If the chart is already present, update the chart to the latest using: +```bash +helm repo update +``` +  + +Install the Kubernetes Infrastructure chart provided by SigNoz +```bash +helm install my-release signoz/k8s-infra \ +--set otelCollectorEndpoint=ingest.{{REGION}}.signoz.cloud:443 \ +--set otelInsecure=false \ +--set signozApiKey={{SIGNOZ_INGESTION_KEY}} \ +--set global.clusterName= +``` +- Replace `` with the name of the Kubernetes cluster or a unique identifier of the cluster. diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-instrumentApplication.md new file mode 100644 index 0000000000..c9138e8a5c --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-instrumentApplication.md @@ -0,0 +1,64 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your PHP Application + +### Step 1: Setup Development Environment +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command: + +**Linux**: +```bash +sudo apt-get install gcc make autoconf +``` + +**MacOs(Homebrew)**: +```bash +brew install gcc make autoconf +``` + +  + +### Step 2: Build the extension + +With our environment set up we can install the extension using [PECL](https://pecl.php.net/): + +```bash +pecl install opentelemetry +``` + +After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project: + +```bash +[opentelemetry] +extension=opentelemetry.so +``` + +Verify that the extension is enabled by running: + +```bash +php -m | grep opentelemetry +``` + +Running the above command will **output**: + +```bash +opentelemetry +``` + +  + +### Step 3: Add the dependencies + +Add dependencies required to perform automatic instrumentation using this command : + +```bash +composer config allow-plugins.php-http/discovery false +composer require \ + open-telemetry/sdk \ + open-telemetry/exporter-otlp \ + php-http/guzzle7-adapter \ + open-telemetry/transport-grpc +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-runApplication.md new file mode 100644 index 0000000000..9fa8f823e2 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-runApplication.md @@ -0,0 +1,16 @@ +### Set environment variables and run app + +We will pass environment variables at the runtime: + +```bash +env OTEL_PHP_AUTOLOAD_ENABLED=true \ + OTEL_SERVICE_NAME={MYAPP} \ + OTEL_TRACES_EXPORTER=otlp \ + OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \ + OTEL_EXPORTER_OTLP_ENDPOINT= \ + OTEL_PROPAGATORS=baggage,tracecontext \ + +``` + +- - Endpoint at which the collector is running. Ex. -> `http://localhost:4317` +- - Run command for your PHP application \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..2b28b1ab8b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-instrumentApplication.md @@ -0,0 +1,60 @@ +  + +### Step 1: Setup Development Environment +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command: + +**Linux**: +```bash +sudo apt-get install gcc make autoconf +``` + +**MacOs(Homebrew)**: +```bash +brew install gcc make autoconf +``` + +  + +### Step 2: Build the extension + +With our environment set up we can install the extension using [PECL](https://pecl.php.net/): + +```bash +pecl install opentelemetry +``` + +After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project: + +```bash +[opentelemetry] +extension=opentelemetry.so +``` + +Verify that the extension is enabled by running: + +```bash +php -m | grep opentelemetry +``` + +Running the above command will **output**: + +```bash +opentelemetry +``` + +  + +### Step 3: Add the dependencies + +Add dependencies required to perform automatic instrumentation using this command : + +```bash +composer config allow-plugins.php-http/discovery false +composer require \ + open-telemetry/sdk \ + open-telemetry/exporter-otlp \ + php-http/guzzle7-adapter \ + open-telemetry/transport-grpc +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-runApplication.md new file mode 100644 index 0000000000..587a1b4373 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-runApplication.md @@ -0,0 +1,16 @@ +### Running your PHP application + +We will pass environment variables at the runtime: + +```bash +env OTEL_PHP_AUTOLOAD_ENABLED=true \ + OTEL_SERVICE_NAME={{MYAPP}} \ + OTEL_TRACES_EXPORTER=otlp \ + OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \ + OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \ + OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \ + OTEL_PROPAGATORS=baggage,tracecontext \ + +``` + +- - Run command for your PHP application diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..a659f36474 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_amd64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_amd64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..a59e7cd63e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-instrumentApplication.md @@ -0,0 +1,62 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your PHP Application + +### Step 1: Setup Development Environment +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command: + +**Linux**: +```bash +sudo apt-get install gcc make autoconf +``` + +**MacOs(Homebrew)**: +```bash +brew install gcc make autoconf +``` + +  + +### Step 2: Build the extension + +With our environment set up we can install the extension using [PECL](https://pecl.php.net/): + +```bash +pecl install opentelemetry +``` + +After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project: + +```bash +[opentelemetry] +extension=opentelemetry.so +``` + +Verify that the extension is enabled by running: + +```bash +php -m | grep opentelemetry +``` + +Running the above command will **output**: + +```bash +opentelemetry +``` + +  + +### Step 3: Add the dependencies + +Add dependencies required to perform automatic instrumentation using this command : + +```bash +composer config allow-plugins.php-http/discovery false +composer require \ + open-telemetry/sdk \ + open-telemetry/exporter-otlp \ + php-http/guzzle7-adapter \ + open-telemetry/transport-grpc +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-runApplication.md new file mode 100644 index 0000000000..f69dd3b393 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-runApplication.md @@ -0,0 +1,41 @@ +  + +Once you are done instrumenting your PHP application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your PHP application + +We will pass environment variables at the runtime: + +```bash +env OTEL_PHP_AUTOLOAD_ENABLED=true \ + OTEL_SERVICE_NAME= \ + OTEL_TRACES_EXPORTER=otlp \ + OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \ + OTEL_EXPORTER_OTLP_ENDPOINT= \ + OTEL_PROPAGATORS=baggage,tracecontext \ + +``` + +- - Endpoint at which the collector is running. Ex. -> `http://localhost:4317` +- - Run command for your PHP application \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..2b28b1ab8b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-instrumentApplication.md @@ -0,0 +1,60 @@ +  + +### Step 1: Setup Development Environment +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command: + +**Linux**: +```bash +sudo apt-get install gcc make autoconf +``` + +**MacOs(Homebrew)**: +```bash +brew install gcc make autoconf +``` + +  + +### Step 2: Build the extension + +With our environment set up we can install the extension using [PECL](https://pecl.php.net/): + +```bash +pecl install opentelemetry +``` + +After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project: + +```bash +[opentelemetry] +extension=opentelemetry.so +``` + +Verify that the extension is enabled by running: + +```bash +php -m | grep opentelemetry +``` + +Running the above command will **output**: + +```bash +opentelemetry +``` + +  + +### Step 3: Add the dependencies + +Add dependencies required to perform automatic instrumentation using this command : + +```bash +composer config allow-plugins.php-http/discovery false +composer require \ + open-telemetry/sdk \ + open-telemetry/exporter-otlp \ + php-http/guzzle7-adapter \ + open-telemetry/transport-grpc +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-runApplication.md new file mode 100644 index 0000000000..587a1b4373 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-runApplication.md @@ -0,0 +1,16 @@ +### Running your PHP application + +We will pass environment variables at the runtime: + +```bash +env OTEL_PHP_AUTOLOAD_ENABLED=true \ + OTEL_SERVICE_NAME={{MYAPP}} \ + OTEL_TRACES_EXPORTER=otlp \ + OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \ + OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \ + OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \ + OTEL_PROPAGATORS=baggage,tracecontext \ + +``` + +- - Run command for your PHP application diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..cbabb8077b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_arm64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_arm64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..a59e7cd63e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-instrumentApplication.md @@ -0,0 +1,62 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your PHP Application + +### Step 1: Setup Development Environment +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command: + +**Linux**: +```bash +sudo apt-get install gcc make autoconf +``` + +**MacOs(Homebrew)**: +```bash +brew install gcc make autoconf +``` + +  + +### Step 2: Build the extension + +With our environment set up we can install the extension using [PECL](https://pecl.php.net/): + +```bash +pecl install opentelemetry +``` + +After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project: + +```bash +[opentelemetry] +extension=opentelemetry.so +``` + +Verify that the extension is enabled by running: + +```bash +php -m | grep opentelemetry +``` + +Running the above command will **output**: + +```bash +opentelemetry +``` + +  + +### Step 3: Add the dependencies + +Add dependencies required to perform automatic instrumentation using this command : + +```bash +composer config allow-plugins.php-http/discovery false +composer require \ + open-telemetry/sdk \ + open-telemetry/exporter-otlp \ + php-http/guzzle7-adapter \ + open-telemetry/transport-grpc +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-runApplication.md new file mode 100644 index 0000000000..a11e47198f --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-runApplication.md @@ -0,0 +1,41 @@ +  + +Once you are done instrumenting your Rust application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your PHP application + +We will pass environment variables at the runtime: + +```bash +env OTEL_PHP_AUTOLOAD_ENABLED=true \ + OTEL_SERVICE_NAME= \ + OTEL_TRACES_EXPORTER=otlp \ + OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \ + OTEL_EXPORTER_OTLP_ENDPOINT= \ + OTEL_PROPAGATORS=baggage,tracecontext \ + +``` + +- - Endpoint at which the collector is running. Ex. -> `http://localhost:4317` +- - Run command for your PHP application \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..2b28b1ab8b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-instrumentApplication.md @@ -0,0 +1,60 @@ +  + +### Step 1: Setup Development Environment +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command: + +**Linux**: +```bash +sudo apt-get install gcc make autoconf +``` + +**MacOs(Homebrew)**: +```bash +brew install gcc make autoconf +``` + +  + +### Step 2: Build the extension + +With our environment set up we can install the extension using [PECL](https://pecl.php.net/): + +```bash +pecl install opentelemetry +``` + +After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project: + +```bash +[opentelemetry] +extension=opentelemetry.so +``` + +Verify that the extension is enabled by running: + +```bash +php -m | grep opentelemetry +``` + +Running the above command will **output**: + +```bash +opentelemetry +``` + +  + +### Step 3: Add the dependencies + +Add dependencies required to perform automatic instrumentation using this command : + +```bash +composer config allow-plugins.php-http/discovery false +composer require \ + open-telemetry/sdk \ + open-telemetry/exporter-otlp \ + php-http/guzzle7-adapter \ + open-telemetry/transport-grpc +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-runApplication.md new file mode 100644 index 0000000000..7b61210f9d --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-runApplication.md @@ -0,0 +1,16 @@ +### Running your PHP application + +We will pass environment variables at the runtime: + +```bash +env OTEL_PHP_AUTOLOAD_ENABLED=true \ + OTEL_SERVICE_NAME={{MYAPP}} \ + OTEL_TRACES_EXPORTER=otlp \ + OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \ + OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \ + OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \ + OTEL_PROPAGATORS=baggage,tracecontext \ + +``` + +- - Run command for your PHP application \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..843e86a411 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +### Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_amd64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_amd64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..a59e7cd63e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-instrumentApplication.md @@ -0,0 +1,62 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your PHP Application + +### Step 1: Setup Development Environment +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command: + +**Linux**: +```bash +sudo apt-get install gcc make autoconf +``` + +**MacOs(Homebrew)**: +```bash +brew install gcc make autoconf +``` + +  + +### Step 2: Build the extension + +With our environment set up we can install the extension using [PECL](https://pecl.php.net/): + +```bash +pecl install opentelemetry +``` + +After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project: + +```bash +[opentelemetry] +extension=opentelemetry.so +``` + +Verify that the extension is enabled by running: + +```bash +php -m | grep opentelemetry +``` + +Running the above command will **output**: + +```bash +opentelemetry +``` + +  + +### Step 3: Add the dependencies + +Add dependencies required to perform automatic instrumentation using this command : + +```bash +composer config allow-plugins.php-http/discovery false +composer require \ + open-telemetry/sdk \ + open-telemetry/exporter-otlp \ + php-http/guzzle7-adapter \ + open-telemetry/transport-grpc +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-runApplication.md new file mode 100644 index 0000000000..a11e47198f --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-runApplication.md @@ -0,0 +1,41 @@ +  + +Once you are done instrumenting your Rust application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your PHP application + +We will pass environment variables at the runtime: + +```bash +env OTEL_PHP_AUTOLOAD_ENABLED=true \ + OTEL_SERVICE_NAME= \ + OTEL_TRACES_EXPORTER=otlp \ + OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \ + OTEL_EXPORTER_OTLP_ENDPOINT= \ + OTEL_PROPAGATORS=baggage,tracecontext \ + +``` + +- - Endpoint at which the collector is running. Ex. -> `http://localhost:4317` +- - Run command for your PHP application \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..2b28b1ab8b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-instrumentApplication.md @@ -0,0 +1,60 @@ +  + +### Step 1: Setup Development Environment +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command: + +**Linux**: +```bash +sudo apt-get install gcc make autoconf +``` + +**MacOs(Homebrew)**: +```bash +brew install gcc make autoconf +``` + +  + +### Step 2: Build the extension + +With our environment set up we can install the extension using [PECL](https://pecl.php.net/): + +```bash +pecl install opentelemetry +``` + +After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project: + +```bash +[opentelemetry] +extension=opentelemetry.so +``` + +Verify that the extension is enabled by running: + +```bash +php -m | grep opentelemetry +``` + +Running the above command will **output**: + +```bash +opentelemetry +``` + +  + +### Step 3: Add the dependencies + +Add dependencies required to perform automatic instrumentation using this command : + +```bash +composer config allow-plugins.php-http/discovery false +composer require \ + open-telemetry/sdk \ + open-telemetry/exporter-otlp \ + php-http/guzzle7-adapter \ + open-telemetry/transport-grpc +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-runApplication.md new file mode 100644 index 0000000000..587a1b4373 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-runApplication.md @@ -0,0 +1,16 @@ +### Running your PHP application + +We will pass environment variables at the runtime: + +```bash +env OTEL_PHP_AUTOLOAD_ENABLED=true \ + OTEL_SERVICE_NAME={{MYAPP}} \ + OTEL_TRACES_EXPORTER=otlp \ + OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \ + OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \ + OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \ + OTEL_PROPAGATORS=baggage,tracecontext \ + +``` + +- - Run command for your PHP application diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..3a780bb8de --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_arm64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_arm64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..a59e7cd63e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-instrumentApplication.md @@ -0,0 +1,62 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your PHP Application + +### Step 1: Setup Development Environment +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command: + +**Linux**: +```bash +sudo apt-get install gcc make autoconf +``` + +**MacOs(Homebrew)**: +```bash +brew install gcc make autoconf +``` + +  + +### Step 2: Build the extension + +With our environment set up we can install the extension using [PECL](https://pecl.php.net/): + +```bash +pecl install opentelemetry +``` + +After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project: + +```bash +[opentelemetry] +extension=opentelemetry.so +``` + +Verify that the extension is enabled by running: + +```bash +php -m | grep opentelemetry +``` + +Running the above command will **output**: + +```bash +opentelemetry +``` + +  + +### Step 3: Add the dependencies + +Add dependencies required to perform automatic instrumentation using this command : + +```bash +composer config allow-plugins.php-http/discovery false +composer require \ + open-telemetry/sdk \ + open-telemetry/exporter-otlp \ + php-http/guzzle7-adapter \ + open-telemetry/transport-grpc +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-runApplication.md new file mode 100644 index 0000000000..a11e47198f --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-runApplication.md @@ -0,0 +1,41 @@ +  + +Once you are done instrumenting your Rust application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your PHP application + +We will pass environment variables at the runtime: + +```bash +env OTEL_PHP_AUTOLOAD_ENABLED=true \ + OTEL_SERVICE_NAME= \ + OTEL_TRACES_EXPORTER=otlp \ + OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \ + OTEL_EXPORTER_OTLP_ENDPOINT= \ + OTEL_PROPAGATORS=baggage,tracecontext \ + +``` + +- - Endpoint at which the collector is running. Ex. -> `http://localhost:4317` +- - Run command for your PHP application \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts b/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts index 7bf505f30d..485a33382c 100644 --- a/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts +++ b/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts @@ -403,6 +403,38 @@ import APM_javascript_reactjs_macOsARM64_quickStart_runApplication from '../Modu import APM_javascript_reactjs_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-installOtelCollector.md'; import APM_javascript_reactjs_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-instrumentApplication.md'; import APM_javascript_reactjs_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-runApplication.md'; +// PHP-Kubernetes +import APM_php_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-installOtelCollector.md'; +import APM_php_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-instrumentApplication.md'; +import APM_php_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-runApplication.md'; +// PHP-LinuxAMD64-quickstart +import APM_php_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-instrumentApplication.md'; +import APM_php_linuxAMD64_quickStart_runApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-runApplication.md'; +// PHP-LinuxAMD64-recommended +import APM_php_linuxAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-installOtelCollector.md'; +import APM_php_linuxAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-instrumentApplication.md'; +import APM_php_linuxAMD64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-runApplication.md'; +// PHP-LinuxARM64-quickstart +import APM_php_linuxARM64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-instrumentApplication.md'; +import APM_php_linuxARM64_quickStart_runApplication from '../Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-runApplication.md'; +// PHP-LinuxARM64-recommended +import APM_php_linuxARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-installOtelCollector.md'; +import APM_php_linuxARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-instrumentApplication.md'; +import APM_php_linuxARM64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-runApplication.md'; +// PHP-MacOsAMD64-quickstart +import APM_php_macOsAMD64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-instrumentApplication.md'; +import APM_php_macOsAMD64_quickStart_runApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-runApplication.md'; +// PHP-MacOsAMD64-recommended +import APM_php_macOsAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-installOtelCollector.md'; +import APM_php_macOsAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-instrumentApplication.md'; +import APM_php_macOsAMD64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-runApplication.md'; +// PHP-MacOsARM64-quickstart +import APM_php_macOsARM64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-instrumentApplication.md'; +import APM_php_macOsARM64_quickStart_runApplication from '../Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-runApplication.md'; +// PHP-MacOsARM64-recommended +import APM_php_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-installOtelCollector.md'; +import APM_php_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-instrumentApplication.md'; +import APM_php_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-runApplication.md'; /// ////// Javascript Done /// ///// Python Start // Django @@ -575,7 +607,6 @@ import APM_python_other_macOsARM64_recommendedSteps_setupOtelCollector from '../ import APM_python_other_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Python/md-docs/Others/MacOsARM64/Recommended/others-macosarm64-recommended-instrumentApplication.md'; import APM_python_other_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Python/md-docs/Others/MacOsARM64/Recommended/others-macosarm64-recommended-runApplication.md'; // ---------------------------------------------------------------------------- -/// ////// Go Done /// ///// ROR Start // ROR-Kubernetes import APM_rails_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/RubyOnRails/md-docs/Kubernetes/ror-kubernetes-installOtelCollector.md'; @@ -1546,4 +1577,36 @@ export const ApmDocFilePaths = { APM_swift_macOsARM64_recommendedSteps_setupOtelCollector, APM_swift_macOsARM64_recommendedSteps_instrumentApplication, APM_swift_macOsARM64_recommendedSteps_runApplication, + + APM_php_kubernetes_recommendedSteps_setupOtelCollector, + APM_php_kubernetes_recommendedSteps_instrumentApplication, + APM_php_kubernetes_recommendedSteps_runApplication, + + APM_php_linuxAMD64_quickStart_instrumentApplication, + APM_php_linuxAMD64_quickStart_runApplication, + + APM_php_linuxAMD64_recommendedSteps_setupOtelCollector, + APM_php_linuxAMD64_recommendedSteps_instrumentApplication, + APM_php_linuxAMD64_recommendedSteps_runApplication, + + APM_php_linuxARM64_quickStart_instrumentApplication, + APM_php_linuxARM64_quickStart_runApplication, + + APM_php_linuxARM64_recommendedSteps_setupOtelCollector, + APM_php_linuxARM64_recommendedSteps_instrumentApplication, + APM_php_linuxARM64_recommendedSteps_runApplication, + + APM_php_macOsAMD64_quickStart_instrumentApplication, + APM_php_macOsAMD64_quickStart_runApplication, + + APM_php_macOsAMD64_recommendedSteps_setupOtelCollector, + APM_php_macOsAMD64_recommendedSteps_instrumentApplication, + APM_php_macOsAMD64_recommendedSteps_runApplication, + + APM_php_macOsARM64_quickStart_instrumentApplication, + APM_php_macOsARM64_quickStart_runApplication, + + APM_php_macOsARM64_recommendedSteps_setupOtelCollector, + APM_php_macOsARM64_recommendedSteps_instrumentApplication, + APM_php_macOsARM64_recommendedSteps_runApplication, }; diff --git a/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts b/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts index 77f1210858..517cc38171 100644 --- a/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts +++ b/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts @@ -132,6 +132,11 @@ const supportedLanguages = [ id: 'swift', imgURL: `/Logos/swift.png`, }, + { + name: 'php', + id: 'php', + imgURL: `/Logos/php.png`, + }, ]; export const defaultLogsType = { @@ -293,7 +298,8 @@ export const getSupportedFrameworks = ({ (moduleID === ModulesMap.APM && dataSourceName === '.NET') || (moduleID === ModulesMap.APM && dataSourceName === 'rust') || (moduleID === ModulesMap.APM && dataSourceName === 'elixir') || - (moduleID === ModulesMap.APM && dataSourceName === 'swift') + (moduleID === ModulesMap.APM && dataSourceName === 'swift') || + (moduleID === ModulesMap.APM && dataSourceName === 'php') ) { return []; } @@ -322,7 +328,8 @@ export const hasFrameworks = ({ (moduleID === ModulesMap.APM && dataSourceName === '.NET') || (moduleID === ModulesMap.APM && dataSourceName === 'rust') || (moduleID === ModulesMap.APM && dataSourceName === 'elixir') || - (moduleID === ModulesMap.APM && dataSourceName === 'swift') + (moduleID === ModulesMap.APM && dataSourceName === 'swift') || + (moduleID === ModulesMap.APM && dataSourceName === 'php') ) { return false; } diff --git a/frontend/src/utils/app.ts b/frontend/src/utils/app.ts index 0ab9e6fca7..d0b859d108 100644 --- a/frontend/src/utils/app.ts +++ b/frontend/src/utils/app.ts @@ -15,6 +15,8 @@ export function extractDomain(email: string): string { export const isCloudUser = (): boolean => { const { hostname } = window.location; + return true; + return hostname?.endsWith('signoz.cloud'); }; From ad9d77d33f953804e25c8b90e4bda4f070709d6f Mon Sep 17 00:00:00 2001 From: CheetoDa Date: Wed, 13 Mar 2024 10:40:32 +0530 Subject: [PATCH 08/53] feat:php flow --- frontend/src/utils/app.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/frontend/src/utils/app.ts b/frontend/src/utils/app.ts index d0b859d108..0ab9e6fca7 100644 --- a/frontend/src/utils/app.ts +++ b/frontend/src/utils/app.ts @@ -15,8 +15,6 @@ export function extractDomain(email: string): string { export const isCloudUser = (): boolean => { const { hostname } = window.location; - return true; - return hostname?.endsWith('signoz.cloud'); }; From 63f0ae1c7c9b1879dacb9f359cf73fe9f2e6d625 Mon Sep 17 00:00:00 2001 From: Vishal Sharma Date: Wed, 20 Mar 2024 19:59:28 +0530 Subject: [PATCH 09/53] chore: update events (#4725) * chore: update events * chore: disable TELEMETRY_EVENT_QUERY_RANGE_API for saas * chore: don't use mustCompile as it can cause panics --- ee/query-service/app/server.go | 17 +++--- .../app/clickhouseReader/reader.go | 19 +++++++ pkg/query-service/app/http_handler.go | 42 ++++++++++++++- pkg/query-service/app/server.go | 17 +++--- pkg/query-service/interfaces/interface.go | 1 + pkg/query-service/model/response.go | 6 +++ pkg/query-service/telemetry/telemetry.go | 52 +++++++++++-------- 7 files changed, 117 insertions(+), 37 deletions(-) diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index 469632ac7f..11ef8dffe0 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -419,30 +419,33 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface signozMetricsUsed := false signozLogsUsed := false - dataSources := []string{} + signozTracesUsed := false if postData != nil { if postData.CompositeQuery != nil { data["queryType"] = postData.CompositeQuery.QueryType data["panelType"] = postData.CompositeQuery.PanelType - signozLogsUsed, signozMetricsUsed, _ = telemetry.GetInstance().CheckSigNozSignals(postData) + signozLogsUsed, signozMetricsUsed, signozTracesUsed = telemetry.GetInstance().CheckSigNozSignals(postData) } } - if signozMetricsUsed || signozLogsUsed { + if signozMetricsUsed || signozLogsUsed || signozTracesUsed { if signozMetricsUsed { - dataSources = append(dataSources, "metrics") telemetry.GetInstance().AddActiveMetricsUser() } if signozLogsUsed { - dataSources = append(dataSources, "logs") telemetry.GetInstance().AddActiveLogsUser() } - data["dataSources"] = dataSources + if signozTracesUsed { + telemetry.GetInstance().AddActiveTracesUser() + } + data["metricsUsed"] = signozMetricsUsed + data["logsUsed"] = signozLogsUsed + data["tracesUsed"] = signozTracesUsed userEmail, err := baseauth.GetEmailFromJwt(r.Context()) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_V3, data, userEmail, true) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail) } } return data, true diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index 56a901a7fb..00f3ca1ba6 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -44,6 +44,7 @@ import ( "go.uber.org/zap" "go.signoz.io/signoz/pkg/query-service/app/dashboards" + "go.signoz.io/signoz/pkg/query-service/app/explorer" "go.signoz.io/signoz/pkg/query-service/app/logs" "go.signoz.io/signoz/pkg/query-service/app/services" "go.signoz.io/signoz/pkg/query-service/auth" @@ -3623,6 +3624,24 @@ func (r *ClickHouseReader) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo return &alertsInfo, nil } +func (r *ClickHouseReader) GetSavedViewsInfo(ctx context.Context) (*model.SavedViewsInfo, error) { + savedViewsInfo := model.SavedViewsInfo{} + savedViews, err := explorer.GetViews() + if err != nil { + zap.S().Debug("Error in fetching saved views info: ", err) + return &savedViewsInfo, err + } + savedViewsInfo.TotalSavedViews = len(savedViews) + for _, view := range savedViews { + if view.SourcePage == "traces" { + savedViewsInfo.TracesSavedViews += 1 + } else if view.SourcePage == "logs" { + savedViewsInfo.LogsSavedViews += 1 + } + } + return &savedViewsInfo, nil +} + func (r *ClickHouseReader) GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError) { // response will contain top level fields from the otel log model response := model.GetFieldsResponse{ diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 676abc1070..16b741e572 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -3479,11 +3479,11 @@ func sendQueryResultEvents(r *http.Request, result []*v3.Result, queryRangeParam dashboardMatched, err := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer) if err != nil { - zap.S().Errorf("error while matching the referrer: %v", err) + zap.S().Errorf("error while matching the dashboard: %v", err) } alertMatched, err := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer) if err != nil { - zap.S().Errorf("error while matching the referrer: %v", err) + zap.S().Errorf("error while matching the alert: %v", err) } if alertMatched || dashboardMatched { @@ -3494,22 +3494,60 @@ func sendQueryResultEvents(r *http.Request, result []*v3.Result, queryRangeParam if err == nil { signozLogsUsed, signozMetricsUsed, signozTracesUsed := telemetry.GetInstance().CheckSigNozSignals(queryRangeParams) if signozLogsUsed || signozMetricsUsed || signozTracesUsed { + if dashboardMatched { + var dashboardID, widgetID string + var dashboardIDMatch, widgetIDMatch []string + dashboardIDRegex, err := regexp.Compile(`/dashboard/([a-f0-9\-]+)/`) + if err == nil { + dashboardIDMatch = dashboardIDRegex.FindStringSubmatch(referrer) + } else { + zap.S().Errorf("error while matching the dashboardIDRegex: %v", err) + } + widgetIDRegex, err := regexp.Compile(`widgetId=([a-f0-9\-]+)`) + if err == nil { + widgetIDMatch = widgetIDRegex.FindStringSubmatch(referrer) + } else { + zap.S().Errorf("error while matching the widgetIDRegex: %v", err) + } + + if len(dashboardIDMatch) > 1 { + dashboardID = dashboardIDMatch[1] + } + + if len(widgetIDMatch) > 1 { + widgetID = widgetIDMatch[1] + } telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_SUCCESSFUL_DASHBOARD_PANEL_QUERY, map[string]interface{}{ "queryType": queryRangeParams.CompositeQuery.QueryType, "panelType": queryRangeParams.CompositeQuery.PanelType, "tracesUsed": signozTracesUsed, "logsUsed": signozLogsUsed, "metricsUsed": signozMetricsUsed, + "dashboardId": dashboardID, + "widgetId": widgetID, }, userEmail) } if alertMatched { + var alertID string + var alertIDMatch []string + alertIDRegex, err := regexp.Compile(`ruleId=(\d+)`) + if err != nil { + zap.S().Errorf("error while matching the alertIDRegex: %v", err) + } else { + alertIDMatch = alertIDRegex.FindStringSubmatch(referrer) + } + + if len(alertIDMatch) > 1 { + alertID = alertIDMatch[1] + } telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_SUCCESSFUL_ALERT_QUERY, map[string]interface{}{ "queryType": queryRangeParams.CompositeQuery.QueryType, "panelType": queryRangeParams.CompositeQuery.PanelType, "tracesUsed": signozTracesUsed, "logsUsed": signozLogsUsed, "metricsUsed": signozMetricsUsed, + "alertId": alertID, }, userEmail) } } diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index e9c80c2507..81ef4e9c13 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -426,30 +426,33 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface signozMetricsUsed := false signozLogsUsed := false - dataSources := []string{} + signozTracesUsed := false if postData != nil { if postData.CompositeQuery != nil { data["queryType"] = postData.CompositeQuery.QueryType data["panelType"] = postData.CompositeQuery.PanelType - signozLogsUsed, signozMetricsUsed, _ = telemetry.GetInstance().CheckSigNozSignals(postData) + signozLogsUsed, signozMetricsUsed, signozTracesUsed = telemetry.GetInstance().CheckSigNozSignals(postData) } } - if signozMetricsUsed || signozLogsUsed { + if signozMetricsUsed || signozLogsUsed || signozTracesUsed { if signozMetricsUsed { - dataSources = append(dataSources, "metrics") telemetry.GetInstance().AddActiveMetricsUser() } if signozLogsUsed { - dataSources = append(dataSources, "logs") telemetry.GetInstance().AddActiveLogsUser() } - data["dataSources"] = dataSources + if signozTracesUsed { + telemetry.GetInstance().AddActiveTracesUser() + } + data["metricsUsed"] = signozMetricsUsed + data["logsUsed"] = signozLogsUsed + data["tracesUsed"] = signozTracesUsed userEmail, err := auth.GetEmailFromJwt(r.Context()) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_V3, data, userEmail, true) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail) } } return data, true diff --git a/pkg/query-service/interfaces/interface.go b/pkg/query-service/interfaces/interface.go index eefb10a0c0..dfe24c9064 100644 --- a/pkg/query-service/interfaces/interface.go +++ b/pkg/query-service/interfaces/interface.go @@ -77,6 +77,7 @@ type Reader interface { GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) + GetSavedViewsInfo(ctx context.Context) (*model.SavedViewsInfo, error) GetTotalSpans(ctx context.Context) (uint64, error) GetTotalLogs(ctx context.Context) (uint64, error) GetTotalSamples(ctx context.Context) (uint64, error) diff --git a/pkg/query-service/model/response.go b/pkg/query-service/model/response.go index aad137714c..a8e09b9d6e 100644 --- a/pkg/query-service/model/response.go +++ b/pkg/query-service/model/response.go @@ -634,6 +634,12 @@ type AlertsInfo struct { TracesBasedAlerts int `json:"tracesBasedAlerts"` } +type SavedViewsInfo struct { + TotalSavedViews int `json:"totalSavedViews"` + TracesSavedViews int `json:"tracesSavedViews"` + LogsSavedViews int `json:"logsSavedViews"` +} + type DashboardsInfo struct { TotalDashboards int `json:"totalDashboards"` TotalDashboardsWithPanelAndName int `json:"totalDashboardsWithPanelAndName"` // dashboards with panel and name without sample title diff --git a/pkg/query-service/telemetry/telemetry.go b/pkg/query-service/telemetry/telemetry.go index ff2ed9aa1a..9202a32168 100644 --- a/pkg/query-service/telemetry/telemetry.go +++ b/pkg/query-service/telemetry/telemetry.go @@ -38,7 +38,7 @@ const ( TELEMETRY_EVENT_SERVICE = "ServiceName" TELEMETRY_EVENT_LOGS_FILTERS = "Logs Filters" TELEMETRY_EVENT_DISTRIBUTED = "Distributed" - TELEMETRY_EVENT_QUERY_RANGE_V3 = "Query Range V3 Metadata" + TELEMETRY_EVENT_QUERY_RANGE_API = "Query Range API" TELEMETRY_EVENT_DASHBOARDS_ALERTS = "Dashboards/Alerts Info" TELEMETRY_EVENT_ACTIVE_USER = "Active User" TELEMETRY_EVENT_ACTIVE_USER_PH = "Active User V2" @@ -61,6 +61,7 @@ var SAAS_EVENTS_LIST = map[string]struct{}{ TELEMETRY_EVENT_DASHBOARDS_ALERTS: {}, TELEMETRY_EVENT_SUCCESSFUL_DASHBOARD_PANEL_QUERY: {}, TELEMETRY_EVENT_SUCCESSFUL_ALERT_QUERY: {}, + // TELEMETRY_EVENT_QUERY_RANGE_API: {}, // this event is not part of SAAS_EVENTS_LIST as it may cause too many events to be sent } const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz" @@ -282,30 +283,39 @@ func createTelemetry() { telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, "") alertsInfo, err := telemetry.reader.GetAlertsInfo(context.Background()) - if err != nil { - telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, map[string]interface{}{"error": err.Error()}, "") - } else { + if err == nil { dashboardsInfo, err := telemetry.reader.GetDashboardsInfo(context.Background()) if err == nil { - dashboardsAlertsData := map[string]interface{}{ - "totalDashboards": dashboardsInfo.TotalDashboards, - "totalDashboardsWithPanelAndName": dashboardsInfo.TotalDashboardsWithPanelAndName, - "logsBasedPanels": dashboardsInfo.LogsBasedPanels, - "metricBasedPanels": dashboardsInfo.MetricBasedPanels, - "tracesBasedPanels": dashboardsInfo.TracesBasedPanels, - "totalAlerts": alertsInfo.TotalAlerts, - "logsBasedAlerts": alertsInfo.LogsBasedAlerts, - "metricBasedAlerts": alertsInfo.MetricBasedAlerts, - "tracesBasedAlerts": alertsInfo.TracesBasedAlerts, + channels, err := telemetry.reader.GetChannels() + if err == nil { + savedViewsInfo, err := telemetry.reader.GetSavedViewsInfo(context.Background()) + if err == nil { + dashboardsAlertsData := map[string]interface{}{ + "totalDashboards": dashboardsInfo.TotalDashboards, + "totalDashboardsWithPanelAndName": dashboardsInfo.TotalDashboardsWithPanelAndName, + "logsBasedPanels": dashboardsInfo.LogsBasedPanels, + "metricBasedPanels": dashboardsInfo.MetricBasedPanels, + "tracesBasedPanels": dashboardsInfo.TracesBasedPanels, + "totalAlerts": alertsInfo.TotalAlerts, + "logsBasedAlerts": alertsInfo.LogsBasedAlerts, + "metricBasedAlerts": alertsInfo.MetricBasedAlerts, + "tracesBasedAlerts": alertsInfo.TracesBasedAlerts, + "totalChannels": len(*channels), + "totalSavedViews": savedViewsInfo.TotalSavedViews, + "logsSavedViews": savedViewsInfo.LogsSavedViews, + "tracesSavedViews": savedViewsInfo.TracesSavedViews, + } + // send event only if there are dashboards or alerts or channels + if dashboardsInfo.TotalDashboards > 0 || alertsInfo.TotalAlerts > 0 || len(*channels) > 0 || savedViewsInfo.TotalSavedViews > 0 { + telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, "") + } + } } - // send event only if there are dashboards or alerts - if dashboardsInfo.TotalDashboards > 0 || alertsInfo.TotalAlerts > 0 { - telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, "") - } - } else { - telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, map[string]interface{}{"error": err.Error()}, "") } } + if err != nil { + telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, map[string]interface{}{"error": err.Error()}, "") + } getDistributedInfoInLastHeartBeatInterval, _ := telemetry.reader.GetDistributedInfoInLastHeartBeatInterval(context.Background()) telemetry.SendEvent(TELEMETRY_EVENT_DISTRIBUTED, getDistributedInfoInLastHeartBeatInterval, "") @@ -419,7 +429,7 @@ func (a *Telemetry) checkEvents(event string) bool { func (a *Telemetry) SendEvent(event string, data map[string]interface{}, userEmail string, opts ...bool) { // ignore telemetry for default user - if userEmail == DEFAULT_CLOUD_EMAIL { + if userEmail == DEFAULT_CLOUD_EMAIL || a.GetUserEmail() == DEFAULT_CLOUD_EMAIL { return } From 0df86454ce42796ec9b1015aa0fadd6141f254a1 Mon Sep 17 00:00:00 2001 From: SagarRajput-7 <162284829+SagarRajput-7@users.noreply.github.com> Date: Thu, 21 Mar 2024 16:31:59 +0530 Subject: [PATCH 10/53] fix: [SIG-567]: prevented stage-&-run API on legend change (#4720) * fix: prevented stage-&-run API on legend change * fix: code refactor --------- Co-authored-by: Sagar Rajput --- .../LeftContainer/WidgetGraph/WidgetGraphs.tsx | 2 ++ .../src/lib/uPlotLib/getUplotChartOptions.ts | 4 ++++ .../src/lib/uPlotLib/utils/getSeriesData.ts | 17 +++++++++++------ 3 files changed, 17 insertions(+), 6 deletions(-) diff --git a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx index 647b746c2d..aa7553af53 100644 --- a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx @@ -133,6 +133,7 @@ function WidgetGraph({ softMax, softMin, panelType: selectedGraph, + currentQuery, }), [ widgetId, @@ -148,6 +149,7 @@ function WidgetGraph({ softMax, softMin, selectedGraph, + currentQuery, ], ); diff --git a/frontend/src/lib/uPlotLib/getUplotChartOptions.ts b/frontend/src/lib/uPlotLib/getUplotChartOptions.ts index 50f6c5fbc4..0b281506f6 100644 --- a/frontend/src/lib/uPlotLib/getUplotChartOptions.ts +++ b/frontend/src/lib/uPlotLib/getUplotChartOptions.ts @@ -12,6 +12,7 @@ import { Dimensions } from 'hooks/useDimensions'; import { convertValue } from 'lib/getConvertedValue'; import _noop from 'lodash-es/noop'; import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; import uPlot from 'uplot'; import onClickPlugin, { OnClickPluginOpts } from './plugins/onClickPlugin'; @@ -40,6 +41,7 @@ export interface GetUPlotChartOptions { maxTimeScale?: number; softMin: number | null; softMax: number | null; + currentQuery?: Query; } export const getUPlotChartOptions = ({ @@ -59,6 +61,7 @@ export const getUPlotChartOptions = ({ softMax, softMin, panelType, + currentQuery, }: GetUPlotChartOptions): uPlot.Options => { const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale); @@ -223,6 +226,7 @@ export const getUPlotChartOptions = ({ widgetMetaData: apiResponse?.data.result, graphsVisibilityStates, panelType, + currentQuery, }), axes: getAxes(isDarkMode, yAxisUnit), }; diff --git a/frontend/src/lib/uPlotLib/utils/getSeriesData.ts b/frontend/src/lib/uPlotLib/utils/getSeriesData.ts index cf60a632cb..574b8dc1de 100644 --- a/frontend/src/lib/uPlotLib/utils/getSeriesData.ts +++ b/frontend/src/lib/uPlotLib/utils/getSeriesData.ts @@ -3,6 +3,7 @@ import { PANEL_TYPES } from 'constants/queryBuilder'; import { themeColors } from 'constants/theme'; import getLabelName from 'lib/getLabelName'; import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; import { QueryData } from 'types/api/widgets/getQuery'; import { drawStyles, lineInterpolations } from './constants'; @@ -31,6 +32,7 @@ const getSeries = ({ widgetMetaData, graphsVisibilityStates, panelType, + currentQuery, }: GetSeriesProps): uPlot.Options['series'] => { const configurations: uPlot.Series[] = [ { label: 'Timestamp', stroke: 'purple' }, @@ -40,13 +42,15 @@ const getSeries = ({ const newGraphVisibilityStates = graphsVisibilityStates?.slice(1); for (let i = 0; i < seriesList?.length; i += 1) { - const { metric = {}, queryName = '', legend = '' } = widgetMetaData[i] || {}; + const { metric = {}, queryName = '', legend: lgd } = widgetMetaData[i] || {}; - const label = getLabelName( - metric, - queryName || '', // query - legend || '', - ); + const newLegend = + currentQuery?.builder.queryData.find((item) => item.queryName === queryName) + ?.legend || ''; + + const legend = newLegend || lgd || ''; + + const label = getLabelName(metric, queryName || '', legend); const color = generateColor(label, themeColors.chartcolors); @@ -87,6 +91,7 @@ export type GetSeriesProps = { widgetMetaData: QueryData[]; graphsVisibilityStates?: boolean[]; panelType?: PANEL_TYPES; + currentQuery?: Query; }; export default getSeries; From 0df3c26f04fe43cccda2126182ea16b79e555f6b Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Fri, 22 Mar 2024 13:28:38 +0530 Subject: [PATCH 11/53] feat: implement download logs feature for logs explorer new design (#4728) * feat: implement download logs feature for logs explorer new design * feat: address review comments * feat: added timestamp and body to the start --------- Co-authored-by: Nityananda Gohain --- .../LogsFormatOptionsMenu.styles.scss | 6 +- .../DownloadV2/DownloadV2.styles.scss | 84 +++++++++++++++++++ .../src/container/DownloadV2/DownloadV2.tsx | 84 +++++++++++++++++++ .../container/DownloadV2/DownloadV2.types.ts | 10 +++ .../src/container/LogsExplorerViews/index.tsx | 27 +++++- 5 files changed, 207 insertions(+), 4 deletions(-) create mode 100644 frontend/src/container/DownloadV2/DownloadV2.styles.scss create mode 100644 frontend/src/container/DownloadV2/DownloadV2.tsx create mode 100644 frontend/src/container/DownloadV2/DownloadV2.types.ts diff --git a/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.styles.scss b/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.styles.scss index efd668ffe1..af325a2d25 100644 --- a/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.styles.scss +++ b/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.styles.scss @@ -27,7 +27,7 @@ line-height: 18px; letter-spacing: 0.08em; text-align: left; - color: var(--bg-slate-200, #52575c); + color: #52575c; } .menu-items { @@ -65,7 +65,7 @@ padding: 12px; .title { - color: var(--bg-slate-200, #52575c); + color: #52575c; font-family: Inter; font-size: 11px; font-style: normal; @@ -149,7 +149,7 @@ } .title { - color: var(--bg-slate-200, #52575c); + color: #52575c; font-family: Inter; font-size: 11px; font-style: normal; diff --git a/frontend/src/container/DownloadV2/DownloadV2.styles.scss b/frontend/src/container/DownloadV2/DownloadV2.styles.scss new file mode 100644 index 0000000000..850c1c7d16 --- /dev/null +++ b/frontend/src/container/DownloadV2/DownloadV2.styles.scss @@ -0,0 +1,84 @@ +.download-logs-popover { + .ant-popover-inner { + border-radius: 4px; + border: 1px solid var(--bg-slate-400); + background: linear-gradient( + 139deg, + rgba(18, 19, 23, 0.8) 0%, + rgba(18, 19, 23, 0.9) 98.68% + ); + box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2); + backdrop-filter: blur(20px); + padding: 12px 18px 12px 14px; + + .download-logs-content { + display: flex; + flex-direction: column; + gap: 8px; + align-items: flex-start; + + .action-btns { + padding: 4px 0px !important; + width: 159px; + display: flex; + align-items: center; + color: var(--bg-vanilla-400); + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: normal; + letter-spacing: 0.14px; + gap: 6px; + + .ant-btn-icon { + margin-inline-end: 0px; + } + } + + .action-btns:hover { + &.ant-btn-text { + background-color: rgba(171, 189, 255, 0.04) !important; + } + } + + .export-heading { + color: #52575c; + font-size: 11px; + font-style: normal; + font-weight: 600; + line-height: 18px; /* 163.636% */ + letter-spacing: 0.88px; + text-transform: uppercase; + } + } + } +} + +.lightMode { + .download-logs-popover { + .ant-popover-inner { + border: 1px solid var(--bg-vanilla-300); + background: linear-gradient( + 139deg, + rgba(255, 255, 255, 0.8) 0%, + rgba(255, 255, 255, 0.9) 98.68% + ); + + box-shadow: 4px 10px 16px 2px rgba(255, 255, 255, 0.2); + + .download-logs-content { + .action-btns { + color: var(--bg-ink-400); + } + .action-btns:hover { + &.ant-btn-text { + background-color: var(--bg-vanilla-300) !important; + } + } + .export-heading { + color: var(--bg-ink-200); + } + } + } + } +} diff --git a/frontend/src/container/DownloadV2/DownloadV2.tsx b/frontend/src/container/DownloadV2/DownloadV2.tsx new file mode 100644 index 0000000000..95630efcb9 --- /dev/null +++ b/frontend/src/container/DownloadV2/DownloadV2.tsx @@ -0,0 +1,84 @@ +import './DownloadV2.styles.scss'; + +import { Button, Popover, Typography } from 'antd'; +import { Excel } from 'antd-table-saveas-excel'; +import { FileDigit, FileDown, Sheet } from 'lucide-react'; +import { unparse } from 'papaparse'; + +import { DownloadProps } from './DownloadV2.types'; + +function Download({ data, isLoading, fileName }: DownloadProps): JSX.Element { + const downloadExcelFile = (): void => { + const headers = Object.keys(Object.assign({}, ...data)).map((item) => { + const updatedTitle = item + .split('_') + .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) + .join(' '); + return { + title: updatedTitle, + dataIndex: item, + }; + }); + const excel = new Excel(); + excel + .addSheet(fileName) + .addColumns(headers) + .addDataSource(data, { + str2Percent: true, + }) + .saveAs(`${fileName}.xlsx`); + }; + + const downloadCsvFile = (): void => { + const csv = unparse(data); + const csvBlob = new Blob([csv], { type: 'text/csv;charset=utf-8;' }); + const csvUrl = URL.createObjectURL(csvBlob); + const downloadLink = document.createElement('a'); + downloadLink.href = csvUrl; + downloadLink.download = `${fileName}.csv`; + downloadLink.click(); + downloadLink.remove(); + }; + + return ( + + Export As + + +
+ } + > + - - - + <> +
+
+
max lines per row
+
+ + +
- - )} +
+
{!addNewColumn &&
} diff --git a/frontend/src/container/LiveLogs/LiveLogsList/index.tsx b/frontend/src/container/LiveLogs/LiveLogsList/index.tsx index 7be2927445..39a39ab990 100644 --- a/frontend/src/container/LiveLogs/LiveLogsList/index.tsx +++ b/frontend/src/container/LiveLogs/LiveLogsList/index.tsx @@ -71,6 +71,7 @@ function LiveLogsList({ logs }: LiveLogsListProps): JSX.Element { key={log.id} logData={log} selectedFields={selectedFields} + linesPerRow={options.maxLines} onAddToQuery={onAddToQuery} onSetActiveLog={onSetActiveLog} /> diff --git a/frontend/src/container/LogsExplorerList/index.tsx b/frontend/src/container/LogsExplorerList/index.tsx index 21b03cf413..fc5a1f6800 100644 --- a/frontend/src/container/LogsExplorerList/index.tsx +++ b/frontend/src/container/LogsExplorerList/index.tsx @@ -90,6 +90,7 @@ function LogsExplorerList({ onAddToQuery={onAddToQuery} onSetActiveLog={onSetActiveLog} activeLog={activeLog} + linesPerRow={options.maxLines} /> ); }, diff --git a/frontend/src/container/LogsTable/index.tsx b/frontend/src/container/LogsTable/index.tsx index c87d4232f6..b10c3503dd 100644 --- a/frontend/src/container/LogsTable/index.tsx +++ b/frontend/src/container/LogsTable/index.tsx @@ -74,6 +74,7 @@ function LogsTable(props: LogsTableProps): JSX.Element { key={log.id} logData={log} selectedFields={selected} + linesPerRow={linesPerRow} onAddToQuery={onAddToQuery} onSetActiveLog={onSetActiveLog} /> From 5745727031fad9bcfe78e3f326ef3be552ae1a9e Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Fri, 22 Mar 2024 14:59:43 +0530 Subject: [PATCH 15/53] fix: [SIG-565]: design feedback for integrations (#4723) * fix: [SIG-565]: design feedback for integrations * feat: added dotted line in the test connection modal * feat: handle the URL change for integration details page to support back navigation * feat: added ghost loading states * feat: added margin for details header * feat: added margin for details header * feat: increase the list sizes to 20 * fix: handle icons * fix: remove unused classes --- frontend/public/Icons/cable-car.svg | 1 + frontend/public/Icons/configure.svg | 1 + frontend/public/Icons/group.svg | 1 + .../src/assets/Integrations/ConfigureIcon.tsx | 23 ++++++++++ frontend/src/constants/query.ts | 1 + .../IntegrationDetailContent.tsx | 9 ++-- .../Configure.tsx | 26 ++++++------ .../DataCollected.tsx | 4 +- .../IntegrationDetailContentTabs.styles.scss | 9 +++- .../IntegrationDetailHeader.tsx | 42 +++++++++++++++---- .../IntegrationDetailPage.styles.scss | 28 +++++++++++-- .../IntegrationDetailPage.tsx | 5 ++- .../src/pages/Integrations/Integrations.tsx | 29 +++++++++++-- 13 files changed, 140 insertions(+), 39 deletions(-) create mode 100644 frontend/public/Icons/cable-car.svg create mode 100644 frontend/public/Icons/configure.svg create mode 100644 frontend/public/Icons/group.svg create mode 100644 frontend/src/assets/Integrations/ConfigureIcon.tsx diff --git a/frontend/public/Icons/cable-car.svg b/frontend/public/Icons/cable-car.svg new file mode 100644 index 0000000000..0c7318debd --- /dev/null +++ b/frontend/public/Icons/cable-car.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Icons/configure.svg b/frontend/public/Icons/configure.svg new file mode 100644 index 0000000000..088dfa9447 --- /dev/null +++ b/frontend/public/Icons/configure.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Icons/group.svg b/frontend/public/Icons/group.svg new file mode 100644 index 0000000000..e293cebcd0 --- /dev/null +++ b/frontend/public/Icons/group.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/assets/Integrations/ConfigureIcon.tsx b/frontend/src/assets/Integrations/ConfigureIcon.tsx new file mode 100644 index 0000000000..84ddef5de0 --- /dev/null +++ b/frontend/src/assets/Integrations/ConfigureIcon.tsx @@ -0,0 +1,23 @@ +import { Color } from '@signozhq/design-tokens'; +import { useIsDarkMode } from 'hooks/useDarkMode'; + +function ConfigureIcon(): JSX.Element { + const isDarkMode = useIsDarkMode(); + return ( + + + + + + + ); +} + +export default ConfigureIcon; diff --git a/frontend/src/constants/query.ts b/frontend/src/constants/query.ts index d3bd2729d1..31ec5fcd20 100644 --- a/frontend/src/constants/query.ts +++ b/frontend/src/constants/query.ts @@ -27,5 +27,6 @@ export enum QueryParams { viewName = 'viewName', viewKey = 'viewKey', expandedWidgetId = 'expandedWidgetId', + integration = 'integration', pagination = 'pagination', } diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx index 6083489b58..ec81d51db6 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx @@ -1,7 +1,8 @@ import './IntegrationDetailPage.styles.scss'; import { Button, Tabs, TabsProps, Typography } from 'antd'; -import { Drum, Hammer, Table2 } from 'lucide-react'; +import ConfigureIcon from 'assets/Integrations/ConfigureIcon'; +import { CableCar, Group } from 'lucide-react'; import { IntegrationDetailedProps } from 'types/api/integrations/types'; import Configure from './IntegrationDetailContentTabs/Configure'; @@ -24,7 +25,7 @@ function IntegrationDetailContent( @@ -43,7 +44,7 @@ function IntegrationDetailContent( @@ -56,7 +57,7 @@ function IntegrationDetailContent( diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx index ede3b41137..92a5e0c823 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx @@ -1,6 +1,6 @@ import './IntegrationDetailContentTabs.styles.scss'; -import { Button, Tooltip, Typography } from 'antd'; +import { Button, Typography } from 'antd'; import cx from 'classnames'; import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer'; import { useState } from 'react'; @@ -21,18 +21,18 @@ function Configure(props: ConfigurationProps): JSX.Element {
{configuration.map((config, index) => ( - - - + ))}
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/DataCollected.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/DataCollected.tsx index a3c387dc3a..1c605ec863 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/DataCollected.tsx +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/DataCollected.tsx @@ -59,7 +59,7 @@ function DataCollected(props: DataCollectedProps): JSX.Element { index % 2 === 0 ? 'table-row-dark' : '' } dataSource={logsData} - pagination={{ pageSize: 3 }} + pagination={{ pageSize: 20 }} className="logs-section-table" />
@@ -74,7 +74,7 @@ function DataCollected(props: DataCollectedProps): JSX.Element { index % 2 === 0 ? 'table-row-dark' : '' } dataSource={metricsData} - pagination={{ pageSize: 3 }} + pagination={{ pageSize: 20 }} className="metrics-section-table" />
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss index 8340d0d4c0..81dcb6bf59 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss @@ -4,7 +4,7 @@ .integration-detail-overview-left-container { display: flex; flex-direction: column; - width: 25%; + width: 30%; gap: 26px; border-right: 1px solid var(--bg-slate-500); padding: 16px 0; @@ -185,13 +185,14 @@ .configure-menu { display: flex; flex-direction: column; - width: 25%; + width: 30%; padding: 16px 16px 0px 0px; border-right: 1px solid var(--bg-slate-500); gap: 8px; .configure-menu-item { padding: 4px 8px; + height: auto; text-align: start; color: var(--bg-vanilla-100); font-family: Inter; @@ -199,6 +200,10 @@ font-style: normal; font-weight: 400; line-height: 18px; /* 128.571% */ + + .configure-text { + text-wrap: pretty; + } } .configure-menu-item:hover { diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx index 6b2a7b7c34..cab49391f5 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx @@ -1,7 +1,7 @@ /* eslint-disable no-nested-ternary */ import './IntegrationDetailPage.styles.scss'; -import { Button, Modal, Typography } from 'antd'; +import { Button, Modal, Tooltip, Typography } from 'antd'; import installIntegration from 'api/Integrations/installIntegration'; import { SOMETHING_WENT_WRONG } from 'constants/api'; import dayjs from 'dayjs'; @@ -22,6 +22,7 @@ interface IntegrationDetailHeaderProps { connectionState: ConnectionStates; connectionData: IntegrationConnectionStatus; } +// eslint-disable-next-line sonarjs/cognitive-complexity function IntegrationDetailHeader( props: IntegrationDetailHeaderProps, ): JSX.Element { @@ -154,19 +155,42 @@ function IntegrationDetailHeader( Last recieved from - - {latestData.last_received_from} - +
+ + + {latestData.last_received_from} + +
Last recieved at - - {latestData.last_received_ts_ms - ? dayjs(latestData.last_received_ts_ms).format('DD MMM YYYY HH:mm') - : ''} - +
+ + + {latestData.last_received_ts_ms + ? dayjs(latestData.last_received_ts_ms).format('DD MMM YYYY HH:mm') + : ''} + +
) : connectionState === ConnectionStates.TestingConnection ? ( diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss index d9982c3aab..b7630491ae 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss @@ -53,9 +53,17 @@ .loading-integration-details { display: flex; - height: 400px; - justify-content: center; - align-items: center; + flex-direction: column; + gap: 16px; + + .skeleton-1 { + height: 125px; + width: 100%; + } + .skeleton-2 { + height: 250px; + width: 100%; + } } .all-integrations-btn { @@ -254,6 +262,7 @@ border-radius: 4px; border: 1px solid rgba(218, 85, 101, 0.2); background: rgba(218, 85, 101, 0.06); + gap: 32px; .unintall-integration-bar-text { display: flex; @@ -429,6 +438,15 @@ .data-info { display: flex; justify-content: space-between; + align-items: center; + + .connection-line { + border: 1px dashed var(--bg-slate-200); + min-width: 20px; + height: 0px; + flex-grow: 1; + margin: 0px 8px; + } .last-data { color: var(--bg-vanilla-400); @@ -447,6 +465,7 @@ font-style: normal; font-weight: 400; line-height: 18px; /* 150% */ + max-width: 320px; } } .testingConnection { @@ -622,6 +641,9 @@ .connection-content { .data-info { + .connection-line { + border: 1px dashed var(--bg-vanilla-400); + } .last-data { color: var(--bg-slate-400); } diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx index e9a975001a..88be0dc3a3 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx @@ -4,7 +4,7 @@ import './IntegrationDetailPage.styles.scss'; import { Color } from '@signozhq/design-tokens'; -import { Button, Typography } from 'antd'; +import { Button, Skeleton, Typography } from 'antd'; import { useGetIntegration } from 'hooks/Integrations/useGetIntegration'; import { useGetIntegrationStatus } from 'hooks/Integrations/useGetIntegrationStatus'; import { defaultTo } from 'lodash-es'; @@ -71,7 +71,8 @@ function IntegrationDetailPage(props: IntegrationDetailPageProps): JSX.Element { {loading ? (
- Please wait.. While we load the integration details + +
) : isError ? (
diff --git a/frontend/src/pages/Integrations/Integrations.tsx b/frontend/src/pages/Integrations/Integrations.tsx index 6d25a20a6f..bda4184eab 100644 --- a/frontend/src/pages/Integrations/Integrations.tsx +++ b/frontend/src/pages/Integrations/Integrations.tsx @@ -1,17 +1,38 @@ import './Integrations.styles.scss'; -import { useState } from 'react'; +import useUrlQuery from 'hooks/useUrlQuery'; +import { useCallback, useMemo, useState } from 'react'; +import { useHistory, useLocation } from 'react-router-dom'; import Header from './Header'; import IntegrationDetailPage from './IntegrationDetailPage/IntegrationDetailPage'; import IntegrationsList from './IntegrationsList'; function Integrations(): JSX.Element { - const [selectedIntegration, setSelectedIntegration] = useState( - null, + const urlQuery = useUrlQuery(); + const history = useHistory(); + const location = useLocation(); + + const selectedIntegration = useMemo(() => urlQuery.get('integration'), [ + urlQuery, + ]); + + const setSelectedIntegration = useCallback( + (integration: string | null) => { + if (integration) { + urlQuery.set('integration', integration); + } else { + urlQuery.set('integration', ''); + } + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; + history.push(generatedUrl); + }, + [history, location.pathname, urlQuery], ); - const [activeDetailTab, setActiveDetailTab] = useState(null); + const [activeDetailTab, setActiveDetailTab] = useState( + 'overview', + ); const [searchTerm, setSearchTerm] = useState(''); return ( From f24135f5b00d2598b06a5d8d130830775fd657d9 Mon Sep 17 00:00:00 2001 From: Raj Kamal Singh <1133322+raj-k-singh@users.noreply.github.com> Date: Sat, 23 Mar 2024 11:39:28 +0530 Subject: [PATCH 16/53] Feat: QS: postgres integration: instructions for collecting and parsing logs (#4738) * chore: offer metrics config instructions for signoz cloud only * chore: some more cleanups * chore: get log collection instructions started * feat: flesh out log collection otel config for postgres * chore: some cleanup * chore: some more cleanup * chore: some more cleanup --- .../postgres/config/collect-logs.md | 109 +++++++++++++ .../postgres/config/collect-metrics.md | 101 ++++++++++++ .../config/configure-otel-collector.md | 146 ------------------ .../postgres/config/prerequisites.md | 50 ++++-- .../postgres/integration.json | 82 ++++++---- .../builtin_integrations/postgres/overview.md | 4 +- 6 files changed, 299 insertions(+), 193 deletions(-) create mode 100644 pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md create mode 100644 pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-metrics.md delete mode 100644 pkg/query-service/app/integrations/builtin_integrations/postgres/config/configure-otel-collector.md diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md new file mode 100644 index 0000000000..f49e722856 --- /dev/null +++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md @@ -0,0 +1,109 @@ +### Collect Postgres Logs + +#### Create collector config file + +Save the following config for collecting postgres logs in a file named `postgres-logs-collection-config.yaml` + +```yaml +receivers: + filelog/postgresql: + include: ["${env:POSTGRESQL_LOG_FILE}"] + operators: + # Parse default postgresql text log format. + # `log_line_prefix` postgres setting defaults to '%m [%p] ' which logs the timestamp and the process ID + # See https://www.postgresql.org/docs/current/runtime-config-logging.html#GUC-LOG-LINE-PREFIX for more details + - type: regex_parser + if: body matches '^(?P\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}.?[0-9]*? [A-Z]*) \\[(?P[0-9]+)\\] (?P[A-Z]*). (?P.*)$' + parse_from: body + regex: '^(?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.?[0-9]*? [A-Z]*) \[(?P[0-9]+)\] (?P[A-Z]*). (?P.*)$' + timestamp: + parse_from: attributes.ts + layout: '%Y-%m-%d %H:%M:%S %Z' + severity: + parse_from: attributes.log_level + mapping: + debug: + - DEBUG1 + - DEBUG2 + - DEBUG3 + - DEBUG4 + - DEBUG5 + info: + - INFO + - LOG + - NOTICE + - DETAIL + warning: WARNING + error: ERROR + fatal: + - FATAL + - PANIC + on_error: send + - type: move + if: attributes.message != nil + from: attributes.message + to: body + - type: remove + if: attributes.log_level != nil + field: attributes.log_level + - type: remove + if: attributes.ts != nil + field: attributes.ts + - type: add + field: attributes.source + value: postgres + +processors: + batch: + send_batch_size: 10000 + send_batch_max_size: 11000 + timeout: 10s + +exporters: + # export to SigNoz cloud + otlp/postgres-logs: + endpoint: "${env:OTLP_DESTINATION_ENDPOINT}" + tls: + insecure: false + headers: + "signoz-access-token": "${env:SIGNOZ_INGESTION_KEY}" + + # export to local collector + # otlp/local: + # endpoint: "localhost:4317" + # tls: + # insecure: true + +service: + pipelines: + postgresql: + receivers: [filelog/postgresql] + processors: [batch] + exporters: [otlp/postgresql-logs] +``` + +#### Set Environment Variables + +Set the following environment variables in your otel-collector environment: + +```bash + +# path of Postgres server log file. must be accessible by the otel collector +export POSTGRESQL_LOG_FILE=/usr/local/var/log/postgres.log + +# region specific SigNoz cloud ingestion endpoint +export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443" + +# your SigNoz ingestion key +export SIGNOZ_INGESTION_KEY="signoz-ingestion-key" + +``` + +#### Use collector config file + +Make the collector config file available to your otel collector and use it by adding the following flag to the command for running your collector +```bash +--config postgres-logs-collection-config.yaml +``` +Note: the collector can use multiple config files, specified by multiple occurrences of the --config flag. + diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-metrics.md b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-metrics.md new file mode 100644 index 0000000000..ad1971fe35 --- /dev/null +++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-metrics.md @@ -0,0 +1,101 @@ +### Collect Postgres Metrics + +You can configure Postgres metrics collection by providing the required collector config to your collector. + +#### Create collector config file + +Save the following config for collecting postgres metrics in a file named `postgres-metrics-collection-config.yaml` + +```yaml +receivers: + postgresql: + # The endpoint of the postgresql server. Whether using TCP or Unix sockets, this value should be host:port. If transport is set to unix, the endpoint will internally be translated from host:port to /host.s.PGSQL.port + endpoint: ${env:POSTGRESQL_ENDPOINT} + # The frequency at which to collect metrics from the Postgres instance. + collection_interval: 60s + # The username used to access the postgres instance + username: ${env:POSTGRESQL_USERNAME} + # The password used to access the postgres instance + password: ${env:POSTGRESQL_PASSWORD} + # The list of databases for which the receiver will attempt to collect statistics. If an empty list is provided, the receiver will attempt to collect statistics for all non-template databases + databases: [] + # # Defines the network to use for connecting to the server. Valid Values are `tcp` or `unix` + # transport: tcp + tls: + # set to false if SSL is enabled on the server + insecure: true + # ca_file: /etc/ssl/certs/ca-certificates.crt + # cert_file: /etc/ssl/certs/postgres.crt + # key_file: /etc/ssl/certs/postgres.key + metrics: + postgresql.database.locks: + enabled: true + postgresql.deadlocks: + enabled: true + postgresql.sequential_scans: + enabled: true + +processors: + # enriches the data with additional host information + # see https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/resourcedetectionprocessor#resource-detection-processor + resourcedetection/system: + # add additional detectors if needed + detectors: ["system"] + system: + hostname_sources: ["os"] + +exporters: + # export to SigNoz cloud + otlp/postgres: + endpoint: "${env:OTLP_DESTINATION_ENDPOINT}" + tls: + insecure: false + headers: + "signoz-access-token": "${env:SIGNOZ_INGESTION_KEY}" + + # export to local collector + # otlp/local: + # endpoint: "localhost:4317" + # tls: + # insecure: true + +service: + pipelines: + metrics/postgresql: + receivers: [postgresql] + # note: remove this processor if the collector host is not running on the same host as the postgres instance + processors: [resourcedetection/system] + exporters: [otlp/postgres] +``` + +#### Set Environment Variables + +Set the following environment variables in your otel-collector environment: + +```bash + +# password for Postgres monitoring user" +export POSTGRESQL_USERNAME="monitoring" + +# password for Postgres monitoring user" +export POSTGRESQL_PASSWORD="" + +# Postgres endpoint reachable from the otel collector" +export POSTGRESQL_ENDPOINT="host:port" + + +# region specific SigNoz cloud ingestion endpoint +export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443" + +# your SigNoz ingestion key +export SIGNOZ_INGESTION_KEY="signoz-ingestion-key" + +``` + +#### Use collector config file + +Make the collector config file available to your otel collector and use it by adding the following flag to the command for running your collector +```bash +--config postgres-metrics-collection-config.yaml +``` +Note: the collector can use multiple config files, specified by multiple occurrences of the --config flag. diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/configure-otel-collector.md b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/configure-otel-collector.md deleted file mode 100644 index 24fc840a30..0000000000 --- a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/configure-otel-collector.md +++ /dev/null @@ -1,146 +0,0 @@ -### Configure otel collector - -#### Create collector config file - -Save the collector config for monitoring postgres in a file named `postgres-collector-config.yaml` - -Use the following configuration for SigNoz cloud. See further below for configuration for self hosted SigNoz - -```yaml -receivers: - postgresql: - # The endpoint of the postgresql server. Whether using TCP or Unix sockets, this value should be host:port. If transport is set to unix, the endpoint will internally be translated from host:port to /host.s.PGSQL.port - endpoint: ${env:POSTGRESQL_ENDPOINT} - # The frequency at which to collect metrics from the Postgres instance. - collection_interval: 60s - # The username used to access the postgres instance - username: monitoring - # The password used to access the postgres instance - password: ${env:POSTGRESQL_PASSWORD} - # The list of databases for which the receiver will attempt to collect statistics. If an empty list is provided, the receiver will attempt to collect statistics for all non-template databases - databases: [] - # # Defines the network to use for connecting to the server. Valid Values are `tcp` or `unix` - # transport: tcp - tls: - # set to false if SSL is enabled on the server - insecure: true - # ca_file: /etc/ssl/certs/ca-certificates.crt - # cert_file: /etc/ssl/certs/postgres.crt - # key_file: /etc/ssl/certs/postgres.key - metrics: - postgresql.database.locks: - enabled: true - postgresql.deadlocks: - enabled: true - postgresql.sequential_scans: - enabled: true - -processors: - # enriches the data with additional host information - # see https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/resourcedetectionprocessor#resource-detection-processor - resourcedetection/system: - # add additional detectors if needed - detectors: ["system"] - system: - hostname_sources: ["os"] - -exporters: - # export to SigNoz cloud - otlp/postgres: - endpoint: "${env:OTLP_DESTINATION_ENDPOINT}" - tls: - insecure: ${env:OTLP_DESTINATION_TLS_INSECURE} - headers: - "signoz-access-token": "${env:SIGNOZ_INGESTION_KEY}" - -service: - pipelines: - metrics/postgresql: - receivers: [postgresql] - # note: remove this processor if the collector host is not running on the same host as the postgres instance - processors: [resourcedetection/system] - exporters: [otlp/postgres] -``` - -Use the following config if using self-hosted SigNoz. See the config above if using SigNoz cloud -```yaml -receivers: - postgresql: - # The endpoint of the postgresql server. Whether using TCP or Unix sockets, this value should be host:port. If transport is set to unix, the endpoint will internally be translated from host:port to /host.s.PGSQL.port - endpoint: ${env:POSTGRESQL_ENDPOINT} - # The frequency at which to collect metrics from the Postgres instance. - collection_interval: 60s - # The username used to access the postgres instance - username: monitoring - # The password used to access the postgres instance - password: ${env:POSTGRESQL_PASSWORD} - # The list of databases for which the receiver will attempt to collect statistics. If an empty list is provided, the receiver will attempt to collect statistics for all non-template databases - databases: [] - # # Defines the network to use for connecting to the server. Valid Values are `tcp` or `unix` - # transport: tcp - tls: - # set to false if SSL is enabled on the server - insecure: true - # ca_file: /etc/ssl/certs/ca-certificates.crt - # cert_file: /etc/ssl/certs/postgres.crt - # key_file: /etc/ssl/certs/postgres.key - metrics: - postgresql.database.locks: - enabled: true - postgresql.deadlocks: - enabled: true - postgresql.sequential_scans: - enabled: true - -processors: - # enriches the data with additional host information - # see https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/resourcedetectionprocessor#resource-detection-processor - resourcedetection/system: - # add additional detectors if needed - detectors: ["system"] - system: - hostname_sources: ["os"] - -exporters: - otlp/postgres: - endpoint: "${env:OTLP_DESTINATION_ENDPOINT}" - tls: - insecure: ${env:OTLP_DESTINATION_TLS_INSECURE} - -service: - pipelines: - metrics/postgresql: - receivers: [postgresql] - # note: remove this processor if the collector host is not running on the same host as the postgres instance - processors: [resourcedetection/system] - exporters: [otlp/postgres] -``` - - -#### Set Environment Variables - -Set the following environment variables in your otel-collector environment: - -```bash - -# password for postgres monitoring user" -export POSTGRESQL_PASSWORD="password" - -# postgres endpoint reachable from the otel collector" -export POSTGRESQL_ENDPOINT="host:port" - -# A reachable OTLP destination for collected metrics. Eg: localhost:4317 or signoz cloud ingestion endpoint -export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443" - -# Set to true if using an endpoint without TLS -export OTLP_DESTINATION_TLS_INSECURE="false" - -# your signoz ingestion key if using SigNoz cloud -export SIGNOZ_INGESTION_KEY="key" - -``` - -#### Use collector config file - -Make the `postgres-collector-config.yaml` file available to your otel collector and add the flag `--config postgres-collector-config.yaml` to the command for running your otel collector. -Note: the collector can use multiple config files, specified by multiple occurrences of the --config flag. diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/prerequisites.md b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/prerequisites.md index fbfc9e9052..e50282d2a8 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/prerequisites.md +++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/prerequisites.md @@ -1,22 +1,40 @@ -### Prepare postgres for monitoring +## Before You Begin -- Have a running postgresql instance -- Have the monitoring user created -- Have the monitoring user granted the necessary permissions +To configure metrics and logs collection for a Postgres server, you need the following. -This receiver supports PostgreSQL versions 9.6+ +### Ensure Postgres server is prepared for monitoring -For PostgreSQL versions 10+, run: +- **Ensure that the Postgres server is running a supported version** + Postgres versions 9.6+ are supported. + You can use the following SQL statement to determine server version + ```SQL + SELECT version(); + ``` -```sql -create user monitoring with password ''; -grant pg_monitor to monitoring; -grant SELECT ON pg_stat_database to monitoring; -``` +- **If collecting metrics, ensure that there is a Postgres user with required permissions** + To create a monitoring user for Postgres versions 10+, run: + ```SQL + create user monitoring with password ''; + grant pg_monitor to monitoring; + grant SELECT ON pg_stat_database to monitoring; + ``` + + To create a monitoring user for Postgres versions >= 9.6 and <10, run: + ```SQL + create user monitoring with password ''; + grant SELECT ON pg_stat_database to monitoring; + ``` + -For PostgreSQL versions >= 9.6 and <10, run: +### Ensure OTEL Collector is running with access to the Postgres server -```sql -create user monitoring with password ''; -grant SELECT ON pg_stat_database to monitoring; -``` +- **Ensure that an OTEL collector is running in your deployment environment** + If needed, please [install an OTEL Collector](https://signoz.io/docs/tutorial/opentelemetry-binary-usage-in-virtual-machine/) + If already installed, ensure that the collector version is v0.88.0 or newer. + + Also ensure that you can provide config files to the collector and that you can set environment variables and command line flags used for running it. + +- **Ensure that the OTEL collector can access the Postgres server** + In order to collect metrics, the collector must be able to access the Postgres server as a client using the monitoring user. + + In order to collect logs, the collector must be able to read the Postgres server log file. diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json b/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json index 687ca31993..c796a886ee 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json +++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json @@ -1,7 +1,7 @@ { "id": "postgres", "title": "PostgreSQL", - "description": "Monitor postgres using logs and metrics.", + "description": "Monitor Postgres with metrics and logs", "author": { "name": "SigNoz", "email": "integrations@signoz.io", @@ -18,8 +18,12 @@ "instructions": "file://config/prerequisites.md" }, { - "title": "Configure Otel Collector", - "instructions": "file://config/configure-otel-collector.md" + "title": "Collect Metrics", + "instructions": "file://config/collect-metrics.md" + }, + { + "title": "Collect Logs", + "instructions": "file://config/collect-logs.md" } ], "assets": { @@ -48,30 +52,48 @@ } }, "data_collected": { - "logs": [], + "logs": [ + { + "name": "Process ID", + "path": "attributes.pid", + "type": "string" + }, { + "name": "Timestamp", + "path": "timestamp", + "type": "timestamp" + }, { + "name": "Severity Text", + "path": "severity_text", + "type": "string" + }, { + "name": "Severity Number", + "path": "severity_number", + "type": "number" + } + ], "metrics": [ { "name": "postgresql.backends", "type": "sum", - "unit": "1", + "unit": "number", "description": "The number of backends." }, { "name": "postgresql.bgwriter.buffers.allocated", "type": "sum", - "unit": "{buffers}", + "unit": "number", "description": "Number of buffers allocated." }, { "name": "postgresql.bgwriter.buffers.writes", "type": "sum", - "unit": "{buffers}", + "unit": "number", "description": "Number of buffers written." }, { "name": "postgresql.bgwriter.checkpoint.count", "type": "sum", - "unit": "{checkpoints}", + "unit": "number", "description": "The number of checkpoints performed." }, { @@ -83,133 +105,133 @@ { "name": "postgresql.bgwriter.maxwritten", "type": "sum", - "unit": "1", + "unit": "number", "description": "Number of times the background writer stopped a cleaning scan because it had written too many buffers." }, { "name": "postgresql.blocks_read", "type": "sum", - "unit": "1", + "unit": "number", "description": "The number of blocks read." }, { "name": "postgresql.commits", "type": "sum", - "unit": "1", + "unit": "number", "description": "The number of commits." }, { "name": "postgresql.connection.max", "type": "gauge", - "unit": "{connections}", + "unit": "number", "description": "Configured maximum number of client connections allowed" }, { "name": "postgresql.database.count", "type": "sum", - "unit": "{databases}", + "unit": "number", "description": "Number of user databases." }, { "name": "postgresql.database.locks", "type": "gauge", - "unit": "{lock}", + "unit": "number", "description": "The number of database locks." }, { "name": "postgresql.db_size", "type": "sum", - "unit": "By", + "unit": "Bytes", "description": "The database disk usage." }, { "name": "postgresql.deadlocks", "type": "sum", - "unit": "{deadlock}", + "unit": "number", "description": "The number of deadlocks." }, { "name": "postgresql.index.scans", "type": "sum", - "unit": "{scans}", + "unit": "number", "description": "The number of index scans on a table." }, { "name": "postgresql.index.size", "type": "gauge", - "unit": "By", + "unit": "Bytes", "description": "The size of the index on disk." }, { "name": "postgresql.operations", "type": "sum", - "unit": "1", + "unit": "number", "description": "The number of db row operations." }, { "name": "postgresql.replication.data_delay", "type": "gauge", - "unit": "By", + "unit": "Bytes", "description": "The amount of data delayed in replication." }, { "name": "postgresql.rollbacks", "type": "sum", - "unit": "1", + "unit": "number", "description": "The number of rollbacks." }, { "name": "postgresql.rows", "type": "sum", - "unit": "1", + "unit": "number", "description": "The number of rows in the database." }, { "name": "postgresql.sequential_scans", "type": "sum", - "unit": "{sequential_scan}", + "unit": "number", "description": "The number of sequential scans." }, { "name": "postgresql.table.count", "type": "sum", - "unit": "{table}", + "unit": "number", "description": "Number of user tables in a database." }, { "name": "postgresql.table.size", "type": "sum", - "unit": "By", + "unit": "Bytes", "description": "Disk space used by a table." }, { "name": "postgresql.table.vacuum.count", "type": "sum", - "unit": "{vacuums}", + "unit": "number", "description": "Number of times a table has manually been vacuumed." }, { "name": "postgresql.temp_files", "type": "sum", - "unit": "{temp_file}", + "unit": "number", "description": "The number of temp files." }, { "name": "postgresql.wal.age", "type": "gauge", - "unit": "s", + "unit": "seconds", "description": "Age of the oldest WAL file." }, { "name": "postgresql.wal.delay", "type": "gauge", - "unit": "s", + "unit": "seconds", "description": "Time between flushing recent WAL locally and receiving notification that the standby server has completed an operation with it." }, { "name": "postgresql.wal.lag", "type": "gauge", - "unit": "s", + "unit": "seconds", "description": "Time between flushing recent WAL locally and receiving notification that the standby server has completed an operation with it." } ] diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/overview.md b/pkg/query-service/app/integrations/builtin_integrations/postgres/overview.md index 4af57e6b20..ac6e061eca 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/postgres/overview.md +++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/overview.md @@ -1,3 +1,5 @@ ### Monitor Postgres with SigNoz -Parse your Postgres logs and collect key metrics. +Collect key Postgres metrics and view them with an out of the box dashboard. + +Collect and parse Postgres logs to populate timestamp, severity, and other log attributes for better querying and aggregation. From 994814864cf578a0852b2e7d9a6696929c676b8a Mon Sep 17 00:00:00 2001 From: Vibhu Pandey Date: Tue, 26 Mar 2024 06:20:35 +0530 Subject: [PATCH 17/53] fix: send 403 on wrong password entry during change password operation (#4733) --- pkg/query-service/app/http_handler.go | 7 +++---- pkg/query-service/auth/auth.go | 11 +++++------ pkg/query-service/model/response.go | 7 +++++++ 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 16b741e572..964850cbf8 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -2363,10 +2363,9 @@ func (aH *APIHandler) changePassword(w http.ResponseWriter, r *http.Request) { return } - if err := auth.ChangePassword(context.Background(), req); err != nil { - if aH.HandleError(w, err, http.StatusInternalServerError) { - return - } + if apiErr := auth.ChangePassword(context.Background(), req); apiErr != nil { + RespondError(w, apiErr, nil) + return } aH.WriteJSON(w, r, map[string]string{"data": "password changed successfully"}) diff --git a/pkg/query-service/auth/auth.go b/pkg/query-service/auth/auth.go index 6b96a6da85..e307f401ab 100644 --- a/pkg/query-service/auth/auth.go +++ b/pkg/query-service/auth/auth.go @@ -234,24 +234,23 @@ func ResetPassword(ctx context.Context, req *model.ResetPasswordRequest) error { return nil } -func ChangePassword(ctx context.Context, req *model.ChangePasswordRequest) error { - +func ChangePassword(ctx context.Context, req *model.ChangePasswordRequest) *model.ApiError { user, apiErr := dao.DB().GetUser(ctx, req.UserId) if apiErr != nil { - return errors.Wrap(apiErr.Err, "failed to query user from the DB") + return apiErr } if user == nil || !passwordMatch(user.Password, req.OldPassword) { - return ErrorInvalidCreds + return model.ForbiddenError(ErrorInvalidCreds) } hash, err := PasswordHash(req.NewPassword) if err != nil { - return errors.Wrap(err, "Failed to generate password hash") + return model.InternalError(errors.New("Failed to generate password hash")) } if apiErr := dao.DB().UpdateUserPassword(ctx, hash, user.Id); apiErr != nil { - return apiErr.Err + return apiErr } return nil diff --git a/pkg/query-service/model/response.go b/pkg/query-service/model/response.go index a8e09b9d6e..1f3970e0d4 100644 --- a/pkg/query-service/model/response.go +++ b/pkg/query-service/model/response.go @@ -112,6 +112,13 @@ func UnavailableError(err error) *ApiError { } } +func ForbiddenError(err error) *ApiError { + return &ApiError{ + Typ: ErrorForbidden, + Err: err, + } +} + func WrapApiError(err *ApiError, msg string) *ApiError { return &ApiError{ Typ: err.Type(), From 83f68f13db3dbedf692f7d2b0eb25c4ea99410cb Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Tue, 26 Mar 2024 12:40:53 +0530 Subject: [PATCH 18/53] feat: add ability to customize alert frequency (#4697) --- frontend/public/locales/en-GB/alerts.json | 1 + frontend/public/locales/en/alerts.json | 1 + .../container/FormAlertRules/RuleOptions.tsx | 108 ++++++++++++------ .../src/container/FormAlertRules/styles.ts | 7 ++ frontend/src/pages/AlertList/index.tsx | 5 + frontend/src/types/api/alerts/def.ts | 1 + pkg/query-service/rules/manager.go | 4 +- pkg/query-service/rules/thresholdRule.go | 2 +- 8 files changed, 90 insertions(+), 39 deletions(-) diff --git a/frontend/public/locales/en-GB/alerts.json b/frontend/public/locales/en-GB/alerts.json index fb360e579b..4dffb641d3 100644 --- a/frontend/public/locales/en-GB/alerts.json +++ b/frontend/public/locales/en-GB/alerts.json @@ -112,6 +112,7 @@ "exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.", "field_unit": "Threshold unit", "text_alert_on_absent": "Send a notification if data is missing for", + "text_alert_frequency": "Run alert every", "text_for": "minutes", "selected_query_placeholder": "Select query" } diff --git a/frontend/public/locales/en/alerts.json b/frontend/public/locales/en/alerts.json index 0349568c70..33714d4429 100644 --- a/frontend/public/locales/en/alerts.json +++ b/frontend/public/locales/en/alerts.json @@ -112,6 +112,7 @@ "exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.", "field_unit": "Threshold unit", "text_alert_on_absent": "Send a notification if data is missing for", + "text_alert_frequency": "Run alert every", "text_for": "minutes", "selected_query_placeholder": "Select query" } diff --git a/frontend/src/container/FormAlertRules/RuleOptions.tsx b/frontend/src/container/FormAlertRules/RuleOptions.tsx index 88e7c83979..d62b39f30f 100644 --- a/frontend/src/container/FormAlertRules/RuleOptions.tsx +++ b/frontend/src/container/FormAlertRules/RuleOptions.tsx @@ -1,5 +1,6 @@ import { Checkbox, + Collapse, Form, InputNumber, InputNumberProps, @@ -24,7 +25,12 @@ import { import { EQueryType } from 'types/common/dashboard'; import { popupContainer } from 'utils/selectPopupContainer'; -import { FormContainer, InlineSelect, StepHeading } from './styles'; +import { + FormContainer, + InlineSelect, + StepHeading, + VerticalLine, +} from './styles'; function RuleOptions({ alertDef, @@ -238,42 +244,72 @@ function RuleOptions({ /> - - - { - setAlertDef({ - ...alertDef, - condition: { - ...alertDef.condition, - alertOnAbsent: e.target.checked, - }, - }); - }} - /> - - {t('text_alert_on_absent')} + + + + + + {t('text_alert_frequency')} + + { + setAlertDef({ + ...alertDef, + frequency: Number(value) || 0, + }); + }} + type="number" + onWheel={(e): void => e.currentTarget.blur()} + /> + + {t('text_for')} + + - - { - setAlertDef({ - ...alertDef, - condition: { - ...alertDef.condition, - absentFor: Number(value) || 0, - }, - }); - }} - type="number" - onWheel={(e): void => e.currentTarget.blur()} - /> - - {t('text_for')} - + + + + { + setAlertDef({ + ...alertDef, + condition: { + ...alertDef.condition, + alertOnAbsent: e.target.checked, + }, + }); + }} + /> + + {t('text_alert_on_absent')} + + + { + setAlertDef({ + ...alertDef, + condition: { + ...alertDef.condition, + absentFor: Number(value) || 0, + }, + }); + }} + type="number" + onWheel={(e): void => e.currentTarget.blur()} + /> + + {t('text_for')} + + + + + diff --git a/frontend/src/container/FormAlertRules/styles.ts b/frontend/src/container/FormAlertRules/styles.ts index 9fcaf4c59c..11205c0ab4 100644 --- a/frontend/src/container/FormAlertRules/styles.ts +++ b/frontend/src/container/FormAlertRules/styles.ts @@ -67,6 +67,13 @@ export const SeveritySelect = styled(Select)` width: 25% !important; `; +export const VerticalLine = styled.div` + border-left: 2px solid #e8e8e8; /* Adjust color and thickness as desired */ + padding-left: 20px; /* Adjust spacing to content as needed */ + margin-left: 20px; /* Adjust margin as desired */ + height: 100%; /* Adjust based on your layout needs */ +`; + export const InputSmall = styled(Input)` width: 40% !important; `; diff --git a/frontend/src/pages/AlertList/index.tsx b/frontend/src/pages/AlertList/index.tsx index 336c399a2f..33f3ada0f9 100644 --- a/frontend/src/pages/AlertList/index.tsx +++ b/frontend/src/pages/AlertList/index.tsx @@ -12,6 +12,11 @@ function AllAlertList(): JSX.Element { children: , }, // { + // label: 'Planned Downtime', + // key: 'Planned Downtime', + // // children: , + // }, + // { // label: 'Map Alert Channels', // key = 'Map Alert Channels', // children: , diff --git a/frontend/src/types/api/alerts/def.ts b/frontend/src/types/api/alerts/def.ts index af3a4bc912..96fa86654f 100644 --- a/frontend/src/types/api/alerts/def.ts +++ b/frontend/src/types/api/alerts/def.ts @@ -14,6 +14,7 @@ export interface AlertDef { alertType?: string; alert?: string; ruleType?: string; + frequency?: number | undefined; condition: RuleCondition; labels?: Labels; annotations?: Labels; diff --git a/pkg/query-service/rules/manager.go b/pkg/query-service/rules/manager.go index 530bb30d14..95181eade6 100644 --- a/pkg/query-service/rules/manager.go +++ b/pkg/query-service/rules/manager.go @@ -525,7 +525,7 @@ func (m *Manager) prepareTask(acquireLock bool, r *PostableRule, taskName string rules = append(rules, tr) // create ch rule task for evalution - task = newTask(TaskTypeCh, taskName, taskNamesuffix, time.Duration(r.Frequency), rules, m.opts, m.prepareNotifyFunc()) + task = newTask(TaskTypeCh, taskName, taskNamesuffix, time.Duration(r.Frequency*Duration(time.Minute)), rules, m.opts, m.prepareNotifyFunc()) // add rule to memory m.rules[ruleId] = tr @@ -547,7 +547,7 @@ func (m *Manager) prepareTask(acquireLock bool, r *PostableRule, taskName string rules = append(rules, pr) // create promql rule task for evalution - task = newTask(TaskTypeProm, taskName, taskNamesuffix, time.Duration(r.Frequency), rules, m.opts, m.prepareNotifyFunc()) + task = newTask(TaskTypeProm, taskName, taskNamesuffix, time.Duration(r.Frequency*Duration(time.Minute)), rules, m.opts, m.prepareNotifyFunc()) // add rule to memory m.rules[ruleId] = pr diff --git a/pkg/query-service/rules/thresholdRule.go b/pkg/query-service/rules/thresholdRule.go index 0fdb3745ca..f358d80393 100644 --- a/pkg/query-service/rules/thresholdRule.go +++ b/pkg/query-service/rules/thresholdRule.go @@ -713,7 +713,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer zap.S().Debugf("ruleid:", r.ID(), "\t resultmap(potential alerts):", len(resultMap)) // if the data is missing for `For` duration then we should send alert - if r.ruleCondition.AlertOnAbsent && r.lastTimestampWithDatapoints.Add(r.Condition().AbsentFor).Before(time.Now()) { + if r.ruleCondition.AlertOnAbsent && r.lastTimestampWithDatapoints.Add(r.Condition().AbsentFor*time.Minute).Before(time.Now()) { zap.S().Debugf("ruleid:", r.ID(), "\t msg: no data found for rule condition") lbls := labels.NewBuilder(labels.Labels{}) if !r.lastTimestampWithDatapoints.IsZero() { From 4c91dbcff0b70dd06594596e01fffb9f8513c12f Mon Sep 17 00:00:00 2001 From: Rajat Dabade Date: Tue, 26 Mar 2024 17:09:13 +0530 Subject: [PATCH 19/53] Explorer Toolbar maximised and minimised (#4721) --- frontend/src/constants/localStorage.ts | 1 + .../ExplorerOptions/ExplorerOptionWrapper.tsx | 39 ++++ .../ExplorerOptions.styles.scss | 27 ++- .../ExplorerOptions/ExplorerOptions.tsx | 212 +++++++++++------- .../ExplorerOptionsHideArea.styles.scss | 55 +++++ .../ExplorerOptionsHideArea.tsx | 78 +++++++ .../src/container/ExplorerOptions/utils.ts | 52 +++++ .../src/container/LogsExplorerViews/index.tsx | 4 +- frontend/src/pages/TracesExplorer/index.tsx | 6 +- 9 files changed, 376 insertions(+), 98 deletions(-) create mode 100644 frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx create mode 100644 frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.styles.scss create mode 100644 frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.tsx diff --git a/frontend/src/constants/localStorage.ts b/frontend/src/constants/localStorage.ts index 296735b286..0ba6cac302 100644 --- a/frontend/src/constants/localStorage.ts +++ b/frontend/src/constants/localStorage.ts @@ -16,4 +16,5 @@ export enum LOCALSTORAGE { CHAT_SUPPORT = 'CHAT_SUPPORT', IS_IDENTIFIED_USER = 'IS_IDENTIFIED_USER', DASHBOARD_VARIABLES = 'DASHBOARD_VARIABLES', + SHOW_EXPLORER_TOOLBAR = 'SHOW_EXPLORER_TOOLBAR', } diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx new file mode 100644 index 0000000000..a2e0eff9c8 --- /dev/null +++ b/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx @@ -0,0 +1,39 @@ +import { useEffect, useState } from 'react'; + +import ExplorerOptions, { ExplorerOptionsProps } from './ExplorerOptions'; +import { getExplorerToolBarVisibility } from './utils'; + +type ExplorerOptionsWrapperProps = Omit< + ExplorerOptionsProps, + 'isExplorerOptionDrop' +>; + +function ExplorerOptionWrapper({ + disabled, + query, + isLoading, + onExport, + sourcepage, +}: ExplorerOptionsWrapperProps): JSX.Element { + const [isExplorerOptionHidden, setIsExplorerOptionHidden] = useState(false); + + useEffect(() => { + const toolbarVisibility = getExplorerToolBarVisibility(sourcepage); + setIsExplorerOptionHidden(!toolbarVisibility); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + return ( + + ); +} + +export default ExplorerOptionWrapper; diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss b/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss index d76d18bb4f..cddeb356b8 100644 --- a/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss +++ b/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss @@ -3,8 +3,8 @@ } .explorer-update { position: fixed; - bottom: 16px; - left: calc(50% - 225px); + bottom: 24px; + left: calc(50% - 250px); display: flex; align-items: center; gap: 12px; @@ -37,21 +37,24 @@ } } + .explorer-options { - display: flex; - gap: 16px; + position: fixed; + bottom: 24px; + left: calc(50% + 240px); padding: 10px 12px; - border-radius: 50px; + transform: translate(calc(-50% - 120px), 0); + transition: left 0.2s linear; border: 1px solid var(--bg-slate-400); + border-radius: 50px; background: rgba(22, 24, 29, 0.6); box-shadow: 4px 4px 16px 4px rgba(0, 0, 0, 0.25); backdrop-filter: blur(20px); - position: fixed; - bottom: 16px; - left: calc(50% + 240px); - transform: translate(calc(-50% - 120px), 0); - transition: left 0.2s linear; + cursor: default; + display: flex; + gap: 16px; + z-index: 1; .ant-select-selector { padding: 0 !important; } @@ -236,9 +239,9 @@ .lightMode { .explorer-options { + background: transparent; + box-shadow: none; border: 1px solid var(--bg-vanilla-300); - background: rgba(255, 255, 255, 0.8); - box-shadow: 4px 4px 16px 4px rgba(255, 255, 255, 0.55); backdrop-filter: blur(20px); hr { diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx index 8322c694d6..635d085e1e 100644 --- a/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx +++ b/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx @@ -1,3 +1,4 @@ +/* eslint-disable react/jsx-props-no-spreading */ import './ExplorerOptions.styles.scss'; import { Color } from '@signozhq/design-tokens'; @@ -30,8 +31,24 @@ import useErrorNotification from 'hooks/useErrorNotification'; import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange'; import { useNotifications } from 'hooks/useNotifications'; import { mapCompositeQueryFromQuery } from 'lib/newQueryBuilder/queryBuilderMappers/mapCompositeQueryFromQuery'; -import { Check, ConciergeBell, Disc3, Plus, X, XCircle } from 'lucide-react'; -import { CSSProperties, useCallback, useMemo, useRef, useState } from 'react'; +import { + Check, + ConciergeBell, + Disc3, + PanelBottomClose, + Plus, + X, + XCircle, +} from 'lucide-react'; +import { + CSSProperties, + Dispatch, + SetStateAction, + useCallback, + useMemo, + useRef, + useState, +} from 'react'; import { useSelector } from 'react-redux'; import { useHistory } from 'react-router-dom'; import { AppState } from 'store/reducers'; @@ -41,11 +58,13 @@ import { DataSource } from 'types/common/queryBuilder'; import AppReducer from 'types/reducer/app'; import { USER_ROLES } from 'types/roles'; +import ExplorerOptionsHideArea from './ExplorerOptionsHideArea'; import { DATASOURCE_VS_ROUTES, generateRGBAFromHex, getRandomColor, saveNewViewHandler, + setExplorerToolBarVisibility, } from './utils'; const allowedRoles = [USER_ROLES.ADMIN, USER_ROLES.AUTHOR, USER_ROLES.EDITOR]; @@ -57,6 +76,8 @@ function ExplorerOptions({ onExport, query, sourcepage, + isExplorerOptionHidden = false, + setIsExplorerOptionHidden, }: ExplorerOptionsProps): JSX.Element { const [isExport, setIsExport] = useState(false); const [isSaveModalOpen, setIsSaveModalOpen] = useState(false); @@ -257,11 +278,18 @@ function ExplorerOptions({ [isDarkMode], ); + const hideToolbar = (): void => { + setExplorerToolBarVisibility(false, sourcepage); + if (setIsExplorerOptionHidden) { + setIsExplorerOptionHidden(true); + } + }; + const isEditDeleteSupported = allowedRoles.includes(role as string); return ( <> - {isQueryUpdated && ( + {isQueryUpdated && !isExplorerOptionHidden && (
)} -
-
- - showSearch - placeholder="Select a view" - loading={viewsIsLoading || isRefetching} - value={viewName || undefined} - onSelect={handleSelect} - style={{ - minWidth: 170, - }} - dropdownStyle={dropdownStyle} - className="views-dropdown" - allowClear={{ - clearIcon: , - }} - onClear={handleClearSelect} - ref={ref} - > - {viewsData?.data?.data?.map((view) => { - const extraData = - view.extraData !== '' ? JSON.parse(view.extraData) : ''; - let bgColor = getRandomColor(); - if (extraData !== '') { - bgColor = extraData.color; - } - return ( - -
- {' '} - {view.name} -
-
- ); - })} - - - -
- -
- -
- - - + {viewsData?.data?.data?.map((view) => { + const extraData = + view.extraData !== '' ? JSON.parse(view.extraData) : ''; + let bgColor = getRandomColor(); + if (extraData !== '') { + bgColor = extraData.color; + } + return ( + +
+ {' '} + {view.name} +
+
+ ); + })} + - - - +
+ +
+ +
+ + + + + + + + + + + +
-
+ )} + + >; } -ExplorerOptions.defaultProps = { isLoading: false }; +ExplorerOptions.defaultProps = { + isLoading: false, + isExplorerOptionHidden: false, + setIsExplorerOptionHidden: undefined, +}; export default ExplorerOptions; diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.styles.scss b/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.styles.scss new file mode 100644 index 0000000000..e45b9e893c --- /dev/null +++ b/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.styles.scss @@ -0,0 +1,55 @@ +.explorer-option-droppable-container { + position: fixed; + bottom: 0; + width: -webkit-fill-available; + height: 24px; + display: flex; + justify-content: center; + border-radius: 10px 10px 0px 0px; + // box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + // backdrop-filter: blur(20px); + + .explorer-actions-btn { + display: flex; + gap: 8px; + margin-right: 8px; + + .action-btn { + display: flex; + justify-content: center; + align-items: center; + border-radius: 10px 10px 0px 0px; + box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + backdrop-filter: blur(20px); + height: 24px !important; + border: none; + } + } + + .explorer-show-btn { + border-radius: 10px 10px 0px 0px; + border: 1px solid var(--bg-slate-400); + background: rgba(22, 24, 29, 0.40); + box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + backdrop-filter: blur(20px); + align-self: center; + padding: 8px 12px; + height: 24px !important; + + .menu-bar { + border-radius: 50px; + background: var(--bg-slate-200); + height: 4px; + width: 50px; + } + } +} + +.lightMode { + .explorer-option-droppable-container { + + .explorer-show-btn { + background: var(--bg-vanilla-200); + } + } +} \ No newline at end of file diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.tsx new file mode 100644 index 0000000000..f5e7faf0dc --- /dev/null +++ b/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.tsx @@ -0,0 +1,78 @@ +/* eslint-disable no-nested-ternary */ +import './ExplorerOptionsHideArea.styles.scss'; + +import { Color } from '@signozhq/design-tokens'; +import { Button, Tooltip } from 'antd'; +import { Disc3, X } from 'lucide-react'; +import { Dispatch, SetStateAction } from 'react'; +import { DataSource } from 'types/common/queryBuilder'; + +import { setExplorerToolBarVisibility } from './utils'; + +interface DroppableAreaProps { + isQueryUpdated: boolean; + isExplorerOptionHidden?: boolean; + sourcepage: DataSource; + setIsExplorerOptionHidden?: Dispatch>; + handleClearSelect: () => void; + onUpdateQueryHandler: () => void; +} + +function ExplorerOptionsHideArea({ + isQueryUpdated, + isExplorerOptionHidden, + sourcepage, + setIsExplorerOptionHidden, + handleClearSelect, + onUpdateQueryHandler, +}: DroppableAreaProps): JSX.Element { + const handleShowExplorerOption = (): void => { + if (setIsExplorerOptionHidden) { + setIsExplorerOptionHidden(false); + setExplorerToolBarVisibility(true, sourcepage); + } + }; + + return ( +
+ {isExplorerOptionHidden && ( + <> + {isQueryUpdated && ( +
+ +
+ )} + + + )} +
+ ); +} + +ExplorerOptionsHideArea.defaultProps = { + isExplorerOptionHidden: undefined, + setIsExplorerOptionHidden: undefined, +}; + +export default ExplorerOptionsHideArea; diff --git a/frontend/src/container/ExplorerOptions/utils.ts b/frontend/src/container/ExplorerOptions/utils.ts index e3ac710609..d94e64161e 100644 --- a/frontend/src/container/ExplorerOptions/utils.ts +++ b/frontend/src/container/ExplorerOptions/utils.ts @@ -1,5 +1,6 @@ import { Color } from '@signozhq/design-tokens'; import { showErrorNotification } from 'components/ExplorerCard/utils'; +import { LOCALSTORAGE } from 'constants/localStorage'; import { QueryParams } from 'constants/query'; import ROUTES from 'constants/routes'; import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi'; @@ -67,3 +68,54 @@ export const generateRGBAFromHex = (hex: string, opacity: number): string => hex.slice(3, 5), 16, )}, ${parseInt(hex.slice(5, 7), 16)}, ${opacity})`; + +export const getExplorerToolBarVisibility = (dataSource: string): boolean => { + try { + const showExplorerToolbar = localStorage.getItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + ); + if (showExplorerToolbar === null) { + const parsedShowExplorerToolbar: { + [DataSource.LOGS]: boolean; + [DataSource.TRACES]: boolean; + [DataSource.METRICS]: boolean; + } = { + [DataSource.METRICS]: true, + [DataSource.TRACES]: true, + [DataSource.LOGS]: true, + }; + localStorage.setItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + JSON.stringify(parsedShowExplorerToolbar), + ); + return true; + } + const parsedShowExplorerToolbar = JSON.parse(showExplorerToolbar || '{}'); + return parsedShowExplorerToolbar[dataSource]; + } catch (error) { + console.error(error); + return false; + } +}; + +export const setExplorerToolBarVisibility = ( + value: boolean, + dataSource: string, +): void => { + try { + const showExplorerToolbar = localStorage.getItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + ); + if (showExplorerToolbar) { + const parsedShowExplorerToolbar = JSON.parse(showExplorerToolbar); + parsedShowExplorerToolbar[dataSource] = value; + localStorage.setItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + JSON.stringify(parsedShowExplorerToolbar), + ); + return; + } + } catch (error) { + console.error(error); + } +}; diff --git a/frontend/src/container/LogsExplorerViews/index.tsx b/frontend/src/container/LogsExplorerViews/index.tsx index e07450229a..c814ac8cb6 100644 --- a/frontend/src/container/LogsExplorerViews/index.tsx +++ b/frontend/src/container/LogsExplorerViews/index.tsx @@ -15,7 +15,7 @@ import { } from 'constants/queryBuilder'; import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config'; import Download from 'container/DownloadV2/DownloadV2'; -import ExplorerOptions from 'container/ExplorerOptions/ExplorerOptions'; +import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper'; import GoToTop from 'container/GoToTop'; import LogsExplorerChart from 'container/LogsExplorerChart'; import LogsExplorerList from 'container/LogsExplorerList'; @@ -659,7 +659,7 @@ function LogsExplorerViews({ - - From 2b3d1c8ee5ad8eb83e0d3eae245a226738bc4fde Mon Sep 17 00:00:00 2001 From: Tan Wei Been <63707630+wbtan7@users.noreply.github.com> Date: Tue, 26 Mar 2024 19:39:59 +0800 Subject: [PATCH 20/53] [Fix]: Using exported dashboards as input to dashboard provisioning #2 (#4726) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(be,fe): upsert dashboard on provision, export with uuid from frontend * chore(fe): formatting in dashboard description * fix: miss out while merging --------- Co-authored-by: HÃ¥vard Co-authored-by: Srikanth Chekuri Co-authored-by: Haavasma <61970295+Haavasma@users.noreply.github.com> --- .../DashboardDescription/index.tsx | 7 ++++- frontend/src/types/api/dashboard/getAll.ts | 1 + pkg/query-service/app/dashboards/model.go | 20 +++++++------ pkg/query-service/app/dashboards/provision.go | 29 ++++++++++++++----- 4 files changed, 40 insertions(+), 17 deletions(-) diff --git a/frontend/src/container/NewDashboard/DashboardDescription/index.tsx b/frontend/src/container/NewDashboard/DashboardDescription/index.tsx index 996c508da4..c916ec7501 100644 --- a/frontend/src/container/NewDashboard/DashboardDescription/index.tsx +++ b/frontend/src/container/NewDashboard/DashboardDescription/index.tsx @@ -23,7 +23,12 @@ function DashboardDescription(): JSX.Element { handleDashboardLockToggle, } = useDashboard(); - const selectedData = selectedDashboard?.data || ({} as DashboardData); + const selectedData = selectedDashboard + ? { + ...selectedDashboard.data, + uuid: selectedDashboard.uuid, + } + : ({} as DashboardData); const { title = '', tags, description } = selectedData || {}; diff --git a/frontend/src/types/api/dashboard/getAll.ts b/frontend/src/types/api/dashboard/getAll.ts index e616ee28ea..ba23e55186 100644 --- a/frontend/src/types/api/dashboard/getAll.ts +++ b/frontend/src/types/api/dashboard/getAll.ts @@ -55,6 +55,7 @@ export interface Dashboard { } export interface DashboardData { + uuid?: string; description?: string; tags?: string[]; name?: string; diff --git a/pkg/query-service/app/dashboards/model.go b/pkg/query-service/app/dashboards/model.go index 698b697279..6e777f49c9 100644 --- a/pkg/query-service/app/dashboards/model.go +++ b/pkg/query-service/app/dashboards/model.go @@ -25,12 +25,14 @@ import ( var db *sqlx.DB // User for mapping job,instance from grafana -var instanceEQRE = regexp.MustCompile("instance(?s)=(?s)\\\"{{.instance}}\\\"") -var nodeEQRE = regexp.MustCompile("instance(?s)=(?s)\\\"{{.node}}\\\"") -var jobEQRE = regexp.MustCompile("job(?s)=(?s)\\\"{{.job}}\\\"") -var instanceRERE = regexp.MustCompile("instance(?s)=~(?s)\\\"{{.instance}}\\\"") -var nodeRERE = regexp.MustCompile("instance(?s)=~(?s)\\\"{{.node}}\\\"") -var jobRERE = regexp.MustCompile("job(?s)=~(?s)\\\"{{.job}}\\\"") +var ( + instanceEQRE = regexp.MustCompile("instance(?s)=(?s)\\\"{{.instance}}\\\"") + nodeEQRE = regexp.MustCompile("instance(?s)=(?s)\\\"{{.node}}\\\"") + jobEQRE = regexp.MustCompile("job(?s)=(?s)\\\"{{.job}}\\\"") + instanceRERE = regexp.MustCompile("instance(?s)=~(?s)\\\"{{.instance}}\\\"") + nodeRERE = regexp.MustCompile("instance(?s)=~(?s)\\\"{{.node}}\\\"") + jobRERE = regexp.MustCompile("job(?s)=~(?s)\\\"{{.job}}\\\"") +) // InitDB sets up setting up the connection pool global variable. func InitDB(dataSourceName string) (*sqlx.DB, error) { @@ -188,6 +190,9 @@ func CreateDashboard(ctx context.Context, data map[string]interface{}, fm interf dash.UpdateBy = &userEmail dash.UpdateSlug() dash.Uuid = uuid.New().String() + if data["uuid"] != nil { + dash.Uuid = data["uuid"].(string) + } mapData, err := json.Marshal(dash.Data) if err != nil { @@ -211,7 +216,6 @@ func CreateDashboard(ctx context.Context, data map[string]interface{}, fm interf return nil, &model.ApiError{Typ: model.ErrorExec, Err: err} } lastInsertId, err := result.LastInsertId() - if err != nil { return nil, &model.ApiError{Typ: model.ErrorExec, Err: err} } @@ -255,7 +259,6 @@ func DeleteDashboard(ctx context.Context, uuid string, fm interfaces.FeatureLook query := `DELETE FROM dashboards WHERE uuid=?` result, err := db.Exec(query, uuid) - if err != nil { return &model.ApiError{Typ: model.ErrorExec, Err: err} } @@ -419,7 +422,6 @@ func (d *Dashboard) UpdateSlug() { } func IsPostDataSane(data *map[string]interface{}) error { - val, ok := (*data)["title"] if !ok || val == nil { return fmt.Errorf("title not found in post data") diff --git a/pkg/query-service/app/dashboards/provision.go b/pkg/query-service/app/dashboards/provision.go index 6f60dc50fe..049ae42e72 100644 --- a/pkg/query-service/app/dashboards/provision.go +++ b/pkg/query-service/app/dashboards/provision.go @@ -10,6 +10,7 @@ import ( "go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/interfaces" + "go.signoz.io/signoz/pkg/query-service/model" ) func readCurrentDir(dir string, fm interfaces.FeatureLookup) error { @@ -43,22 +44,36 @@ func readCurrentDir(dir string, fm interfaces.FeatureLookup) error { continue } - _, apiErr := GetDashboard(context.Background(), data["uuid"].(string)) - if apiErr == nil { - zap.S().Infof("Creating Dashboards: Error in file: %s\t%s", filename, "Dashboard already present in database") + id := data["uuid"] + if id == nil { + _, apiErr := CreateDashboard(context.Background(), data, fm) + if apiErr != nil { + zap.S().Errorf("Creating Dashboards: Error in file: %s\t%s", filename, apiErr.Err) + } continue } - _, apiErr = CreateDashboard(context.Background(), data, fm) + apiErr := upsertDashboard(id.(string), data, filename, fm) if apiErr != nil { - zap.S().Errorf("Creating Dashboards: Error in file: %s\t%s", filename, apiErr.Err) - continue + zap.S().Errorf("Creating Dashboards: Error upserting dashboard: %s\t%s", filename, apiErr.Err) } - } return nil } +func upsertDashboard(uuid string, data map[string]interface{}, filename string, fm interfaces.FeatureLookup) *model.ApiError { + _, apiErr := GetDashboard(context.Background(), uuid) + if apiErr == nil { + zap.S().Infof("Creating Dashboards: Already exists: %s\t%s", filename, "Dashboard already present in database, Updating dashboard") + _, apiErr := UpdateDashboard(context.Background(), uuid, data, fm) + return apiErr + } + + zap.S().Infof("Creating Dashboards: UUID not found: %s\t%s", filename, "Dashboard not present in database, Creating dashboard") + _, apiErr = CreateDashboard(context.Background(), data, fm) + return apiErr +} + func LoadDashboardFiles(fm interfaces.FeatureLookup) error { dashboardsPath := constants.GetOrDefaultEnv("DASHBOARDS_PATH", "./config/dashboards") return readCurrentDir(dashboardsPath, fm) From 9e02147d4ca49c4a137dcdaa14d304cb215dcf9a Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Tue, 26 Mar 2024 23:54:31 +0530 Subject: [PATCH 21/53] fix: [SIG-574]: support __ in the groupBy clause (#4747) --- .../filters/GroupByFilter/GroupByFilter.tsx | 21 +++++-------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx b/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx index 386786f70c..e7b00756f5 100644 --- a/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx +++ b/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx @@ -1,11 +1,7 @@ import { Select, Spin } from 'antd'; import { getAggregateKeys } from 'api/queryBuilder/getAttributeKeys'; // ** Constants -import { - idDivider, - QueryBuilderKeys, - selectValueDivider, -} from 'constants/queryBuilder'; +import { idDivider, QueryBuilderKeys } from 'constants/queryBuilder'; import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig'; import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys'; import useDebounce from 'hooks/useDebounce'; @@ -83,11 +79,7 @@ export const GroupByFilter = memo(function GroupByFilter({ dataType={item.dataType || ''} /> ), - value: `${transformStringWithPrefix({ - str: item.key, - prefix: item.type || '', - condition: !item.isColumn, - })}${selectValueDivider}${item.id}`, + value: `${item.id}`, })) || []; setOptionsData(options); @@ -135,7 +127,8 @@ export const GroupByFilter = memo(function GroupByFilter({ const keys = await getAttributeKeys(); const groupByValues: BaseAutocompleteData[] = values.map((item) => { - const [currentValue, id] = item.value.split(selectValueDivider); + const id = item.value; + const currentValue = item.value.split(idDivider)[0]; if (id && id.includes(idDivider)) { const attribute = keys.find((item) => item.id === id); @@ -174,11 +167,7 @@ export const GroupByFilter = memo(function GroupByFilter({ condition: !item.isColumn, }), )}`, - value: `${transformStringWithPrefix({ - str: item.key, - prefix: item.type || '', - condition: !item.isColumn, - })}${selectValueDivider}${item.id}`, + value: `${item.id}`, }), ); From ae594061e9e7d75c6b5f9663b2c8505c2c9e24f0 Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Wed, 27 Mar 2024 00:07:29 +0530 Subject: [PATCH 22/53] chore: fix query-service logging (#4696) --- ee/query-service/app/api/auth.go | 40 +- ee/query-service/app/api/license.go | 8 +- ee/query-service/app/api/metrics.go | 4 +- ee/query-service/app/api/pat.go | 12 +- ee/query-service/app/api/traces.go | 4 +- ee/query-service/app/db/metrics.go | 49 +-- ee/query-service/app/db/trace.go | 4 +- ee/query-service/app/server.go | 28 +- ee/query-service/auth/auth.go | 12 +- ee/query-service/dao/sqlite/auth.go | 24 +- ee/query-service/dao/sqlite/domain.go | 22 +- ee/query-service/dao/sqlite/pat.go | 12 +- .../integrations/signozio/signozio.go | 6 +- ee/query-service/license/db.go | 4 +- ee/query-service/license/manager.go | 24 +- ee/query-service/main.go | 39 +- ee/query-service/model/domain.go | 45 +- ee/query-service/sso/saml/request.go | 2 +- ee/query-service/usage/manager.go | 22 +- pkg/query-service/agentConf/db.go | 12 +- pkg/query-service/agentConf/manager.go | 22 +- .../app/clickhouseReader/options.go | 2 +- .../app/clickhouseReader/reader.go | 411 ++++++++---------- pkg/query-service/app/dashboards/model.go | 36 +- pkg/query-service/app/dashboards/provision.go | 16 +- pkg/query-service/app/http_handler.go | 86 ++-- .../logparsingpipeline/collector_config.go | 2 +- .../app/logparsingpipeline/controller.go | 6 +- .../app/logparsingpipeline/db.go | 8 +- .../app/opamp/configure_ingestionRules.go | 14 +- pkg/query-service/app/opamp/model/agent.go | 4 +- pkg/query-service/app/opamp/model/agents.go | 4 +- pkg/query-service/app/opamp/opamp_server.go | 15 +- .../app/opamp/pipeline_builder.go | 8 +- pkg/query-service/app/querier/helper.go | 24 +- pkg/query-service/app/querier/querier.go | 10 +- pkg/query-service/app/querier/v2/helper.go | 16 +- pkg/query-service/app/querier/v2/querier.go | 10 +- .../app/queryBuilder/query_builder.go | 2 +- pkg/query-service/app/server.go | 32 +- pkg/query-service/auth/auth.go | 40 +- pkg/query-service/auth/jwt.go | 2 +- pkg/query-service/cache/redis/redis.go | 6 +- pkg/query-service/constants/constants.go | 4 +- pkg/query-service/dao/sqlite/connection.go | 2 +- pkg/query-service/featureManager/manager.go | 6 +- .../integrations/alertManager/manager.go | 24 +- .../integrations/alertManager/notifier.go | 12 +- pkg/query-service/main.go | 6 +- .../queryBuilderToExpr/queryBuilderToExpr.go | 4 +- pkg/query-service/rules/alerting.go | 10 +- pkg/query-service/rules/apiParams.go | 6 - pkg/query-service/rules/db.go | 18 +- pkg/query-service/rules/manager.go | 89 ++-- pkg/query-service/rules/promRule.go | 8 +- pkg/query-service/rules/promRuleTask.go | 6 +- pkg/query-service/rules/ruleTask.go | 14 +- pkg/query-service/rules/thresholdRule.go | 53 ++- pkg/query-service/telemetry/telemetry.go | 2 +- pkg/query-service/tests/docker.go | 3 +- pkg/query-service/utils/format.go | 4 +- pkg/query-service/utils/time.go | 2 +- pkg/query-service/version/version.go | 4 +- 63 files changed, 689 insertions(+), 737 deletions(-) diff --git a/ee/query-service/app/api/auth.go b/ee/query-service/app/api/auth.go index a469b99e33..9ec99a4cc1 100644 --- a/ee/query-service/app/api/auth.go +++ b/ee/query-service/app/api/auth.go @@ -74,7 +74,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() requestBody, err := io.ReadAll(r.Body) if err != nil { - zap.S().Errorf("received no input in api\n", err) + zap.L().Error("received no input in api", zap.Error(err)) RespondError(w, model.BadRequest(err), nil) return } @@ -82,7 +82,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { err = json.Unmarshal(requestBody, &req) if err != nil { - zap.S().Errorf("received invalid user registration request", zap.Error(err)) + zap.L().Error("received invalid user registration request", zap.Error(err)) RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil) return } @@ -90,13 +90,13 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { // get invite object invite, err := baseauth.ValidateInvite(ctx, req) if err != nil { - zap.S().Errorf("failed to validate invite token", err) + zap.L().Error("failed to validate invite token", zap.Error(err)) RespondError(w, model.BadRequest(err), nil) return } if invite == nil { - zap.S().Errorf("failed to validate invite token: it is either empty or invalid", err) + zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err)) RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil) return } @@ -104,7 +104,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { // get auth domain from email domain domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email) if apierr != nil { - zap.S().Errorf("failed to get domain from email", apierr) + zap.L().Error("failed to get domain from email", zap.Error(apierr)) RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil) } @@ -205,24 +205,24 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) ctx := context.Background() if !ah.CheckFeature(model.SSO) { - zap.S().Errorf("[receiveGoogleAuth] sso requested but feature unavailable %s in org domain %s", model.SSO) + zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain") http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently) return } q := r.URL.Query() if errType := q.Get("error"); errType != "" { - zap.S().Errorf("[receiveGoogleAuth] failed to login with google auth", q.Get("error_description")) + zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description"))) http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently) return } relayState := q.Get("state") - zap.S().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState)) + zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState)) parsedState, err := url.Parse(relayState) if err != nil || relayState == "" { - zap.S().Errorf("[receiveGoogleAuth] failed to process response - invalid response from IDP", err, r) + zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) handleSsoError(w, r, redirectUri) return } @@ -244,14 +244,14 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) identity, err := callbackHandler.HandleCallback(r) if err != nil { - zap.S().Errorf("[receiveGoogleAuth] failed to process HandleCallback ", domain.String(), zap.Error(err)) + zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email) if err != nil { - zap.S().Errorf("[receiveGoogleAuth] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err)) + zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } @@ -266,14 +266,14 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { ctx := context.Background() if !ah.CheckFeature(model.SSO) { - zap.S().Errorf("[receiveSAML] sso requested but feature unavailable %s in org domain %s", model.SSO) + zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain") http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently) return } err := r.ParseForm() if err != nil { - zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r) + zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) handleSsoError(w, r, redirectUri) return } @@ -281,11 +281,11 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { // the relay state is sent when a login request is submitted to // Idp. relayState := r.FormValue("RelayState") - zap.S().Debug("[receiveML] relay state", zap.String("relayState", relayState)) + zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState)) parsedState, err := url.Parse(relayState) if err != nil || relayState == "" { - zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r) + zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) handleSsoError(w, r, redirectUri) return } @@ -302,34 +302,34 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { sp, err := domain.PrepareSamlRequest(parsedState) if err != nil { - zap.S().Errorf("[receiveSAML] failed to prepare saml request for domain (%s): %v", domain.String(), err) + zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse")) if err != nil { - zap.S().Errorf("[receiveSAML] failed to retrieve assertion info from saml response for organization (%s): %v", domain.String(), err) + zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } if assertionInfo.WarningInfo.InvalidTime { - zap.S().Errorf("[receiveSAML] expired saml response for organization (%s): %v", domain.String(), err) + zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } email := assertionInfo.NameID if email == "" { - zap.S().Errorf("[receiveSAML] invalid email in the SSO response (%s)", domain.String()) + zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String())) handleSsoError(w, r, redirectUri) return } nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email) if err != nil { - zap.S().Errorf("[receiveSAML] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err)) + zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) handleSsoError(w, r, redirectUri) return } diff --git a/ee/query-service/app/api/license.go b/ee/query-service/app/api/license.go index 5c397020b1..51cfddefb1 100644 --- a/ee/query-service/app/api/license.go +++ b/ee/query-service/app/api/license.go @@ -191,7 +191,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { url := fmt.Sprintf("%s/trial?licenseKey=%s", constants.LicenseSignozIo, currentActiveLicenseKey) req, err := http.NewRequest("GET", url, nil) if err != nil { - zap.S().Error("Error while creating request for trial details", err) + zap.L().Error("Error while creating request for trial details", zap.Error(err)) // If there is an error in fetching trial details, we will still return the license details // to avoid blocking the UI ah.Respond(w, resp) @@ -200,7 +200,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey) trialResp, err := hClient.Do(req) if err != nil { - zap.S().Error("Error while fetching trial details", err) + zap.L().Error("Error while fetching trial details", zap.Error(err)) // If there is an error in fetching trial details, we will still return the license details // to avoid incorrectly blocking the UI ah.Respond(w, resp) @@ -211,7 +211,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { trialRespBody, err := io.ReadAll(trialResp.Body) if err != nil || trialResp.StatusCode != http.StatusOK { - zap.S().Error("Error while fetching trial details", err) + zap.L().Error("Error while fetching trial details", zap.Error(err)) // If there is an error in fetching trial details, we will still return the license details // to avoid incorrectly blocking the UI ah.Respond(w, resp) @@ -222,7 +222,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { var trialRespData model.SubscriptionServerResp if err := json.Unmarshal(trialRespBody, &trialRespData); err != nil { - zap.S().Error("Error while decoding trial details", err) + zap.L().Error("Error while decoding trial details", zap.Error(err)) // If there is an error in fetching trial details, we will still return the license details // to avoid incorrectly blocking the UI ah.Respond(w, resp) diff --git a/ee/query-service/app/api/metrics.go b/ee/query-service/app/api/metrics.go index 81af7035b7..7c0e320f45 100644 --- a/ee/query-service/app/api/metrics.go +++ b/ee/query-service/app/api/metrics.go @@ -18,14 +18,14 @@ import ( func (ah *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) { if !ah.CheckFeature(basemodel.CustomMetricsFunction) { - zap.S().Info("CustomMetricsFunction feature is not enabled in this plan") + zap.L().Info("CustomMetricsFunction feature is not enabled in this plan") ah.APIHandler.QueryRangeMetricsV2(w, r) return } metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r) if apiErrorObj != nil { - zap.S().Errorf(apiErrorObj.Err.Error()) + zap.L().Error("Error in parsing metric query params", zap.Error(apiErrorObj.Err)) RespondError(w, apiErrorObj, nil) return } diff --git a/ee/query-service/app/api/pat.go b/ee/query-service/app/api/pat.go index ea43f47fb0..3ff8be74a2 100644 --- a/ee/query-service/app/api/pat.go +++ b/ee/query-service/app/api/pat.go @@ -43,8 +43,8 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) { return } pat := model.PAT{ - Name: req.Name, - Role: req.Role, + Name: req.Name, + Role: req.Role, ExpiresAt: req.ExpiresInDays, } err = validatePATRequest(pat) @@ -65,7 +65,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) { pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60) } - zap.S().Debugf("Got Create PAT request: %+v", pat) + zap.L().Info("Got Create PAT request", zap.Any("pat", pat)) var apierr basemodel.BaseApiError if pat, apierr = ah.AppDao().CreatePAT(ctx, pat); apierr != nil { RespondError(w, apierr, nil) @@ -115,7 +115,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) { req.UpdatedByUserID = user.Id id := mux.Vars(r)["id"] req.UpdatedAt = time.Now().Unix() - zap.S().Debugf("Got Update PAT request: %+v", req) + zap.L().Info("Got Update PAT request", zap.Any("pat", req)) var apierr basemodel.BaseApiError if apierr = ah.AppDao().UpdatePAT(ctx, req, id); apierr != nil { RespondError(w, apierr, nil) @@ -135,7 +135,7 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) { }, nil) return } - zap.S().Infof("Get PATs for user: %+v", user.Id) + zap.L().Info("Get PATs for user", zap.String("user_id", user.Id)) pats, apierr := ah.AppDao().ListPATs(ctx) if apierr != nil { RespondError(w, apierr, nil) @@ -156,7 +156,7 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) { return } - zap.S().Debugf("Revoke PAT with id: %+v", id) + zap.L().Info("Revoke PAT with id", zap.String("id", id)) if apierr := ah.AppDao().RevokePAT(ctx, id, user.Id); apierr != nil { RespondError(w, apierr, nil) return diff --git a/ee/query-service/app/api/traces.go b/ee/query-service/app/api/traces.go index 22d66f7a82..ee18b2f50b 100644 --- a/ee/query-service/app/api/traces.go +++ b/ee/query-service/app/api/traces.go @@ -15,7 +15,7 @@ import ( func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) { if !ah.CheckFeature(basemodel.SmartTraceDetail) { - zap.S().Info("SmartTraceDetail feature is not enabled in this plan") + zap.L().Info("SmartTraceDetail feature is not enabled in this plan") ah.APIHandler.SearchTraces(w, r) return } @@ -26,7 +26,7 @@ func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) { } spanLimit, err := strconv.Atoi(constants.SpanLimitStr) if err != nil { - zap.S().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable: ", err) + zap.L().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable", zap.Error(err)) return } result, err := ah.opts.DataConnector.SearchTraces(r.Context(), traceId, spanId, levelUpInt, levelDownInt, spanLimit, db.SmartTraceAlgorithm) diff --git a/ee/query-service/app/db/metrics.go b/ee/query-service/app/db/metrics.go index 3bafc6a638..c7b41b17f5 100644 --- a/ee/query-service/app/db/metrics.go +++ b/ee/query-service/app/db/metrics.go @@ -22,7 +22,7 @@ import ( func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) { defer utils.Elapsed("GetMetricResult")() - zap.S().Infof("Executing metric result query: %s", query) + zap.L().Info("Executing metric result query: ", zap.String("query", query)) var hash string // If getSubTreeSpans function is used in the clickhouse query @@ -38,9 +38,8 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) } rows, err := r.conn.Query(ctx, query) - zap.S().Debug(query) if err != nil { - zap.S().Debug("Error in processing query: ", err) + zap.L().Error("Error in processing query", zap.Error(err)) return nil, "", fmt.Errorf("error in processing query") } @@ -117,7 +116,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()) } default: - zap.S().Errorf("invalid var found in metric builder query result", v, colName) + zap.L().Error("invalid var found in metric builder query result", zap.Any("var", v), zap.String("colName", colName)) } } sort.Strings(groupBy) @@ -140,7 +139,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) } // err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash) // if err != nil { - // zap.S().Error("Error in dropping temporary table: ", err) + // zap.L().Error("Error in dropping temporary table: ", err) // return nil, err // } if hash == "" { @@ -152,7 +151,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) { - zap.S().Debugf("Executing getSubTreeSpans function") + zap.L().Debug("Executing getSubTreeSpans function") // str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;` @@ -162,28 +161,28 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash) if err != nil { - zap.S().Error("Error in dropping temporary table: ", err) + zap.L().Error("Error in dropping temporary table", zap.Error(err)) return query, hash, err } // Create temporary table to store the getSubTreeSpans() results - zap.S().Debugf("Creating temporary table getSubTreeSpans%s", hash) + zap.L().Debug("Creating temporary table getSubTreeSpans", zap.String("hash", hash)) err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)") if err != nil { - zap.S().Error("Error in creating temporary table: ", err) + zap.L().Error("Error in creating temporary table", zap.Error(err)) return query, hash, err } var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse getSpansSubQuery := subtreeInput // Execute the subTree query - zap.S().Debugf("Executing subTree query: %s", getSpansSubQuery) + zap.L().Debug("Executing subTree query", zap.String("query", getSpansSubQuery)) err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery) - // zap.S().Info(getSpansSubQuery) + // zap.L().Info(getSpansSubQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return query, hash, fmt.Errorf("Error in processing sql query") } @@ -196,16 +195,16 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu if len(getSpansSubQueryDBResponses) == 0 { return query, hash, fmt.Errorf("No spans found for the given query") } - zap.S().Debugf("Executing query to fetch all the spans from the same TraceID: %s", modelQuery) + zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery)) err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return query, hash, fmt.Errorf("Error in processing sql query") } // Process model to fetch the spans - zap.S().Debugf("Processing model to fetch the spans") + zap.L().Debug("Processing model to fetch the spans") searchSpanResponses := []basemodel.SearchSpanResponseItem{} for _, item := range searchScanResponses { var jsonItem basemodel.SearchSpanResponseItem @@ -218,17 +217,17 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu } // Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash // Use map to store pointer to the spans to avoid duplicates and save memory - zap.S().Debugf("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans%s", hash) + zap.L().Debug("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash)) treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses) if err != nil { - zap.S().Error("Error in getSubTreeAlgorithm function: ", err) + zap.L().Error("Error in getSubTreeAlgorithm function", zap.Error(err)) return query, hash, err } - zap.S().Debugf("Preparing batch to store subtree spans in temporary table getSubTreeSpans%s", hash) + zap.L().Debug("Preparing batch to store subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash)) statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash)) if err != nil { - zap.S().Error("Error in preparing batch statement: ", err) + zap.L().Error("Error in preparing batch statement", zap.Error(err)) return query, hash, err } for _, span := range treeSearchResponse { @@ -251,14 +250,14 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu span.Events, ) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return query, hash, err } } - zap.S().Debugf("Inserting the subtree spans in temporary table getSubTreeSpans%s", hash) + zap.L().Debug("Inserting the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash)) err = statement.Send() if err != nil { - zap.S().Error("Error in sending statement: ", err) + zap.L().Error("Error in sending statement", zap.Error(err)) return query, hash, err } return query, hash, nil @@ -323,7 +322,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub spans = append(spans, span) } - zap.S().Debug("Building Tree") + zap.L().Debug("Building Tree") roots, err := buildSpanTrees(&spans) if err != nil { return nil, err @@ -333,7 +332,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub // For each root, get the subtree spans for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses { targetSpan := &model.SpanForTraceDetails{} - // zap.S().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses))) + // zap.L().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses))) // Search target span object in the tree for _, root := range roots { targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID) @@ -341,7 +340,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub break } if err != nil { - zap.S().Error("Error during BreadthFirstSearch(): ", err) + zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err)) return nil, err } } diff --git a/ee/query-service/app/db/trace.go b/ee/query-service/app/db/trace.go index 529a9a93fd..c6fe9045cf 100644 --- a/ee/query-service/app/db/trace.go +++ b/ee/query-service/app/db/trace.go @@ -49,7 +49,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI break } if err != nil { - zap.S().Error("Error during BreadthFirstSearch(): ", err) + zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err)) return nil, err } } @@ -186,7 +186,7 @@ func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTra // If the parent span is not found, add current span to list of roots if parent == nil { - // zap.S().Debug("Parent Span not found parent_id: ", span.ParentID) + // zap.L().Debug("Parent Span not found parent_id: ", span.ParentID) roots = append(roots, span) span.ParentID = "" continue diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index 11ef8dffe0..c742eef01b 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -134,7 +134,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { var reader interfaces.DataConnector storage := os.Getenv("STORAGE") if storage == "clickhouse" { - zap.S().Info("Using ClickHouse as datastore ...") + zap.L().Info("Using ClickHouse as datastore ...") qb := db.NewDataConnector( localDB, serverOptions.PromConfigPath, @@ -525,7 +525,7 @@ func (s *Server) initListeners() error { return err } - zap.S().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort)) + zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort)) // listen on private port to support internal services privateHostPort := s.serverOptions.PrivateHostPort @@ -538,7 +538,7 @@ func (s *Server) initListeners() error { if err != nil { return err } - zap.S().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort)) + zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort)) return nil } @@ -550,7 +550,7 @@ func (s *Server) Start() error { if !s.serverOptions.DisableRules { s.ruleManager.Start() } else { - zap.S().Info("msg: Rules disabled as rules.disable is set to TRUE") + zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE") } err := s.initListeners() @@ -564,23 +564,23 @@ func (s *Server) Start() error { } go func() { - zap.S().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort)) + zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort)) switch err := s.httpServer.Serve(s.httpConn); err { case nil, http.ErrServerClosed, cmux.ErrListenerClosed: // normal exit, nothing to do default: - zap.S().Error("Could not start HTTP server", zap.Error(err)) + zap.L().Error("Could not start HTTP server", zap.Error(err)) } s.unavailableChannel <- healthcheck.Unavailable }() go func() { - zap.S().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort)) + zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort)) err = http.ListenAndServe(baseconst.DebugHttpPort, nil) if err != nil { - zap.S().Error("Could not start pprof server", zap.Error(err)) + zap.L().Error("Could not start pprof server", zap.Error(err)) } }() @@ -590,14 +590,14 @@ func (s *Server) Start() error { } go func() { - zap.S().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort)) + zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort)) switch err := s.privateHTTP.Serve(s.privateConn); err { case nil, http.ErrServerClosed, cmux.ErrListenerClosed: // normal exit, nothing to do - zap.S().Info("private http server closed") + zap.L().Info("private http server closed") default: - zap.S().Error("Could not start private HTTP server", zap.Error(err)) + zap.L().Error("Could not start private HTTP server", zap.Error(err)) } s.unavailableChannel <- healthcheck.Unavailable @@ -605,10 +605,10 @@ func (s *Server) Start() error { }() go func() { - zap.S().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint)) + zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint)) err := s.opampServer.Start(baseconst.OpAmpWsEndpoint) if err != nil { - zap.S().Info("opamp ws server failed to start", err) + zap.L().Error("opamp ws server failed to start", zap.Error(err)) s.unavailableChannel <- healthcheck.Unavailable } }() @@ -684,7 +684,7 @@ func makeRulesManager( return nil, fmt.Errorf("rule manager error: %v", err) } - zap.S().Info("rules manager is ready") + zap.L().Info("rules manager is ready") return manager, nil } diff --git a/ee/query-service/auth/auth.go b/ee/query-service/auth/auth.go index 8c06384549..d45d050cca 100644 --- a/ee/query-service/auth/auth.go +++ b/ee/query-service/auth/auth.go @@ -17,25 +17,25 @@ import ( func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel.UserPayload, error) { patToken := r.Header.Get("SIGNOZ-API-KEY") if len(patToken) > 0 { - zap.S().Debugf("Received a non-zero length PAT token") + zap.L().Debug("Received a non-zero length PAT token") ctx := context.Background() dao := apiHandler.AppDao() pat, err := dao.GetPAT(ctx, patToken) if err == nil && pat != nil { - zap.S().Debugf("Found valid PAT: %+v", pat) + zap.L().Debug("Found valid PAT: ", zap.Any("pat", pat)) if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 { - zap.S().Debugf("PAT has expired: %+v", pat) + zap.L().Info("PAT has expired: ", zap.Any("pat", pat)) return nil, fmt.Errorf("PAT has expired") } group, apiErr := dao.GetGroupByName(ctx, pat.Role) if apiErr != nil { - zap.S().Debugf("Error while getting group for PAT: %+v", apiErr) + zap.L().Error("Error while getting group for PAT: ", zap.Any("apiErr", apiErr)) return nil, apiErr } user, err := dao.GetUser(ctx, pat.UserID) if err != nil { - zap.S().Debugf("Error while getting user for PAT: %+v", err) + zap.L().Error("Error while getting user for PAT: ", zap.Error(err)) return nil, err } telemetry.GetInstance().SetPatTokenUser() @@ -48,7 +48,7 @@ func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel }, nil } if err != nil { - zap.S().Debugf("Error while getting user for PAT: %+v", err) + zap.L().Error("Error while getting user for PAT: ", zap.Error(err)) return nil, err } } diff --git a/ee/query-service/dao/sqlite/auth.go b/ee/query-service/dao/sqlite/auth.go index 664323eaaf..4418b04cbf 100644 --- a/ee/query-service/dao/sqlite/auth.go +++ b/ee/query-service/dao/sqlite/auth.go @@ -22,19 +22,19 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) ( domain, apierr := m.GetDomainByEmail(ctx, email) if apierr != nil { - zap.S().Errorf("failed to get domain from email", apierr) + zap.L().Error("failed to get domain from email", zap.Error(apierr)) return nil, model.InternalErrorStr("failed to get domain from email") } hash, err := baseauth.PasswordHash(utils.GeneratePassowrd()) if err != nil { - zap.S().Errorf("failed to generate password hash when registering a user via SSO redirect", zap.Error(err)) + zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err)) return nil, model.InternalErrorStr("failed to generate password hash") } group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup) if apiErr != nil { - zap.S().Debugf("GetGroupByName failed, err: %v\n", apiErr.Err) + zap.L().Error("GetGroupByName failed", zap.Error(apiErr)) return nil, apiErr } @@ -51,7 +51,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) ( user, apiErr = m.CreateUser(ctx, user, false) if apiErr != nil { - zap.S().Debugf("CreateUser failed, err: %v\n", apiErr.Err) + zap.L().Error("CreateUser failed", zap.Error(apiErr)) return nil, apiErr } @@ -65,7 +65,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st userPayload, apierr := m.GetUserByEmail(ctx, email) if !apierr.IsNil() { - zap.S().Errorf(" failed to get user with email received from auth provider", apierr.Error()) + zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error())) return "", model.BadRequestStr("invalid user email received from the auth provider") } @@ -75,7 +75,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st newUser, apiErr := m.createUserForSAMLRequest(ctx, email) user = newUser if apiErr != nil { - zap.S().Errorf("failed to create user with email received from auth provider: %v", apierr.Error()) + zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr)) return "", apiErr } } else { @@ -84,7 +84,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st tokenStore, err := baseauth.GenerateJWTForUser(user) if err != nil { - zap.S().Errorf("failed to generate token for SSO login user", err) + zap.L().Error("failed to generate token for SSO login user", zap.Error(err)) return "", model.InternalErrorStr("failed to generate token for the user") } @@ -143,8 +143,8 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) ( // do nothing, just skip sso ssoAvailable = false default: - zap.S().Errorf("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err)) - return resp, model.BadRequest(err) + zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err)) + return resp, model.BadRequestStr(err.Error()) } } @@ -160,7 +160,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) ( if len(emailComponents) > 0 { emailDomain = emailComponents[1] } - zap.S().Errorf("failed to get org domain from email", zap.String("emailDomain", emailDomain), apierr.ToError()) + zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError())) return resp, apierr } @@ -176,7 +176,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) ( escapedUrl, _ := url.QueryUnescape(sourceUrl) siteUrl, err := url.Parse(escapedUrl) if err != nil { - zap.S().Errorf("failed to parse referer", err) + zap.L().Error("failed to parse referer", zap.Error(err)) return resp, model.InternalError(fmt.Errorf("failed to generate login request")) } @@ -185,7 +185,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) ( resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl) if err != nil { - zap.S().Errorf("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), err) + zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err)) return resp, model.InternalError(err) } diff --git a/ee/query-service/dao/sqlite/domain.go b/ee/query-service/dao/sqlite/domain.go index b515af49c9..fbaa4fe332 100644 --- a/ee/query-service/dao/sqlite/domain.go +++ b/ee/query-service/dao/sqlite/domain.go @@ -48,13 +48,13 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url if domainIdStr != "" { domainId, err := uuid.Parse(domainIdStr) if err != nil { - zap.S().Errorf("failed to parse domainId from relay state", err) + zap.L().Error("failed to parse domainId from relay state", zap.Error(err)) return nil, fmt.Errorf("failed to parse domainId from IdP response") } domain, err = m.GetDomain(ctx, domainId) if (err != nil) || domain == nil { - zap.S().Errorf("failed to find domain from domainId received in IdP response", err.Error()) + zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err)) return nil, fmt.Errorf("invalid credentials") } } @@ -64,7 +64,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url domainFromDB, err := m.GetDomainByName(ctx, domainNameStr) domain = domainFromDB if (err != nil) || domain == nil { - zap.S().Errorf("failed to find domain from domainName received in IdP response", err.Error()) + zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err)) return nil, fmt.Errorf("invalid credentials") } } @@ -132,7 +132,7 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDo for _, s := range stored { domain := model.OrgDomain{Id: s.Id, Name: s.Name, OrgId: s.OrgId} if err := domain.LoadConfig(s.Data); err != nil { - zap.S().Errorf("ListDomains() failed", zap.Error(err)) + zap.L().Error("ListDomains() failed", zap.Error(err)) } domains = append(domains, domain) } @@ -153,7 +153,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba configJson, err := json.Marshal(domain) if err != nil { - zap.S().Errorf("failed to unmarshal domain config", zap.Error(err)) + zap.L().Error("failed to unmarshal domain config", zap.Error(err)) return model.InternalError(fmt.Errorf("domain creation failed")) } @@ -167,7 +167,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba time.Now().Unix()) if err != nil { - zap.S().Errorf("failed to insert domain in db", zap.Error(err)) + zap.L().Error("failed to insert domain in db", zap.Error(err)) return model.InternalError(fmt.Errorf("domain creation failed")) } @@ -178,13 +178,13 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError { if domain.Id == uuid.Nil { - zap.S().Errorf("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) + zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) return model.InternalError(fmt.Errorf("domain update failed")) } configJson, err := json.Marshal(domain) if err != nil { - zap.S().Errorf("domain update failed", zap.Error(err)) + zap.L().Error("domain update failed", zap.Error(err)) return model.InternalError(fmt.Errorf("domain update failed")) } @@ -195,7 +195,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba domain.Id) if err != nil { - zap.S().Errorf("domain update failed", zap.Error(err)) + zap.L().Error("domain update failed", zap.Error(err)) return model.InternalError(fmt.Errorf("domain update failed")) } @@ -206,7 +206,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError { if id == uuid.Nil { - zap.S().Errorf("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) + zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) return model.InternalError(fmt.Errorf("domain delete failed")) } @@ -215,7 +215,7 @@ func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.Bas id) if err != nil { - zap.S().Errorf("domain delete failed", zap.Error(err)) + zap.L().Error("domain delete failed", zap.Error(err)) return model.InternalError(fmt.Errorf("domain delete failed")) } diff --git a/ee/query-service/dao/sqlite/pat.go b/ee/query-service/dao/sqlite/pat.go index b2af1640c3..75169db685 100644 --- a/ee/query-service/dao/sqlite/pat.go +++ b/ee/query-service/dao/sqlite/pat.go @@ -26,12 +26,12 @@ func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basem p.Revoked, ) if err != nil { - zap.S().Errorf("Failed to insert PAT in db, err: %v", zap.Error(err)) + zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err)) return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed")) } id, err := result.LastInsertId() if err != nil { - zap.S().Errorf("Failed to get last inserted id, err: %v", zap.Error(err)) + zap.L().Error("Failed to get last inserted id, err: %v", zap.Error(err)) return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed")) } p.Id = strconv.Itoa(int(id)) @@ -62,7 +62,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) basemo p.UpdatedByUserID, id) if err != nil { - zap.S().Errorf("Failed to update PAT in db, err: %v", zap.Error(err)) + zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err)) return model.InternalError(fmt.Errorf("PAT update failed")) } return nil @@ -74,7 +74,7 @@ func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed lastUsed, token) if err != nil { - zap.S().Errorf("Failed to update PAT last used in db, err: %v", zap.Error(err)) + zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err)) return model.InternalError(fmt.Errorf("PAT last used update failed")) } return nil @@ -84,7 +84,7 @@ func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApi pats := []model.PAT{} if err := m.DB().Select(&pats, "SELECT * FROM personal_access_tokens WHERE revoked=false ORDER by updated_at DESC;"); err != nil { - zap.S().Errorf("Failed to fetch PATs err: %v", zap.Error(err)) + zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err)) return nil, model.InternalError(fmt.Errorf("failed to fetch PATs")) } for i := range pats { @@ -129,7 +129,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) base "UPDATE personal_access_tokens SET revoked=true, updated_by_user_id = $1, updated_at=$2 WHERE id=$3", userID, updatedAt, id) if err != nil { - zap.S().Errorf("Failed to revoke PAT in db, err: %v", zap.Error(err)) + zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err)) return model.InternalError(fmt.Errorf("PAT revoke failed")) } return nil diff --git a/ee/query-service/integrations/signozio/signozio.go b/ee/query-service/integrations/signozio/signozio.go index c1ad5e57e4..c18cfb6572 100644 --- a/ee/query-service/integrations/signozio/signozio.go +++ b/ee/query-service/integrations/signozio/signozio.go @@ -47,13 +47,13 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError) httpResponse, err := http.Post(C.Prefix+"/licenses/activate", APPLICATION_JSON, bytes.NewBuffer(reqString)) if err != nil { - zap.S().Errorf("failed to connect to license.signoz.io", err) + zap.L().Error("failed to connect to license.signoz.io", zap.Error(err)) return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection")) } httpBody, err := io.ReadAll(httpResponse.Body) if err != nil { - zap.S().Errorf("failed to read activation response from license.signoz.io", err) + zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err)) return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io")) } @@ -63,7 +63,7 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError) result := ActivationResult{} err = json.Unmarshal(httpBody, &result) if err != nil { - zap.S().Errorf("failed to marshal activation response from license.signoz.io", err) + zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err)) return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response")) } diff --git a/ee/query-service/license/db.go b/ee/query-service/license/db.go index 8d2f7065ff..bf71e9376d 100644 --- a/ee/query-service/license/db.go +++ b/ee/query-service/license/db.go @@ -97,7 +97,7 @@ func (r *Repo) InsertLicense(ctx context.Context, l *model.License) error { l.ValidationMessage) if err != nil { - zap.S().Errorf("error in inserting license data: ", zap.Error(err)) + zap.L().Error("error in inserting license data: ", zap.Error(err)) return fmt.Errorf("failed to insert license in db: %v", err) } @@ -121,7 +121,7 @@ func (r *Repo) UpdatePlanDetails(ctx context.Context, _, err := r.db.ExecContext(ctx, query, planDetails, time.Now(), key) if err != nil { - zap.S().Errorf("error in updating license: ", zap.Error(err)) + zap.L().Error("error in updating license: ", zap.Error(err)) return fmt.Errorf("failed to update license in db: %v", err) } diff --git a/ee/query-service/license/manager.go b/ee/query-service/license/manager.go index dcfa8235b1..56cb685fec 100644 --- a/ee/query-service/license/manager.go +++ b/ee/query-service/license/manager.go @@ -100,7 +100,7 @@ func (lm *Manager) SetActive(l *model.License) { err := lm.InitFeatures(lm.activeFeatures) if err != nil { - zap.S().Panicf("Couldn't activate features: %v", err) + zap.L().Panic("Couldn't activate features", zap.Error(err)) } if !lm.validatorRunning { // we want to make sure only one validator runs, @@ -125,13 +125,13 @@ func (lm *Manager) LoadActiveLicense() error { if active != nil { lm.SetActive(active) } else { - zap.S().Info("No active license found, defaulting to basic plan") + zap.L().Info("No active license found, defaulting to basic plan") // if no active license is found, we default to basic(free) plan with all default features lm.activeFeatures = model.BasicPlan setDefaultFeatures(lm) err := lm.InitFeatures(lm.activeFeatures) if err != nil { - zap.S().Error("Couldn't initialize features: ", err) + zap.L().Error("Couldn't initialize features", zap.Error(err)) return err } } @@ -191,7 +191,7 @@ func (lm *Manager) Validator(ctx context.Context) { // Validate validates the current active license func (lm *Manager) Validate(ctx context.Context) (reterr error) { - zap.S().Info("License validation started") + zap.L().Info("License validation started") if lm.activeLicense == nil { return nil } @@ -201,12 +201,12 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) { lm.lastValidated = time.Now().Unix() if reterr != nil { - zap.S().Errorf("License validation completed with error", reterr) + zap.L().Error("License validation completed with error", zap.Error(reterr)) atomic.AddUint64(&lm.failedAttempts, 1) telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED, map[string]interface{}{"err": reterr.Error()}, "") } else { - zap.S().Info("License validation completed with no errors") + zap.L().Info("License validation completed with no errors") } lm.mutex.Unlock() @@ -214,7 +214,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) { response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId) if apiError != nil { - zap.S().Errorf("failed to validate license", apiError) + zap.L().Error("failed to validate license", zap.Error(apiError.Err)) return apiError.Err } @@ -235,7 +235,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) { } if err := l.ParsePlan(); err != nil { - zap.S().Errorf("failed to parse updated license", zap.Error(err)) + zap.L().Error("failed to parse updated license", zap.Error(err)) return err } @@ -245,7 +245,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) { if err != nil { // unexpected db write issue but we can let the user continue // and wait for update to work in next cycle. - zap.S().Errorf("failed to validate license", zap.Error(err)) + zap.L().Error("failed to validate license", zap.Error(err)) } } @@ -270,7 +270,7 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m response, apiError := validate.ActivateLicense(key, "") if apiError != nil { - zap.S().Errorf("failed to activate license", zap.Error(apiError.Err)) + zap.L().Error("failed to activate license", zap.Error(apiError.Err)) return nil, apiError } @@ -284,14 +284,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m err := l.ParsePlan() if err != nil { - zap.S().Errorf("failed to activate license", zap.Error(err)) + zap.L().Error("failed to activate license", zap.Error(err)) return nil, model.InternalError(err) } // store the license before activating it err = lm.repo.InsertLicense(ctx, l) if err != nil { - zap.S().Errorf("failed to activate license", zap.Error(err)) + zap.L().Error("failed to activate license", zap.Error(err)) return nil, model.InternalError(err) } diff --git a/ee/query-service/main.go b/ee/query-service/main.go index 427f78059b..3323e5bdbd 100644 --- a/ee/query-service/main.go +++ b/ee/query-service/main.go @@ -14,10 +14,10 @@ import ( semconv "go.opentelemetry.io/otel/semconv/v1.4.0" "go.signoz.io/signoz/ee/query-service/app" "go.signoz.io/signoz/pkg/query-service/auth" - "go.signoz.io/signoz/pkg/query-service/constants" baseconst "go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/version" "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder" zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync" @@ -27,18 +27,19 @@ import ( ) func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger { - config := zap.NewDevelopmentConfig() + config := zap.NewProductionConfig() ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt) defer stop() - config.EncoderConfig.EncodeDuration = zapcore.StringDurationEncoder - otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig) - consoleEncoder := zapcore.NewConsoleEncoder(config.EncoderConfig) - defaultLogLevel := zapcore.DebugLevel - config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder + config.EncoderConfig.EncodeDuration = zapcore.MillisDurationEncoder + config.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder config.EncoderConfig.TimeKey = "timestamp" config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder + otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig) + consoleEncoder := zapcore.NewJSONEncoder(config.EncoderConfig) + defaultLogLevel := zapcore.InfoLevel + res := resource.NewWithAttributes( semconv.SchemaURL, semconv.ServiceNameKey.String("query-service"), @@ -48,14 +49,15 @@ func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger { zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel), ) - if enableQueryServiceLogOTLPExport == true { - conn, err := grpc.DialContext(ctx, constants.OTLPTarget, grpc.WithBlock(), grpc.WithInsecure(), grpc.WithTimeout(time.Second*30)) + if enableQueryServiceLogOTLPExport { + ctx, _ := context.WithTimeout(ctx, time.Second*30) + conn, err := grpc.DialContext(ctx, baseconst.OTLPTarget, grpc.WithBlock(), grpc.WithTransportCredentials(insecure.NewCredentials())) if err != nil { - log.Println("failed to connect to otlp collector to export query service logs with error:", err) + log.Fatalf("failed to establish connection: %v", err) } else { logExportBatchSizeInt, err := strconv.Atoi(baseconst.LogExportBatchSize) if err != nil { - logExportBatchSizeInt = 1000 + logExportBatchSizeInt = 512 } ws := zapcore.AddSync(zapotlpsync.NewOtlpSyncer(conn, zapotlpsync.Options{ BatchSize: logExportBatchSizeInt, @@ -113,7 +115,6 @@ func main() { zap.ReplaceGlobals(loggerMgr) defer loggerMgr.Sync() // flushes buffer, if any - logger := loggerMgr.Sugar() version.PrintVersion() serverOptions := &app.ServerOptions{ @@ -137,22 +138,22 @@ func main() { auth.JwtSecret = os.Getenv("SIGNOZ_JWT_SECRET") if len(auth.JwtSecret) == 0 { - zap.S().Warn("No JWT secret key is specified.") + zap.L().Warn("No JWT secret key is specified.") } else { - zap.S().Info("No JWT secret key set successfully.") + zap.L().Info("JWT secret key set successfully.") } server, err := app.NewServer(serverOptions) if err != nil { - logger.Fatal("Failed to create server", zap.Error(err)) + zap.L().Fatal("Failed to create server", zap.Error(err)) } if err := server.Start(); err != nil { - logger.Fatal("Could not start servers", zap.Error(err)) + zap.L().Fatal("Could not start server", zap.Error(err)) } if err := auth.InitAuthCache(context.Background()); err != nil { - logger.Fatal("Failed to initialize auth cache", zap.Error(err)) + zap.L().Fatal("Failed to initialize auth cache", zap.Error(err)) } signalsChannel := make(chan os.Signal, 1) @@ -161,9 +162,9 @@ func main() { for { select { case status := <-server.HealthCheckStatus(): - logger.Info("Received HealthCheck status: ", zap.Int("status", int(status))) + zap.L().Info("Received HealthCheck status: ", zap.Int("status", int(status))) case <-signalsChannel: - logger.Fatal("Received OS Interrupt Signal ... ") + zap.L().Fatal("Received OS Interrupt Signal ... ") server.Stop() } } diff --git a/ee/query-service/model/domain.go b/ee/query-service/model/domain.go index beadd66a51..4d5ff66df2 100644 --- a/ee/query-service/model/domain.go +++ b/ee/query-service/model/domain.go @@ -9,8 +9,8 @@ import ( "github.com/google/uuid" "github.com/pkg/errors" saml2 "github.com/russellhaering/gosaml2" - "go.signoz.io/signoz/ee/query-service/sso/saml" "go.signoz.io/signoz/ee/query-service/sso" + "go.signoz.io/signoz/ee/query-service/sso/saml" basemodel "go.signoz.io/signoz/pkg/query-service/model" "go.uber.org/zap" ) @@ -24,16 +24,16 @@ const ( // OrgDomain identify org owned web domains for auth and other purposes type OrgDomain struct { - Id uuid.UUID `json:"id"` - Name string `json:"name"` - OrgId string `json:"orgId"` - SsoEnabled bool `json:"ssoEnabled"` - SsoType SSOType `json:"ssoType"` + Id uuid.UUID `json:"id"` + Name string `json:"name"` + OrgId string `json:"orgId"` + SsoEnabled bool `json:"ssoEnabled"` + SsoType SSOType `json:"ssoType"` - SamlConfig *SamlConfig `json:"samlConfig"` + SamlConfig *SamlConfig `json:"samlConfig"` GoogleAuthConfig *GoogleOAuthConfig `json:"googleAuthConfig"` - Org *basemodel.Organization + Org *basemodel.Organization } func (od *OrgDomain) String() string { @@ -100,8 +100,8 @@ func (od *OrgDomain) GetSAMLCert() string { return "" } -// PrepareGoogleOAuthProvider creates GoogleProvider that is used in -// requesting OAuth and also used in processing response from google +// PrepareGoogleOAuthProvider creates GoogleProvider that is used in +// requesting OAuth and also used in processing response from google func (od *OrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (sso.OAuthCallbackProvider, error) { if od.GoogleAuthConfig == nil { return nil, fmt.Errorf("Google auth is not setup correctly for this domain") @@ -137,38 +137,36 @@ func (od *OrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServicePro } func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) { - fmtDomainId := strings.Replace(od.Id.String(), "-", ":", -1) - + // build redirect url from window.location sent by frontend redirectURL := fmt.Sprintf("%s://%s%s", siteUrl.Scheme, siteUrl.Host, siteUrl.Path) // prepare state that gets relayed back when the auth provider // calls back our url. here we pass the app url (where signoz runs) // and the domain Id. The domain Id helps in identifying sso config - // when the call back occurs and the app url is useful in redirecting user - // back to the right path. + // when the call back occurs and the app url is useful in redirecting user + // back to the right path. // why do we need to pass app url? the callback typically is handled by backend // and sometimes backend might right at a different port or is unaware of frontend // endpoint (unless SITE_URL param is set). hence, we receive this build sso request - // along with frontend window.location and use it to relay the information through - // auth provider to the backend (HandleCallback or HandleSSO method). + // along with frontend window.location and use it to relay the information through + // auth provider to the backend (HandleCallback or HandleSSO method). relayState := fmt.Sprintf("%s?domainId=%s", redirectURL, fmtDomainId) - - switch (od.SsoType) { + switch od.SsoType { case SAML: sp, err := od.PrepareSamlRequest(siteUrl) if err != nil { return "", err } - + return sp.BuildAuthURL(relayState) - + case GoogleAuth: - + googleProvider, err := od.PrepareGoogleOAuthProvider(siteUrl) if err != nil { return "", err @@ -176,9 +174,8 @@ func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) { return googleProvider.BuildAuthURL(relayState) default: - zap.S().Errorf("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name)) - return "", fmt.Errorf("unsupported SSO config for the domain") + zap.L().Error("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name)) + return "", fmt.Errorf("unsupported SSO config for the domain") } - } diff --git a/ee/query-service/sso/saml/request.go b/ee/query-service/sso/saml/request.go index 01af7afe28..c9788d0ff3 100644 --- a/ee/query-service/sso/saml/request.go +++ b/ee/query-service/sso/saml/request.go @@ -102,6 +102,6 @@ func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (* IDPCertificateStore: certStore, SPKeyStore: randomKeyStore, } - zap.S().Debugf("SAML request:", sp) + zap.L().Debug("SAML request", zap.Any("sp", sp)) return sp, nil } diff --git a/ee/query-service/usage/manager.go b/ee/query-service/usage/manager.go index 99158b4345..72535c9ae5 100644 --- a/ee/query-service/usage/manager.go +++ b/ee/query-service/usage/manager.go @@ -91,12 +91,12 @@ func (lm *Manager) UploadUsage() { // check if license is present or not license, err := lm.licenseRepo.GetActiveLicense(ctx) if err != nil { - zap.S().Errorf("failed to get active license: %v", zap.Error(err)) + zap.L().Error("failed to get active license", zap.Error(err)) return } if license == nil { // we will not start the usage reporting if license is not present. - zap.S().Info("no license present, skipping usage reporting") + zap.L().Info("no license present, skipping usage reporting") return } @@ -123,7 +123,7 @@ func (lm *Manager) UploadUsage() { dbusages := []model.UsageDB{} err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour))) if err != nil && !strings.Contains(err.Error(), "doesn't exist") { - zap.S().Errorf("failed to get usage from clickhouse: %v", zap.Error(err)) + zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err)) return } for _, u := range dbusages { @@ -133,16 +133,16 @@ func (lm *Manager) UploadUsage() { } if len(usages) <= 0 { - zap.S().Info("no snapshots to upload, skipping.") + zap.L().Info("no snapshots to upload, skipping.") return } - zap.S().Info("uploading usage data") + zap.L().Info("uploading usage data") orgName := "" orgNames, orgError := lm.modelDao.GetOrgs(ctx) if orgError != nil { - zap.S().Errorf("failed to get org data: %v", zap.Error(orgError)) + zap.L().Error("failed to get org data: %v", zap.Error(orgError)) } if len(orgNames) == 1 { orgName = orgNames[0].Name @@ -152,14 +152,14 @@ func (lm *Manager) UploadUsage() { for _, usage := range usages { usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data)) if err != nil { - zap.S().Errorf("error while decrypting usage data: %v", zap.Error(err)) + zap.L().Error("error while decrypting usage data: %v", zap.Error(err)) return } usageData := model.Usage{} err = json.Unmarshal(usageDataBytes, &usageData) if err != nil { - zap.S().Errorf("error while unmarshalling usage data: %v", zap.Error(err)) + zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err)) return } @@ -184,13 +184,13 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload for i := 1; i <= MaxRetries; i++ { apiErr := licenseserver.SendUsage(ctx, payload) if apiErr != nil && i == MaxRetries { - zap.S().Errorf("retries stopped : %v", zap.Error(apiErr)) + zap.L().Error("retries stopped : %v", zap.Error(apiErr)) // not returning error here since it is captured in the failed count return } else if apiErr != nil { // sleeping for exponential backoff sleepDuration := RetryInterval * time.Duration(i) - zap.S().Errorf("failed to upload snapshot retrying after %v secs : %v", sleepDuration.Seconds(), zap.Error(apiErr.Err)) + zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err)) time.Sleep(sleepDuration) } else { break @@ -201,7 +201,7 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload func (lm *Manager) Stop() { lm.scheduler.Stop() - zap.S().Debug("sending usage data before shutting down") + zap.L().Info("sending usage data before shutting down") // send usage before shutting down lm.UploadUsage() diff --git a/pkg/query-service/agentConf/db.go b/pkg/query-service/agentConf/db.go index ffbc2f53a8..04ab780db6 100644 --- a/pkg/query-service/agentConf/db.go +++ b/pkg/query-service/agentConf/db.go @@ -151,7 +151,7 @@ func (r *Repo) insertConfig( // allowing empty elements for logs - use case is deleting all pipelines if len(elements) == 0 && c.ElementType != ElementTypeLogPipelines { - zap.S().Error("insert config called with no elements ", c.ElementType) + zap.L().Error("insert config called with no elements ", zap.String("ElementType", string(c.ElementType))) return model.BadRequest(fmt.Errorf("config must have atleast one element")) } @@ -159,7 +159,7 @@ func (r *Repo) insertConfig( // the version can not be set by the user, we want to auto-assign the versions // in a monotonically increasing order starting with 1. hence, we reject insert // requests with version anything other than 0. here, 0 indicates un-assigned - zap.S().Error("invalid version assignment while inserting agent config", c.Version, c.ElementType) + zap.L().Error("invalid version assignment while inserting agent config", zap.Int("version", c.Version), zap.String("ElementType", string(c.ElementType))) return model.BadRequest(fmt.Errorf( "user defined versions are not supported in the agent config", )) @@ -167,7 +167,7 @@ func (r *Repo) insertConfig( configVersion, err := r.GetLatestVersion(ctx, c.ElementType) if err != nil && err.Type() != model.ErrorNotFound { - zap.S().Error("failed to fetch latest config version", err) + zap.L().Error("failed to fetch latest config version", zap.Error(err)) return model.InternalError(fmt.Errorf("failed to fetch latest config version")) } @@ -212,7 +212,7 @@ func (r *Repo) insertConfig( c.DeployResult) if dbErr != nil { - zap.S().Error("error in inserting config version: ", zap.Error(dbErr)) + zap.L().Error("error in inserting config version: ", zap.Error(dbErr)) return model.InternalError(errors.Wrap(dbErr, "failed to insert ingestion rule")) } @@ -258,7 +258,7 @@ func (r *Repo) updateDeployStatus(ctx context.Context, _, err := r.db.ExecContext(ctx, updateQuery, status, result, lastHash, lastconf, version, string(elementType)) if err != nil { - zap.S().Error("failed to update deploy status", err) + zap.L().Error("failed to update deploy status", zap.Error(err)) return model.BadRequest(fmt.Errorf("failed to update deploy status")) } @@ -276,7 +276,7 @@ func (r *Repo) updateDeployStatusByHash( _, err := r.db.ExecContext(ctx, updateQuery, status, result, confighash) if err != nil { - zap.S().Error("failed to update deploy status", err) + zap.L().Error("failed to update deploy status", zap.Error(err)) return model.InternalError(errors.Wrap(err, "failed to update deploy status")) } diff --git a/pkg/query-service/agentConf/manager.go b/pkg/query-service/agentConf/manager.go index 0fdab4e990..c9a7335e0b 100644 --- a/pkg/query-service/agentConf/manager.go +++ b/pkg/query-service/agentConf/manager.go @@ -224,19 +224,19 @@ func Redeploy(ctx context.Context, typ ElementTypeDef, version int) *model.ApiEr configVersion, err := GetConfigVersion(ctx, typ, version) if err != nil { - zap.S().Debug("failed to fetch config version during redeploy", err) + zap.L().Error("failed to fetch config version during redeploy", zap.Error(err)) return model.WrapApiError(err, "failed to fetch details of the config version") } if configVersion == nil || (configVersion != nil && configVersion.LastConf == "") { - zap.S().Debug("config version has no conf yaml", configVersion) + zap.L().Debug("config version has no conf yaml", zap.Any("configVersion", configVersion)) return model.BadRequest(fmt.Errorf("the config version can not be redeployed")) } switch typ { case ElementTypeSamplingRules: var config *tsp.Config if err := yaml.Unmarshal([]byte(configVersion.LastConf), &config); err != nil { - zap.S().Error("failed to read last conf correctly", err) + zap.L().Debug("failed to read last conf correctly", zap.Error(err)) return model.BadRequest(fmt.Errorf("failed to read the stored config correctly")) } @@ -248,7 +248,7 @@ func Redeploy(ctx context.Context, typ ElementTypeDef, version int) *model.ApiEr opamp.AddToTracePipelineSpec("signoz_tail_sampling") configHash, err := opamp.UpsertControlProcessors(ctx, "traces", processorConf, m.OnConfigUpdate) if err != nil { - zap.S().Error("failed to call agent config update for trace processor:", err) + zap.L().Error("failed to call agent config update for trace processor", zap.Error(err)) return model.InternalError(fmt.Errorf("failed to deploy the config")) } @@ -256,7 +256,7 @@ func Redeploy(ctx context.Context, typ ElementTypeDef, version int) *model.ApiEr case ElementTypeDropRules: var filterConfig *filterprocessor.Config if err := yaml.Unmarshal([]byte(configVersion.LastConf), &filterConfig); err != nil { - zap.S().Error("failed to read last conf correctly", err) + zap.L().Error("failed to read last conf correctly", zap.Error(err)) return model.InternalError(fmt.Errorf("failed to read the stored config correctly")) } processorConf := map[string]interface{}{ @@ -266,7 +266,7 @@ func Redeploy(ctx context.Context, typ ElementTypeDef, version int) *model.ApiEr opamp.AddToMetricsPipelineSpec("filter") configHash, err := opamp.UpsertControlProcessors(ctx, "metrics", processorConf, m.OnConfigUpdate) if err != nil { - zap.S().Error("failed to call agent config update for trace processor:", err) + zap.L().Error("failed to call agent config update for trace processor", zap.Error(err)) return err } @@ -292,13 +292,13 @@ func UpsertFilterProcessor(ctx context.Context, version int, config *filterproce opamp.AddToMetricsPipelineSpec("filter") configHash, err := opamp.UpsertControlProcessors(ctx, "metrics", processorConf, m.OnConfigUpdate) if err != nil { - zap.S().Error("failed to call agent config update for trace processor:", err) + zap.L().Error("failed to call agent config update for trace processor", zap.Error(err)) return err } processorConfYaml, yamlErr := yaml.Marshal(config) if yamlErr != nil { - zap.S().Warnf("unexpected error while transforming processor config to yaml", yamlErr) + zap.L().Warn("unexpected error while transforming processor config to yaml", zap.Error(yamlErr)) } m.updateDeployStatus(ctx, ElementTypeDropRules, version, string(DeployInitiated), "Deployment started", configHash, string(processorConfYaml)) @@ -317,7 +317,7 @@ func (m *Manager) OnConfigUpdate(agentId string, hash string, err error) { message := "Deployment was successful" defer func() { - zap.S().Info(status, zap.String("agentId", agentId), zap.String("agentResponse", message)) + zap.L().Info(status, zap.String("agentId", agentId), zap.String("agentResponse", message)) }() if err != nil { @@ -343,13 +343,13 @@ func UpsertSamplingProcessor(ctx context.Context, version int, config *tsp.Confi opamp.AddToTracePipelineSpec("signoz_tail_sampling") configHash, err := opamp.UpsertControlProcessors(ctx, "traces", processorConf, m.OnConfigUpdate) if err != nil { - zap.S().Error("failed to call agent config update for trace processor:", err) + zap.L().Error("failed to call agent config update for trace processor", zap.Error(err)) return err } processorConfYaml, yamlErr := yaml.Marshal(config) if yamlErr != nil { - zap.S().Warnf("unexpected error while transforming processor config to yaml", yamlErr) + zap.L().Warn("unexpected error while transforming processor config to yaml", zap.Error(yamlErr)) } m.updateDeployStatus(ctx, ElementTypeSamplingRules, version, string(DeployInitiated), "Deployment started", configHash, string(processorConfYaml)) diff --git a/pkg/query-service/app/clickhouseReader/options.go b/pkg/query-service/app/clickhouseReader/options.go index 0defced7ed..d92b5ee38f 100644 --- a/pkg/query-service/app/clickhouseReader/options.go +++ b/pkg/query-service/app/clickhouseReader/options.go @@ -106,7 +106,7 @@ func defaultConnector(cfg *namespaceConfig) (clickhouse.Conn, error) { options.DialTimeout = cfg.DialTimeout } - zap.S().Infof("Connecting to Clickhouse at %s, Secure: %t, MaxIdleConns: %d, MaxOpenConns: %d, DialTimeout: %s", options.Addr, options.TLS != nil, options.MaxIdleConns, options.MaxOpenConns, options.DialTimeout) + zap.L().Info("Connecting to Clickhouse", zap.String("at", options.Addr[0]), zap.Int("MaxIdleConns", options.MaxIdleConns), zap.Int("MaxOpenConns", options.MaxOpenConns), zap.Duration("DialTimeout", options.DialTimeout)) db, err := clickhouse.Open(options) if err != nil { return nil, err diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index 00f3ca1ba6..a1b12d9415 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -141,8 +141,7 @@ func NewReader( db, err := initialize(options) if err != nil { - zap.S().Error("failed to initialize ClickHouse: ", err) - os.Exit(1) + zap.L().Fatal("failed to initialize ClickHouse", zap.Error(err)) } return NewReaderFromClickhouseConnection(db, options, localDB, configFile, featureFlag, cluster) @@ -158,8 +157,8 @@ func NewReaderFromClickhouseConnection( ) *ClickHouseReader { alertManager, err := am.New("") if err != nil { - zap.S().Errorf("msg: failed to initialize alert manager: ", "/t error:", err) - zap.S().Errorf("msg: check if the alert manager URL is correctly set and valid") + zap.L().Error("failed to initialize alert manager", zap.Error(err)) + zap.L().Error("check if the alert manager URL is correctly set and valid") os.Exit(1) } @@ -347,20 +346,6 @@ func (r *ClickHouseReader) Start(readerReady chan bool) { reloadReady.Close() - // ! commented the alert manager can now - // call query service to do this - // channels, apiErrorObj := r.GetChannels() - - // if apiErrorObj != nil { - // zap.S().Errorf("Not able to read channels from DB") - // } - // for _, channel := range *channels { - // apiErrorObj = r.LoadChannel(&channel) - // if apiErrorObj != nil { - // zap.S().Errorf("Not able to load channel with id=%d loaded from DB", channel.Id, channel.Data) - // } - // } - <-cancel return nil @@ -444,14 +429,14 @@ func (r *ClickHouseReader) LoadChannel(channel *model.ChannelItem) *model.ApiErr response, err := http.Post(constants.GetAlertManagerApiPrefix()+"v1/receivers", "application/json", bytes.NewBuffer([]byte(channel.Data))) if err != nil { - zap.S().Errorf("Error in getting response of API call to alertmanager/v1/receivers\n", err) + zap.L().Error("Error in getting response of API call to alertmanager/v1/receivers", zap.Error(err)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } if response.StatusCode > 299 { responseData, _ := io.ReadAll(response.Body) - err := fmt.Errorf("Error in getting 2xx response in API call to alertmanager/v1/receivers\n Status: %s \n Data: %s", response.Status, string(responseData)) - zap.S().Error(err) + err := fmt.Errorf("Error in getting 2xx response in API call to alertmanager/v1/receivers") + zap.L().Error("Error in getting 2xx response in API call to alertmanager/v1/receivers", zap.String("Status", response.Status), zap.String("Data", string(responseData))) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -468,17 +453,15 @@ func (r *ClickHouseReader) GetChannel(id string) (*model.ChannelItem, *model.Api stmt, err := r.localDB.Preparex(query) - zap.S().Info(query, idInt) - if err != nil { - zap.S().Debug("Error in preparing sql query for GetChannel : ", err) + zap.L().Error("Error in preparing sql query for GetChannel", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} } err = stmt.Get(&channel, idInt) if err != nil { - zap.S().Debug(fmt.Sprintf("Error in getting channel with id=%d : ", idInt), err) + zap.L().Error("Error in getting channel with id", zap.Int("id", idInt), zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -504,14 +487,14 @@ func (r *ClickHouseReader) DeleteChannel(id string) *model.ApiError { { stmt, err := tx.Prepare(`DELETE FROM notification_channels WHERE id=$1;`) if err != nil { - zap.S().Errorf("Error in preparing statement for INSERT to notification_channels\n", err) + zap.L().Error("Error in preparing statement for INSERT to notification_channels", zap.Error(err)) tx.Rollback() return &model.ApiError{Typ: model.ErrorInternal, Err: err} } defer stmt.Close() if _, err := stmt.Exec(idInt); err != nil { - zap.S().Errorf("Error in Executing prepared statement for INSERT to notification_channels\n", err) + zap.L().Error("Error in Executing prepared statement for INSERT to notification_channels", zap.Error(err)) tx.Rollback() // return an error too, we may want to wrap them return &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -525,7 +508,7 @@ func (r *ClickHouseReader) DeleteChannel(id string) *model.ApiError { err = tx.Commit() if err != nil { - zap.S().Errorf("Error in committing transaction for DELETE command to notification_channels\n", err) + zap.L().Error("Error in committing transaction for DELETE command to notification_channels", zap.Error(err)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -541,10 +524,10 @@ func (r *ClickHouseReader) GetChannels() (*[]model.ChannelItem, *model.ApiError) err := r.localDB.Select(&channels, query) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -609,7 +592,7 @@ func (r *ClickHouseReader) EditChannel(receiver *am.Receiver, id string) (*am.Re // check if channel type is supported in the current user plan if err := r.featureFlags.CheckFeature(fmt.Sprintf("ALERT_CHANNEL_%s", strings.ToUpper(channel_type))); err != nil { - zap.S().Warn("an unsupported feature was blocked", err) + zap.L().Warn("an unsupported feature was blocked", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("unsupported feature. please upgrade your plan to access this feature")} } @@ -619,14 +602,14 @@ func (r *ClickHouseReader) EditChannel(receiver *am.Receiver, id string) (*am.Re stmt, err := tx.Prepare(`UPDATE notification_channels SET updated_at=$1, type=$2, data=$3 WHERE id=$4;`) if err != nil { - zap.S().Errorf("Error in preparing statement for UPDATE to notification_channels\n", err) + zap.L().Error("Error in preparing statement for UPDATE to notification_channels", zap.Error(err)) tx.Rollback() return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} } defer stmt.Close() if _, err := stmt.Exec(time.Now(), channel_type, string(receiverString), idInt); err != nil { - zap.S().Errorf("Error in Executing prepared statement for UPDATE to notification_channels\n", err) + zap.L().Error("Error in Executing prepared statement for UPDATE to notification_channels", zap.Error(err)) tx.Rollback() // return an error too, we may want to wrap them return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -640,7 +623,7 @@ func (r *ClickHouseReader) EditChannel(receiver *am.Receiver, id string) (*am.Re err = tx.Commit() if err != nil { - zap.S().Errorf("Error in committing transaction for INSERT to notification_channels\n", err) + zap.L().Error("Error in committing transaction for INSERT to notification_channels", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -654,7 +637,7 @@ func (r *ClickHouseReader) CreateChannel(receiver *am.Receiver) (*am.Receiver, * // check if channel type is supported in the current user plan if err := r.featureFlags.CheckFeature(fmt.Sprintf("ALERT_CHANNEL_%s", strings.ToUpper(channel_type))); err != nil { - zap.S().Warn("an unsupported feature was blocked", err) + zap.L().Warn("an unsupported feature was blocked", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("unsupported feature. please upgrade your plan to access this feature")} } @@ -668,14 +651,14 @@ func (r *ClickHouseReader) CreateChannel(receiver *am.Receiver) (*am.Receiver, * { stmt, err := tx.Prepare(`INSERT INTO notification_channels (created_at, updated_at, name, type, data) VALUES($1,$2,$3,$4,$5);`) if err != nil { - zap.S().Errorf("Error in preparing statement for INSERT to notification_channels\n", err) + zap.L().Error("Error in preparing statement for INSERT to notification_channels", zap.Error(err)) tx.Rollback() return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} } defer stmt.Close() if _, err := stmt.Exec(time.Now(), time.Now(), receiver.Name, channel_type, string(receiverString)); err != nil { - zap.S().Errorf("Error in Executing prepared statement for INSERT to notification_channels\n", err) + zap.L().Error("Error in Executing prepared statement for INSERT to notification_channels", zap.Error(err)) tx.Rollback() // return an error too, we may want to wrap them return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -689,7 +672,7 @@ func (r *ClickHouseReader) CreateChannel(receiver *am.Receiver) (*am.Receiver, * err = tx.Commit() if err != nil { - zap.S().Errorf("Error in committing transaction for INSERT to notification_channels\n", err) + zap.L().Error("Error in committing transaction for INSERT to notification_channels", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -742,10 +725,10 @@ func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, erro rows, err := r.db.Query(ctx, query) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, fmt.Errorf("Error in processing sql query") } @@ -773,7 +756,7 @@ func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context, skipConfig rows, err := r.db.Query(ctx, query) if err != nil { - zap.S().Error("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")} } @@ -874,7 +857,7 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G query += subQuery args = append(args, argsSubQuery...) if errStatus != nil { - zap.S().Error("Error in processing sql query: ", errStatus) + zap.L().Error("Error in processing sql query", zap.Error(errStatus)) return } err := r.db.QueryRow( @@ -888,19 +871,19 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G } if err != nil { - zap.S().Error("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return } subQuery, argsSubQuery, errStatus = buildQueryWithTagParams(ctx, tags) if errStatus != nil { - zap.S().Error("Error building query with tag params: ", err) + zap.L().Error("Error building query with tag params", zap.Error(errStatus)) return } query += subQuery args = append(args, argsSubQuery...) err = r.db.QueryRow(ctx, errorQuery, args...).Scan(&numErrors) if err != nil { - zap.S().Error("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return } @@ -966,11 +949,11 @@ func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams * query += " GROUP BY time ORDER BY time DESC" err := r.db.Select(ctx, &serviceOverviewItems, query, args...) - zap.S().Debug(query) + zap.L().Debug("running query", zap.String("query", query)) if err != nil { - zap.S().Error("Error in processing sql query: ", err) - return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} + zap.L().Error("Error in processing sql query", zap.Error(err)) + return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")} } serviceErrorItems := []model.ServiceErrorItem{} @@ -994,10 +977,8 @@ func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams * query += " GROUP BY time ORDER BY time DESC" err = r.db.Select(ctx, &serviceErrorItems, query, args...) - zap.S().Debug(query) - if err != nil { - zap.S().Error("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } @@ -1133,10 +1114,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " GROUP BY serviceName" var dBResponse []model.DBResponseServiceName err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } for _, service := range dBResponse { @@ -1150,10 +1131,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " GROUP BY httpCode" var dBResponse []model.DBResponseHttpCode err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } for _, service := range dBResponse { @@ -1167,10 +1148,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " GROUP BY httpRoute" var dBResponse []model.DBResponseHttpRoute err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } for _, service := range dBResponse { @@ -1184,10 +1165,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " GROUP BY httpUrl" var dBResponse []model.DBResponseHttpUrl err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } for _, service := range dBResponse { @@ -1201,10 +1182,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " GROUP BY httpMethod" var dBResponse []model.DBResponseHttpMethod err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } for _, service := range dBResponse { @@ -1218,10 +1199,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " GROUP BY httpHost" var dBResponse []model.DBResponseHttpHost err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } for _, service := range dBResponse { @@ -1235,10 +1216,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " GROUP BY name" var dBResponse []model.DBResponseOperation err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } for _, service := range dBResponse { @@ -1252,10 +1233,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " GROUP BY component" var dBResponse []model.DBResponseComponent err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } for _, service := range dBResponse { @@ -1268,10 +1249,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += query var dBResponse []model.DBResponseTotal err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } @@ -1279,10 +1260,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery2 += query var dBResponse2 []model.DBResponseTotal err = r.db.Select(ctx, &dBResponse2, finalQuery2, args...) - zap.S().Info(finalQuery2) + zap.L().Info(finalQuery2) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } if len(dBResponse) > 0 && len(dBResponse2) > 0 { @@ -1304,9 +1285,9 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += query var dBResponse []model.DBResponseMinMax err = r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } if len(dBResponse) > 0 { @@ -1319,10 +1300,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " ORDER BY durationNano LIMIT 1" var dBResponse []model.DBResponseTotal err = r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } @@ -1331,10 +1312,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " ORDER BY durationNano DESC LIMIT 1" var dBResponse2 []model.DBResponseTotal err = r.db.Select(ctx, &dBResponse2, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} } if len(dBResponse) > 0 { @@ -1350,10 +1331,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " GROUP BY rpcMethod" var dBResponse []model.DBResponseRPCMethod err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)} } for _, service := range dBResponse { @@ -1368,10 +1349,10 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode finalQuery += " GROUP BY responseStatusCode" var dBResponse []model.DBResponseStatusCodeMethod err := r.db.Select(ctx, &dBResponse, finalQuery, args...) - zap.S().Info(finalQuery) + zap.L().Info(finalQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)} } for _, service := range dBResponse { @@ -1496,10 +1477,10 @@ func (r *ClickHouseReader) GetFilteredSpans(ctx context.Context, queryParams *mo projectionOptQuery := "SET allow_experimental_projection_optimization = 1" err := r.db.Exec(ctx, projectionOptQuery) - zap.S().Info(projectionOptQuery) + zap.L().Info(projectionOptQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } if queryParams.Order == constants.Descending { @@ -1534,10 +1515,10 @@ func (r *ClickHouseReader) GetFilteredSpans(ctx context.Context, queryParams *mo } } - zap.S().Info(baseQuery) + zap.L().Info(baseQuery) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } @@ -1774,10 +1755,10 @@ func (r *ClickHouseReader) GetTagFilters(ctx context.Context, queryParams *model finalQuery += query err := r.db.Select(ctx, &tagFilters, finalQuery, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } tagFiltersResult := model.TagFilters{ @@ -1896,10 +1877,10 @@ func (r *ClickHouseReader) GetTagValues(ctx context.Context, queryParams *model. args = append(args, clickhouse.Named("limit", queryParams.Limit)) err := r.db.Select(ctx, &tagValues, finalQuery, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } @@ -1958,10 +1939,8 @@ func (r *ClickHouseReader) GetTopOperations(ctx context.Context, queryParams *mo } err := r.db.Select(ctx, &topOperationsItems, query, args...) - zap.S().Debug(query) - if err != nil { - zap.S().Error("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")} } @@ -1990,10 +1969,10 @@ func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetU err := r.db.Select(ctx, &usageItems, query, namedArgs...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, fmt.Errorf("Error in processing sql query") } @@ -2018,14 +1997,14 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, traceId string, spa err := r.db.Select(ctx, &searchScanResponses, query, traceId) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) - return nil, fmt.Errorf("Error in processing sql query") + zap.L().Error("Error in processing sql query", zap.Error(err)) + return nil, fmt.Errorf("error in processing sql query") } end := time.Now() - zap.S().Debug("getTraceSQLQuery took: ", end.Sub(start)) + zap.L().Debug("getTraceSQLQuery took: ", zap.Duration("duration", end.Sub(start))) searchSpansResult := []model.SearchSpansResult{{ Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"}, Events: make([][]interface{}, len(searchScanResponses)), @@ -2041,7 +2020,7 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, traceId string, spa searchSpanResponses = append(searchSpanResponses, jsonItem) } end = time.Now() - zap.S().Debug("getTraceSQLQuery unmarshal took: ", end.Sub(start)) + zap.L().Debug("getTraceSQLQuery unmarshal took: ", zap.Duration("duration", end.Sub(start))) err = r.featureFlags.CheckFeature(model.SmartTraceDetail) smartAlgoEnabled := err == nil @@ -2052,7 +2031,7 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, traceId string, spa return nil, err } end = time.Now() - zap.S().Debug("smartTraceAlgo took: ", end.Sub(start)) + zap.L().Debug("smartTraceAlgo took: ", zap.Duration("duration", end.Sub(start))) } else { for i, item := range searchSpanResponses { spanEvents := item.GetValues() @@ -2099,12 +2078,12 @@ func (r *ClickHouseReader) GetDependencyGraph(ctx context.Context, queryParams * query += filterQuery + " GROUP BY src, dest;" args = append(args, filterArgs...) - zap.S().Debug(query, args) + zap.L().Debug("GetDependencyGraph query", zap.String("query", query), zap.Any("args", args)) err := r.db.Select(ctx, &response, query, args...) if err != nil { - zap.S().Error("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, fmt.Errorf("error in processing sql query %w", err) } @@ -2252,10 +2231,10 @@ func (r *ClickHouseReader) GetFilteredSpansAggregates(ctx context.Context, query err := r.db.Select(ctx, &SpanAggregatesDBResponseItems, query, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } @@ -2338,7 +2317,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, go func(tableName string) { _, dbErr := r.localDB.Exec("INSERT INTO ttl_status (transaction_id, created_at, updated_at, table_name, ttl, status, cold_storage_ttl) VALUES (?, ?, ?, ?, ?, ?, ?)", uuid, time.Now(), time.Now(), tableName, params.DelDuration, constants.StatusPending, coldStorageDuration) if dbErr != nil { - zap.S().Error(fmt.Errorf("Error in inserting to ttl_status table: %s", dbErr.Error())) + zap.L().Error("Error in inserting to ttl_status table", zap.Error(dbErr)) return } req := fmt.Sprintf( @@ -2350,32 +2329,32 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, } err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume) if err != nil { - zap.S().Error(fmt.Errorf("Error in setting cold storage: %s", err.Err.Error())) + zap.L().Error("Error in setting cold storage", zap.Error(err)) statusItem, err := r.checkTTLStatusItem(ctx, tableName) if err == nil { _, dbErr := r.localDB.Exec("UPDATE ttl_status SET updated_at = ?, status = ? WHERE id = ?", time.Now(), constants.StatusFailed, statusItem.Id) if dbErr != nil { - zap.S().Debug("Error in processing ttl_status update sql query: ", dbErr) + zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr)) return } } return } req += fmt.Sprint(" SETTINGS distributed_ddl_task_timeout = -1;") - zap.S().Debugf("Executing TTL request: %s\n", req) + zap.L().Error("Executing TTL request: ", zap.String("request", req)) statusItem, _ := r.checkTTLStatusItem(ctx, tableName) if err := r.db.Exec(context.Background(), req); err != nil { - zap.S().Error(fmt.Errorf("Error in executing set TTL query: %s", err.Error())) + zap.L().Error("Error in executing set TTL query", zap.Error(err)) _, dbErr := r.localDB.Exec("UPDATE ttl_status SET updated_at = ?, status = ? WHERE id = ?", time.Now(), constants.StatusFailed, statusItem.Id) if dbErr != nil { - zap.S().Debug("Error in processing ttl_status update sql query: ", dbErr) + zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr)) return } return } _, dbErr = r.localDB.Exec("UPDATE ttl_status SET updated_at = ?, status = ? WHERE id = ?", time.Now(), constants.StatusSuccess, statusItem.Id) if dbErr != nil { - zap.S().Debug("Error in processing ttl_status update sql query: ", dbErr) + zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr)) return } }(tableName) @@ -2393,7 +2372,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, go func(tableName string) { _, dbErr := r.localDB.Exec("INSERT INTO ttl_status (transaction_id, created_at, updated_at, table_name, ttl, status, cold_storage_ttl) VALUES (?, ?, ?, ?, ?, ?, ?)", uuid, time.Now(), time.Now(), tableName, params.DelDuration, constants.StatusPending, coldStorageDuration) if dbErr != nil { - zap.S().Error(fmt.Errorf("Error in inserting to ttl_status table: %s", dbErr.Error())) + zap.L().Error("Error in inserting to ttl_status table", zap.Error(dbErr)) return } req := fmt.Sprintf( @@ -2406,32 +2385,32 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, } err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume) if err != nil { - zap.S().Error(fmt.Errorf("Error in setting cold storage: %s", err.Err.Error())) + zap.L().Error("Error in setting cold storage", zap.Error(err)) statusItem, err := r.checkTTLStatusItem(ctx, tableName) if err == nil { _, dbErr := r.localDB.Exec("UPDATE ttl_status SET updated_at = ?, status = ? WHERE id = ?", time.Now(), constants.StatusFailed, statusItem.Id) if dbErr != nil { - zap.S().Debug("Error in processing ttl_status update sql query: ", dbErr) + zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr)) return } } return } req += fmt.Sprint(" SETTINGS distributed_ddl_task_timeout = -1") - zap.S().Debugf("Executing TTL request: %s\n", req) + zap.L().Info("Executing TTL request: ", zap.String("request", req)) statusItem, _ := r.checkTTLStatusItem(ctx, tableName) if err := r.db.Exec(ctx, req); err != nil { - zap.S().Error(fmt.Errorf("error while setting ttl. Err=%v", err)) + zap.L().Error("error while setting ttl.", zap.Error(err)) _, dbErr := r.localDB.Exec("UPDATE ttl_status SET updated_at = ?, status = ? WHERE id = ?", time.Now(), constants.StatusFailed, statusItem.Id) if dbErr != nil { - zap.S().Debug("Error in processing ttl_status update sql query: ", dbErr) + zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr)) return } return } _, dbErr = r.localDB.Exec("UPDATE ttl_status SET updated_at = ?, status = ? WHERE id = ?", time.Now(), constants.StatusSuccess, statusItem.Id) if dbErr != nil { - zap.S().Debug("Error in processing ttl_status update sql query: ", dbErr) + zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr)) return } }(tableName) @@ -2447,7 +2426,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, go func(tableName string) { _, dbErr := r.localDB.Exec("INSERT INTO ttl_status (transaction_id, created_at, updated_at, table_name, ttl, status, cold_storage_ttl) VALUES (?, ?, ?, ?, ?, ?, ?)", uuid, time.Now(), time.Now(), tableName, params.DelDuration, constants.StatusPending, coldStorageDuration) if dbErr != nil { - zap.S().Error(fmt.Errorf("error in inserting to ttl_status table: %s", dbErr.Error())) + zap.L().Error("error in inserting to ttl_status table", zap.Error(dbErr)) return } req := fmt.Sprintf( @@ -2460,32 +2439,32 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, } err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume) if err != nil { - zap.S().Error(fmt.Errorf("error in setting cold storage: %s", err.Err.Error())) + zap.L().Error("error in setting cold storage", zap.Error(err)) statusItem, err := r.checkTTLStatusItem(ctx, tableName) if err == nil { _, dbErr := r.localDB.Exec("UPDATE ttl_status SET updated_at = ?, status = ? WHERE id = ?", time.Now(), constants.StatusFailed, statusItem.Id) if dbErr != nil { - zap.S().Debug("Error in processing ttl_status update sql query: ", dbErr) + zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr)) return } } return } req += fmt.Sprint(" SETTINGS distributed_ddl_task_timeout = -1") - zap.S().Debugf("Executing TTL request: %s\n", req) + zap.L().Info("Executing TTL request: ", zap.String("request", req)) statusItem, _ := r.checkTTLStatusItem(ctx, tableName) if err := r.db.Exec(ctx, req); err != nil { - zap.S().Error(fmt.Errorf("error while setting ttl. Err=%v", err)) + zap.L().Error("error while setting ttl", zap.Error(err)) _, dbErr := r.localDB.Exec("UPDATE ttl_status SET updated_at = ?, status = ? WHERE id = ?", time.Now(), constants.StatusFailed, statusItem.Id) if dbErr != nil { - zap.S().Debug("Error in processing ttl_status update sql query: ", dbErr) + zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr)) return } return } _, dbErr = r.localDB.Exec("UPDATE ttl_status SET updated_at = ?, status = ? WHERE id = ?", time.Now(), constants.StatusSuccess, statusItem.Id) if dbErr != nil { - zap.S().Debug("Error in processing ttl_status update sql query: ", dbErr) + zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr)) return } }(tableName) @@ -2501,7 +2480,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, func (r *ClickHouseReader) deleteTtlTransactions(ctx context.Context, numberOfTransactionsStore int) { _, err := r.localDB.Exec("DELETE FROM ttl_status WHERE transaction_id NOT IN (SELECT distinct transaction_id FROM ttl_status ORDER BY created_at DESC LIMIT ?)", numberOfTransactionsStore) if err != nil { - zap.S().Debug("Error in processing ttl_status delete sql query: ", err) + zap.L().Error("Error in processing ttl_status delete sql query", zap.Error(err)) } } @@ -2511,12 +2490,12 @@ func (r *ClickHouseReader) checkTTLStatusItem(ctx context.Context, tableName str query := `SELECT id, status, ttl, cold_storage_ttl FROM ttl_status WHERE table_name = ? ORDER BY created_at DESC` - zap.S().Info(query, tableName) + zap.L().Info("checkTTLStatusItem query", zap.String("query", query), zap.String("tableName", tableName)) stmt, err := r.localDB.Preparex(query) if err != nil { - zap.S().Debug("Error preparing query for checkTTLStatusItem: ", err) + zap.L().Error("Error preparing query for checkTTLStatusItem", zap.Error(err)) return model.TTLStatusItem{}, &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -2526,7 +2505,7 @@ func (r *ClickHouseReader) checkTTLStatusItem(ctx context.Context, tableName str return model.TTLStatusItem{}, nil } if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return model.TTLStatusItem{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing ttl_status check sql query")} } return statusItem[0], nil @@ -2567,9 +2546,9 @@ func (r *ClickHouseReader) setColdStorage(ctx context.Context, tableName string, if len(coldStorageVolume) > 0 { policyReq := fmt.Sprintf("ALTER TABLE %s ON CLUSTER %s MODIFY SETTING storage_policy='tiered'", tableName, r.cluster) - zap.S().Debugf("Executing Storage policy request: %s\n", policyReq) + zap.L().Info("Executing Storage policy request: ", zap.String("request", policyReq)) if err := r.db.Exec(ctx, policyReq); err != nil { - zap.S().Error(fmt.Errorf("error while setting storage policy. Err=%v", err)) + zap.L().Error("error while setting storage policy", zap.Error(err)) return &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error while setting storage policy. Err=%v", err)} } } @@ -2582,12 +2561,10 @@ func (r *ClickHouseReader) GetDisks(ctx context.Context) (*[]model.DiskItem, *mo query := "SELECT name,type FROM system.disks" if err := r.db.Select(ctx, &diskItems, query); err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error while getting disks. Err=%v", err)} } - zap.S().Infof("Got response: %+v\n", diskItems) - return &diskItems, nil } @@ -2605,7 +2582,7 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, ttlParams *model.GetTTLPa parseTTL := func(queryResp string) (int, int) { - zap.S().Debugf("Parsing TTL from: %s", queryResp) + zap.L().Info("Parsing TTL from: ", zap.String("queryResp", queryResp)) deleteTTLExp := regexp.MustCompile(`toIntervalSecond\(([0-9]*)\)`) moveTTLExp := regexp.MustCompile(`toIntervalSecond\(([0-9]*)\) TO VOLUME`) @@ -2640,7 +2617,7 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, ttlParams *model.GetTTLPa err := r.db.Select(ctx, &dbResp, query) if err != nil { - zap.S().Error(fmt.Errorf("error while getting ttl. Err=%v", err)) + zap.L().Error("error while getting ttl", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error while getting ttl. Err=%v", err)} } if len(dbResp) == 0 { @@ -2658,7 +2635,7 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, ttlParams *model.GetTTLPa err := r.db.Select(ctx, &dbResp, query) if err != nil { - zap.S().Error(fmt.Errorf("error while getting ttl. Err=%v", err)) + zap.L().Error("error while getting ttl", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error while getting ttl. Err=%v", err)} } if len(dbResp) == 0 { @@ -2676,7 +2653,7 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, ttlParams *model.GetTTLPa err := r.db.Select(ctx, &dbResp, query) if err != nil { - zap.S().Error(fmt.Errorf("error while getting ttl. Err=%v", err)) + zap.L().Error("error while getting ttl", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error while getting ttl. Err=%v", err)} } if len(dbResp) == 0 { @@ -2798,7 +2775,7 @@ func (r *ClickHouseReader) ListErrors(ctx context.Context, queryParams *model.Li args = append(args, argsSubQuery...) if errStatus != nil { - zap.S().Error("Error in processing tags: ", errStatus) + zap.L().Error("Error in processing tags", zap.Error(errStatus)) return nil, errStatus } query = query + " GROUP BY groupID" @@ -2826,10 +2803,10 @@ func (r *ClickHouseReader) ListErrors(ctx context.Context, queryParams *model.Li } err := r.db.Select(ctx, &getErrorResponses, query, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } @@ -2858,15 +2835,15 @@ func (r *ClickHouseReader) CountErrors(ctx context.Context, queryParams *model.C args = append(args, argsSubQuery...) if errStatus != nil { - zap.S().Error("Error in processing tags: ", errStatus) + zap.L().Error("Error in processing tags", zap.Error(errStatus)) return 0, errStatus } err := r.db.QueryRow(ctx, query, args...).Scan(&errorCount) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return 0, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } @@ -2876,7 +2853,7 @@ func (r *ClickHouseReader) CountErrors(ctx context.Context, queryParams *model.C func (r *ClickHouseReader) GetErrorFromErrorID(ctx context.Context, queryParams *model.GetErrorParams) (*model.ErrorWithSpan, *model.ApiError) { if queryParams.ErrorID == "" { - zap.S().Debug("errorId missing from params") + zap.L().Error("errorId missing from params") return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("ErrorID missing from params")} } var getErrorWithSpanReponse []model.ErrorWithSpan @@ -2885,10 +2862,10 @@ func (r *ClickHouseReader) GetErrorFromErrorID(ctx context.Context, queryParams args := []interface{}{clickhouse.Named("errorID", queryParams.ErrorID), clickhouse.Named("groupID", queryParams.GroupID), clickhouse.Named("timestamp", strconv.FormatInt(queryParams.Timestamp.UnixNano(), 10))} err := r.db.Select(ctx, &getErrorWithSpanReponse, query, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } @@ -2909,10 +2886,10 @@ func (r *ClickHouseReader) GetErrorFromGroupID(ctx context.Context, queryParams err := r.db.Select(ctx, &getErrorWithSpanReponse, query, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } @@ -2927,7 +2904,7 @@ func (r *ClickHouseReader) GetErrorFromGroupID(ctx context.Context, queryParams func (r *ClickHouseReader) GetNextPrevErrorIDs(ctx context.Context, queryParams *model.GetErrorParams) (*model.NextPrevErrorIDs, *model.ApiError) { if queryParams.ErrorID == "" { - zap.S().Debug("errorId missing from params") + zap.L().Error("errorId missing from params") return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("ErrorID missing from params")} } var err *model.ApiError @@ -2936,12 +2913,12 @@ func (r *ClickHouseReader) GetNextPrevErrorIDs(ctx context.Context, queryParams } getNextPrevErrorIDsResponse.NextErrorID, getNextPrevErrorIDsResponse.NextTimestamp, err = r.getNextErrorID(ctx, queryParams) if err != nil { - zap.S().Debug("Unable to get next error ID due to err: ", err) + zap.L().Error("Unable to get next error ID due to err: ", zap.Error(err)) return nil, err } getNextPrevErrorIDsResponse.PrevErrorID, getNextPrevErrorIDsResponse.PrevTimestamp, err = r.getPrevErrorID(ctx, queryParams) if err != nil { - zap.S().Debug("Unable to get prev error ID due to err: ", err) + zap.L().Error("Unable to get prev error ID due to err: ", zap.Error(err)) return nil, err } return &getNextPrevErrorIDsResponse, nil @@ -2957,17 +2934,17 @@ func (r *ClickHouseReader) getNextErrorID(ctx context.Context, queryParams *mode err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } if len(getNextErrorIDReponse) == 0 { - zap.S().Info("NextErrorID not found") + zap.L().Info("NextErrorID not found") return "", time.Time{}, nil } else if len(getNextErrorIDReponse) == 1 { - zap.S().Info("NextErrorID found") + zap.L().Info("NextErrorID found") return getNextErrorIDReponse[0].NextErrorID, getNextErrorIDReponse[0].NextTimestamp, nil } else { if getNextErrorIDReponse[0].Timestamp.UnixNano() == getNextErrorIDReponse[1].Timestamp.UnixNano() { @@ -2978,10 +2955,10 @@ func (r *ClickHouseReader) getNextErrorID(ctx context.Context, queryParams *mode err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } if len(getNextErrorIDReponse) == 0 { @@ -2992,26 +2969,26 @@ func (r *ClickHouseReader) getNextErrorID(ctx context.Context, queryParams *mode err := r.db.Select(ctx, &getNextErrorIDReponse, query, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } if len(getNextErrorIDReponse) == 0 { - zap.S().Info("NextErrorID not found") + zap.L().Info("NextErrorID not found") return "", time.Time{}, nil } else { - zap.S().Info("NextErrorID found") + zap.L().Info("NextErrorID found") return getNextErrorIDReponse[0].NextErrorID, getNextErrorIDReponse[0].NextTimestamp, nil } } else { - zap.S().Info("NextErrorID found") + zap.L().Info("NextErrorID found") return getNextErrorIDReponse[0].NextErrorID, getNextErrorIDReponse[0].NextTimestamp, nil } } else { - zap.S().Info("NextErrorID found") + zap.L().Info("NextErrorID found") return getNextErrorIDReponse[0].NextErrorID, getNextErrorIDReponse[0].NextTimestamp, nil } } @@ -3026,17 +3003,17 @@ func (r *ClickHouseReader) getPrevErrorID(ctx context.Context, queryParams *mode err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } if len(getPrevErrorIDReponse) == 0 { - zap.S().Info("PrevErrorID not found") + zap.L().Info("PrevErrorID not found") return "", time.Time{}, nil } else if len(getPrevErrorIDReponse) == 1 { - zap.S().Info("PrevErrorID found") + zap.L().Info("PrevErrorID found") return getPrevErrorIDReponse[0].PrevErrorID, getPrevErrorIDReponse[0].PrevTimestamp, nil } else { if getPrevErrorIDReponse[0].Timestamp.UnixNano() == getPrevErrorIDReponse[1].Timestamp.UnixNano() { @@ -3047,10 +3024,10 @@ func (r *ClickHouseReader) getPrevErrorID(ctx context.Context, queryParams *mode err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } if len(getPrevErrorIDReponse) == 0 { @@ -3061,26 +3038,26 @@ func (r *ClickHouseReader) getPrevErrorID(ctx context.Context, queryParams *mode err := r.db.Select(ctx, &getPrevErrorIDReponse, query, args...) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return "", time.Time{}, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")} } if len(getPrevErrorIDReponse) == 0 { - zap.S().Info("PrevErrorID not found") + zap.L().Info("PrevErrorID not found") return "", time.Time{}, nil } else { - zap.S().Info("PrevErrorID found") + zap.L().Info("PrevErrorID found") return getPrevErrorIDReponse[0].PrevErrorID, getPrevErrorIDReponse[0].PrevTimestamp, nil } } else { - zap.S().Info("PrevErrorID found") + zap.L().Info("PrevErrorID found") return getPrevErrorIDReponse[0].PrevErrorID, getPrevErrorIDReponse[0].PrevTimestamp, nil } } else { - zap.S().Info("PrevErrorID found") + zap.L().Info("PrevErrorID found") return getPrevErrorIDReponse[0].PrevErrorID, getPrevErrorIDReponse[0].PrevTimestamp, nil } } @@ -3111,7 +3088,7 @@ func (r *ClickHouseReader) GetMetricAutocompleteTagKey(ctx context.Context, para } if err != nil { - zap.S().Error(err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: err} } @@ -3150,7 +3127,7 @@ func (r *ClickHouseReader) GetMetricAutocompleteTagValue(ctx context.Context, pa } if err != nil { - zap.S().Error(err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: err} } @@ -3180,7 +3157,7 @@ func (r *ClickHouseReader) GetMetricAutocompleteMetricNames(ctx context.Context, rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", matchText)) if err != nil { - zap.S().Error(err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: err} } @@ -3198,7 +3175,7 @@ func (r *ClickHouseReader) GetMetricAutocompleteMetricNames(ctx context.Context, } func (r *ClickHouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*model.Series, string, error) { - zap.S().Error("GetMetricResultEE is not implemented for opensource version") + zap.L().Error("GetMetricResultEE is not implemented for opensource version") return nil, "", fmt.Errorf("GetMetricResultEE is not implemented for opensource version") } @@ -3207,12 +3184,12 @@ func (r *ClickHouseReader) GetMetricResult(ctx context.Context, query string) ([ defer utils.Elapsed("GetMetricResult")() - zap.S().Infof("Executing metric result query: %s", query) + zap.L().Info("Executing metric result query: ", zap.String("query", query)) rows, err := r.db.Query(ctx, query) if err != nil { - zap.S().Debug("Error in processing query: ", err) + zap.L().Error("Error in processing query", zap.Error(err)) return nil, err } @@ -3289,7 +3266,7 @@ func (r *ClickHouseReader) GetMetricResult(ctx context.Context, query string) ([ groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()) } default: - zap.S().Errorf("invalid var found in metric builder query result", v, colName) + zap.L().Error("invalid var found in metric builder query result", zap.Any("v", v), zap.String("colName", colName)) } } sort.Strings(groupBy) @@ -3457,8 +3434,7 @@ func (r *ClickHouseReader) GetTagsInfoInLastHeartBeatInterval(ctx context.Contex err := r.db.Select(ctx, &tagTelemetryDataList, queryStr) if err != nil { - zap.S().Info(queryStr) - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query: ", zap.Error(err)) return nil, err } @@ -3515,7 +3491,7 @@ func (r *ClickHouseReader) GetDashboardsInfo(ctx context.Context) (*model.Dashbo var dashboardsData []dashboards.Dashboard err := r.localDB.Select(&dashboardsData, query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return &dashboardsInfo, err } totalDashboardsWithPanelAndName := 0 @@ -3601,14 +3577,14 @@ func (r *ClickHouseReader) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo var alertsData []string err := r.localDB.Select(&alertsData, query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return &alertsInfo, err } for _, alert := range alertsData { var rule rules.GettableRule err = json.Unmarshal([]byte(alert), &rule) if err != nil { - zap.S().Errorf("msg:", "invalid rule data", "\t err:", err) + zap.L().Error("invalid rule data", zap.Error(err)) continue } if rule.AlertType == "LOGS_BASED_ALERT" { @@ -3837,7 +3813,6 @@ func (r *ClickHouseReader) GetLogs(ctx context.Context, params *model.LogsFilter } query = fmt.Sprintf("%s order by %s %s limit %d", query, params.OrderBy, params.Order, params.Limit) - zap.S().Debug(query) err = r.db.Select(ctx, &response, query) if err != nil { return nil, &model.ApiError{Err: err, Typ: model.ErrorInternal} @@ -3897,7 +3872,7 @@ func (r *ClickHouseReader) TailLogs(ctx context.Context, client *model.LogsTailC case <-ctx.Done(): done := true client.Done <- &done - zap.S().Debug("closing go routine : " + client.Name) + zap.L().Debug("closing go routine : " + client.Name) return case <-ticker.C: // get the new 100 logs as anything more older won't make sense @@ -3909,11 +3884,10 @@ func (r *ClickHouseReader) TailLogs(ctx context.Context, client *model.LogsTailC tmpQuery = fmt.Sprintf("%s and id > '%s'", tmpQuery, idStart) } tmpQuery = fmt.Sprintf("%s order by timestamp desc, id desc limit 100", tmpQuery) - zap.S().Debug(tmpQuery) response := []model.SignozLog{} err := r.db.Select(ctx, &response, tmpQuery) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while getting logs", zap.Error(err)) client.Error <- err return } @@ -3922,7 +3896,7 @@ func (r *ClickHouseReader) TailLogs(ctx context.Context, client *model.LogsTailC case <-ctx.Done(): done := true client.Done <- &done - zap.S().Debug("closing go routine while sending logs : " + client.Name) + zap.L().Debug("closing go routine while sending logs : " + client.Name) return default: client.Logs <- &response[i] @@ -3987,7 +3961,6 @@ func (r *ClickHouseReader) AggregateLogs(ctx context.Context, params *model.Logs query = fmt.Sprintf("%s GROUP BY ts_start_interval ORDER BY ts_start_interval", query) } - zap.S().Debug(query) err = r.db.Select(ctx, &logAggregatesDBResponseItems, query) if err != nil { return nil, &model.ApiError{Err: err, Typ: model.ErrorInternal} @@ -4026,10 +3999,10 @@ func (r *ClickHouseReader) QueryDashboardVars(ctx context.Context, query string) var result model.DashboardVar rows, err := r.db.Query(ctx, query) - zap.S().Info(query) + zap.L().Info(query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, err } @@ -4072,7 +4045,7 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, req rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText)) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -4121,7 +4094,7 @@ func (r *ClickHouseReader) GetMetricAttributeKeys(ctx context.Context, req *v3.F } rows, err = r.db.Query(ctx, query, req.AggregateAttribute, common.PastDayRoundOff(), fmt.Sprintf("%%%s%%", req.SearchText)) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -4157,7 +4130,7 @@ func (r *ClickHouseReader) GetMetricAttributeValues(ctx context.Context, req *v3 rows, err = r.db.Query(ctx, query, req.FilterAttributeKey, req.AggregateAttribute, req.FilterAttributeKey, fmt.Sprintf("%%%s%%", req.SearchText), common.PastDayRoundOff()) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -4179,7 +4152,7 @@ func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricN query := fmt.Sprintf("SELECT DISTINCT(temporality) from %s.%s WHERE metric_name='%s' AND JSONExtractString(labels, 'service_name') = '%s'", signozMetricDBName, signozTSTableName, metricName, serviceName) rows, err := r.db.Query(ctx, query, metricName) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -4198,7 +4171,7 @@ func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricN query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, 'le')) as le from %s.%s WHERE metric_name='%s' AND JSONExtractString(labels, 'service_name') = '%s' ORDER BY le", signozMetricDBName, signozTSTableName, metricName, serviceName) rows, err = r.db.Query(ctx, query, metricName) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -4214,7 +4187,7 @@ func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricN // ideally this should not happen but we have seen ClickHouse // returning empty string for some values if err != nil { - zap.S().Error("error while parsing le value: ", err) + zap.L().Error("error while parsing le value", zap.Error(err)) continue } if math.IsInf(le, 0) { @@ -4236,7 +4209,7 @@ func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, metricName, se query := fmt.Sprintf("SELECT DISTINCT temporality, description, type, unit, is_monotonic from %s.%s WHERE metric_name=$1", signozMetricDBName, signozTSTableNameV41Day) rows, err := r.db.Query(ctx, query, metricName) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while fetching metric metadata", zap.Error(err)) return nil, fmt.Errorf("error while fetching metric metadata: %s", err.Error()) } defer rows.Close() @@ -4255,7 +4228,7 @@ func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, metricName, se query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, 'le')) as le from %s.%s WHERE metric_name=$1 AND type = 'Histogram' AND JSONExtractString(labels, 'service_name') = $2 ORDER BY le", signozMetricDBName, signozTSTableNameV41Day) rows, err = r.db.Query(ctx, query, metricName, serviceName) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -4271,7 +4244,7 @@ func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, metricName, se // ideally this should not happen but we have seen ClickHouse // returning empty string for some values if err != nil { - zap.S().Error("error while parsing le value: ", err) + zap.L().Error("error while parsing le value", zap.Error(err)) continue } if math.IsInf(le, 0) { @@ -4405,7 +4378,7 @@ func (r *ClickHouseReader) GetLogAggregateAttributes(ctx context.Context, req *v query = fmt.Sprintf("SELECT DISTINCT(tagKey), tagType, tagDataType from %s.%s WHERE %s limit $2", r.logsDB, r.logsTagAttributeTable, where) rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -4459,7 +4432,7 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt } if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -4571,7 +4544,7 @@ func (r *ClickHouseReader) GetLogAttributeValues(ctx context.Context, req *v3.Fi } if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -4632,7 +4605,7 @@ func readRow(vars []interface{}, columnNames []string) ([]string, map[string]str var metric map[string]string err := json.Unmarshal([]byte(*v), &metric) if err != nil { - zap.S().Errorf("unexpected error encountered %v", err) + zap.L().Error("unexpected error encountered", zap.Error(err)) } for key, val := range metric { groupBy = append(groupBy, val) @@ -4688,7 +4661,7 @@ func readRow(vars []interface{}, columnNames []string) ([]string, map[string]str groupAttributes[colName] = fmt.Sprintf("%v", *v) default: - zap.S().Errorf("unsupported var type %v found in query builder query result for column %s", v, colName) + zap.L().Error("unsupported var type found in query builder query result", zap.Any("v", v), zap.String("colName", colName)) } } return groupBy, groupAttributes, groupAttributesArray, point @@ -4786,7 +4759,7 @@ func (r *ClickHouseReader) GetTimeSeriesResultV3(ctx context.Context, query stri rows, err := r.db.Query(ctx, query) if err != nil { - zap.S().Errorf("error while reading time series result %v", err) + zap.L().Error("error while reading time series result", zap.Error(err)) return nil, err } defer rows.Close() @@ -4811,7 +4784,7 @@ func (r *ClickHouseReader) GetListResultV3(ctx context.Context, query string) ([ rows, err := r.db.Query(ctx, query) if err != nil { - zap.S().Errorf("error while reading time series result %v", err) + zap.L().Error("error while reading time series result", zap.Error(err)) return nil, err } defer rows.Close() @@ -4954,7 +4927,7 @@ func (r *ClickHouseReader) GetTraceAggregateAttributes(ctx context.Context, req rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText)) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -4995,7 +4968,7 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText)) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -5049,7 +5022,7 @@ func (r *ClickHouseReader) GetTraceAttributeValues(ctx context.Context, req *v3. query = fmt.Sprintf("SELECT DISTINCT stringTagValue from %s.%s WHERE tagKey = $1 AND stringTagValue ILIKE $2 AND tagType=$3 limit $4", r.TraceDB, r.spanAttributeTable) rows, err = r.db.Query(ctx, query, req.FilterAttributeKey, fmt.Sprintf("%%%s%%", req.SearchText), req.TagType, req.Limit) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -5065,7 +5038,7 @@ func (r *ClickHouseReader) GetTraceAttributeValues(ctx context.Context, req *v3. query = fmt.Sprintf("SELECT DISTINCT float64TagValue from %s.%s where tagKey = $1 AND toString(float64TagValue) ILIKE $2 AND tagType=$3 limit $4", r.TraceDB, r.spanAttributeTable) rows, err = r.db.Query(ctx, query, req.FilterAttributeKey, fmt.Sprintf("%%%s%%", req.SearchText), req.TagType, req.Limit) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -5099,7 +5072,7 @@ func (r *ClickHouseReader) GetSpanAttributeKeys(ctx context.Context) (map[string rows, err = r.db.Query(ctx, query) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while executing query", zap.Error(err)) return nil, fmt.Errorf("error while executing query: %s", err.Error()) } defer rows.Close() @@ -5137,7 +5110,7 @@ func (r *ClickHouseReader) LiveTailLogsV3(ctx context.Context, query string, tim case <-ctx.Done(): done := true client.Done <- &done - zap.S().Debug("closing go routine : " + client.Name) + zap.L().Debug("closing go routine : " + client.Name) return case <-ticker.C: // get the new 100 logs as anything more older won't make sense @@ -5152,7 +5125,7 @@ func (r *ClickHouseReader) LiveTailLogsV3(ctx context.Context, query string, tim response := []model.SignozLog{} err := r.db.Select(ctx, &response, tmpQuery) if err != nil { - zap.S().Error(err) + zap.L().Error("Error while getting logs", zap.Error(err)) client.Error <- err return } diff --git a/pkg/query-service/app/dashboards/model.go b/pkg/query-service/app/dashboards/model.go index 6e777f49c9..c69f30a6bd 100644 --- a/pkg/query-service/app/dashboards/model.go +++ b/pkg/query-service/app/dashboards/model.go @@ -196,7 +196,7 @@ func CreateDashboard(ctx context.Context, data map[string]interface{}, fm interf mapData, err := json.Marshal(dash.Data) if err != nil { - zap.S().Errorf("Error in marshalling data field in dashboard: ", dash, err) + zap.L().Error("Error in marshalling data field in dashboard: ", zap.Any("dashboard", dash), zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: err} } @@ -212,7 +212,7 @@ func CreateDashboard(ctx context.Context, data map[string]interface{}, fm interf dash.Uuid, dash.CreatedAt, userEmail, dash.UpdatedAt, userEmail, mapData) if err != nil { - zap.S().Errorf("Error in inserting dashboard data: ", dash, err) + zap.L().Error("Error in inserting dashboard data: ", zap.Any("dashboard", dash), zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: err} } lastInsertId, err := result.LastInsertId() @@ -246,7 +246,7 @@ func DeleteDashboard(ctx context.Context, uuid string, fm interfaces.FeatureLook dashboard, dErr := GetDashboard(ctx, uuid) if dErr != nil { - zap.S().Errorf("Error in getting dashboard: ", uuid, dErr) + zap.L().Error("Error in getting dashboard: ", zap.String("uuid", uuid), zap.Any("error", dErr)) return dErr } @@ -296,7 +296,7 @@ func UpdateDashboard(ctx context.Context, uuid string, data map[string]interface mapData, err := json.Marshal(data) if err != nil { - zap.S().Errorf("Error in marshalling data field in dashboard: ", data, err) + zap.L().Error("Error in marshalling data field in dashboard: ", zap.Any("data", data), zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err} } @@ -337,7 +337,7 @@ func UpdateDashboard(ctx context.Context, uuid string, data map[string]interface dashboard.UpdatedAt, userEmail, mapData, dashboard.Uuid) if err != nil { - zap.S().Errorf("Error in inserting dashboard data: ", data, err) + zap.L().Error("Error in inserting dashboard data", zap.Any("data", data), zap.Error(err)) return nil, &model.ApiError{Typ: model.ErrorExec, Err: err} } if existingCount != newCount { @@ -358,7 +358,7 @@ func LockUnlockDashboard(ctx context.Context, uuid string, lock bool) *model.Api _, err := db.Exec(query, uuid) if err != nil { - zap.S().Errorf("Error in updating dashboard: ", uuid, err) + zap.L().Error("Error in updating dashboard", zap.String("uuid", uuid), zap.Error(err)) return &model.ApiError{Typ: model.ErrorExec, Err: err} } @@ -370,10 +370,10 @@ func updateFeatureUsage(fm interfaces.FeatureLookup, usage int64) *model.ApiErro if err != nil { switch err.(type) { case model.ErrFeatureUnavailable: - zap.S().Errorf("feature unavailable", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err)) + zap.L().Error("feature unavailable", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err)) return model.BadRequest(err) default: - zap.S().Errorf("feature check failed", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err)) + zap.L().Error("feature check failed", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err)) return model.BadRequest(err) } } @@ -397,10 +397,10 @@ func checkFeatureUsage(fm interfaces.FeatureLookup, usage int64) *model.ApiError if err != nil { switch err.(type) { case model.ErrFeatureUnavailable: - zap.S().Errorf("feature unavailable", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err)) + zap.L().Error("feature unavailable", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err)) return model.BadRequest(err) default: - zap.S().Errorf("feature check failed", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err)) + zap.L().Error("feature check failed", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err)) return model.BadRequest(err) } } @@ -535,13 +535,13 @@ func TransformGrafanaJSONToSignoz(grafanaJSON model.GrafanaJSON) model.Dashboard if template.Type == "query" { if template.Datasource == nil { - zap.S().Warnf("Skipping panel %d as it has no datasource", templateIdx) + zap.L().Warn("Skipping panel as it has no datasource", zap.Int("templateIdx", templateIdx)) continue } // Skip if the source is not prometheus source, stringOk := template.Datasource.(string) if stringOk && !strings.Contains(strings.ToLower(source), "prometheus") { - zap.S().Warnf("Skipping template %d as it is not prometheus", templateIdx) + zap.L().Warn("Skipping template as it is not prometheus", zap.Int("templateIdx", templateIdx)) continue } var result model.Datasource @@ -553,12 +553,12 @@ func TransformGrafanaJSONToSignoz(grafanaJSON model.GrafanaJSON) model.Dashboard } } if result.Type != "prometheus" && result.Type != "" { - zap.S().Warnf("Skipping template %d as it is not prometheus", templateIdx) + zap.L().Warn("Skipping template as it is not prometheus", zap.Int("templateIdx", templateIdx)) continue } if !stringOk && !structOk { - zap.S().Warnf("Didn't recognize source, skipping") + zap.L().Warn("Didn't recognize source, skipping") continue } typ = "QUERY" @@ -629,13 +629,13 @@ func TransformGrafanaJSONToSignoz(grafanaJSON model.GrafanaJSON) model.Dashboard continue } if panel.Datasource == nil { - zap.S().Warnf("Skipping panel %d as it has no datasource", idx) + zap.L().Warn("Skipping panel as it has no datasource", zap.Int("idx", idx)) continue } // Skip if the datasource is not prometheus source, stringOk := panel.Datasource.(string) if stringOk && !strings.Contains(strings.ToLower(source), "prometheus") { - zap.S().Warnf("Skipping panel %d as it is not prometheus", idx) + zap.L().Warn("Skipping panel as it is not prometheus", zap.Int("idx", idx)) continue } var result model.Datasource @@ -647,12 +647,12 @@ func TransformGrafanaJSONToSignoz(grafanaJSON model.GrafanaJSON) model.Dashboard } } if result.Type != "prometheus" && result.Type != "" { - zap.S().Warnf("Skipping panel %d as it is not prometheus", idx) + zap.L().Warn("Skipping panel as it is not prometheus", zap.Int("idx", idx)) continue } if !stringOk && !structOk { - zap.S().Warnf("Didn't recognize source, skipping") + zap.L().Warn("Didn't recognize source, skipping") continue } diff --git a/pkg/query-service/app/dashboards/provision.go b/pkg/query-service/app/dashboards/provision.go index 049ae42e72..fb97a960c1 100644 --- a/pkg/query-service/app/dashboards/provision.go +++ b/pkg/query-service/app/dashboards/provision.go @@ -16,31 +16,31 @@ import ( func readCurrentDir(dir string, fm interfaces.FeatureLookup) error { file, err := os.Open(dir) if err != nil { - zap.S().Errorf("failed opening directory: %s", err) - return err + zap.L().Warn("failed opening directory", zap.Error(err)) + return nil } defer file.Close() list, _ := file.Readdirnames(0) // 0 to read all files and folders for _, filename := range list { - zap.S().Info("Provisioning dashboard: ", filename) + zap.L().Info("Provisioning dashboard: ", zap.String("filename", filename)) // using filepath.Join for platform specific path creation // which is equivalent to "dir+/+filename" (on unix based systems) but cleaner plan, err := os.ReadFile(filepath.Join(dir, filename)) if err != nil { - zap.S().Errorf("Creating Dashboards: Error in reading json fron file: %s\t%s", filename, err) + zap.L().Error("Creating Dashboards: Error in reading json fron file", zap.String("filename", filename), zap.Error(err)) continue } var data map[string]interface{} err = json.Unmarshal(plan, &data) if err != nil { - zap.S().Errorf("Creating Dashboards: Error in unmarshalling json from file: %s\t%s", filename, err) + zap.L().Error("Creating Dashboards: Error in unmarshalling json from file", zap.String("filename", filename), zap.Error(err)) continue } err = IsPostDataSane(&data) if err != nil { - zap.S().Infof("Creating Dashboards: Error in file: %s\t%s", filename, err) + zap.L().Info("Creating Dashboards: Error in file", zap.String("filename", filename), zap.Error(err)) continue } @@ -48,14 +48,14 @@ func readCurrentDir(dir string, fm interfaces.FeatureLookup) error { if id == nil { _, apiErr := CreateDashboard(context.Background(), data, fm) if apiErr != nil { - zap.S().Errorf("Creating Dashboards: Error in file: %s\t%s", filename, apiErr.Err) + zap.L().Error("Creating Dashboards: Error in file", zap.String("filename", filename), zap.Error(apiErr.Err)) } continue } apiErr := upsertDashboard(id.(string), data, filename, fm) if apiErr != nil { - zap.S().Errorf("Creating Dashboards: Error upserting dashboard: %s\t%s", filename, apiErr.Err) + zap.L().Error("Creating Dashboards: Error upserting dashboard", zap.String("filename", filename), zap.Error(apiErr.Err)) } } return nil diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 964850cbf8..c025345cef 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -207,7 +207,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) { hasUsers, err := aH.appDao.GetUsersWithOpts(context.Background(), 1) if err.Error() != "" { // raise warning but no panic as this is a recoverable condition - zap.S().Warnf("unexpected error while fetch user count while initializing base api handler", err.Error()) + zap.L().Warn("unexpected error while fetch user count while initializing base api handler", zap.Error(err)) } if len(hasUsers) != 0 { // first user is already created, we can mark the app ready for general use. @@ -273,7 +273,7 @@ func RespondError(w http.ResponseWriter, apiErr model.BaseApiError, data interfa Data: data, }) if err != nil { - zap.S().Error("msg", "error marshalling json response", "err", err) + zap.L().Error("error marshalling json response", zap.Error(err)) http.Error(w, err.Error(), http.StatusInternalServerError) return } @@ -303,7 +303,7 @@ func RespondError(w http.ResponseWriter, apiErr model.BaseApiError, data interfa w.Header().Set("Content-Type", "application/json") w.WriteHeader(code) if n, err := w.Write(b); err != nil { - zap.S().Error("msg", "error writing response", "bytesWritten", n, "err", err) + zap.L().Error("error writing response", zap.Int("bytesWritten", n), zap.Error(err)) } } @@ -314,7 +314,7 @@ func writeHttpResponse(w http.ResponseWriter, data interface{}) { Data: data, }) if err != nil { - zap.S().Error("msg", "error marshalling json response", "err", err) + zap.L().Error("error marshalling json response", zap.Error(err)) http.Error(w, err.Error(), http.StatusInternalServerError) return } @@ -322,7 +322,7 @@ func writeHttpResponse(w http.ResponseWriter, data interface{}) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusOK) if n, err := w.Write(b); err != nil { - zap.S().Error("msg", "error writing response", "bytesWritten", n, "err", err) + zap.L().Error("error writing response", zap.Int("bytesWritten", n), zap.Error(err)) } } @@ -567,7 +567,7 @@ func (aH *APIHandler) addTemporality(ctx context.Context, qp *v3.QueryRangeParam var err error if aH.preferDelta { - zap.S().Debug("fetching metric temporality") + zap.L().Debug("fetching metric temporality") metricNameToTemporality, err = aH.reader.FetchTemporality(ctx, metricNames) if err != nil { return err @@ -595,7 +595,7 @@ func (aH *APIHandler) QueryRangeMetricsV2(w http.ResponseWriter, r *http.Request metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r) if apiErrorObj != nil { - zap.S().Errorf(apiErrorObj.Err.Error()) + zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err)) RespondError(w, apiErrorObj, nil) return } @@ -1130,7 +1130,7 @@ func (aH *APIHandler) testRule(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() body, err := io.ReadAll(r.Body) if err != nil { - zap.S().Errorf("Error in getting req body in test rule API\n", err) + zap.L().Error("Error in getting req body in test rule API", zap.Error(err)) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return } @@ -1173,7 +1173,7 @@ func (aH *APIHandler) patchRule(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() body, err := io.ReadAll(r.Body) if err != nil { - zap.S().Errorf("msg: error in getting req body of patch rule API\n", "\t error:", err) + zap.L().Error("error in getting req body of patch rule API\n", zap.Error(err)) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return } @@ -1194,7 +1194,7 @@ func (aH *APIHandler) editRule(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() body, err := io.ReadAll(r.Body) if err != nil { - zap.S().Errorf("msg: error in getting req body of edit rule API\n", "\t error:", err) + zap.L().Error("error in getting req body of edit rule API", zap.Error(err)) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return } @@ -1245,14 +1245,14 @@ func (aH *APIHandler) testChannel(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() body, err := io.ReadAll(r.Body) if err != nil { - zap.S().Errorf("Error in getting req body of testChannel API\n", err) + zap.L().Error("Error in getting req body of testChannel API", zap.Error(err)) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return } receiver := &am.Receiver{} if err := json.Unmarshal(body, receiver); err != nil { // Parse []byte to go struct pointer - zap.S().Errorf("Error in parsing req body of testChannel API\n", err) + zap.L().Error("Error in parsing req body of testChannel API\n", zap.Error(err)) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return } @@ -1272,14 +1272,14 @@ func (aH *APIHandler) editChannel(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() body, err := io.ReadAll(r.Body) if err != nil { - zap.S().Errorf("Error in getting req body of editChannel API\n", err) + zap.L().Error("Error in getting req body of editChannel API", zap.Error(err)) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return } receiver := &am.Receiver{} if err := json.Unmarshal(body, receiver); err != nil { // Parse []byte to go struct pointer - zap.S().Errorf("Error in parsing req body of editChannel API\n", err) + zap.L().Error("Error in parsing req body of editChannel API", zap.Error(err)) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return } @@ -1300,14 +1300,14 @@ func (aH *APIHandler) createChannel(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() body, err := io.ReadAll(r.Body) if err != nil { - zap.S().Errorf("Error in getting req body of createChannel API\n", err) + zap.L().Error("Error in getting req body of createChannel API", zap.Error(err)) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return } receiver := &am.Receiver{} if err := json.Unmarshal(body, receiver); err != nil { // Parse []byte to go struct pointer - zap.S().Errorf("Error in parsing req body of createChannel API\n", err) + zap.L().Error("Error in parsing req body of createChannel API", zap.Error(err)) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return } @@ -1347,7 +1347,7 @@ func (aH *APIHandler) createRule(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() body, err := io.ReadAll(r.Body) if err != nil { - zap.S().Errorf("Error in getting req body for create rule API\n", err) + zap.L().Error("Error in getting req body for create rule API", zap.Error(err)) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return } @@ -1374,7 +1374,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request) return } - // zap.S().Info(query, apiError) + // zap.L().Info(query, apiError) ctx := r.Context() if to := r.FormValue("timeout"); to != "" { @@ -1396,7 +1396,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request) } if res.Err != nil { - zap.S().Error(res.Err) + zap.L().Error("error in query range metrics", zap.Error(res.Err)) } if res.Err != nil { @@ -1429,7 +1429,7 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) { return } - // zap.S().Info(query, apiError) + // zap.L().Info(query, apiError) ctx := r.Context() if to := r.FormValue("timeout"); to != "" { @@ -1451,7 +1451,7 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) { } if res.Err != nil { - zap.S().Error(res.Err) + zap.L().Error("error in query range metrics", zap.Error(res.Err)) } if res.Err != nil { @@ -2045,7 +2045,7 @@ func (aH *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) listUsers(w http.ResponseWriter, r *http.Request) { users, err := dao.DB().GetUsers(context.Background()) if err != nil { - zap.S().Debugf("[listUsers] Failed to query list of users, err: %v", err) + zap.L().Error("[listUsers] Failed to query list of users", zap.Error(err)) RespondError(w, err, nil) return } @@ -2062,7 +2062,7 @@ func (aH *APIHandler) getUser(w http.ResponseWriter, r *http.Request) { ctx := context.Background() user, err := dao.DB().GetUser(ctx, id) if err != nil { - zap.S().Debugf("[getUser] Failed to query user, err: %v", err) + zap.L().Error("[getUser] Failed to query user", zap.Error(err)) RespondError(w, err, "Failed to get user") return } @@ -2092,7 +2092,7 @@ func (aH *APIHandler) editUser(w http.ResponseWriter, r *http.Request) { ctx := context.Background() old, apiErr := dao.DB().GetUser(ctx, id) if apiErr != nil { - zap.S().Debugf("[editUser] Failed to query user, err: %v", err) + zap.L().Error("[editUser] Failed to query user", zap.Error(err)) RespondError(w, apiErr, nil) return } @@ -2176,7 +2176,7 @@ func (aH *APIHandler) patchUserFlag(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() b, err := io.ReadAll(r.Body) if err != nil { - zap.S().Errorf("failed read user flags from http request for userId ", userId, "with error: ", err) + zap.L().Error("failed read user flags from http request for userId ", zap.String("userId", userId), zap.Error(err)) RespondError(w, model.BadRequestStr("received user flags in invalid format"), nil) return } @@ -2184,7 +2184,7 @@ func (aH *APIHandler) patchUserFlag(w http.ResponseWriter, r *http.Request) { err = json.Unmarshal(b, &flags) if err != nil { - zap.S().Errorf("failed parsing user flags for userId ", userId, "with error: ", err) + zap.L().Error("failed parsing user flags for userId ", zap.String("userId", userId), zap.Error(err)) RespondError(w, model.BadRequestStr("received user flags in invalid format"), nil) return } @@ -2348,7 +2348,7 @@ func (aH *APIHandler) resetPassword(w http.ResponseWriter, r *http.Request) { } if err := auth.ResetPassword(context.Background(), req); err != nil { - zap.S().Debugf("resetPassword failed, err: %v\n", err) + zap.L().Error("resetPassword failed", zap.Error(err)) if aH.HandleError(w, err, http.StatusInternalServerError) { return } @@ -2391,7 +2391,7 @@ func (aH *APIHandler) HandleError(w http.ResponseWriter, err error, statusCode i return false } if statusCode == http.StatusInternalServerError { - zap.S().Error("HTTP handler, Internal Server Error", zap.Error(err)) + zap.L().Error("HTTP handler, Internal Server Error", zap.Error(err)) } structuredResp := structuredResponse{ Errors: []structuredError{ @@ -2809,10 +2809,10 @@ func (aH *APIHandler) tailLogs(w http.ResponseWriter, r *http.Request) { fmt.Fprintf(w, "data: %v\n\n", buf.String()) flusher.Flush() case <-client.Done: - zap.S().Debug("done!") + zap.L().Debug("done!") return case err := <-client.Error: - zap.S().Error("error occured!", err) + zap.L().Error("error occured", zap.Error(err)) return } } @@ -2963,7 +2963,7 @@ func (ah *APIHandler) CreateLogsPipeline(w http.ResponseWriter, r *http.Request) postable []logparsingpipeline.PostablePipeline, ) (*logparsingpipeline.PipelinesResponse, *model.ApiError) { if len(postable) == 0 { - zap.S().Warnf("found no pipelines in the http request, this will delete all the pipelines") + zap.L().Warn("found no pipelines in the http request, this will delete all the pipelines") } for _, p := range postable { @@ -3403,7 +3403,7 @@ func (aH *APIHandler) QueryRangeV3Format(w http.ResponseWriter, r *http.Request) queryRangeParams, apiErrorObj := ParseQueryRangeParams(r) if apiErrorObj != nil { - zap.S().Errorf(apiErrorObj.Err.Error()) + zap.L().Error(apiErrorObj.Err.Error()) RespondError(w, apiErrorObj, nil) return } @@ -3478,11 +3478,11 @@ func sendQueryResultEvents(r *http.Request, result []*v3.Result, queryRangeParam dashboardMatched, err := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer) if err != nil { - zap.S().Errorf("error while matching the dashboard: %v", err) + zap.L().Error("error while matching the referrer", zap.Error(err)) } alertMatched, err := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer) if err != nil { - zap.S().Errorf("error while matching the alert: %v", err) + zap.L().Error("error while matching the alert: ", zap.Error(err)) } if alertMatched || dashboardMatched { @@ -3559,7 +3559,7 @@ func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) { queryRangeParams, apiErrorObj := ParseQueryRangeParams(r) if apiErrorObj != nil { - zap.S().Errorf(apiErrorObj.Err.Error()) + zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err)) RespondError(w, apiErrorObj, nil) return } @@ -3568,7 +3568,7 @@ func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) { temporalityErr := aH.addTemporality(r.Context(), queryRangeParams) if temporalityErr != nil { - zap.S().Errorf("Error while adding temporality for metrics: %v", temporalityErr) + zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr)) RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil) return } @@ -3584,7 +3584,7 @@ func (aH *APIHandler) liveTailLogs(w http.ResponseWriter, r *http.Request) { queryRangeParams, apiErrorObj := ParseQueryRangeParams(r) if apiErrorObj != nil { - zap.S().Errorf(apiErrorObj.Err.Error()) + zap.L().Error(apiErrorObj.Err.Error()) RespondError(w, apiErrorObj, nil) return } @@ -3645,10 +3645,10 @@ func (aH *APIHandler) liveTailLogs(w http.ResponseWriter, r *http.Request) { fmt.Fprintf(w, "data: %v\n\n", buf.String()) flusher.Flush() case <-client.Done: - zap.S().Debug("done!") + zap.L().Debug("done!") return case err := <-client.Error: - zap.S().Error("error occurred!", err) + zap.L().Error("error occurred", zap.Error(err)) fmt.Fprintf(w, "event: error\ndata: %v\n\n", err.Error()) flusher.Flush() return @@ -3725,7 +3725,7 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) { queryRangeParams, apiErrorObj := ParseQueryRangeParams(r) if apiErrorObj != nil { - zap.S().Errorf(apiErrorObj.Err.Error()) + zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err)) RespondError(w, apiErrorObj, nil) return } @@ -3733,7 +3733,7 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) { // add temporality for each metric temporalityErr := aH.populateTemporality(r.Context(), queryRangeParams) if temporalityErr != nil { - zap.S().Errorf("Error while adding temporality for metrics: %v", temporalityErr) + zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr)) RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil) return } @@ -3777,12 +3777,12 @@ func postProcessResult(result []*v3.Result, queryRangeParams *v3.QueryRangeParam expression, err := govaluate.NewEvaluableExpressionWithFunctions(query.Expression, evalFuncs()) // This shouldn't happen here, because it should have been caught earlier in validation if err != nil { - zap.S().Errorf("error in expression: %s", err.Error()) + zap.L().Error("error in expression", zap.Error(err)) return nil, err } formulaResult, err := processResults(result, expression) if err != nil { - zap.S().Errorf("error in expression: %s", err.Error()) + zap.L().Error("error in expression", zap.Error(err)) return nil, err } formulaResult.QueryName = query.QueryName diff --git a/pkg/query-service/app/logparsingpipeline/collector_config.go b/pkg/query-service/app/logparsingpipeline/collector_config.go index c370441210..17b8d96c1e 100644 --- a/pkg/query-service/app/logparsingpipeline/collector_config.go +++ b/pkg/query-service/app/logparsingpipeline/collector_config.go @@ -138,7 +138,7 @@ func buildLogsProcessors(current []string, logsParserPipeline []string) ([]strin func checkDuplicateString(pipeline []string) bool { exists := make(map[string]bool, len(pipeline)) - zap.S().Debugf("checking duplicate processors in the pipeline:", pipeline) + zap.L().Debug("checking duplicate processors in the pipeline:", zap.Any("pipeline", pipeline)) for _, processor := range pipeline { name := processor if _, ok := exists[name]; ok { diff --git a/pkg/query-service/app/logparsingpipeline/controller.go b/pkg/query-service/app/logparsingpipeline/controller.go index 9527fe9e8d..2e6b0ba4d3 100644 --- a/pkg/query-service/app/logparsingpipeline/controller.go +++ b/pkg/query-service/app/logparsingpipeline/controller.go @@ -104,7 +104,7 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion( if version >= 0 { savedPipelines, errors := ic.getPipelinesByVersion(ctx, version) if errors != nil { - zap.S().Errorf("failed to get pipelines for version %d, %w", version, errors) + zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Errors("errors", errors)) return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version")) } result = savedPipelines @@ -158,7 +158,7 @@ func (ic *LogParsingPipelineController) GetPipelinesByVersion( ) (*PipelinesResponse, *model.ApiError) { pipelines, errors := ic.getEffectivePipelinesByVersion(ctx, version) if errors != nil { - zap.S().Errorf("failed to get pipelines for version %d, %w", version, errors) + zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Error(errors)) return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version")) } @@ -166,7 +166,7 @@ func (ic *LogParsingPipelineController) GetPipelinesByVersion( if version >= 0 { cv, err := agentConf.GetConfigVersion(ctx, agentConf.ElementTypeLogPipelines, version) if err != nil { - zap.S().Errorf("failed to get config for version %d, %s", version, err.Error()) + zap.L().Error("failed to get config for version", zap.Int("version", version), zap.Error(err)) return nil, model.WrapApiError(err, "failed to get config for given version") } configVersion = cv diff --git a/pkg/query-service/app/logparsingpipeline/db.go b/pkg/query-service/app/logparsingpipeline/db.go index df187f0de3..618060d105 100644 --- a/pkg/query-service/app/logparsingpipeline/db.go +++ b/pkg/query-service/app/logparsingpipeline/db.go @@ -99,7 +99,7 @@ func (r *Repo) insertPipeline( insertRow.RawConfig) if err != nil { - zap.S().Errorf("error in inserting pipeline data: ", zap.Error(err)) + zap.L().Error("error in inserting pipeline data", zap.Error(err)) return nil, model.InternalError(errors.Wrap(err, "failed to insert pipeline")) } @@ -171,19 +171,19 @@ func (r *Repo) GetPipeline( err := r.db.SelectContext(ctx, &pipelines, pipelineQuery, id) if err != nil { - zap.S().Errorf("failed to get ingestion pipeline from db", err) + zap.L().Error("failed to get ingestion pipeline from db", zap.Error(err)) return nil, model.InternalError(errors.Wrap(err, "failed to get ingestion pipeline from db")) } if len(pipelines) == 0 { - zap.S().Warnf("No row found for ingestion pipeline id", id) + zap.L().Warn("No row found for ingestion pipeline id", zap.String("id", id)) return nil, model.NotFoundError(fmt.Errorf("No row found for ingestion pipeline id %v", id)) } if len(pipelines) == 1 { err := pipelines[0].ParseRawConfig() if err != nil { - zap.S().Errorf("invalid pipeline config found", id, err) + zap.L().Error("invalid pipeline config found", zap.String("id", id), zap.Error(err)) return nil, model.InternalError( errors.Wrap(err, "found an invalid pipeline config"), ) diff --git a/pkg/query-service/app/opamp/configure_ingestionRules.go b/pkg/query-service/app/opamp/configure_ingestionRules.go index bd71aa38b0..ec9c9e5b88 100644 --- a/pkg/query-service/app/opamp/configure_ingestionRules.go +++ b/pkg/query-service/app/opamp/configure_ingestionRules.go @@ -27,10 +27,10 @@ func UpsertControlProcessors( // AddToTracePipeline() or RemoveFromTracesPipeline() prior to calling // this method - zap.S().Debug("initiating ingestion rules deployment config", signal, processors) + zap.L().Debug("initiating ingestion rules deployment config", zap.String("signal", signal), zap.Any("processors", processors)) if signal != string(Metrics) && signal != string(Traces) { - zap.S().Error("received invalid signal int UpsertControlProcessors", signal) + zap.L().Error("received invalid signal int UpsertControlProcessors", zap.String("signal", signal)) fnerr = coreModel.BadRequest(fmt.Errorf( "signal not supported in ingestion rules: %s", signal, )) @@ -51,7 +51,7 @@ func UpsertControlProcessors( } if len(agents) > 1 && signal == string(Traces) { - zap.S().Debug("found multiple agents. this feature is not supported for traces pipeline (sampling rules)") + zap.L().Debug("found multiple agents. this feature is not supported for traces pipeline (sampling rules)") fnerr = coreModel.BadRequest(fmt.Errorf("multiple agents not supported in sampling rules")) return } @@ -60,7 +60,7 @@ func UpsertControlProcessors( agenthash, err := addIngestionControlToAgent(agent, signal, processors, false) if err != nil { - zap.S().Error("failed to push ingestion rules config to agent", agent.ID, err) + zap.L().Error("failed to push ingestion rules config to agent", zap.String("agentID", agent.ID), zap.Error(err)) continue } @@ -89,7 +89,7 @@ func addIngestionControlToAgent(agent *model.Agent, signal string, processors ma // add ingestion control spec err = makeIngestionControlSpec(agentConf, Signal(signal), processors) if err != nil { - zap.S().Error("failed to prepare ingestion control processors for agent ", agent.ID, err) + zap.L().Error("failed to prepare ingestion control processors for agent", zap.String("agentID", agent.ID), zap.Error(err)) return confHash, err } @@ -99,7 +99,7 @@ func addIngestionControlToAgent(agent *model.Agent, signal string, processors ma return confHash, err } - zap.S().Debugf("sending new config", string(configR)) + zap.L().Debug("sending new config", zap.String("config", string(configR))) hash := sha256.New() _, err = hash.Write(configR) if err != nil { @@ -140,7 +140,7 @@ func makeIngestionControlSpec(agentConf *confmap.Conf, signal Signal, processors // merge tracesPipelinePlan with current pipeline mergedPipeline, err := buildPipeline(signal, currentPipeline) if err != nil { - zap.S().Error("failed to build pipeline", signal, err) + zap.L().Error("failed to build pipeline", zap.String("signal", string(signal)), zap.Error(err)) return err } diff --git a/pkg/query-service/app/opamp/model/agent.go b/pkg/query-service/app/opamp/model/agent.go index 1eef7bb4cf..5751bd255b 100644 --- a/pkg/query-service/app/opamp/model/agent.go +++ b/pkg/query-service/app/opamp/model/agent.go @@ -276,7 +276,7 @@ func (agent *Agent) processStatusUpdate( func (agent *Agent) updateRemoteConfig(configProvider AgentConfigProvider) bool { recommendedConfig, confId, err := configProvider.RecommendAgentConfig([]byte(agent.EffectiveConfig)) if err != nil { - zap.S().Error("could not generate config recommendation for agent:", agent.ID, err) + zap.L().Error("could not generate config recommendation for agent", zap.String("agentID", agent.ID), zap.Error(err)) return false } @@ -293,7 +293,7 @@ func (agent *Agent) updateRemoteConfig(configProvider AgentConfigProvider) bool if len(confId) < 1 { // Should never happen. Handle gracefully if it does by some chance. - zap.S().Errorf("config provider recommended a config with empty confId. Using content hash for configId") + zap.L().Error("config provider recommended a config with empty confId. Using content hash for configId") hash := sha256.New() for k, v := range cfg.Config.ConfigMap { diff --git a/pkg/query-service/app/opamp/model/agents.go b/pkg/query-service/app/opamp/model/agents.go index 2e2118e216..e984cafce2 100644 --- a/pkg/query-service/app/opamp/model/agents.go +++ b/pkg/query-service/app/opamp/model/agents.go @@ -131,8 +131,8 @@ func (agents *Agents) RecommendLatestConfigToAll( // Recommendation is same as current config if string(newConfig) == agent.EffectiveConfig { - zap.S().Infof( - "Recommended config same as current effective config for agent %s", agent.ID, + zap.L().Info( + "Recommended config same as current effective config for agent", zap.String("agentID", agent.ID), ) return nil } diff --git a/pkg/query-service/app/opamp/opamp_server.go b/pkg/query-service/app/opamp/opamp_server.go index 2a7ba4c6fa..75d8d877be 100644 --- a/pkg/query-service/app/opamp/opamp_server.go +++ b/pkg/query-service/app/opamp/opamp_server.go @@ -40,7 +40,7 @@ func InitializeServer( agents: agents, agentConfigProvider: agentConfigProvider, } - opAmpServer.server = server.New(zap.S()) + opAmpServer.server = server.New(zap.L().Sugar()) return opAmpServer } @@ -58,8 +58,8 @@ func (srv *Server) Start(listener string) error { unsubscribe := srv.agentConfigProvider.SubscribeToConfigUpdates(func() { err := srv.agents.RecommendLatestConfigToAll(srv.agentConfigProvider) if err != nil { - zap.S().Errorf( - "could not roll out latest config recommendation to connected agents: %w", err, + zap.L().Error( + "could not roll out latest config recommendation to connected agents", zap.Error(err), ) } }) @@ -85,15 +85,14 @@ func (srv *Server) OnMessage(conn types.Connection, msg *protobufs.AgentToServer agent, created, err := srv.agents.FindOrCreateAgent(agentID, conn) if err != nil { - zap.S().Error("Failed to find or create agent %q: %v", agentID, err) + zap.L().Error("Failed to find or create agent", zap.String("agentID", agentID), zap.Error(err)) // TODO: handle error } if created { agent.CanLB = model.ExtractLbFlag(msg.AgentDescription) - zap.S().Debugf( - "New agent added:", - zap.Bool("canLb", agent.CanLB), + zap.L().Debug( + "New agent added", zap.Bool("canLb", agent.CanLB), zap.String("ID", agent.ID), zap.Any("status", agent.CurrentStatus), ) @@ -117,7 +116,7 @@ func Ready() bool { return false } if opAmpServer.agents.Count() == 0 { - zap.S().Warnf("no agents available, all agent config requests will be rejected") + zap.L().Warn("no agents available, all agent config requests will be rejected") return false } return true diff --git a/pkg/query-service/app/opamp/pipeline_builder.go b/pkg/query-service/app/opamp/pipeline_builder.go index 841a9ce5c6..7654fe8c4f 100644 --- a/pkg/query-service/app/opamp/pipeline_builder.go +++ b/pkg/query-service/app/opamp/pipeline_builder.go @@ -89,7 +89,7 @@ func RemoveFromMetricsPipelineSpec(name string) { func checkDuplicates(pipeline []interface{}) bool { exists := make(map[string]bool, len(pipeline)) - zap.S().Debugf("checking duplicate processors in the pipeline:", pipeline) + zap.L().Debug("checking duplicate processors in the pipeline", zap.Any("pipeline", pipeline)) for _, processor := range pipeline { name := processor.(string) if _, ok := exists[name]; ok { @@ -149,7 +149,7 @@ func buildPipeline(signal Signal, current []interface{}) ([]interface{}, error) currentPos := loc + inserts // if disabled then remove from the pipeline if !m.Enabled { - zap.S().Debugf("build_pipeline: found a disabled item, removing from pipeline at position", currentPos-1, " ", m.Name) + zap.L().Debug("build_pipeline: found a disabled item, removing from pipeline at position", zap.Int("position", currentPos-1), zap.String("processor", m.Name)) if currentPos-1 <= 0 { pipeline = pipeline[currentPos+1:] } else { @@ -170,10 +170,10 @@ func buildPipeline(signal Signal, current []interface{}) ([]interface{}, error) // right after last matched processsor (e.g. insert filters after tail_sampling for existing list of [batch, tail_sampling]) if lastMatched <= 0 { - zap.S().Debugf("build_pipeline: found a new item to be inserted, inserting at position 0:", m.Name) + zap.L().Debug("build_pipeline: found a new item to be inserted, inserting at position 0", zap.String("processor", m.Name)) pipeline = append([]interface{}{m.Name}, pipeline[lastMatched+1:]...) } else { - zap.S().Debugf("build_pipeline: found a new item to be inserted, inserting at position :", lastMatched, " ", m.Name) + zap.L().Debug("build_pipeline: found a new item to be inserted, inserting at position", zap.Int("position", lastMatched), zap.String("processor", m.Name)) prior := make([]interface{}, len(pipeline[:lastMatched])) next := make([]interface{}, len(pipeline[lastMatched:])) copy(prior, pipeline[:lastMatched]) diff --git a/pkg/query-service/app/querier/helper.go b/pkg/query-service/app/querier/helper.go index 47f65fe007..71ee5da72d 100644 --- a/pkg/query-service/app/querier/helper.go +++ b/pkg/query-service/app/querier/helper.go @@ -116,7 +116,7 @@ func (q *querier) runBuilderQuery( if !params.NoCache && q.cache != nil { var retrieveStatus status.RetrieveStatus data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true) - zap.S().Infof("cache retrieve status: %s", retrieveStatus.String()) + zap.L().Info("cache retrieve status", zap.String("status", retrieveStatus.String())) if err == nil { cachedData = data } @@ -143,7 +143,7 @@ func (q *querier) runBuilderQuery( missedSeries = append(missedSeries, series...) } if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil { - zap.S().Error("error unmarshalling cached data", zap.Error(err)) + zap.L().Error("error unmarshalling cached data", zap.Error(err)) } mergedSeries := mergeSerieses(cachedSeries, missedSeries) @@ -154,7 +154,7 @@ func (q *querier) runBuilderQuery( // caching the data mergedSeriesData, marshallingErr = json.Marshal(mergedSeries) if marshallingErr != nil { - zap.S().Error("error marshalling merged series", zap.Error(marshallingErr)) + zap.L().Error("error marshalling merged series", zap.Error(marshallingErr)) } } @@ -172,7 +172,7 @@ func (q *querier) runBuilderQuery( // caching the data err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { - zap.S().Error("error storing merged series", zap.Error(err)) + zap.L().Error("error storing merged series", zap.Error(err)) return } } @@ -251,7 +251,7 @@ func (q *querier) runBuilderQuery( if !params.NoCache && q.cache != nil { var retrieveStatus status.RetrieveStatus data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true) - zap.S().Infof("cache retrieve status: %s", retrieveStatus.String()) + zap.L().Info("cache retrieve status", zap.String("status", retrieveStatus.String())) if err == nil { cachedData = data } @@ -290,7 +290,7 @@ func (q *querier) runBuilderQuery( missedSeries = append(missedSeries, series...) } if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil { - zap.S().Error("error unmarshalling cached data", zap.Error(err)) + zap.L().Error("error unmarshalling cached data", zap.Error(err)) } mergedSeries := mergeSerieses(cachedSeries, missedSeries) var mergedSeriesData []byte @@ -300,7 +300,7 @@ func (q *querier) runBuilderQuery( // caching the data mergedSeriesData, marshallingErr = json.Marshal(mergedSeries) if marshallingErr != nil { - zap.S().Error("error marshalling merged series", zap.Error(marshallingErr)) + zap.L().Error("error marshalling merged series", zap.Error(marshallingErr)) } } @@ -316,7 +316,7 @@ func (q *querier) runBuilderQuery( if missedSeriesLen > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil { err := q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { - zap.S().Error("error storing merged series", zap.Error(err)) + zap.L().Error("error storing merged series", zap.Error(err)) return } } @@ -353,7 +353,7 @@ func (q *querier) runBuilderExpression( if !params.NoCache && q.cache != nil { var retrieveStatus status.RetrieveStatus data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true) - zap.S().Infof("cache retrieve status: %s", retrieveStatus.String()) + zap.L().Info("cache retrieve status", zap.String("status", retrieveStatus.String())) if err == nil { cachedData = data } @@ -379,7 +379,7 @@ func (q *querier) runBuilderExpression( missedSeries = append(missedSeries, series...) } if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil { - zap.S().Error("error unmarshalling cached data", zap.Error(err)) + zap.L().Error("error unmarshalling cached data", zap.Error(err)) } mergedSeries := mergeSerieses(cachedSeries, missedSeries) @@ -390,7 +390,7 @@ func (q *querier) runBuilderExpression( // caching the data mergedSeriesData, marshallingErr = json.Marshal(mergedSeries) if marshallingErr != nil { - zap.S().Error("error marshalling merged series", zap.Error(marshallingErr)) + zap.L().Error("error marshalling merged series", zap.Error(marshallingErr)) } } @@ -406,7 +406,7 @@ func (q *querier) runBuilderExpression( if len(missedSeries) > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil { err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { - zap.S().Error("error storing merged series", zap.Error(err)) + zap.L().Error("error storing merged series", zap.Error(err)) return } } diff --git a/pkg/query-service/app/querier/querier.go b/pkg/query-service/app/querier/querier.go index 103660f8bc..d735e00a1f 100644 --- a/pkg/query-service/app/querier/querier.go +++ b/pkg/query-service/app/querier/querier.go @@ -108,7 +108,7 @@ func (q *querier) execClickHouseQuery(ctx context.Context, query string) ([]*v3. series.Points = points } if pointsWithNegativeTimestamps > 0 { - zap.S().Errorf("found points with negative timestamps for query %s", query) + zap.L().Error("found points with negative timestamps for query", zap.String("query", query)) } return result, err } @@ -346,7 +346,7 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam // Ensure NoCache is not set and cache is not nil if !params.NoCache && q.cache != nil { data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true) - zap.S().Infof("cache retrieve status: %s", retrieveStatus.String()) + zap.L().Info("cache retrieve status", zap.String("status", retrieveStatus.String())) if err == nil { cachedData = data } @@ -365,7 +365,7 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam } if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil { // ideally we should not be getting an error here - zap.S().Error("error unmarshalling cached data", zap.Error(err)) + zap.L().Error("error unmarshalling cached data", zap.Error(err)) } mergedSeries := mergeSerieses(cachedSeries, missedSeries) @@ -375,12 +375,12 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam if len(missedSeries) > 0 && !params.NoCache && q.cache != nil { mergedSeriesData, err := json.Marshal(mergedSeries) if err != nil { - zap.S().Error("error marshalling merged series", zap.Error(err)) + zap.L().Error("error marshalling merged series", zap.Error(err)) return } err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { - zap.S().Error("error storing merged series", zap.Error(err)) + zap.L().Error("error storing merged series", zap.Error(err)) return } } diff --git a/pkg/query-service/app/querier/v2/helper.go b/pkg/query-service/app/querier/v2/helper.go index 24738806d3..e564956f19 100644 --- a/pkg/query-service/app/querier/v2/helper.go +++ b/pkg/query-service/app/querier/v2/helper.go @@ -169,7 +169,7 @@ func (q *querier) runBuilderQuery( if !params.NoCache && q.cache != nil { var retrieveStatus status.RetrieveStatus data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true) - zap.S().Infof("cache retrieve status: %s", retrieveStatus.String()) + zap.L().Info("cache retrieve status", zap.String("status", retrieveStatus.String())) if err == nil { cachedData = data } @@ -208,7 +208,7 @@ func (q *querier) runBuilderQuery( missedSeries = append(missedSeries, series...) } if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil { - zap.S().Error("error unmarshalling cached data", zap.Error(err)) + zap.L().Error("error unmarshalling cached data", zap.Error(err)) } mergedSeries := mergeSerieses(cachedSeries, missedSeries) @@ -221,12 +221,12 @@ func (q *querier) runBuilderQuery( if len(missedSeries) > 0 && !params.NoCache && q.cache != nil { mergedSeriesData, err := json.Marshal(mergedSeries) if err != nil { - zap.S().Error("error marshalling merged series", zap.Error(err)) + zap.L().Error("error marshalling merged series", zap.Error(err)) return } err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { - zap.S().Error("error storing merged series", zap.Error(err)) + zap.L().Error("error storing merged series", zap.Error(err)) return } } @@ -263,7 +263,7 @@ func (q *querier) runBuilderExpression( if !params.NoCache && q.cache != nil { var retrieveStatus status.RetrieveStatus data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true) - zap.S().Infof("cache retrieve status: %s", retrieveStatus.String()) + zap.L().Info("cache retrieve status", zap.String("status", retrieveStatus.String())) if err == nil { cachedData = data } @@ -289,7 +289,7 @@ func (q *querier) runBuilderExpression( missedSeries = append(missedSeries, series...) } if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil { - zap.S().Error("error unmarshalling cached data", zap.Error(err)) + zap.L().Error("error unmarshalling cached data", zap.Error(err)) } mergedSeries := mergeSerieses(cachedSeries, missedSeries) @@ -302,12 +302,12 @@ func (q *querier) runBuilderExpression( if len(missedSeries) > 0 && !params.NoCache && q.cache != nil { mergedSeriesData, err := json.Marshal(mergedSeries) if err != nil { - zap.S().Error("error marshalling merged series", zap.Error(err)) + zap.L().Error("error marshalling merged series", zap.Error(err)) return } err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { - zap.S().Error("error storing merged series", zap.Error(err)) + zap.L().Error("error storing merged series", zap.Error(err)) return } } diff --git a/pkg/query-service/app/querier/v2/querier.go b/pkg/query-service/app/querier/v2/querier.go index 50f19b89b1..e45153da7d 100644 --- a/pkg/query-service/app/querier/v2/querier.go +++ b/pkg/query-service/app/querier/v2/querier.go @@ -108,7 +108,7 @@ func (q *querier) execClickHouseQuery(ctx context.Context, query string) ([]*v3. series.Points = points } if pointsWithNegativeTimestamps > 0 { - zap.S().Errorf("found points with negative timestamps for query %s", query) + zap.L().Error("found points with negative timestamps for query", zap.String("query", query)) } return result, err } @@ -326,7 +326,7 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam // Ensure NoCache is not set and cache is not nil if !params.NoCache && q.cache != nil { data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true) - zap.S().Infof("cache retrieve status: %s", retrieveStatus.String()) + zap.L().Info("cache retrieve status", zap.String("status", retrieveStatus.String())) if err == nil { cachedData = data } @@ -345,7 +345,7 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam } if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil { // ideally we should not be getting an error here - zap.S().Error("error unmarshalling cached data", zap.Error(err)) + zap.L().Error("error unmarshalling cached data", zap.Error(err)) } mergedSeries := mergeSerieses(cachedSeries, missedSeries) @@ -355,12 +355,12 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam if len(missedSeries) > 0 && !params.NoCache && q.cache != nil { mergedSeriesData, err := json.Marshal(mergedSeries) if err != nil { - zap.S().Error("error marshalling merged series", zap.Error(err)) + zap.L().Error("error marshalling merged series", zap.Error(err)) return } err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { - zap.S().Error("error storing merged series", zap.Error(err)) + zap.L().Error("error storing merged series", zap.Error(err)) return } } diff --git a/pkg/query-service/app/queryBuilder/query_builder.go b/pkg/query-service/app/queryBuilder/query_builder.go index 647edd191b..693bc88f44 100644 --- a/pkg/query-service/app/queryBuilder/query_builder.go +++ b/pkg/query-service/app/queryBuilder/query_builder.go @@ -246,7 +246,7 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3, args ...in } queries[queryName] = queryString default: - zap.S().Errorf("Unknown data source %s", query.DataSource) + zap.L().Error("Unknown data source", zap.String("dataSource", string(query.DataSource))) } } } diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index 81ef4e9c13..549e74e976 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -115,7 +115,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { var reader interfaces.Reader storage := os.Getenv("STORAGE") if storage == "clickhouse" { - zap.S().Info("Using ClickHouse as datastore ...") + zap.L().Info("Using ClickHouse as datastore ...") clickhouseReader := clickhouseReader.NewReader( localDB, serverOptions.PromConfigPath, @@ -304,7 +304,7 @@ func loggingMiddleware(next http.Handler) http.Handler { path, _ := route.GetPathTemplate() startTime := time.Now() next.ServeHTTP(w, r) - zap.S().Info(path+"\ttimeTaken:"+time.Now().Sub(startTime).String(), zap.Duration("timeTaken", time.Now().Sub(startTime)), zap.String("path", path)) + zap.L().Info(path+"\ttimeTaken:"+time.Now().Sub(startTime).String(), zap.Duration("timeTaken", time.Now().Sub(startTime)), zap.String("path", path)) }) } @@ -375,7 +375,7 @@ func loggingMiddlewarePrivate(next http.Handler) http.Handler { path, _ := route.GetPathTemplate() startTime := time.Now() next.ServeHTTP(w, r) - zap.S().Info(path+"\tprivatePort: true \ttimeTaken"+time.Now().Sub(startTime).String(), zap.Duration("timeTaken", time.Now().Sub(startTime)), zap.String("path", path), zap.Bool("tprivatePort", true)) + zap.L().Info(path+"\tprivatePort: true \ttimeTaken"+time.Now().Sub(startTime).String(), zap.Duration("timeTaken", time.Now().Sub(startTime)), zap.String("path", path), zap.Bool("tprivatePort", true)) }) } @@ -550,7 +550,7 @@ func (s *Server) initListeners() error { return err } - zap.S().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort)) + zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort)) // listen on private port to support internal services privateHostPort := s.serverOptions.PrivateHostPort @@ -563,7 +563,7 @@ func (s *Server) initListeners() error { if err != nil { return err } - zap.S().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort)) + zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort)) return nil } @@ -575,7 +575,7 @@ func (s *Server) Start() error { if !s.serverOptions.DisableRules { s.ruleManager.Start() } else { - zap.S().Info("msg: Rules disabled as rules.disable is set to TRUE") + zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE") } err := s.initListeners() @@ -589,23 +589,23 @@ func (s *Server) Start() error { } go func() { - zap.S().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort)) + zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort)) switch err := s.httpServer.Serve(s.httpConn); err { case nil, http.ErrServerClosed, cmux.ErrListenerClosed: // normal exit, nothing to do default: - zap.S().Error("Could not start HTTP server", zap.Error(err)) + zap.L().Error("Could not start HTTP server", zap.Error(err)) } s.unavailableChannel <- healthcheck.Unavailable }() go func() { - zap.S().Info("Starting pprof server", zap.String("addr", constants.DebugHttpPort)) + zap.L().Info("Starting pprof server", zap.String("addr", constants.DebugHttpPort)) err = http.ListenAndServe(constants.DebugHttpPort, nil) if err != nil { - zap.S().Error("Could not start pprof server", zap.Error(err)) + zap.L().Error("Could not start pprof server", zap.Error(err)) } }() @@ -615,14 +615,14 @@ func (s *Server) Start() error { } fmt.Println("starting private http") go func() { - zap.S().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort)) + zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort)) switch err := s.privateHTTP.Serve(s.privateConn); err { case nil, http.ErrServerClosed, cmux.ErrListenerClosed: // normal exit, nothing to do - zap.S().Info("private http server closed") + zap.L().Info("private http server closed") default: - zap.S().Error("Could not start private HTTP server", zap.Error(err)) + zap.L().Error("Could not start private HTTP server", zap.Error(err)) } s.unavailableChannel <- healthcheck.Unavailable @@ -630,10 +630,10 @@ func (s *Server) Start() error { }() go func() { - zap.S().Info("Starting OpAmp Websocket server", zap.String("addr", constants.OpAmpWsEndpoint)) + zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", constants.OpAmpWsEndpoint)) err := s.opampServer.Start(constants.OpAmpWsEndpoint) if err != nil { - zap.S().Info("opamp ws server failed to start", err) + zap.L().Info("opamp ws server failed to start", zap.Error(err)) s.unavailableChannel <- healthcheck.Unavailable } }() @@ -706,7 +706,7 @@ func makeRulesManager( return nil, fmt.Errorf("rule manager error: %v", err) } - zap.S().Info("rules manager is ready") + zap.L().Info("rules manager is ready") return manager, nil } diff --git a/pkg/query-service/auth/auth.go b/pkg/query-service/auth/auth.go index e307f401ab..0a90c8c730 100644 --- a/pkg/query-service/auth/auth.go +++ b/pkg/query-service/auth/auth.go @@ -40,7 +40,7 @@ type InviteEmailData struct { // The root user should be able to invite people to create account on SigNoz cluster. func Invite(ctx context.Context, req *model.InviteRequest) (*model.InviteResponse, error) { - zap.S().Debugf("Got an invite request for email: %s\n", req.Email) + zap.L().Debug("Got an invite request for email", zap.String("email", req.Email)) token, err := utils.RandomHex(opaqueTokenSize) if err != nil { @@ -110,13 +110,13 @@ func inviteEmail(req *model.InviteRequest, au *model.UserPayload, token string) tmpl, err := template.ParseFiles(constants.InviteEmailTemplate) if err != nil { - zap.S().Errorf("failed to send email", err) + zap.L().Error("failed to send email", zap.Error(err)) return } var body bytes.Buffer if err := tmpl.Execute(&body, data); err != nil { - zap.S().Errorf("failed to send email", err) + zap.L().Error("failed to send email", zap.Error(err)) return } @@ -126,7 +126,7 @@ func inviteEmail(req *model.InviteRequest, au *model.UserPayload, token string) body.String(), ) if err != nil { - zap.S().Errorf("failed to send email", err) + zap.L().Error("failed to send email", zap.Error(err)) return } return @@ -134,7 +134,7 @@ func inviteEmail(req *model.InviteRequest, au *model.UserPayload, token string) // RevokeInvite is used to revoke the invitation for the given email. func RevokeInvite(ctx context.Context, email string) error { - zap.S().Debugf("RevokeInvite method invoked for email: %s\n", email) + zap.L().Debug("RevokeInvite method invoked for email", zap.String("email", email)) if !isValidEmail(email) { return ErrorInvalidInviteToken @@ -148,7 +148,7 @@ func RevokeInvite(ctx context.Context, email string) error { // GetInvite returns an invitation object for the given token. func GetInvite(ctx context.Context, token string) (*model.InvitationResponseObject, error) { - zap.S().Debugf("GetInvite method invoked for token: %s\n", token) + zap.L().Debug("GetInvite method invoked for token", zap.String("token", token)) inv, apiErr := dao.DB().GetInviteFromToken(ctx, token) if apiErr != nil { @@ -282,13 +282,13 @@ func RegisterFirstUser(ctx context.Context, req *RegisterRequest) (*model.User, org, apierr := dao.DB().CreateOrg(ctx, &model.Organization{Name: req.OrgName}) if apierr != nil { - zap.S().Debugf("CreateOrg failed, err: %v\n", zap.Error(apierr.ToError())) + zap.L().Error("CreateOrg failed", zap.Error(apierr.ToError())) return nil, apierr } group, apiErr := dao.DB().GetGroupByName(ctx, groupName) if apiErr != nil { - zap.S().Debugf("GetGroupByName failed, err: %v\n", apiErr.Err) + zap.L().Error("GetGroupByName failed", zap.Error(apiErr.Err)) return nil, apiErr } @@ -297,7 +297,7 @@ func RegisterFirstUser(ctx context.Context, req *RegisterRequest) (*model.User, hash, err = PasswordHash(req.Password) if err != nil { - zap.S().Errorf("failed to generate password hash when registering a user", zap.Error(err)) + zap.L().Error("failed to generate password hash when registering a user", zap.Error(err)) return nil, model.InternalError(model.ErrSignupFailed{}) } @@ -328,7 +328,7 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b invite, err := ValidateInvite(ctx, req) if err != nil { - zap.S().Errorf("failed to validate invite token", err) + zap.L().Error("failed to validate invite token", zap.Error(err)) return nil, model.BadRequest(model.ErrSignupFailed{}) } @@ -337,7 +337,7 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b // in the same transaction at the end of this function userPayload, apierr := dao.DB().GetUserByEmail(ctx, invite.Email) if apierr != nil { - zap.S().Debugf("failed to get user by email", apierr.Err) + zap.L().Error("failed to get user by email", zap.Error(apierr.Err)) return nil, apierr } @@ -347,7 +347,7 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b } if invite.OrgId == "" { - zap.S().Errorf("failed to find org in the invite") + zap.L().Error("failed to find org in the invite") return nil, model.InternalError(fmt.Errorf("invalid invite, org not found")) } @@ -358,7 +358,7 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b group, apiErr := dao.DB().GetGroupByName(ctx, invite.Role) if apiErr != nil { - zap.S().Debugf("GetGroupByName failed, err: %v\n", apiErr.Err) + zap.L().Error("GetGroupByName failed", zap.Error(apiErr.Err)) return nil, model.InternalError(model.ErrSignupFailed{}) } @@ -368,13 +368,13 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b if req.Password != "" { hash, err = PasswordHash(req.Password) if err != nil { - zap.S().Errorf("failed to generate password hash when registering a user", zap.Error(err)) + zap.L().Error("failed to generate password hash when registering a user", zap.Error(err)) return nil, model.InternalError(model.ErrSignupFailed{}) } } else { hash, err = PasswordHash(utils.GeneratePassowrd()) if err != nil { - zap.S().Errorf("failed to generate password hash when registering a user", zap.Error(err)) + zap.L().Error("failed to generate password hash when registering a user", zap.Error(err)) return nil, model.InternalError(model.ErrSignupFailed{}) } } @@ -393,13 +393,13 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b // TODO(Ahsan): Ideally create user and delete invitation should happen in a txn. user, apiErr = dao.DB().CreateUser(ctx, user, false) if apiErr != nil { - zap.S().Debugf("CreateUser failed, err: %v\n", apiErr.Err) + zap.L().Error("CreateUser failed", zap.Error(apiErr.Err)) return nil, apiErr } apiErr = dao.DB().DeleteInvitation(ctx, user.Email) if apiErr != nil { - zap.S().Debugf("delete invitation failed, err: %v\n", apiErr.Err) + zap.L().Error("delete invitation failed", zap.Error(apiErr.Err)) return nil, apiErr } @@ -428,17 +428,17 @@ func Register(ctx context.Context, req *RegisterRequest) (*model.User, *model.Ap // Login method returns access and refresh tokens on successful login, else it errors out. func Login(ctx context.Context, request *model.LoginRequest) (*model.LoginResponse, error) { - zap.S().Debugf("Login method called for user: %s\n", request.Email) + zap.L().Debug("Login method called for user", zap.String("email", request.Email)) user, err := authenticateLogin(ctx, request) if err != nil { - zap.S().Debugf("Failed to authenticate login request, %v", err) + zap.L().Error("Failed to authenticate login request", zap.Error(err)) return nil, err } userjwt, err := GenerateJWTForUser(&user.User) if err != nil { - zap.S().Debugf("Failed to generate JWT against login creds, %v", err) + zap.L().Error("Failed to generate JWT against login creds", zap.Error(err)) return nil, err } diff --git a/pkg/query-service/auth/jwt.go b/pkg/query-service/auth/jwt.go index 90e2f7008d..b27d43fb9d 100644 --- a/pkg/query-service/auth/jwt.go +++ b/pkg/query-service/auth/jwt.go @@ -60,7 +60,7 @@ func validateUser(tok string) (*model.UserPayload, error) { func AttachJwtToContext(ctx context.Context, r *http.Request) context.Context { token, err := ExtractJwtFromRequest(r) if err != nil { - zap.S().Debugf("Error while getting token from header, %v", err) + zap.L().Error("Error while getting token from header", zap.Error(err)) return ctx } diff --git a/pkg/query-service/cache/redis/redis.go b/pkg/query-service/cache/redis/redis.go index 22278c52ed..6338eca6f3 100644 --- a/pkg/query-service/cache/redis/redis.go +++ b/pkg/query-service/cache/redis/redis.go @@ -59,7 +59,7 @@ func (c *cache) Retrieve(cacheKey string, allowExpired bool) ([]byte, status.Ret func (c *cache) SetTTL(cacheKey string, ttl time.Duration) { err := c.client.Expire(context.Background(), cacheKey, ttl).Err() if err != nil { - zap.S().Error("error setting TTL for cache key", zap.String("cacheKey", cacheKey), zap.Duration("ttl", ttl), zap.Error(err)) + zap.L().Error("error setting TTL for cache key", zap.String("cacheKey", cacheKey), zap.Duration("ttl", ttl), zap.Error(err)) } } @@ -67,7 +67,7 @@ func (c *cache) SetTTL(cacheKey string, ttl time.Duration) { func (c *cache) Remove(cacheKey string) { err := c.client.Del(context.Background(), cacheKey).Err() if err != nil { - zap.S().Error("error deleting cache key", zap.String("cacheKey", cacheKey), zap.Error(err)) + zap.L().Error("error deleting cache key", zap.String("cacheKey", cacheKey), zap.Error(err)) } } @@ -102,7 +102,7 @@ func (c *cache) GetOptions() *Options { func (c *cache) GetTTL(cacheKey string) time.Duration { ttl, err := c.client.TTL(context.Background(), cacheKey).Result() if err != nil { - zap.S().Error("error getting TTL for cache key", zap.String("cacheKey", cacheKey), zap.Error(err)) + zap.L().Error("error getting TTL for cache key", zap.String("cacheKey", cacheKey), zap.Error(err)) } return ttl } diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index 6181a66ea8..54fc819478 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -58,8 +58,8 @@ var InviteEmailTemplate = GetOrDefaultEnv("INVITE_EMAIL_TEMPLATE", "/root/templa // Alert manager channel subpath var AmChannelApiPath = GetOrDefaultEnv("ALERTMANAGER_API_CHANNEL_PATH", "v1/routes") -var OTLPTarget = GetOrDefaultEnv("OTLP_TARGET", "") -var LogExportBatchSize = GetOrDefaultEnv("LOG_EXPORT_BATCH_SIZE", "1000") +var OTLPTarget = GetOrDefaultEnv("OTEL_EXPORTER_OTLP_ENDPOINT", "") +var LogExportBatchSize = GetOrDefaultEnv("OTEL_BLRP_MAX_EXPORT_BATCH_SIZE", "512") var RELATIONAL_DATASOURCE_PATH = GetOrDefaultEnv("SIGNOZ_LOCAL_DB_PATH", "/var/lib/signoz/signoz.db") diff --git a/pkg/query-service/dao/sqlite/connection.go b/pkg/query-service/dao/sqlite/connection.go index a7335d6426..a2545e9531 100644 --- a/pkg/query-service/dao/sqlite/connection.go +++ b/pkg/query-service/dao/sqlite/connection.go @@ -180,7 +180,7 @@ func (mds *ModelDaoSqlite) createGroupIfNotPresent(ctx context.Context, return group, nil } - zap.S().Debugf("%s is not found, creating it", name) + zap.L().Debug("group is not found, creating it", zap.String("group_name", name)) group, cErr := mds.CreateGroup(ctx, &model.Group{Name: name}) if cErr != nil { return nil, cErr.Err diff --git a/pkg/query-service/featureManager/manager.go b/pkg/query-service/featureManager/manager.go index 15175b1882..439b8b7bd2 100644 --- a/pkg/query-service/featureManager/manager.go +++ b/pkg/query-service/featureManager/manager.go @@ -43,12 +43,12 @@ func (fm *FeatureManager) GetFeatureFlags() (model.FeatureSet, error) { } func (fm *FeatureManager) InitFeatures(req model.FeatureSet) error { - zap.S().Error("InitFeatures not implemented in OSS") + zap.L().Error("InitFeatures not implemented in OSS") return nil } func (fm *FeatureManager) UpdateFeatureFlag(req model.Feature) error { - zap.S().Error("UpdateFeatureFlag not implemented in OSS") + zap.L().Error("UpdateFeatureFlag not implemented in OSS") return nil } @@ -63,4 +63,4 @@ func (fm *FeatureManager) GetFeatureFlag(key string) (model.Feature, error) { } } return model.Feature{}, model.ErrFeatureUnavailable{Key: key} -} \ No newline at end of file +} diff --git a/pkg/query-service/integrations/alertManager/manager.go b/pkg/query-service/integrations/alertManager/manager.go index 3b7df3ce56..d80893010e 100644 --- a/pkg/query-service/integrations/alertManager/manager.go +++ b/pkg/query-service/integrations/alertManager/manager.go @@ -83,13 +83,12 @@ func (m *manager) AddRoute(receiver *Receiver) *model.ApiError { response, err := http.Post(amURL, contentType, bytes.NewBuffer(receiverString)) if err != nil { - zap.S().Errorf(fmt.Sprintf("Error in getting response of API call to alertmanager(POST %s)\n", amURL), err) + zap.L().Error("Error in getting response of API call to alertmanager", zap.String("url", amURL), zap.Error(err)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } if response.StatusCode > 299 { - err := fmt.Errorf(fmt.Sprintf("Error in getting 2xx response in API call to alertmanager(POST %s)\n", amURL), response.Status) - zap.S().Error(err) + zap.L().Error("Error in getting 2xx response in API call to alertmanager", zap.String("url", amURL), zap.String("status", response.Status)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } return nil @@ -102,7 +101,7 @@ func (m *manager) EditRoute(receiver *Receiver) *model.ApiError { req, err := http.NewRequest(http.MethodPut, amURL, bytes.NewBuffer(receiverString)) if err != nil { - zap.S().Errorf(fmt.Sprintf("Error creating new update request for API call to alertmanager(PUT %s)\n", amURL), err) + zap.L().Error("Error creating new update request for API call to alertmanager", zap.String("url", amURL), zap.Error(err)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -112,13 +111,12 @@ func (m *manager) EditRoute(receiver *Receiver) *model.ApiError { response, err := client.Do(req) if err != nil { - zap.S().Errorf(fmt.Sprintf("Error in getting response of API call to alertmanager(PUT %s)\n", amURL), err) + zap.L().Error("Error in getting response of API call to alertmanager", zap.String("url", amURL), zap.Error(err)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } if response.StatusCode > 299 { - err := fmt.Errorf(fmt.Sprintf("Error in getting 2xx response in PUT API call to alertmanager(PUT %s)\n", amURL), response.Status) - zap.S().Error(err) + zap.L().Error("Error in getting 2xx response in PUT API call to alertmanager", zap.String("url", amURL), zap.String("status", response.Status)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } return nil @@ -132,7 +130,7 @@ func (m *manager) DeleteRoute(name string) *model.ApiError { req, err := http.NewRequest(http.MethodDelete, amURL, bytes.NewBuffer(requestData)) if err != nil { - zap.S().Errorf("Error in creating new delete request to alertmanager/v1/receivers\n", err) + zap.L().Error("Error in creating new delete request to alertmanager/v1/receivers", zap.Error(err)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } @@ -142,13 +140,13 @@ func (m *manager) DeleteRoute(name string) *model.ApiError { response, err := client.Do(req) if err != nil { - zap.S().Errorf(fmt.Sprintf("Error in getting response of API call to alertmanager(DELETE %s)\n", amURL), err) + zap.L().Error("Error in getting response of API call to alertmanager", zap.String("url", amURL), zap.Error(err)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } if response.StatusCode > 299 { err := fmt.Errorf(fmt.Sprintf("Error in getting 2xx response in PUT API call to alertmanager(DELETE %s)\n", amURL), response.Status) - zap.S().Error(err) + zap.L().Error("Error in getting 2xx response in PUT API call to alertmanager", zap.String("url", amURL), zap.String("status", response.Status)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } return nil @@ -162,19 +160,19 @@ func (m *manager) TestReceiver(receiver *Receiver) *model.ApiError { response, err := http.Post(amTestURL, contentType, bytes.NewBuffer(receiverBytes)) if err != nil { - zap.S().Errorf(fmt.Sprintf("Error in getting response of API call to alertmanager(POST %s)\n", amTestURL), err) + zap.L().Error("Error in getting response of API call to alertmanager", zap.String("url", amTestURL), zap.Error(err)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } if response.StatusCode > 201 && response.StatusCode < 400 { err := fmt.Errorf(fmt.Sprintf("Invalid parameters in test alert api for alertmanager(POST %s)\n", amTestURL), response.Status) - zap.S().Error(err) + zap.L().Error("Invalid parameters in test alert api for alertmanager", zap.Error(err)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } if response.StatusCode > 400 { err := fmt.Errorf(fmt.Sprintf("Received Server Error response for API call to alertmanager(POST %s)\n", amTestURL), response.Status) - zap.S().Error(err) + zap.L().Error("Received Server Error response for API call to alertmanager", zap.Error(err)) return &model.ApiError{Typ: model.ErrorInternal, Err: err} } diff --git a/pkg/query-service/integrations/alertManager/notifier.go b/pkg/query-service/integrations/alertManager/notifier.go index 148d489ed0..e86cf28c5e 100644 --- a/pkg/query-service/integrations/alertManager/notifier.go +++ b/pkg/query-service/integrations/alertManager/notifier.go @@ -87,11 +87,11 @@ func NewNotifier(o *NotifierOptions, logger log.Logger) (*Notifier, error) { amset, err := newAlertmanagerSet(o.AlertManagerURLs, timeout, logger) if err != nil { - zap.S().Errorf("failed to parse alert manager urls") + zap.L().Error("failed to parse alert manager urls") return n, err } n.alertmanagers = amset - zap.S().Info("Starting notifier with alert manager:", o.AlertManagerURLs) + zap.L().Info("Starting notifier with alert manager", zap.Strings("urls", o.AlertManagerURLs)) return n, nil } @@ -123,7 +123,7 @@ func (n *Notifier) nextBatch() []*Alert { // Run dispatches notifications continuously. func (n *Notifier) Run() { - zap.S().Info("msg: Initiating alert notifier...") + zap.L().Info("msg: Initiating alert notifier...") for { select { case <-n.ctx.Done(): @@ -133,7 +133,7 @@ func (n *Notifier) Run() { alerts := n.nextBatch() if !n.sendAll(alerts...) { - zap.S().Warn("msg: dropped alerts", "\t count:", len(alerts)) + zap.L().Warn("msg: dropped alerts", zap.Int("count", len(alerts))) // n.metrics.dropped.Add(float64(len(alerts))) } // If the queue still has items left, kick off the next iteration. @@ -205,7 +205,7 @@ func (n *Notifier) sendAll(alerts ...*Alert) bool { b, err := json.Marshal(alerts) if err != nil { - zap.S().Errorf("msg", "Encoding alerts failed", "err", err) + zap.L().Error("Encoding alerts failed", zap.Error(err)) return false } @@ -229,7 +229,7 @@ func (n *Notifier) sendAll(alerts ...*Alert) bool { go func(ams *alertmanagerSet, am Manager) { u := am.URLPath(alertPushEndpoint).String() if err := n.sendOne(ctx, ams.client, u, b); err != nil { - zap.S().Errorf("alertmanager", u, "count", len(alerts), "msg", "Error calling alert API", "err", err) + zap.L().Error("Error calling alert API", zap.String("alertmanager", u), zap.Int("count", len(alerts)), zap.Error(err)) } else { atomic.AddUint64(&numSuccess, 1) } diff --git a/pkg/query-service/main.go b/pkg/query-service/main.go index f0602c4dcd..ec68c61939 100644 --- a/pkg/query-service/main.go +++ b/pkg/query-service/main.go @@ -18,7 +18,7 @@ import ( ) func initZapLog() *zap.Logger { - config := zap.NewDevelopmentConfig() + config := zap.NewProductionConfig() config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder config.EncoderConfig.TimeKey = "timestamp" config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder @@ -85,9 +85,9 @@ func main() { auth.JwtSecret = os.Getenv("SIGNOZ_JWT_SECRET") if len(auth.JwtSecret) == 0 { - zap.S().Warn("No JWT secret key is specified.") + zap.L().Warn("No JWT secret key is specified.") } else { - zap.S().Info("No JWT secret key set successfully.") + zap.L().Info("No JWT secret key set successfully.") } server, err := app.NewServer(serverOptions) diff --git a/pkg/query-service/queryBuilderToExpr/queryBuilderToExpr.go b/pkg/query-service/queryBuilderToExpr/queryBuilderToExpr.go index 0139792dfa..e853a37685 100644 --- a/pkg/query-service/queryBuilderToExpr/queryBuilderToExpr.go +++ b/pkg/query-service/queryBuilderToExpr/queryBuilderToExpr.go @@ -143,11 +143,11 @@ func exprFormattedValue(v interface{}) string { case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64, float32, float64, bool: return strings.Join(strings.Fields(fmt.Sprint(x)), ",") default: - zap.S().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x[0]))) + zap.L().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x[0]))) return "" } default: - zap.S().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x))) + zap.L().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x))) return "" } } diff --git a/pkg/query-service/rules/alerting.go b/pkg/query-service/rules/alerting.go index b2ee0b53d0..3e56c2d0c7 100644 --- a/pkg/query-service/rules/alerting.go +++ b/pkg/query-service/rules/alerting.go @@ -15,15 +15,9 @@ import ( // this file contains common structs and methods used by // rule engine -// how long before re-sending the alert -const resolvedRetention = 15 * time.Minute - const ( - // AlertMetricName is the metric name for synthetic alert timeseries. - alertMetricName = "ALERTS" - - // AlertForStateMetricName is the metric name for 'for' state of alert. - alertForStateMetricName = "ALERTS_FOR_STATE" + // how long before re-sending the alert + resolvedRetention = 15 * time.Minute TestAlertPostFix = "_TEST_ALERT" ) diff --git a/pkg/query-service/rules/apiParams.go b/pkg/query-service/rules/apiParams.go index 0ccf885b3d..1393f59697 100644 --- a/pkg/query-service/rules/apiParams.go +++ b/pkg/query-service/rules/apiParams.go @@ -10,7 +10,6 @@ import ( "github.com/pkg/errors" "go.signoz.io/signoz/pkg/query-service/model" v3 "go.signoz.io/signoz/pkg/query-service/model/v3" - "go.uber.org/zap" "go.signoz.io/signoz/pkg/query-service/utils/times" "go.signoz.io/signoz/pkg/query-service/utils/timestamp" @@ -74,18 +73,15 @@ func parseIntoRule(initRule PostableRule, content []byte, kind string) (*Postabl var err error if kind == "json" { if err = json.Unmarshal(content, rule); err != nil { - zap.S().Debugf("postable rule content", string(content), "\t kind:", kind) return nil, []error{fmt.Errorf("failed to load json")} } } else if kind == "yaml" { if err = yaml.Unmarshal(content, rule); err != nil { - zap.S().Debugf("postable rule content", string(content), "\t kind:", kind) return nil, []error{fmt.Errorf("failed to load yaml")} } } else { return nil, []error{fmt.Errorf("invalid data type")} } - zap.S().Debugf("postable rule(parsed):", rule) if rule.RuleCondition == nil && rule.Expr != "" { // account for legacy rules @@ -126,8 +122,6 @@ func parseIntoRule(initRule PostableRule, content []byte, kind string) (*Postabl } } - zap.S().Debugf("postable rule:", rule, "\t condition", rule.RuleCondition.String()) - if errs := rule.Validate(); len(errs) > 0 { return nil, errs } diff --git a/pkg/query-service/rules/db.go b/pkg/query-service/rules/db.go index f0b1bb3281..cf903884fd 100644 --- a/pkg/query-service/rules/db.go +++ b/pkg/query-service/rules/db.go @@ -73,7 +73,7 @@ func (r *ruleDB) CreateRuleTx(ctx context.Context, rule string) (int64, Tx, erro stmt, err := tx.Prepare(`INSERT into rules (created_at, created_by, updated_at, updated_by, data) VALUES($1,$2,$3,$4,$5);`) if err != nil { - zap.S().Errorf("Error in preparing statement for INSERT to rules\n", err) + zap.L().Error("Error in preparing statement for INSERT to rules", zap.Error(err)) tx.Rollback() return lastInsertId, nil, err } @@ -82,14 +82,14 @@ func (r *ruleDB) CreateRuleTx(ctx context.Context, rule string) (int64, Tx, erro result, err := stmt.Exec(createdAt, userEmail, updatedAt, userEmail, rule) if err != nil { - zap.S().Errorf("Error in Executing prepared statement for INSERT to rules\n", err) + zap.L().Error("Error in Executing prepared statement for INSERT to rules", zap.Error(err)) tx.Rollback() // return an error too, we may want to wrap them return lastInsertId, nil, err } lastInsertId, err = result.LastInsertId() if err != nil { - zap.S().Errorf("Error in getting last insert id for INSERT to rules\n", err) + zap.L().Error("Error in getting last insert id for INSERT to rules\n", zap.Error(err)) tx.Rollback() // return an error too, we may want to wrap them return lastInsertId, nil, err } @@ -122,14 +122,14 @@ func (r *ruleDB) EditRuleTx(ctx context.Context, rule string, id string) (string //} stmt, err := r.Prepare(`UPDATE rules SET updated_by=$1, updated_at=$2, data=$3 WHERE id=$4;`) if err != nil { - zap.S().Errorf("Error in preparing statement for UPDATE to rules\n", err) + zap.L().Error("Error in preparing statement for UPDATE to rules", zap.Error(err)) // tx.Rollback() return groupName, nil, err } defer stmt.Close() if _, err := stmt.Exec(userEmail, updatedAt, rule, idInt); err != nil { - zap.S().Errorf("Error in Executing prepared statement for UPDATE to rules\n", err) + zap.L().Error("Error in Executing prepared statement for UPDATE to rules", zap.Error(err)) // tx.Rollback() // return an error too, we may want to wrap them return groupName, nil, err } @@ -158,7 +158,7 @@ func (r *ruleDB) DeleteRuleTx(ctx context.Context, id string) (string, Tx, error defer stmt.Close() if _, err := stmt.Exec(idInt); err != nil { - zap.S().Errorf("Error in Executing prepared statement for DELETE to rules\n", err) + zap.L().Error("Error in Executing prepared statement for DELETE to rules", zap.Error(err)) // tx.Rollback() return groupName, nil, err } @@ -175,7 +175,7 @@ func (r *ruleDB) GetStoredRules(ctx context.Context) ([]StoredRule, error) { err := r.Select(&rules, query) if err != nil { - zap.S().Debug("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, err } @@ -193,10 +193,10 @@ func (r *ruleDB) GetStoredRule(ctx context.Context, id string) (*StoredRule, err query := fmt.Sprintf("SELECT id, created_at, created_by, updated_at, updated_by, data FROM rules WHERE id=%d", intId) err = r.Get(rule, query) - // zap.S().Info(query) + // zap.L().Info(query) if err != nil { - zap.S().Error("Error in processing sql query: ", err) + zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, err } diff --git a/pkg/query-service/rules/manager.go b/pkg/query-service/rules/manager.go index 95181eade6..d5c6e74dd3 100644 --- a/pkg/query-service/rules/manager.go +++ b/pkg/query-service/rules/manager.go @@ -125,7 +125,7 @@ func NewManager(o *ManagerOptions) (*Manager, error) { func (m *Manager) Start() { if err := m.initiate(); err != nil { - zap.S().Errorf("failed to initialize alerting rules manager: %v", err) + zap.L().Error("failed to initialize alerting rules manager", zap.Error(err)) } m.run() } @@ -154,40 +154,40 @@ func (m *Manager) initiate() error { if len(errs) > 0 { if errs[0].Error() == "failed to load json" { - zap.S().Info("failed to load rule in json format, trying yaml now:", rec.Data) + zap.L().Info("failed to load rule in json format, trying yaml now:", zap.String("name", taskName)) // see if rule is stored in yaml format parsedRule, errs = parsePostableRule([]byte(rec.Data), "yaml") if parsedRule == nil { - zap.S().Errorf("failed to parse and initialize yaml rule:", errs) + zap.L().Error("failed to parse and initialize yaml rule", zap.String("name", taskName), zap.Error(err)) // just one rule is being parsed so expect just one error loadErrors = append(loadErrors, errs[0]) continue } else { // rule stored in yaml, so migrate it to json - zap.S().Info("msg:", "migrating rule from JSON to yaml", "\t rule:", rec.Data, "\t parsed rule:", parsedRule) + zap.L().Info("migrating rule from JSON to yaml", zap.String("name", taskName)) ruleJSON, err := json.Marshal(parsedRule) if err == nil { taskName, _, err := m.ruleDB.EditRuleTx(context.Background(), string(ruleJSON), fmt.Sprintf("%d", rec.Id)) if err != nil { - zap.S().Errorf("msg: failed to migrate rule ", "/t error:", err) + zap.L().Error("failed to migrate rule", zap.String("name", taskName), zap.Error(err)) } else { - zap.S().Info("msg:", "migrated rule from yaml to json", "/t rule:", taskName) + zap.L().Info("migrated rule from yaml to json", zap.String("name", taskName)) } } } } else { - zap.S().Errorf("failed to parse and initialize rule:", errs) + zap.L().Error("failed to parse and initialize rule", zap.String("name", taskName), zap.Error(err)) // just one rule is being parsed so expect just one error - loadErrors = append(loadErrors, errs[0]) + loadErrors = append(loadErrors, err) continue } } if !parsedRule.Disabled { err := m.addTask(parsedRule, taskName) if err != nil { - zap.S().Errorf("failed to load the rule definition (%s): %v", taskName, err) + zap.L().Error("failed to load the rule definition", zap.String("name", taskName), zap.Error(err)) } } } @@ -213,13 +213,13 @@ func (m *Manager) Stop() { m.mtx.Lock() defer m.mtx.Unlock() - zap.S().Info("msg: ", "Stopping rule manager...") + zap.L().Info("Stopping rule manager...") for _, t := range m.tasks { t.Stop() } - zap.S().Info("msg: ", "Rule manager stopped") + zap.L().Info("Rule manager stopped") } // EditRuleDefinition writes the rule definition to the @@ -230,7 +230,7 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id string) error currentRule, err := m.GetRule(ctx, id) if err != nil { - zap.S().Errorf("msg: ", "failed to get the rule from rule db", "\t ruleid: ", id) + zap.L().Error("failed to get the rule from rule db", zap.String("id", id), zap.Error(err)) return err } @@ -243,7 +243,7 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id string) error } if len(errs) > 0 { - zap.S().Errorf("failed to parse rules:", errs) + zap.L().Error("failed to parse rules", zap.Errors("errors", errs)) // just one rule is being parsed so expect just one error return errs[0] } @@ -264,13 +264,13 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id string) error if !checkIfTraceOrLogQB(¤tRule.PostableRule) { err = m.updateFeatureUsage(parsedRule, 1) if err != nil { - zap.S().Errorf("error updating feature usage: %v", err) + zap.L().Error("error updating feature usage", zap.Error(err)) } // update feature usage if the new rule is not a trace or log query builder and the current rule is } else if !checkIfTraceOrLogQB(parsedRule) { err = m.updateFeatureUsage(¤tRule.PostableRule, -1) if err != nil { - zap.S().Errorf("error updating feature usage: %v", err) + zap.L().Error("error updating feature usage", zap.Error(err)) } } @@ -281,12 +281,12 @@ func (m *Manager) editTask(rule *PostableRule, taskName string) error { m.mtx.Lock() defer m.mtx.Unlock() - zap.S().Debugf("msg:", "editing a rule task", "\t task name:", taskName) + zap.L().Debug("editing a rule task", zap.String("name", taskName)) newTask, err := m.prepareTask(false, rule, taskName) if err != nil { - zap.S().Errorf("msg:", "loading tasks failed", "\t err:", err) + zap.L().Error("loading tasks failed", zap.Error(err)) return errors.New("error preparing rule with given parameters, previous rule set restored") } @@ -294,7 +294,7 @@ func (m *Manager) editTask(rule *PostableRule, taskName string) error { // it to finish the current iteration. Then copy it into the new group. oldTask, ok := m.tasks[taskName] if !ok { - zap.S().Warnf("msg:", "rule task not found, a new task will be created ", "\t task name:", taskName) + zap.L().Warn("rule task not found, a new task will be created", zap.String("name", taskName)) } delete(m.tasks, taskName) @@ -319,14 +319,14 @@ func (m *Manager) DeleteRule(ctx context.Context, id string) error { idInt, err := strconv.Atoi(id) if err != nil { - zap.S().Errorf("msg: ", "delete rule received an rule id in invalid format, must be a number", "\t ruleid:", id) + zap.L().Error("delete rule received an rule id in invalid format, must be a number", zap.String("id", id), zap.Error(err)) return fmt.Errorf("delete rule received an rule id in invalid format, must be a number") } // update feature usage rule, err := m.GetRule(ctx, id) if err != nil { - zap.S().Errorf("msg: ", "failed to get the rule from rule db", "\t ruleid: ", id) + zap.L().Error("failed to get the rule from rule db", zap.String("id", id), zap.Error(err)) return err } @@ -336,13 +336,13 @@ func (m *Manager) DeleteRule(ctx context.Context, id string) error { } if _, _, err := m.ruleDB.DeleteRuleTx(ctx, id); err != nil { - zap.S().Errorf("msg: ", "failed to delete the rule from rule db", "\t ruleid: ", id) + zap.L().Error("failed to delete the rule from rule db", zap.String("id", id), zap.Error(err)) return err } err = m.updateFeatureUsage(&rule.PostableRule, -1) if err != nil { - zap.S().Errorf("error updating feature usage: %v", err) + zap.L().Error("error updating feature usage", zap.Error(err)) } return nil @@ -351,16 +351,16 @@ func (m *Manager) DeleteRule(ctx context.Context, id string) error { func (m *Manager) deleteTask(taskName string) { m.mtx.Lock() defer m.mtx.Unlock() - zap.S().Debugf("msg:", "deleting a rule task", "\t task name:", taskName) + zap.L().Debug("deleting a rule task", zap.String("name", taskName)) oldg, ok := m.tasks[taskName] if ok { oldg.Stop() delete(m.tasks, taskName) delete(m.rules, ruleIdFromTaskName(taskName)) - zap.S().Debugf("msg:", "rule task deleted", "\t task name:", taskName) + zap.L().Debug("rule task deleted", zap.String("name", taskName)) } else { - zap.S().Info("msg: ", "rule not found for deletion", "\t name:", taskName) + zap.L().Info("rule not found for deletion", zap.String("name", taskName)) } } @@ -376,7 +376,7 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*GettableRule } if len(errs) > 0 { - zap.S().Errorf("failed to parse rules:", errs) + zap.L().Error("failed to parse rules", zap.Errors("errors", errs)) // just one rule is being parsed so expect just one error return nil, errs[0] } @@ -400,7 +400,7 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*GettableRule // update feature usage err = m.updateFeatureUsage(parsedRule, 1) if err != nil { - zap.S().Errorf("error updating feature usage: %v", err) + zap.L().Error("error updating feature usage", zap.Error(err)) } gettableRule := &GettableRule{ Id: fmt.Sprintf("%d", lastInsertId), @@ -438,10 +438,10 @@ func (m *Manager) checkFeatureUsage(parsedRule *PostableRule) error { if err != nil { switch err.(type) { case model.ErrFeatureUnavailable: - zap.S().Errorf("feature unavailable", zap.String("featureKey", model.QueryBuilderAlerts), zap.Error(err)) + zap.L().Error("feature unavailable", zap.String("featureKey", model.QueryBuilderAlerts), zap.Error(err)) return model.BadRequest(err) default: - zap.S().Errorf("feature check failed", zap.String("featureKey", model.QueryBuilderAlerts), zap.Error(err)) + zap.L().Error("feature check failed", zap.String("featureKey", model.QueryBuilderAlerts), zap.Error(err)) return model.BadRequest(err) } } @@ -466,11 +466,11 @@ func (m *Manager) addTask(rule *PostableRule, taskName string) error { m.mtx.Lock() defer m.mtx.Unlock() - zap.S().Debugf("msg:", "adding a new rule task", "\t task name:", taskName) + zap.L().Debug("adding a new rule task", zap.String("name", taskName)) newTask, err := m.prepareTask(false, rule, taskName) if err != nil { - zap.S().Errorf("msg:", "creating rule task failed", "\t name:", taskName, "\t err", err) + zap.L().Error("creating rule task failed", zap.String("name", taskName), zap.Error(err)) return errors.New("error loading rules, previous rule set restored") } @@ -504,7 +504,7 @@ func (m *Manager) prepareTask(acquireLock bool, r *PostableRule, taskName string var task Task if r.Alert == "" { - zap.S().Errorf("msg:", "task load failed, at least one rule must be set", "\t task name:", taskName) + zap.L().Error("task load failed, at least one rule must be set", zap.String("name", taskName)) return task, fmt.Errorf("task load failed, at least one rule must be set") } @@ -686,7 +686,7 @@ func (m *Manager) ListRuleStates(ctx context.Context) (*GettableRules, error) { ruleResponse := &GettableRule{} if err := json.Unmarshal([]byte(s.Data), ruleResponse); err != nil { // Parse []byte to go struct pointer - zap.S().Errorf("msg:", "invalid rule data", "\t err:", err) + zap.L().Error("failed to unmarshal rule from db", zap.Int("id", s.Id), zap.Error(err)) continue } @@ -779,28 +779,28 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, ruleId string) // retrieve rule from DB storedJSON, err := m.ruleDB.GetStoredRule(ctx, ruleId) if err != nil { - zap.S().Errorf("msg:", "failed to get stored rule with given id", "\t error:", err) + zap.L().Error("failed to get stored rule with given id", zap.String("id", ruleId), zap.Error(err)) return nil, err } // storedRule holds the current stored rule from DB storedRule := PostableRule{} if err := json.Unmarshal([]byte(storedJSON.Data), &storedRule); err != nil { - zap.S().Errorf("msg:", "failed to get unmarshal stored rule with given id", "\t error:", err) + zap.L().Error("failed to unmarshal stored rule with given id", zap.String("id", ruleId), zap.Error(err)) return nil, err } // patchedRule is combo of stored rule and patch received in the request patchedRule, errs := parseIntoRule(storedRule, []byte(ruleStr), "json") if len(errs) > 0 { - zap.S().Errorf("failed to parse rules:", errs) + zap.L().Error("failed to parse rules", zap.Errors("errors", errs)) // just one rule is being parsed so expect just one error return nil, errs[0] } // deploy or un-deploy task according to patched (new) rule state if err := m.syncRuleStateWithTask(taskName, patchedRule); err != nil { - zap.S().Errorf("failed to sync stored rule state with the task") + zap.L().Error("failed to sync stored rule state with the task", zap.String("taskName", taskName), zap.Error(err)) return nil, err } @@ -816,7 +816,7 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, ruleId string) // restore task state from the stored rule if err := m.syncRuleStateWithTask(taskName, &storedRule); err != nil { - zap.S().Errorf("msg: ", "failed to restore rule after patch failure", "\t error:", err) + zap.L().Error("failed to restore rule after patch failure", zap.String("taskName", taskName), zap.Error(err)) } return nil, err @@ -846,7 +846,7 @@ func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *m parsedRule, errs := ParsePostableRule([]byte(ruleStr)) if len(errs) > 0 { - zap.S().Errorf("msg: failed to parse rule from request:", "\t error: ", errs) + zap.L().Error("failed to parse rule from request", zap.Errors("errors", errs)) return 0, newApiErrorBadData(errs[0]) } @@ -882,7 +882,7 @@ func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *m ) if err != nil { - zap.S().Errorf("msg: failed to prepare a new threshold rule for test:", "\t error: ", err) + zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", rule.Name()), zap.Error(err)) return 0, newApiErrorBadData(err) } @@ -899,7 +899,7 @@ func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *m ) if err != nil { - zap.S().Errorf("msg: failed to prepare a new promql rule for test:", "\t error: ", err) + zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err)) return 0, newApiErrorBadData(err) } } else { @@ -911,10 +911,13 @@ func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *m count, err := rule.Eval(ctx, ts, m.opts.Queriers) if err != nil { - zap.S().Warn("msg:", "Evaluating rule failed", "\t rule:", rule, "\t err: ", err) + zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err)) return 0, newApiErrorInternal(fmt.Errorf("rule evaluation failed")) } - alertsFound := count.(int) + alertsFound, ok := count.(int) + if !ok { + return 0, newApiErrorInternal(fmt.Errorf("something went wrong")) + } rule.SendAlerts(ctx, ts, 0, time.Duration(1*time.Minute), m.prepareNotifyFunc()) return alertsFound, nil diff --git a/pkg/query-service/rules/promRule.go b/pkg/query-service/rules/promRule.go index 1a4a89e3d2..8f829e0ad3 100644 --- a/pkg/query-service/rules/promRule.go +++ b/pkg/query-service/rules/promRule.go @@ -94,7 +94,7 @@ func NewPromRule( return nil, err } - zap.S().Info("msg:", "creating new alerting rule", "\t name:", p.name, "\t condition:", p.ruleCondition.String(), "\t query:", query) + zap.L().Info("creating new alerting rule", zap.String("name", p.name), zap.String("condition", p.ruleCondition.String()), zap.String("query", query)) return &p, nil } @@ -339,7 +339,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) ( if err != nil { return nil, err } - zap.S().Info("rule:", r.Name(), "\t evaluating promql query: ", q) + zap.L().Info("evaluating promql query", zap.String("name", r.Name()), zap.String("query", q)) res, err := queriers.PqlEngine.RunAlertQuery(ctx, q, start, end, interval) if err != nil { r.SetHealth(HealthBad) @@ -368,7 +368,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) ( if !shouldAlert { continue } - zap.S().Debugf("rule: %s, alerting for series: %v", r.Name(), series) + zap.L().Debug("alerting for series", zap.String("name", r.Name()), zap.Any("series", series)) thresholdFormatter := formatter.FromUnit(r.ruleCondition.TargetUnit) threshold := thresholdFormatter.Format(r.targetVal(), r.ruleCondition.TargetUnit) @@ -435,7 +435,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) ( } } - zap.S().Debugf("For rule: %s, found %d alerts", r.Name(), len(alerts)) + zap.L().Debug("found alerts for rule", zap.Int("count", len(alerts)), zap.String("name", r.Name())) // alerts[h] is ready, add or update active list now for h, a := range alerts { // Check whether we already have alerting state for the identifying label set. diff --git a/pkg/query-service/rules/promRuleTask.go b/pkg/query-service/rules/promRuleTask.go index d4a853d844..af38488f7c 100644 --- a/pkg/query-service/rules/promRuleTask.go +++ b/pkg/query-service/rules/promRuleTask.go @@ -40,7 +40,7 @@ type PromRuleTask struct { // newPromRuleTask holds rules that have promql condition // and evalutes the rule at a given frequency func newPromRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc) *PromRuleTask { - zap.S().Info("Initiating a new rule group:", name, "\t frequency:", frequency) + zap.L().Info("Initiating a new rule group", zap.String("name", name), zap.Duration("frequency", frequency)) if time.Now() == time.Now().Add(frequency) { frequency = DefaultFrequency @@ -312,7 +312,7 @@ func (g *PromRuleTask) CopyState(fromTask Task) error { // Eval runs a single evaluation cycle in which all rules are evaluated sequentially. func (g *PromRuleTask) Eval(ctx context.Context, ts time.Time) { - zap.S().Info("promql rule task:", g.name, "\t eval started at:", ts) + zap.L().Info("promql rule task", zap.String("name", g.name), zap.Time("eval started at", ts)) for i, rule := range g.rules { if rule == nil { continue @@ -340,7 +340,7 @@ func (g *PromRuleTask) Eval(ctx context.Context, ts time.Time) { rule.SetHealth(HealthBad) rule.SetLastError(err) - zap.S().Warn("msg", "Evaluating rule failed", "rule", rule, "err", err) + zap.L().Warn("Evaluating rule failed", zap.String("ruleid", rule.ID()), zap.Error(err)) // Canceled queries are intentional termination of queries. This normally // happens on shutdown and thus we skip logging of any errors here. diff --git a/pkg/query-service/rules/ruleTask.go b/pkg/query-service/rules/ruleTask.go index b2f6f09921..edf3957a6f 100644 --- a/pkg/query-service/rules/ruleTask.go +++ b/pkg/query-service/rules/ruleTask.go @@ -25,10 +25,8 @@ type RuleTask struct { evaluationTime time.Duration lastEvaluation time.Time - markStale bool - done chan struct{} - terminated chan struct{} - managerDone chan struct{} + done chan struct{} + terminated chan struct{} pause bool notify NotifyFunc @@ -42,7 +40,7 @@ func newRuleTask(name, file string, frequency time.Duration, rules []Rule, opts if time.Now() == time.Now().Add(frequency) { frequency = DefaultFrequency } - zap.S().Info("msg:", "initiating a new rule task", "\t name:", name, "\t frequency:", frequency) + zap.L().Info("initiating a new rule task", zap.String("name", name), zap.Duration("frequency", frequency)) return &RuleTask{ name: name, @@ -91,7 +89,7 @@ func (g *RuleTask) Run(ctx context.Context) { // Wait an initial amount to have consistently slotted intervals. evalTimestamp := g.EvalTimestamp(time.Now().UnixNano()).Add(g.frequency) - zap.S().Debugf("group:", g.name, "\t group run to begin at: ", evalTimestamp) + zap.L().Debug("group run to begin at", zap.Time("evalTimestamp", evalTimestamp)) select { case <-time.After(time.Until(evalTimestamp)): case <-g.done: @@ -294,7 +292,7 @@ func (g *RuleTask) CopyState(fromTask Task) error { // Eval runs a single evaluation cycle in which all rules are evaluated sequentially. func (g *RuleTask) Eval(ctx context.Context, ts time.Time) { - zap.S().Debugf("msg:", "rule task eval started", "\t name:", g.name, "\t start time:", ts) + zap.L().Debug("rule task eval started", zap.String("name", g.name), zap.Time("start time", ts)) for i, rule := range g.rules { if rule == nil { @@ -330,7 +328,7 @@ func (g *RuleTask) Eval(ctx context.Context, ts time.Time) { rule.SetHealth(HealthBad) rule.SetLastError(err) - zap.S().Warn("msg:", "Evaluating rule failed", "\t rule:", rule, "\t err: ", err) + zap.L().Warn("Evaluating rule failed", zap.String("ruleid", rule.ID()), zap.Error(err)) // Canceled queries are intentional termination of queries. This normally // happens on shutdown and thus we skip logging of any errors here. diff --git a/pkg/query-service/rules/thresholdRule.go b/pkg/query-service/rules/thresholdRule.go index f358d80393..c05c61c57b 100644 --- a/pkg/query-service/rules/thresholdRule.go +++ b/pkg/query-service/rules/thresholdRule.go @@ -135,7 +135,7 @@ func NewThresholdRule( } t.queryBuilderV4 = queryBuilder.NewQueryBuilder(builderOptsV4, featureFlags) - zap.S().Info("msg:", "creating new alerting rule", "\t name:", t.name, "\t condition:", t.ruleCondition.String(), "\t generatorURL:", t.GeneratorURL()) + zap.L().Info("creating new ThresholdRule", zap.String("name", t.name), zap.String("id", t.id)) return &t, nil } @@ -386,7 +386,7 @@ func (r *ThresholdRule) ForEachActiveAlert(f func(*Alert)) { } func (r *ThresholdRule) SendAlerts(ctx context.Context, ts time.Time, resendDelay time.Duration, interval time.Duration, notifyFunc NotifyFunc) { - zap.S().Info("msg:", "sending alerts", "\t rule:", r.Name()) + zap.L().Info("sending alerts", zap.String("rule", r.Name())) alerts := []*Alert{} r.ForEachActiveAlert(func(alert *Alert) { if r.opts.SendAlways || alert.needsSending(ts, resendDelay) { @@ -400,7 +400,7 @@ func (r *ThresholdRule) SendAlerts(ctx context.Context, ts time.Time, resendDela anew := *alert alerts = append(alerts, &anew) } else { - zap.S().Debugf("msg: skipping send alert due to resend delay", "\t rule: ", r.Name(), "\t alert:", alert.Labels) + zap.L().Debug("skipping send alert due to resend delay", zap.String("rule", r.Name()), zap.Any("alert", alert.Labels)) } }) notifyFunc(ctx, "", alerts...) @@ -416,12 +416,12 @@ func (r *ThresholdRule) Unit() string { func (r *ThresholdRule) CheckCondition(v float64) bool { if math.IsNaN(v) { - zap.S().Debugf("msg:", "found NaN in rule condition", "\t rule name:", r.Name()) + zap.L().Debug("found NaN in rule condition", zap.String("rule", r.Name())) return false } if r.ruleCondition.Target == nil { - zap.S().Debugf("msg:", "found null target in rule condition", "\t rulename:", r.Name()) + zap.L().Debug("found null target in rule condition", zap.String("rule", r.Name())) return false } @@ -429,7 +429,7 @@ func (r *ThresholdRule) CheckCondition(v float64) bool { value := unitConverter.Convert(converter.Value{F: *r.ruleCondition.Target, U: converter.Unit(r.ruleCondition.TargetUnit)}, converter.Unit(r.Unit())) - zap.S().Debugf("Checking condition for rule: %s, Converter=%s, Value=%f, Target=%f, CompareOp=%s", r.Name(), unitConverter.Name(), v, value.F, r.ruleCondition.CompareOp) + zap.L().Info("Checking condition for rule", zap.String("rule", r.Name()), zap.String("converter", unitConverter.Name()), zap.Float64("value", v), zap.Float64("target", value.F), zap.String("compareOp", string(r.ruleCondition.CompareOp))) switch r.ruleCondition.CompareOp { case ValueIsEq: return v == value.F @@ -496,7 +496,7 @@ func (r *ThresholdRule) shouldSkipFirstRecord() bool { func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, query string) (Vector, error) { rows, err := db.Query(ctx, query) if err != nil { - zap.S().Errorf("rule:", r.Name(), "\t failed to get alert query result") + zap.L().Error("failed to get alert query result", zap.String("rule", r.Name()), zap.Error(err)) return nil, err } @@ -604,7 +604,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer lblsOrig.Set(columnNames[i], fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())) } default: - zap.S().Errorf("ruleId:", r.ID(), "\t error: invalid var found in query result", v, columnNames[i]) + zap.L().Error("invalid var found in query result", zap.String("ruleId", r.ID()), zap.Any("value", v), zap.Any("column", columnNames[i])) } } @@ -710,11 +710,11 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer } } - zap.S().Debugf("ruleid:", r.ID(), "\t resultmap(potential alerts):", len(resultMap)) + zap.L().Debug("resultmap(potential alerts)", zap.String("ruleid", r.ID()), zap.Int("count", len(resultMap))) // if the data is missing for `For` duration then we should send alert if r.ruleCondition.AlertOnAbsent && r.lastTimestampWithDatapoints.Add(r.Condition().AbsentFor*time.Minute).Before(time.Now()) { - zap.S().Debugf("ruleid:", r.ID(), "\t msg: no data found for rule condition") + zap.L().Info("no data found for rule condition", zap.String("ruleid", r.ID())) lbls := labels.NewBuilder(labels.Labels{}) if !r.lastTimestampWithDatapoints.IsZero() { lbls.Set("lastSeen", r.lastTimestampWithDatapoints.Format(constants.AlertTimeFormat)) @@ -734,7 +734,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer } } if len(result) != 0 { - zap.S().Infof("For rule %s, with ClickHouseQuery %s, found %d alerts", r.ID(), query, len(result)) + zap.L().Info("found alerts", zap.String("ruleid", r.ID()), zap.String("query", query), zap.Int("count", len(result))) } return result, nil } @@ -979,7 +979,7 @@ func (r *ThresholdRule) prepareClickhouseQueries(ts time.Time) (map[string]strin } if r.ruleCondition.QueryType() != v3.QueryTypeClickHouseSQL { - zap.S().Debugf("ruleid:", r.ID(), "\t msg: unsupported query type in prepareClickhouseQueries()") + zap.L().Error("unsupported query type in prepareClickhouseQueries", zap.String("ruleid", r.ID())) return nil, fmt.Errorf("failed to prepare clickhouse queries") } @@ -995,18 +995,17 @@ func (r *ThresholdRule) prepareClickhouseQueries(ts time.Time) (map[string]strin tmpl := template.New("clickhouse-query") tmpl, err := tmpl.Parse(chQuery.Query) if err != nil { - zap.S().Errorf("ruleid:", r.ID(), "\t msg: failed to parse clickhouse query to populate vars", err) + zap.L().Error("failed to parse clickhouse query to populate vars", zap.String("ruleid", r.ID()), zap.Error(err)) r.SetHealth(HealthBad) return nil, err } var query bytes.Buffer err = tmpl.Execute(&query, params.Variables) if err != nil { - zap.S().Errorf("ruleid:", r.ID(), "\t msg: failed to populate clickhouse query", err) + zap.L().Error("failed to populate clickhouse query", zap.String("ruleid", r.ID()), zap.Error(err)) r.SetHealth(HealthBad) return nil, err } - zap.S().Debugf("ruleid:", r.ID(), "\t query:", query.String()) queries[name] = query.String() } return queries, nil @@ -1023,13 +1022,13 @@ func (r *ThresholdRule) GetSelectedQuery() string { if r.ruleCondition.QueryType() == v3.QueryTypeBuilder { queries, err = r.prepareBuilderQueries(time.Now(), nil) if err != nil { - zap.S().Errorf("ruleid:", r.ID(), "\t msg: failed to prepare metric queries", zap.Error(err)) + zap.L().Error("failed to prepare metric queries", zap.String("ruleid", r.ID()), zap.Error(err)) return "" } } else if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL { queries, err = r.prepareClickhouseQueries(time.Now()) if err != nil { - zap.S().Errorf("ruleid:", r.ID(), "\t msg: failed to prepare clickhouse queries", zap.Error(err)) + zap.L().Error("failed to prepare clickhouse queries", zap.String("ruleid", r.ID()), zap.Error(err)) return "" } } @@ -1078,7 +1077,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time, ch c queries, err = r.prepareBuilderQueries(ts, ch) if err != nil { - zap.S().Errorf("ruleid:", r.ID(), "\t msg: failed to prepare metric queries", zap.Error(err)) + zap.L().Error("failed to prepare metric queries", zap.String("ruleid", r.ID()), zap.Error(err)) return nil, fmt.Errorf("failed to prepare metric queries") } @@ -1087,7 +1086,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time, ch c queries, err = r.prepareClickhouseQueries(ts) if err != nil { - zap.S().Errorf("ruleid:", r.ID(), "\t msg: failed to prepare clickhouse queries", zap.Error(err)) + zap.L().Error("failed to prepare clickhouse queries", zap.String("ruleid", r.ID()), zap.Error(err)) return nil, fmt.Errorf("failed to prepare clickhouse queries") } @@ -1099,16 +1098,16 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time, ch c return nil, fmt.Errorf("no queries could be built with the rule config") } - zap.S().Debugf("ruleid:", r.ID(), "\t runQueries:", queries) + zap.L().Info("prepared queries", zap.String("ruleid", r.ID()), zap.Any("queries", queries)) queryLabel := r.GetSelectedQuery() - zap.S().Debugf("ruleId: ", r.ID(), "\t result query label:", queryLabel) + zap.L().Debug("Selected query lable for rule", zap.String("ruleid", r.ID()), zap.String("label", queryLabel)) if queryString, ok := queries[queryLabel]; ok { return r.runChQuery(ctx, ch, queryString) } - zap.S().Errorf("ruleId: ", r.ID(), "\t invalid query label:", queryLabel, "\t queries:", queries) + zap.L().Error("invalid query label", zap.String("ruleid", r.ID()), zap.String("label", queryLabel), zap.Any("queries", queries)) return nil, fmt.Errorf("this is unexpected, invalid query label") } @@ -1137,7 +1136,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie if err != nil { r.SetHealth(HealthBad) r.SetLastError(err) - zap.S().Debugf("ruleid:", r.ID(), "\t failure in buildAndRunQuery:", err) + zap.L().Error("failure in buildAndRunQuery", zap.String("ruleid", r.ID()), zap.Error(err)) return nil, err } @@ -1156,7 +1155,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie value := valueFormatter.Format(smpl.V, r.Unit()) thresholdFormatter := formatter.FromUnit(r.ruleCondition.TargetUnit) threshold := thresholdFormatter.Format(r.targetVal(), r.ruleCondition.TargetUnit) - zap.S().Debugf("Alert template data for rule %s: Formatter=%s, Value=%s, Threshold=%s", r.Name(), valueFormatter.Name(), value, threshold) + zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold)) tmplData := AlertTemplateData(l, value, threshold) // Inject some convenience variables that are easier to remember for users @@ -1177,7 +1176,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie result, err := tmpl.Expand() if err != nil { result = fmt.Sprintf("", err) - zap.S().Errorf("msg:", "Expanding alert template failed", "\t err", err, "\t data", tmplData) + zap.L().Error("Expanding alert template failed", zap.Error(err), zap.Any("data", tmplData)) } return result } @@ -1222,7 +1221,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie resultFPs[h] = struct{}{} if _, ok := alerts[h]; ok { - zap.S().Errorf("ruleId: ", r.ID(), "\t msg:", "the alert query returns duplicate records:", alerts[h]) + zap.L().Error("the alert query returns duplicate records", zap.String("ruleid", r.ID()), zap.Any("alert", alerts[h])) err = fmt.Errorf("duplicate alert found, vector contains metrics with the same labelset after applying alert labels") // We have already acquired the lock above hence using SetHealth and // SetLastError will deadlock. @@ -1242,7 +1241,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie } } - zap.S().Info("rule:", r.Name(), "\t alerts found: ", len(alerts)) + zap.L().Info("alerts found", zap.String("name", r.Name()), zap.Int("count", len(alerts))) // alerts[h] is ready, add or update active list now for h, a := range alerts { diff --git a/pkg/query-service/telemetry/telemetry.go b/pkg/query-service/telemetry/telemetry.go index 9202a32168..4c23cbd092 100644 --- a/pkg/query-service/telemetry/telemetry.go +++ b/pkg/query-service/telemetry/telemetry.go @@ -467,7 +467,7 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}, userEma } } - // zap.S().Info(data) + // zap.L().Info(data) properties := analytics.NewProperties() properties.Set("version", version.GetVersion()) properties.Set("deploymentType", getDeploymentType()) diff --git a/pkg/query-service/tests/docker.go b/pkg/query-service/tests/docker.go index a710161a0e..c65a627512 100644 --- a/pkg/query-service/tests/docker.go +++ b/pkg/query-service/tests/docker.go @@ -13,7 +13,6 @@ import ( "log" minio "github.com/minio/minio-go/v6" - "go.uber.org/zap" ) const ( @@ -36,7 +35,7 @@ func init() { } else if goArch == "amd64" { composeFile = "./test-deploy/docker-compose.yaml" } else { - zap.S().Fatalf("Unsupported architecture: %s", goArch) + log.Fatalf("Unsupported architecture: %s", goArch) } } diff --git a/pkg/query-service/utils/format.go b/pkg/query-service/utils/format.go index bc15a8a1e9..0a614e2987 100644 --- a/pkg/query-service/utils/format.go +++ b/pkg/query-service/utils/format.go @@ -183,11 +183,11 @@ func ClickHouseFormattedValue(v interface{}) string { case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64, float32, float64, bool: return strings.Join(strings.Fields(fmt.Sprint(x)), ",") default: - zap.S().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x[0]))) + zap.L().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x[0]))) return "" } default: - zap.S().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x))) + zap.L().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x))) return "" } } diff --git a/pkg/query-service/utils/time.go b/pkg/query-service/utils/time.go index 69b49e42ac..274b032cdb 100644 --- a/pkg/query-service/utils/time.go +++ b/pkg/query-service/utils/time.go @@ -15,6 +15,6 @@ func Elapsed(funcName string, args ...interface{}) func() { } argsStr = argsStr[:len(argsStr)-2] return func() { - zap.S().Infof("func %s took %v with args %v", funcName, time.Since(start), string(argsStr)) + zap.L().Info("Elapsed time", zap.String("func_name", funcName), zap.Duration("duration", time.Since(start)), zap.String("args", argsStr)) } } diff --git a/pkg/query-service/version/version.go b/pkg/query-service/version/version.go index 577fe6789c..68c37a4e0e 100644 --- a/pkg/query-service/version/version.go +++ b/pkg/query-service/version/version.go @@ -3,8 +3,6 @@ package version import ( "fmt" "runtime" - - "go.uber.org/zap" ) // These fields are set during an official build @@ -40,7 +38,7 @@ Copyright 2022 SigNoz // PrintVersion prints version and other helpful information. func PrintVersion() { - zap.S().Infof("\n%s\n", BuildDetails()) + fmt.Println(BuildDetails()) } func GetVersion() string { From e1679790f7d81d39b25e3ffd8f2f4a8c1244b4ef Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Wed, 27 Mar 2024 01:01:24 +0530 Subject: [PATCH 23/53] fix: log chips not forming making filtering not work (#4749) * fix: log chips not forming making filtering not work * fix: remove console log --- frontend/src/components/Logs/ListLogView/index.tsx | 1 - frontend/src/hooks/queryBuilder/useTag.ts | 9 ++++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/frontend/src/components/Logs/ListLogView/index.tsx b/frontend/src/components/Logs/ListLogView/index.tsx index 5577579b64..2b828d663c 100644 --- a/frontend/src/components/Logs/ListLogView/index.tsx +++ b/frontend/src/components/Logs/ListLogView/index.tsx @@ -48,7 +48,6 @@ function LogGeneralField({ fieldValue, linesPerRow = 1, }: LogFieldProps): JSX.Element { - console.log('fieldKey:', fieldKey, linesPerRow); const html = useMemo( () => ({ __html: convert.toHtml(dompurify.sanitize(fieldValue)), diff --git a/frontend/src/hooks/queryBuilder/useTag.ts b/frontend/src/hooks/queryBuilder/useTag.ts index a3bbd4af05..419aaaedc9 100644 --- a/frontend/src/hooks/queryBuilder/useTag.ts +++ b/frontend/src/hooks/queryBuilder/useTag.ts @@ -75,10 +75,13 @@ export const useTag = ( (value: string): void => { const { tagKey } = getTagToken(value); const parts = tagKey.split('-'); - // this is done to ensure that `hello-world` also gets converted to `body CONTAINS hello-world` - const id = parts[parts.length - 1]; - const key = parts.slice(0, -1).join('-'); + let id = parts[parts.length - 1]; + let key = parts.slice(0, -1).join('-'); + if (parts.length === 1) { + id = ''; + [key] = parts; + } if (id === 'custom') { const customValue = whereClauseConfig From ad1b01f225aeef839e5dc066f8720159a7392002 Mon Sep 17 00:00:00 2001 From: SagarRajput-7 <162284829+SagarRajput-7@users.noreply.github.com> Date: Wed, 27 Mar 2024 10:23:57 +0530 Subject: [PATCH 24/53] feat: [SIG-566]: Added message to alert user about their past due - subscription status (#4724) * feat: [SIG-566]: Added message to alert user about their past due - subscription status * feat: [SIG-566]: Added message string to billings.json * feat: [SIG-566]: Added strings to billings.json * feat: [SIG-566]: updated test cases * feat: [SIG-566]: updated message text * feat: [SIG-566]: code fix * feat: [SIG-566]: code fix --- frontend/public/locales/en/billings.json | 14 ++++ frontend/src/api/billing/getUsage.ts | 1 + .../BillingContainer.test.tsx | 28 ++++---- .../BillingContainer/BillingContainer.tsx | 67 ++++++++++++++----- 4 files changed, 79 insertions(+), 31 deletions(-) create mode 100644 frontend/public/locales/en/billings.json diff --git a/frontend/public/locales/en/billings.json b/frontend/public/locales/en/billings.json new file mode 100644 index 0000000000..fb706e002f --- /dev/null +++ b/frontend/public/locales/en/billings.json @@ -0,0 +1,14 @@ +{ + "days_remaining": "days remaining in your billing period.", + "billing": "Billing", + "manage_billing_and_costs": "Manage your billing information, invoices, and monitor costs.", + "enterprise_cloud": "Enterprise Cloud", + "enterprise": "Enterprise", + "card_details_recieved_and_billing_info": "We have received your card details, your billing will only start after the end of your free trial period.", + "upgrade_plan": "Upgrade Plan", + "manage_billing": "Manage Billing", + "upgrade_now_text": "Upgrade now to have uninterrupted access", + "billing_start_info": "Your billing will start only after the trial period", + "checkout_plans": "Check out features in paid plans", + "here": "here" +} diff --git a/frontend/src/api/billing/getUsage.ts b/frontend/src/api/billing/getUsage.ts index 1cb5be5640..da7b6ebd63 100644 --- a/frontend/src/api/billing/getUsage.ts +++ b/frontend/src/api/billing/getUsage.ts @@ -13,6 +13,7 @@ export interface UsageResponsePayloadProps { billTotal: number; }; discount: number; + subscriptionStatus?: string; } const getUsage = async ( diff --git a/frontend/src/container/BillingContainer/BillingContainer.test.tsx b/frontend/src/container/BillingContainer/BillingContainer.test.tsx index cd447e5d60..1988df313b 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.test.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.test.tsx @@ -56,14 +56,14 @@ describe('BillingContainer', () => { expect(cost).toBeInTheDocument(); const manageBilling = screen.getByRole('button', { - name: /manage billing/i, + name: 'manage_billing', }); expect(manageBilling).toBeInTheDocument(); const dollar = screen.getByText(/\$0/i); expect(dollar).toBeInTheDocument(); - const currentBill = screen.getByText('Billing'); + const currentBill = screen.getByText('billing'); expect(currentBill).toBeInTheDocument(); }); @@ -75,7 +75,7 @@ describe('BillingContainer', () => { const freeTrailText = await screen.findByText('Free Trial'); expect(freeTrailText).toBeInTheDocument(); - const currentBill = screen.getByText('Billing'); + const currentBill = screen.getByText('billing'); expect(currentBill).toBeInTheDocument(); const dollar0 = await screen.findByText(/\$0/i); @@ -85,18 +85,14 @@ describe('BillingContainer', () => { ); expect(onTrail).toBeInTheDocument(); - const numberOfDayRemaining = await screen.findByText( - /1 days remaining in your billing period./i, - ); + const numberOfDayRemaining = await screen.findByText(/1 days_remaining/i); expect(numberOfDayRemaining).toBeInTheDocument(); const upgradeButton = await screen.findAllByRole('button', { - name: /upgrade/i, + name: /upgrade_plan/i, }); expect(upgradeButton[1]).toBeInTheDocument(); expect(upgradeButton.length).toBe(2); - const checkPaidPlan = await screen.findByText( - /Check out features in paid plans/i, - ); + const checkPaidPlan = await screen.findByText(/checkout_plans/i); expect(checkPaidPlan).toBeInTheDocument(); const link = screen.getByRole('link', { name: /here/i }); @@ -114,7 +110,7 @@ describe('BillingContainer', () => { render(); }); - const currentBill = screen.getByText('Billing'); + const currentBill = screen.getByText('billing'); expect(currentBill).toBeInTheDocument(); const dollar0 = await screen.findByText(/\$0/i); @@ -126,17 +122,17 @@ describe('BillingContainer', () => { expect(onTrail).toBeInTheDocument(); const receivedCardDetails = await screen.findByText( - /We have received your card details, your billing will only start after the end of your free trial period./i, + /card_details_recieved_and_billing_info/i, ); expect(receivedCardDetails).toBeInTheDocument(); const manageBillingButton = await screen.findByRole('button', { - name: /manage billing/i, + name: /manage_billing/i, }); expect(manageBillingButton).toBeInTheDocument(); const dayRemainingInBillingPeriod = await screen.findByText( - /1 days remaining in your billing period./i, + /1 days_remaining/i, ); expect(dayRemainingInBillingPeriod).toBeInTheDocument(); }); @@ -156,7 +152,7 @@ describe('BillingContainer', () => { const billingPeriod = await findByText(billingPeriodText); expect(billingPeriod).toBeInTheDocument(); - const currentBill = screen.getByText('Billing'); + const currentBill = screen.getByText('billing'); expect(currentBill).toBeInTheDocument(); const dollar0 = await screen.findByText(/\$1,278.3/i); @@ -181,7 +177,7 @@ describe('BillingContainer', () => { ); render(); const dayRemainingInBillingPeriod = await screen.findByText( - /11 days remaining in your billing period./i, + /11 days_remaining/i, ); expect(dayRemainingInBillingPeriod).toBeInTheDocument(); }); diff --git a/frontend/src/container/BillingContainer/BillingContainer.tsx b/frontend/src/container/BillingContainer/BillingContainer.tsx index b31f9c4745..fe784a0c57 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -17,7 +17,7 @@ import { } from 'antd'; import { ColumnsType } from 'antd/es/table'; import updateCreditCardApi from 'api/billing/checkout'; -import getUsage from 'api/billing/getUsage'; +import getUsage, { UsageResponsePayloadProps } from 'api/billing/getUsage'; import manageCreditCardApi from 'api/billing/manage'; import Spinner from 'components/Spinner'; import { SOMETHING_WENT_WRONG } from 'constants/api'; @@ -28,6 +28,7 @@ import useLicense from 'hooks/useLicense'; import { useNotifications } from 'hooks/useNotifications'; import { pick } from 'lodash-es'; import { useCallback, useEffect, useState } from 'react'; +import { useTranslation } from 'react-i18next'; import { useMutation, useQuery } from 'react-query'; import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; @@ -49,6 +50,11 @@ interface DataType { cost: string; } +enum SubscriptionStatus { + PastDue = 'past_due', + Active = 'active', +} + const renderSkeletonInput = (): JSX.Element => ( = [ }, ]; +// eslint-disable-next-line sonarjs/cognitive-complexity export default function BillingContainer(): JSX.Element { - const daysRemainingStr = 'days remaining in your billing period.'; + const { t } = useTranslation(['billings']); + const daysRemainingStr = t('days_remaining'); const [headerText, setHeaderText] = useState(''); const [billAmount, setBillAmount] = useState(0); const [activeLicense, setActiveLicense] = useState(null); const [daysRemaining, setDaysRemaining] = useState(0); const [isFreeTrial, setIsFreeTrial] = useState(false); const [data, setData] = useState([]); - const [apiResponse, setApiResponse] = useState({}); + const [apiResponse, setApiResponse] = useState< + Partial + >({}); const { trackEvent } = useAnalytics(); @@ -186,6 +196,9 @@ export default function BillingContainer(): JSX.Element { [licensesData?.payload?.onTrial], ); + const isSubscriptionPastDue = + apiResponse.subscriptionStatus === SubscriptionStatus.PastDue; + const { isLoading, isFetching: isFetchingBillingData } = useQuery( [REACT_QUERY_KEY.GET_BILLING_USAGE, user?.userId], { @@ -342,14 +355,27 @@ export default function BillingContainer(): JSX.Element { [apiResponse, billAmount, isLoading, isFetchingBillingData], ); + const { Text } = Typography; + const subscriptionPastDueMessage = (): JSX.Element => ( + + {`We were not able to process payments for your account. Please update your card details `} + + {t('here')} + + {` if your payment information has changed. Email us at `} + cloud-support@signoz.io + {` otherwise. Be sure to provide this information immediately to avoid interruption to your service.`} + + ); + return (
- Billing + {t('billing')} - Manage your billing information, invoices, and monitor costs. + {t('manage_billing_and_costs')} @@ -361,7 +387,7 @@ export default function BillingContainer(): JSX.Element { - {isCloudUserVal ? 'Enterprise Cloud' : 'Enterprise'}{' '} + {isCloudUserVal ? t('enterprise_cloud') : t('enterprise')}{' '} {isFreeTrial ? Free Trial : ''} {!isLoading && !isFetchingBillingData ? ( @@ -378,8 +404,8 @@ export default function BillingContainer(): JSX.Element { onClick={handleBilling} > {isFreeTrial && !licensesData?.payload?.trialConvertedToSubscription - ? 'Upgrade Plan' - : 'Manage Billing'} + ? t('upgrade_plan') + : t('manage_billing')} @@ -389,8 +415,7 @@ export default function BillingContainer(): JSX.Element { ellipsis style={{ fontWeight: '300', color: '#49aa19', fontSize: 12 }} > - We have received your card details, your billing will only start after - the end of your free trial period. + {t('card_details_recieved_and_billing_info')} )} @@ -404,6 +429,18 @@ export default function BillingContainer(): JSX.Element { ) : ( )} + + {isSubscriptionPastDue && + (!isLoading && !isFetchingBillingData ? ( + + ) : ( + + ))} @@ -434,16 +471,16 @@ export default function BillingContainer(): JSX.Element { - Upgrade now to have uninterrupted access + {t('upgrade_now_text')} - Your billing will start only after the trial period + {t('Your billing will start only after the trial period')} - Check out features in paid plans   + {t('checkout_plans')}   - here + {t('here')} @@ -464,7 +501,7 @@ export default function BillingContainer(): JSX.Element { loading={isLoadingBilling || isLoadingManageBilling} onClick={handleBilling} > - Upgrade Plan + {t('upgrade_plan')} From dbd4363ff87b5e986f73839512b7a2e966f406b4 Mon Sep 17 00:00:00 2001 From: SagarRajput-7 <162284829+SagarRajput-7@users.noreply.github.com> Date: Wed, 27 Mar 2024 11:55:28 +0530 Subject: [PATCH 25/53] feat: [SIG-573]: Fixed billing page issues (#4744) * feat: [SIG-573]: Fixed billing page issues * feat: [SIG-573]: Fixed jest test case --- .../BillingContainer/BillingContainer.tsx | 19 +++++++----- .../BillingUsageGraph/BillingUsageGraph.tsx | 31 +++++++++++++------ .../src/lib/uPlotLib/plugins/tooltipPlugin.ts | 14 ++++++++- 3 files changed, 46 insertions(+), 18 deletions(-) diff --git a/frontend/src/container/BillingContainer/BillingContainer.tsx b/frontend/src/container/BillingContainer/BillingContainer.tsx index fe784a0c57..9b45801356 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -26,7 +26,7 @@ import useAnalytics from 'hooks/analytics/useAnalytics'; import useAxiosError from 'hooks/useAxiosError'; import useLicense from 'hooks/useLicense'; import { useNotifications } from 'hooks/useNotifications'; -import { pick } from 'lodash-es'; +import { isEmpty, pick } from 'lodash-es'; import { useCallback, useEffect, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { useMutation, useQuery } from 'react-query'; @@ -149,6 +149,9 @@ export default function BillingContainer(): JSX.Element { const processUsageData = useCallback( (data: any): void => { + if (isEmpty(data?.payload)) { + return; + } const { details: { breakdown = [], billTotal }, billingPeriodStart, @@ -420,12 +423,14 @@ export default function BillingContainer(): JSX.Element { )} {!isLoading && !isFetchingBillingData ? ( - + headerText && ( + + ) ) : ( )} diff --git a/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx b/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx index fa6ce813a6..be77ebba95 100644 --- a/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx +++ b/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx @@ -3,9 +3,7 @@ import '../../../lib/uPlotLib/uPlotLib.styles.scss'; import { Color } from '@signozhq/design-tokens'; import { Card, Flex, Typography } from 'antd'; -import { getComponentForPanelType } from 'constants/panelTypes'; -import { PANEL_TYPES } from 'constants/queryBuilder'; -import { PropsTypePropsMap } from 'container/GridPanelSwitch/types'; +import Uplot from 'components/Uplot'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { useResizeObserver } from 'hooks/useDimensions'; import tooltipPlugin from 'lib/uPlotLib/plugins/tooltipPlugin'; @@ -14,7 +12,7 @@ import getRenderer from 'lib/uPlotLib/utils/getRenderer'; import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData'; import { getXAxisScale } from 'lib/uPlotLib/utils/getXAxisScale'; import { getYAxisScale } from 'lib/uPlotLib/utils/getYAxisScale'; -import { FC, useMemo, useRef } from 'react'; +import { useMemo, useRef } from 'react'; import uPlot from 'uplot'; import { @@ -43,6 +41,21 @@ const paths = ( return renderer(u, seriesIdx, idx0, idx1, extendGap, buildClip); }; +const calculateStartEndTime = ( + data: any, +): { startTime: number; endTime: number } => { + const timestamps: number[] = []; + data?.details?.breakdown?.forEach((breakdown: any) => { + breakdown?.dayWiseBreakdown?.breakdown.forEach((entry: any) => { + timestamps.push(entry?.timestamp); + }); + }); + const billingTime = [data?.billingPeriodStart, data?.billingPeriodEnd]; + const startTime: number = Math.min(...timestamps, ...billingTime); + const endTime: number = Math.max(...timestamps, ...billingTime); + return { startTime, endTime }; +}; + export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element { const { data, billAmount } = props; const graphCompatibleData = useMemo( @@ -54,11 +67,9 @@ export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element { const isDarkMode = useIsDarkMode(); const containerDimensions = useResizeObserver(graphRef); - const { billingPeriodStart: startTime, billingPeriodEnd: endTime } = data; - - const Component = getComponentForPanelType(PANEL_TYPES.BAR) as FC< - PropsTypePropsMap[PANEL_TYPES] - >; + const { startTime, endTime } = useMemo(() => calculateStartEndTime(data), [ + data, + ]); const getGraphSeries = (color: string, label: string): any => ({ drawStyle: 'bars', @@ -183,7 +194,7 @@ export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element {
- +
); diff --git a/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts b/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts index 4ec3677dfb..b06e5bff63 100644 --- a/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts +++ b/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts @@ -3,6 +3,7 @@ import { themeColors } from 'constants/theme'; import dayjs from 'dayjs'; import customParseFormat from 'dayjs/plugin/customParseFormat'; import getLabelName from 'lib/getLabelName'; +import { get } from 'lodash-es'; import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; import { placement } from '../placement'; @@ -68,7 +69,18 @@ const generateTooltipContent = ( const dataIngested = quantity[idx]; const label = getLabelName(metric, queryName || '', legend || ''); - const color = generateColor(label, themeColors.chartcolors); + let color = generateColor(label, themeColors.chartcolors); + + // in case of billing graph pick colors from the series options + if (isBillingUsageGraphs) { + let clr; + series.forEach((item) => { + if (item.label === label) { + clr = get(item, '_fill'); + } + }); + color = clr ?? color; + } let tooltipItemLabel = label; From a30b75a2a8be6f81f877953027d43989358e48c4 Mon Sep 17 00:00:00 2001 From: Yunus M Date: Wed, 27 Mar 2024 18:46:05 +0530 Subject: [PATCH 26/53] feat: show environments in a separate dropdown (#4717) * feat: show environments in a separate dropdown --- .../ResourceAttributesFilter.styles.scss | 20 ++ .../ResourceAttributesFilter.tsx | 172 +++++++++++++----- .../components/QueryChip/QueryChip.tsx | 5 +- .../ResourceAttributesFilter/styles.ts | 7 +- .../useResourceAttribute/ResourceProvider.tsx | 37 +++- .../src/hooks/useResourceAttribute/types.ts | 1 + .../src/hooks/useResourceAttribute/utils.ts | 48 +++++ frontend/src/pages/Services/Metrics.test.tsx | 4 +- 8 files changed, 237 insertions(+), 57 deletions(-) create mode 100644 frontend/src/container/ResourceAttributesFilter/ResourceAttributesFilter.styles.scss diff --git a/frontend/src/container/ResourceAttributesFilter/ResourceAttributesFilter.styles.scss b/frontend/src/container/ResourceAttributesFilter/ResourceAttributesFilter.styles.scss new file mode 100644 index 0000000000..9d10445703 --- /dev/null +++ b/frontend/src/container/ResourceAttributesFilter/ResourceAttributesFilter.styles.scss @@ -0,0 +1,20 @@ +.resourceAttributesFilter-container { + display: flex; + align-items: center; + justify-content: stretch; + flex-wrap: wrap; + gap: 8px; + margin-bottom: 16px; + + .resource-attributes-selector { + flex: 1; + } + + .environment-selector { + min-width: 200px; + } + + .ant-form-item { + margin-bottom: 0; + } +} diff --git a/frontend/src/container/ResourceAttributesFilter/ResourceAttributesFilter.tsx b/frontend/src/container/ResourceAttributesFilter/ResourceAttributesFilter.tsx index a61a0ce0ee..4211291742 100644 --- a/frontend/src/container/ResourceAttributesFilter/ResourceAttributesFilter.tsx +++ b/frontend/src/container/ResourceAttributesFilter/ResourceAttributesFilter.tsx @@ -1,10 +1,17 @@ +import './ResourceAttributesFilter.styles.scss'; + import { CloseCircleFilled } from '@ant-design/icons'; import { Button, Select, Spin } from 'antd'; import useResourceAttribute, { isResourceEmpty, } from 'hooks/useResourceAttribute'; -import { convertMetricKeyToTrace } from 'hooks/useResourceAttribute/utils'; -import { ReactNode, useMemo } from 'react'; +import { + convertMetricKeyToTrace, + getEnvironmentTagKeys, + getEnvironmentTagValues, +} from 'hooks/useResourceAttribute/utils'; +import { ReactNode, useEffect, useMemo, useState } from 'react'; +import { SelectOption } from 'types/common/select'; import { popupContainer } from 'utils/selectPopupContainer'; import { v4 as uuid } from 'uuid'; @@ -22,60 +29,129 @@ function ResourceAttributesFilter({ handleClearAll, handleFocus, handleChange, + handleEnvironmentChange, selectedQuery, optionsData, loading, } = useResourceAttribute(); - const isEmpty = useMemo( - () => isResourceEmpty(queries, staging, selectedQuery), - [queries, selectedQuery, staging], + const [environments, setEnvironments] = useState< + SelectOption[] + >([]); + + const [selectedEnvironments, setSelectedEnvironments] = useState([]); + + const queriesExcludingEnvironment = useMemo( + () => + queries.filter( + (query) => query.tagKey !== 'resource_deployment_environment', + ), + [queries], ); - return ( - -
- {queries.map((query) => ( - - ))} - {staging.map((query, idx) => ( - - {idx === 0 ? convertMetricKeyToTrace(query) : query} - - ))} -
- + {environments.map((opt) => ( + + {opt.label} + + ))} + +
+ +
+ +
+ {queriesExcludingEnvironment.map((query) => ( + + ))} + {staging.map((query, idx) => ( + + {idx === 0 ? convertMetricKeyToTrace(query) : query} + + ))} +
+ - - -
- -
- -
- - - + {viewsData?.data?.data?.map((view) => { + const extraData = + view.extraData !== '' ? JSON.parse(view.extraData) : ''; + let bgColor = getRandomColor(); + if (extraData !== '') { + bgColor = extraData.color; + } + return ( + +
+ {' '} + {view.name} +
+
+ ); + })} + - - - +
+ +
+ +
+ + + + + + + +
-
+ )} + + >; } -ExplorerOptions.defaultProps = { isLoading: false }; +ExplorerOptions.defaultProps = { + isLoading: false, + isExplorerOptionHidden: false, + setIsExplorerOptionHidden: undefined, +}; export default ExplorerOptions; diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.styles.scss b/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.styles.scss new file mode 100644 index 0000000000..e092229bb9 --- /dev/null +++ b/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.styles.scss @@ -0,0 +1,55 @@ +.explorer-option-droppable-container { + position: fixed; + bottom: 0; + width: -webkit-fill-available; + height: 24px; + display: flex; + justify-content: center; + border-radius: 10px 10px 0px 0px; + // box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + // backdrop-filter: blur(20px); + + .explorer-actions-btn { + display: flex; + gap: 8px; + margin-right: 8px; + + .action-btn { + display: flex; + justify-content: center; + align-items: center; + border-radius: 10px 10px 0px 0px; + box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + backdrop-filter: blur(20px); + height: 24px !important; + border: none; + } + } + + .explorer-show-btn { + border-radius: 10px 10px 0px 0px; + border: 1px solid var(--bg-slate-400); + background: rgba(22, 24, 29, 0.40); + box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + backdrop-filter: blur(20px); + align-self: center; + padding: 8px 12px; + height: 24px !important; + + .menu-bar { + border-radius: 50px; + background: var(--bg-slate-200); + height: 4px; + width: 50px; + } + } +} + +.lightMode { + .explorer-option-droppable-container { + + .explorer-show-btn { + background: var(--bg-vanilla-400); + } + } +} \ No newline at end of file diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.tsx new file mode 100644 index 0000000000..33bef7c984 --- /dev/null +++ b/frontend/src/container/ExplorerOptions/ExplorerOptionsDroppableArea.tsx @@ -0,0 +1,83 @@ +/* eslint-disable no-nested-ternary */ +import './ExplorerOptionsDroppableArea.styles.scss'; + +import { useDroppable } from '@dnd-kit/core'; +import { Color } from '@signozhq/design-tokens'; +import { Button, Tooltip } from 'antd'; +import { Disc3, X } from 'lucide-react'; +import { Dispatch, SetStateAction } from 'react'; +import { DataSource } from 'types/common/queryBuilder'; + +import { setExplorerToolBarVisibility } from './utils'; + +interface DroppableAreaProps { + isQueryUpdated: boolean; + isExplorerOptionHidden?: boolean; + sourcepage: DataSource; + setIsExplorerOptionHidden?: Dispatch>; + handleClearSelect: () => void; + onUpdateQueryHandler: () => void; +} + +function ExplorerOptionsDroppableArea({ + isQueryUpdated, + isExplorerOptionHidden, + sourcepage, + setIsExplorerOptionHidden, + handleClearSelect, + onUpdateQueryHandler, +}: DroppableAreaProps): JSX.Element { + const { setNodeRef } = useDroppable({ + id: 'explorer-options-droppable', + }); + + const handleShowExplorerOption = (): void => { + if (setIsExplorerOptionHidden) { + setIsExplorerOptionHidden(false); + setExplorerToolBarVisibility(true, sourcepage); + } + }; + + return ( +
+ {isExplorerOptionHidden && ( + <> + {isQueryUpdated && ( +
+ +
+ )} + + + )} +
+ ); +} + +ExplorerOptionsDroppableArea.defaultProps = { + isExplorerOptionHidden: undefined, + setIsExplorerOptionHidden: undefined, +}; + +export default ExplorerOptionsDroppableArea; diff --git a/frontend/src/container/ExplorerOptions/utils.ts b/frontend/src/container/ExplorerOptions/utils.ts index e3ac710609..d94e64161e 100644 --- a/frontend/src/container/ExplorerOptions/utils.ts +++ b/frontend/src/container/ExplorerOptions/utils.ts @@ -1,5 +1,6 @@ import { Color } from '@signozhq/design-tokens'; import { showErrorNotification } from 'components/ExplorerCard/utils'; +import { LOCALSTORAGE } from 'constants/localStorage'; import { QueryParams } from 'constants/query'; import ROUTES from 'constants/routes'; import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi'; @@ -67,3 +68,54 @@ export const generateRGBAFromHex = (hex: string, opacity: number): string => hex.slice(3, 5), 16, )}, ${parseInt(hex.slice(5, 7), 16)}, ${opacity})`; + +export const getExplorerToolBarVisibility = (dataSource: string): boolean => { + try { + const showExplorerToolbar = localStorage.getItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + ); + if (showExplorerToolbar === null) { + const parsedShowExplorerToolbar: { + [DataSource.LOGS]: boolean; + [DataSource.TRACES]: boolean; + [DataSource.METRICS]: boolean; + } = { + [DataSource.METRICS]: true, + [DataSource.TRACES]: true, + [DataSource.LOGS]: true, + }; + localStorage.setItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + JSON.stringify(parsedShowExplorerToolbar), + ); + return true; + } + const parsedShowExplorerToolbar = JSON.parse(showExplorerToolbar || '{}'); + return parsedShowExplorerToolbar[dataSource]; + } catch (error) { + console.error(error); + return false; + } +}; + +export const setExplorerToolBarVisibility = ( + value: boolean, + dataSource: string, +): void => { + try { + const showExplorerToolbar = localStorage.getItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + ); + if (showExplorerToolbar) { + const parsedShowExplorerToolbar = JSON.parse(showExplorerToolbar); + parsedShowExplorerToolbar[dataSource] = value; + localStorage.setItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + JSON.stringify(parsedShowExplorerToolbar), + ); + return; + } + } catch (error) { + console.error(error); + } +}; diff --git a/frontend/src/container/LogsExplorerViews/index.tsx b/frontend/src/container/LogsExplorerViews/index.tsx index a12fd80997..45b33d01af 100644 --- a/frontend/src/container/LogsExplorerViews/index.tsx +++ b/frontend/src/container/LogsExplorerViews/index.tsx @@ -14,7 +14,7 @@ import { PANEL_TYPES, } from 'constants/queryBuilder'; import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config'; -import ExplorerOptions from 'container/ExplorerOptions/ExplorerOptions'; +import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper'; import GoToTop from 'container/GoToTop'; import LogsExplorerChart from 'container/LogsExplorerChart'; import LogsExplorerList from 'container/LogsExplorerList'; @@ -634,7 +634,7 @@ function LogsExplorerViews({ - - From f8e8132b58ab3ec4dce9c1149651e9825b95a6bf Mon Sep 17 00:00:00 2001 From: Prashant Shahi Date: Wed, 27 Mar 2024 23:23:15 +0545 Subject: [PATCH 30/53] =?UTF-8?q?chore(signoz):=20=F0=9F=93=8C=20pin=20ver?= =?UTF-8?q?sions:=20SigNoz=200.42.0,=20SigNoz=20OtelCollector=200.88.17?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Prashant Shahi --- .../clickhouse-setup/docker-compose.yaml | 8 +-- .../clickhouse-setup/docker-compose-core.yaml | 4 +- .../clickhouse-setup/docker-compose.yaml | 8 +-- go.mod | 16 +++--- go.sum | 52 ++++++++++--------- .../tests/test-deploy/docker-compose.yaml | 4 +- 6 files changed, 47 insertions(+), 45 deletions(-) diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 6c1bb497c6..b27244a88b 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -146,7 +146,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.41.1 + image: signoz/query-service:0.42.0 command: [ "-config=/root/config/prometheus.yml", @@ -186,7 +186,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:0.41.1 + image: signoz/frontend:0.42.0 deploy: restart_policy: condition: on-failure @@ -199,7 +199,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.88.15 + image: signoz/signoz-otel-collector:0.88.17 command: [ "--config=/etc/otel-collector-config.yaml", @@ -237,7 +237,7 @@ services: - query-service otel-collector-migrator: - image: signoz/signoz-schema-migrator:0.88.15 + image: signoz/signoz-schema-migrator:0.88.17 deploy: restart_policy: condition: on-failure diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index f595b86e64..92adebe461 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -66,7 +66,7 @@ services: - --storage.path=/data otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.15} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.17} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -81,7 +81,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: signoz-otel-collector - image: signoz/signoz-otel-collector:0.88.15 + image: signoz/signoz-otel-collector:0.88.17 command: [ "--config=/etc/otel-collector-config.yaml", diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 43e6eccb19..a8b2af5b13 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -164,7 +164,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` query-service: - image: signoz/query-service:${DOCKER_TAG:-0.41.1} + image: signoz/query-service:${DOCKER_TAG:-0.42.0} container_name: signoz-query-service command: [ @@ -203,7 +203,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:${DOCKER_TAG:-0.41.1} + image: signoz/frontend:${DOCKER_TAG:-0.42.0} container_name: signoz-frontend restart: on-failure depends_on: @@ -215,7 +215,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.15} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.17} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -229,7 +229,7 @@ services: otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.15} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.17} container_name: signoz-otel-collector command: [ diff --git a/go.mod b/go.mod index 213be786b1..1563caae79 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ require ( github.com/ClickHouse/clickhouse-go/v2 v2.20.0 github.com/DATA-DOG/go-sqlmock v1.5.2 github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd - github.com/SigNoz/signoz-otel-collector v0.88.15 + github.com/SigNoz/signoz-otel-collector v0.88.17 github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974 github.com/antonmedv/expr v1.15.3 @@ -58,7 +58,7 @@ require ( go.opentelemetry.io/collector/exporter v0.88.0 go.opentelemetry.io/collector/extension v0.88.0 go.opentelemetry.io/collector/otelcol v0.88.0 - go.opentelemetry.io/collector/pdata v1.0.0-rcv0017 + go.opentelemetry.io/collector/pdata v1.3.0 go.opentelemetry.io/collector/processor v0.88.0 go.opentelemetry.io/collector/receiver v0.88.0 go.opentelemetry.io/collector/service v0.88.0 @@ -69,9 +69,9 @@ require ( golang.org/x/crypto v0.19.0 golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 golang.org/x/net v0.21.0 - golang.org/x/oauth2 v0.13.0 - google.golang.org/grpc v1.59.0 - google.golang.org/protobuf v1.31.0 + golang.org/x/oauth2 v0.16.0 + google.golang.org/grpc v1.62.0 + google.golang.org/protobuf v1.33.0 gopkg.in/segmentio/analytics-go.v3 v3.1.0 gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v3 v3.0.1 @@ -194,9 +194,9 @@ require ( golang.org/x/text v0.14.0 // indirect golang.org/x/time v0.3.0 // indirect gonum.org/v1/gonum v0.14.0 // indirect - google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20231002182017-d307bd883b97 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a // indirect + google.golang.org/appengine v1.6.8 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/square/go-jose.v2 v2.6.0 // indirect k8s.io/klog/v2 v2.100.1 // indirect diff --git a/go.sum b/go.sum index 625e71e882..08debabfed 100644 --- a/go.sum +++ b/go.sum @@ -98,8 +98,8 @@ github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkb github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc= github.com/SigNoz/prometheus v1.9.79-0.1 h1:RjsOw7oXVKx7IDA+/sRXW2x5pnw60/tT9MMuEz3+8DU= github.com/SigNoz/prometheus v1.9.79-0.1/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww= -github.com/SigNoz/signoz-otel-collector v0.88.15 h1:JUi9wzlj7WonPiXD4fak7yv/JMgd39sYFBGKTJIvP2Q= -github.com/SigNoz/signoz-otel-collector v0.88.15/go.mod h1:Dst94AfUCw8+w2R32FvOwTpjzL//ZaY3tIPGpyJ4iqw= +github.com/SigNoz/signoz-otel-collector v0.88.17 h1:XN3PwlI94LUOTXjXCPaVgQnGNdXnz7p40GKVWhOjlkQ= +github.com/SigNoz/signoz-otel-collector v0.88.17/go.mod h1:PThU+A6SgzEotT3ngKN4WVGWW0+eS7F1a2Rnq11aZZA= github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc= github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo= github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY= @@ -170,8 +170,8 @@ github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4 h1:/inchEIKaYC1Akx+H+gqO04wryn5h75LSazbRlnya1k= -github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20231128003011-0fa0005c9caa h1:jQCWAUqqlij9Pgj2i/PB79y4KOPYVyFYdROxgaCwdTQ= +github.com/cncf/xds/go v0.0.0-20231128003011-0fa0005c9caa/go.mod h1:x/1Gn8zydmfq8dk6e9PdstVsDgu9RuyIIJqAaF//0IM= github.com/coreos/go-oidc/v3 v3.4.0 h1:xz7elHb/LDwm/ERpwHd+5nb7wFHL32rsr6bBOgaeu6g= github.com/coreos/go-oidc/v3 v3.4.0/go.mod h1:eHUXhZtXPQLgEaDrOVTgwbgmz1xGOkJNye6h3zkD2Pw= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= @@ -215,11 +215,11 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.m github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= -github.com/envoyproxy/go-control-plane v0.11.1 h1:wSUXTlLfiAQRWs2F+p+EKOY9rUyis1MyGqJ2DIk5HpM= -github.com/envoyproxy/go-control-plane v0.11.1/go.mod h1:uhMcXKCQMEJHiAb0w+YGefQLaTEw+YhGluxZkrTmD0g= +github.com/envoyproxy/go-control-plane v0.12.0 h1:4X+VP1GHd1Mhj6IB5mMeGbLCleqxjletLK6K0rbxyZI= +github.com/envoyproxy/go-control-plane v0.12.0/go.mod h1:ZBTaoJ23lqITozF0M6G4/IragXCQKCnYbmlmtHvwRG0= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA= -github.com/envoyproxy/protoc-gen-validate v1.0.2/go.mod h1:GpiZQP3dDbg4JouG/NNS7QWXpgx6x8QiMKdmN72jogE= +github.com/envoyproxy/protoc-gen-validate v1.0.4 h1:gVPz/FMfvh57HdSJQyvBtF00j8JU4zdyUgIUNhlgg0A= +github.com/envoyproxy/protoc-gen-validate v1.0.4/go.mod h1:qys6tmnRsYrQqIhm2bvKZH4Blx/1gTIZ2UKVY1M+Yew= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= @@ -297,8 +297,8 @@ github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzq github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.1.2 h1:DVjP2PbBOzHyzA+dn3WhHIq4NdVu3Q+pvivFICf/7fo= -github.com/golang/glog v1.1.2/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/rLQ= +github.com/golang/glog v1.2.0 h1:uCdmnmatrKCgMBlM4rMuJZWOkPDqdbZPnrMXDY4gI68= +github.com/golang/glog v1.2.0/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -875,8 +875,8 @@ go.opentelemetry.io/collector/featuregate v1.0.0-rcv0017 h1:DtJQalPXMWQqT6jd2LZ1 go.opentelemetry.io/collector/featuregate v1.0.0-rcv0017/go.mod h1:fLmJMf1AoHttkF8p5oJAc4o5ZpHu8yO5XYJ7gbLCLzo= go.opentelemetry.io/collector/otelcol v0.88.0 h1:f2eRVLJY66w9WFj5iT1Tg6Qxtlljagov9v8TPStuK2g= go.opentelemetry.io/collector/otelcol v0.88.0/go.mod h1:F85TtMPt+ySe29HD6DOyvsMFCV3onaB3VJzky7qrtzQ= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0017 h1:AgALhc2VenoA5l1DvTdg7mkzaBGqoTSuMkAtjsttBFo= -go.opentelemetry.io/collector/pdata v1.0.0-rcv0017/go.mod h1:Rv9fOclA5AtM/JGm0d4jBOIAo1+jBA13UT5Bx0ovXi4= +go.opentelemetry.io/collector/pdata v1.3.0 h1:JRYN7tVHYFwmtQhIYbxWeiKSa2L1nCohyAs8sYqKFZo= +go.opentelemetry.io/collector/pdata v1.3.0/go.mod h1:t7W0Undtes53HODPdSujPLTnfSR5fzT+WpL+RTaaayo= go.opentelemetry.io/collector/processor v0.88.0 h1:5BUZaH+RhTpgTVqBZCrBnN/vl0M1CtwQsZ8ek4iH1lc= go.opentelemetry.io/collector/processor v0.88.0/go.mod h1:2T5KxgBQxXuuyMu9dh+PIBxQ/geCFYcdnjmlWZx8o3E= go.opentelemetry.io/collector/receiver v0.88.0 h1:MPvVAFOfjl0+Ylka7so8QoK8T2Za2471rv5t3sqbbSY= @@ -1067,8 +1067,8 @@ golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= -golang.org/x/oauth2 v0.13.0 h1:jDDenyj+WgFtmV3zYVoi8aE2BwtXFLWOA67ZfNWftiY= -golang.org/x/oauth2 v0.13.0/go.mod h1:/JMhi4ZRXAf4HG9LiNmxvk+45+96RUlVThiH8FzNBn0= +golang.org/x/oauth2 v0.16.0 h1:aDkGMBSYxElaoP81NpoUoz2oo2R2wHdZpGToUxfyQrQ= +golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1196,6 +1196,7 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= @@ -1317,8 +1318,9 @@ google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7 google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -1398,12 +1400,12 @@ google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP google.golang.org/genproto v0.0.0-20220523171625-347a074981d8/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/genproto v0.0.0-20220608133413-ed9918b62aac/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= -google.golang.org/genproto v0.0.0-20231002182017-d307bd883b97 h1:SeZZZx0cP0fqUyA+oRzP9k7cSwJlvDFiROO72uwD6i0= -google.golang.org/genproto v0.0.0-20231002182017-d307bd883b97/go.mod h1:t1VqOqqvce95G3hIDCT5FeO3YUc6Q4Oe24L/+rNMxRk= -google.golang.org/genproto/googleapis/api v0.0.0-20231002182017-d307bd883b97 h1:W18sezcAYs+3tDZX4F80yctqa12jcP1PUS2gQu1zTPU= -google.golang.org/genproto/googleapis/api v0.0.0-20231002182017-d307bd883b97/go.mod h1:iargEX0SFPm3xcfMI0d1domjg0ZF4Aa0p2awqyxhvF0= -google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a h1:a2MQQVoTo96JC9PMGtGBymLp7+/RzpFc2yX/9WfFg1c= -google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a/go.mod h1:4cYg8o5yUbm77w8ZX00LhMVNl/YVBFJRYWDc0uYWMs0= +google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 h1:KAeGQVN3M9nD0/bQXnr/ClcEMJ968gUXJQ9pwfSynuQ= +google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80/go.mod h1:cc8bqMqtv9gMOr0zHg2Vzff5ULhhL2IXP4sbcn32Dro= +google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 h1:Lj5rbfG876hIAYFjqiJnPHfhXbv+nzTWfm04Fg/XSVU= +google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80/go.mod h1:4jWUdICTdgc3Ibxmr8nAJiiLHwQBY0UI0XZcEMaFKaA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 h1:AjyfHzEPEFp/NpvfN5g+KDla3EMojjhRVZc1i7cj+oM= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -1437,8 +1439,8 @@ google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11 google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk= -google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98= +google.golang.org/grpc v1.62.0 h1:HQKZ/fa1bXkX1oFOvSjmZEUL8wLSaZTjCcLAlmZRtdk= +google.golang.org/grpc v1.62.0/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -1455,8 +1457,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= -google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index 679fb7f401..463c051f63 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -192,7 +192,7 @@ services: <<: *db-depend otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.15} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.17} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -205,7 +205,7 @@ services: # condition: service_healthy otel-collector: - image: signoz/signoz-otel-collector:0.88.15 + image: signoz/signoz-otel-collector:0.88.17 container_name: signoz-otel-collector command: [ From 5959963b9d643ea39b8a5a1e5c264d9a5b14c923 Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Thu, 28 Mar 2024 16:34:09 +0530 Subject: [PATCH 31/53] fix: [SIG-575]: no data in new trace explorer page specific scenario (#4748) Co-authored-by: Vishal Sharma --- .../src/container/OptionsMenu/constants.ts | 45 +++++++++++++++++++ .../container/OptionsMenu/useOptionsMenu.ts | 25 ++++++++--- 2 files changed, 64 insertions(+), 6 deletions(-) diff --git a/frontend/src/container/OptionsMenu/constants.ts b/frontend/src/container/OptionsMenu/constants.ts index b1e5463686..2db02f85b8 100644 --- a/frontend/src/container/OptionsMenu/constants.ts +++ b/frontend/src/container/OptionsMenu/constants.ts @@ -1,3 +1,5 @@ +import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; + import { OptionsQuery } from './types'; export const URL_OPTIONS = 'options'; @@ -7,3 +9,46 @@ export const defaultOptionsQuery: OptionsQuery = { maxLines: 2, format: 'list', }; + +export const defaultTraceSelectedColumns = [ + { + key: 'serviceName', + dataType: DataTypes.String, + type: 'tag', + isColumn: true, + isJSON: false, + id: 'serviceName--string--tag--true', + }, + { + key: 'name', + dataType: DataTypes.String, + type: 'tag', + isColumn: true, + isJSON: false, + id: 'name--string--tag--true', + }, + { + key: 'durationNano', + dataType: DataTypes.Float64, + type: 'tag', + isColumn: true, + isJSON: false, + id: 'durationNano--float64--tag--true', + }, + { + key: 'httpMethod', + dataType: DataTypes.String, + type: 'tag', + isColumn: true, + isJSON: false, + id: 'httpMethod--string--tag--true', + }, + { + key: 'responseStatusCode', + dataType: DataTypes.String, + type: 'tag', + isColumn: true, + isJSON: false, + id: 'responseStatusCode--string--tag--true', + }, +]; diff --git a/frontend/src/container/OptionsMenu/useOptionsMenu.ts b/frontend/src/container/OptionsMenu/useOptionsMenu.ts index be2ae00b37..97fbbbb006 100644 --- a/frontend/src/container/OptionsMenu/useOptionsMenu.ts +++ b/frontend/src/container/OptionsMenu/useOptionsMenu.ts @@ -16,7 +16,11 @@ import { } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { DataSource } from 'types/common/queryBuilder'; -import { defaultOptionsQuery, URL_OPTIONS } from './constants'; +import { + defaultOptionsQuery, + defaultTraceSelectedColumns, + URL_OPTIONS, +} from './constants'; import { InitialOptions, OptionsMenuConfig, OptionsQuery } from './types'; import { getOptionsFromKeys } from './utils'; @@ -124,20 +128,29 @@ const useOptionsMenu = ({ { queryKey: [debouncedSearchText, isFocused], enabled: isFocused }, ); - const searchedAttributeKeys = useMemo( - () => searchedAttributesData?.payload?.attributeKeys || [], - [searchedAttributesData?.payload?.attributeKeys], - ); + const searchedAttributeKeys = useMemo(() => { + if (searchedAttributesData?.payload?.attributeKeys?.length) { + return searchedAttributesData.payload.attributeKeys; + } + if (dataSource === DataSource.TRACES) { + return defaultTraceSelectedColumns; + } + + return []; + }, [dataSource, searchedAttributesData?.payload?.attributeKeys]); const initialOptionsQuery: OptionsQuery = useMemo( () => ({ ...defaultOptionsQuery, ...initialOptions, + // eslint-disable-next-line no-nested-ternary selectColumns: initialOptions?.selectColumns ? initialSelectedColumns + : dataSource === DataSource.TRACES + ? defaultTraceSelectedColumns : defaultOptionsQuery.selectColumns, }), - [initialOptions, initialSelectedColumns], + [dataSource, initialOptions, initialSelectedColumns], ); const selectedColumnKeys = useMemo( From e25b54f86abe872984d094ad5017c05bacf92747 Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Thu, 28 Mar 2024 16:46:16 +0530 Subject: [PATCH 32/53] fix: 404 resource not found issues (#4757) --- frontend/src/api/SAML/listAllDomain.ts | 2 +- frontend/src/api/queryBuilder/getAggregateAttribute.ts | 2 +- frontend/src/api/queryBuilder/getAttributeKeys.ts | 2 +- frontend/src/api/saveView/deleteView.ts | 2 +- frontend/src/api/saveView/getAllViews.ts | 2 +- frontend/src/api/saveView/saveView.ts | 2 +- frontend/src/api/saveView/updateView.ts | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/frontend/src/api/SAML/listAllDomain.ts b/frontend/src/api/SAML/listAllDomain.ts index dea73e4311..41620f7d3e 100644 --- a/frontend/src/api/SAML/listAllDomain.ts +++ b/frontend/src/api/SAML/listAllDomain.ts @@ -8,7 +8,7 @@ const listAllDomain = async ( props: Props, ): Promise | ErrorResponse> => { try { - const response = await axios.get(`orgs/${props.orgId}/domains`); + const response = await axios.get(`/orgs/${props.orgId}/domains`); return { statusCode: 200, diff --git a/frontend/src/api/queryBuilder/getAggregateAttribute.ts b/frontend/src/api/queryBuilder/getAggregateAttribute.ts index e493bb460a..f13c3da4a8 100644 --- a/frontend/src/api/queryBuilder/getAggregateAttribute.ts +++ b/frontend/src/api/queryBuilder/getAggregateAttribute.ts @@ -24,7 +24,7 @@ export const getAggregateAttribute = async ({ const response: AxiosResponse<{ data: IQueryAutocompleteResponse; }> = await ApiV3Instance.get( - `autocomplete/aggregate_attributes?${createQueryParams({ + `/autocomplete/aggregate_attributes?${createQueryParams({ aggregateOperator, searchText, dataSource, diff --git a/frontend/src/api/queryBuilder/getAttributeKeys.ts b/frontend/src/api/queryBuilder/getAttributeKeys.ts index 99edc630c8..9cc127bb71 100644 --- a/frontend/src/api/queryBuilder/getAttributeKeys.ts +++ b/frontend/src/api/queryBuilder/getAttributeKeys.ts @@ -25,7 +25,7 @@ export const getAggregateKeys = async ({ const response: AxiosResponse<{ data: IQueryAutocompleteResponse; }> = await ApiV3Instance.get( - `autocomplete/attribute_keys?${createQueryParams({ + `/autocomplete/attribute_keys?${createQueryParams({ aggregateOperator, searchText, dataSource, diff --git a/frontend/src/api/saveView/deleteView.ts b/frontend/src/api/saveView/deleteView.ts index e58e731d10..9317c8331a 100644 --- a/frontend/src/api/saveView/deleteView.ts +++ b/frontend/src/api/saveView/deleteView.ts @@ -2,4 +2,4 @@ import axios from 'api'; import { DeleteViewPayloadProps } from 'types/api/saveViews/types'; export const deleteView = (uuid: string): Promise => - axios.delete(`explorer/views/${uuid}`); + axios.delete(`/explorer/views/${uuid}`); diff --git a/frontend/src/api/saveView/getAllViews.ts b/frontend/src/api/saveView/getAllViews.ts index bdafb96b61..4a54d6af0d 100644 --- a/frontend/src/api/saveView/getAllViews.ts +++ b/frontend/src/api/saveView/getAllViews.ts @@ -6,4 +6,4 @@ import { DataSource } from 'types/common/queryBuilder'; export const getAllViews = ( sourcepage: DataSource, ): Promise> => - axios.get(`explorer/views?sourcePage=${sourcepage}`); + axios.get(`/explorer/views?sourcePage=${sourcepage}`); diff --git a/frontend/src/api/saveView/saveView.ts b/frontend/src/api/saveView/saveView.ts index a0c7ba5bf4..60a552f0bb 100644 --- a/frontend/src/api/saveView/saveView.ts +++ b/frontend/src/api/saveView/saveView.ts @@ -8,7 +8,7 @@ export const saveView = ({ viewName, extraData, }: SaveViewProps): Promise> => - axios.post('explorer/views', { + axios.post('/explorer/views', { name: viewName, sourcePage, compositeQuery, diff --git a/frontend/src/api/saveView/updateView.ts b/frontend/src/api/saveView/updateView.ts index 6ee745ffc2..b48b73f275 100644 --- a/frontend/src/api/saveView/updateView.ts +++ b/frontend/src/api/saveView/updateView.ts @@ -11,7 +11,7 @@ export const updateView = ({ sourcePage, viewKey, }: UpdateViewProps): Promise => - axios.put(`explorer/views/${viewKey}`, { + axios.put(`/explorer/views/${viewKey}`, { name: viewName, compositeQuery, extraData, From 6014bb76b6e80707a4a6dc6ff22094966330ca02 Mon Sep 17 00:00:00 2001 From: Yunus M Date: Thu, 28 Mar 2024 16:51:29 +0530 Subject: [PATCH 33/53] feat: support drag select in chart - alerts page (#4618) * feat: support drag select in chart - alerts page * feat: handle back navigation after drag select --- .../FormAlertRules/ChartPreview/index.tsx | 62 ++++++++++++++++++- 1 file changed, 60 insertions(+), 2 deletions(-) diff --git a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx index 59dd78e96b..6e8c167c29 100644 --- a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx +++ b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx @@ -1,6 +1,7 @@ import { InfoCircleOutlined } from '@ant-design/icons'; import Spinner from 'components/Spinner'; import { DEFAULT_ENTITY_VERSION } from 'constants/app'; +import { QueryParams } from 'constants/query'; import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder'; import GridPanelSwitch from 'container/GridPanelSwitch'; import { getFormatNameByOptionId } from 'container/NewWidget/RightContainer/alertFomatCategories'; @@ -10,11 +11,17 @@ import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { useResizeObserver } from 'hooks/useDimensions'; +import useUrlQuery from 'hooks/useUrlQuery'; +import GetMinMax from 'lib/getMinMax'; +import getTimeString from 'lib/getTimeString'; +import history from 'lib/history'; import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions'; import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData'; -import { useEffect, useMemo, useRef, useState } from 'react'; +import { useCallback, useEffect, useMemo, useRef, useState } from 'react'; import { useTranslation } from 'react-i18next'; -import { useSelector } from 'react-redux'; +import { useDispatch, useSelector } from 'react-redux'; +import { useLocation } from 'react-router-dom'; +import { UpdateTimeInterval } from 'store/actions'; import { AppState } from 'store/reducers'; import { AlertDef } from 'types/api/alerts/def'; import { Query } from 'types/api/queryBuilder/queryBuilderData'; @@ -54,6 +61,7 @@ function ChartPreview({ yAxisUnit, }: ChartPreviewProps): JSX.Element | null { const { t } = useTranslation('alerts'); + const dispatch = useDispatch(); const threshold = alertDef?.condition.target || 0; const [minTimeScale, setMinTimeScale] = useState(); const [maxTimeScale, setMaxTimeScale] = useState(); @@ -63,6 +71,30 @@ function ChartPreview({ GlobalReducer >((state) => state.globalTime); + const handleBackNavigation = (): void => { + const searchParams = new URLSearchParams(window.location.search); + const startTime = searchParams.get(QueryParams.startTime); + const endTime = searchParams.get(QueryParams.endTime); + + if (startTime && endTime && startTime !== endTime) { + dispatch( + UpdateTimeInterval('custom', [ + parseInt(getTimeString(startTime), 10), + parseInt(getTimeString(endTime), 10), + ]), + ); + } + }; + + useEffect(() => { + window.addEventListener('popstate', handleBackNavigation); + + return (): void => { + window.removeEventListener('popstate', handleBackNavigation); + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + const canQuery = useMemo((): boolean => { if (!query || query == null) { return false; @@ -131,10 +163,34 @@ function ChartPreview({ const containerDimensions = useResizeObserver(graphRef); const isDarkMode = useIsDarkMode(); + const urlQuery = useUrlQuery(); + const location = useLocation(); const optionName = getFormatNameByOptionId(alertDef?.condition.targetUnit || '') || ''; + const onDragSelect = useCallback( + (start: number, end: number): void => { + const startTimestamp = Math.trunc(start); + const endTimestamp = Math.trunc(end); + + if (startTimestamp !== endTimestamp) { + dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp])); + } + + const { maxTime, minTime } = GetMinMax('custom', [ + startTimestamp, + endTimestamp, + ]); + + urlQuery.set(QueryParams.startTime, minTime.toString()); + urlQuery.set(QueryParams.endTime, maxTime.toString()); + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; + history.push(generatedUrl); + }, + [dispatch, location.pathname, urlQuery], + ); + const options = useMemo( () => getUPlotChartOptions({ @@ -145,6 +201,7 @@ function ChartPreview({ minTimeScale, maxTimeScale, isDarkMode, + onDragSelect, thresholds: [ { index: '0', // no impact @@ -174,6 +231,7 @@ function ChartPreview({ minTimeScale, maxTimeScale, isDarkMode, + onDragSelect, threshold, t, optionName, From 9f30bba9a8e50f2403a2f7503f29bd2b01f172f7 Mon Sep 17 00:00:00 2001 From: Yunus M Date: Thu, 28 Mar 2024 16:55:59 +0530 Subject: [PATCH 34/53] feat: add support to pin attributes in logs details view (#4692) * feat: add support to pin attributes in logs details view * feat: add safety checks * feat: update styles * feat: update styles * feat: move json parsing in try catch block --- frontend/src/constants/localStorage.ts | 1 + .../LogDetailedView/Overview.styles.scss | 13 +++ .../LogDetailedView/TableView.styles.scss | 37 ++++++- .../container/LogDetailedView/TableView.tsx | 101 +++++++++++++++++- .../ResourceAttributesFilter.styles.scss | 13 +++ .../ResourceAttributesFilter/styles.ts | 1 - 6 files changed, 158 insertions(+), 8 deletions(-) diff --git a/frontend/src/constants/localStorage.ts b/frontend/src/constants/localStorage.ts index 0ba6cac302..85f46ab892 100644 --- a/frontend/src/constants/localStorage.ts +++ b/frontend/src/constants/localStorage.ts @@ -17,4 +17,5 @@ export enum LOCALSTORAGE { IS_IDENTIFIED_USER = 'IS_IDENTIFIED_USER', DASHBOARD_VARIABLES = 'DASHBOARD_VARIABLES', SHOW_EXPLORER_TOOLBAR = 'SHOW_EXPLORER_TOOLBAR', + PINNED_ATTRIBUTES = 'PINNED_ATTRIBUTES', } diff --git a/frontend/src/container/LogDetailedView/Overview.styles.scss b/frontend/src/container/LogDetailedView/Overview.styles.scss index bc63db442c..8e1726a0a9 100644 --- a/frontend/src/container/LogDetailedView/Overview.styles.scss +++ b/frontend/src/container/LogDetailedView/Overview.styles.scss @@ -53,6 +53,19 @@ background: rgba(171, 189, 255, 0.04); padding: 8px; + + .ant-collapse-extra { + display: flex; + align-items: center; + + .action-btn { + display: flex; + + .ant-btn { + background: rgba(113, 144, 249, 0.08); + } + } + } } .ant-collapse-content { diff --git a/frontend/src/container/LogDetailedView/TableView.styles.scss b/frontend/src/container/LogDetailedView/TableView.styles.scss index 89101bf7b2..d9cbdcabbb 100644 --- a/frontend/src/container/LogDetailedView/TableView.styles.scss +++ b/frontend/src/container/LogDetailedView/TableView.styles.scss @@ -5,12 +5,13 @@ .ant-table-row:hover { .ant-table-cell { .value-field { - display: flex; - justify-content: space-between; - align-items: center; .action-btn { display: flex; - gap: 4px; + position: absolute; + top: 50%; + right: 16px; + transform: translateY(-50%); + gap: 8px; } } } @@ -28,6 +29,30 @@ } } + .attribute-pin { + cursor: pointer; + + padding: 0; + vertical-align: middle; + text-align: center; + + .log-attribute-pin { + padding: 8px; + + display: flex; + justify-content: center; + align-items: center; + + .pin-attribute-icon { + border: none; + + &.pinned svg { + fill: var(--bg-robin-500); + } + } + } + } + .value-field-container { background: rgba(22, 25, 34, 0.4); @@ -70,6 +95,10 @@ .value-field-container { background: var(--bg-vanilla-300); + &.attribute-pin { + background: var(--bg-vanilla-100); + } + .action-btn { .filter-btn { background: var(--bg-vanilla-300); diff --git a/frontend/src/container/LogDetailedView/TableView.tsx b/frontend/src/container/LogDetailedView/TableView.tsx index 593519404d..a69fba6441 100644 --- a/frontend/src/container/LogDetailedView/TableView.tsx +++ b/frontend/src/container/LogDetailedView/TableView.tsx @@ -1,22 +1,29 @@ +/* eslint-disable jsx-a11y/no-static-element-interactions */ +/* eslint-disable jsx-a11y/click-events-have-key-events */ import './TableView.styles.scss'; import { LinkOutlined } from '@ant-design/icons'; import { Color } from '@signozhq/design-tokens'; import { Button, Space, Spin, Tooltip, Tree, Typography } from 'antd'; import { ColumnsType } from 'antd/es/table'; +import getLocalStorageApi from 'api/browser/localstorage/get'; +import setLocalStorageApi from 'api/browser/localstorage/set'; +import cx from 'classnames'; import AddToQueryHOC, { AddToQueryHOCProps, } from 'components/Logs/AddToQueryHOC'; import CopyClipboardHOC from 'components/Logs/CopyClipboardHOC'; import { ResizeTable } from 'components/ResizeTable'; +import { LOCALSTORAGE } from 'constants/localStorage'; import { OPERATORS } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; +import { useIsDarkMode } from 'hooks/useDarkMode'; import history from 'lib/history'; import { fieldSearchFilter } from 'lib/logs/fieldSearch'; import { removeJSONStringifyQuotes } from 'lib/removeJSONStringifyQuotes'; import { isEmpty } from 'lodash-es'; -import { ArrowDownToDot, ArrowUpFromDot } from 'lucide-react'; -import { useMemo, useState } from 'react'; +import { ArrowDownToDot, ArrowUpFromDot, Pin } from 'lucide-react'; +import { useEffect, useMemo, useState } from 'react'; import { useDispatch } from 'react-redux'; import { generatePath } from 'react-router-dom'; import { Dispatch } from 'redux'; @@ -57,6 +64,28 @@ function TableView({ const dispatch = useDispatch>(); const [isfilterInLoading, setIsFilterInLoading] = useState(false); const [isfilterOutLoading, setIsFilterOutLoading] = useState(false); + const isDarkMode = useIsDarkMode(); + + const [pinnedAttributes, setPinnedAttributes] = useState< + Record + >({}); + + useEffect(() => { + const pinnedAttributesFromLocalStorage = getLocalStorageApi( + LOCALSTORAGE.PINNED_ATTRIBUTES, + ); + + if (pinnedAttributesFromLocalStorage) { + try { + const parsedPinnedAttributes = JSON.parse(pinnedAttributesFromLocalStorage); + setPinnedAttributes(parsedPinnedAttributes); + } catch (e) { + console.error('Error parsing pinned attributes from local storgage'); + } + } else { + setPinnedAttributes({}); + } + }, []); const flattenLogData: Record | null = useMemo( () => (logData ? flattenObject(logData) : null), @@ -74,6 +103,19 @@ function TableView({ } }; + const togglePinAttribute = (record: DataType): void => { + if (record) { + const newPinnedAttributes = { ...pinnedAttributes }; + newPinnedAttributes[record.key] = !newPinnedAttributes[record.key]; + setPinnedAttributes(newPinnedAttributes); + + setLocalStorageApi( + LOCALSTORAGE.PINNED_ATTRIBUTES, + JSON.stringify(newPinnedAttributes), + ); + } + }; + const onClickHandler = ( operator: string, fieldKey: string, @@ -138,6 +180,37 @@ function TableView({ } const columns: ColumnsType = [ + { + title: '', + dataIndex: 'pin', + key: 'pin', + width: 5, + align: 'left', + className: 'attribute-pin value-field-container', + render: (fieldData: Record, record): JSX.Element => { + let pinColor = isDarkMode ? Color.BG_VANILLA_100 : Color.BG_INK_500; + + if (pinnedAttributes[record?.key]) { + pinColor = Color.BG_ROBIN_500; + } + + return ( +
+
{ + togglePinAttribute(record); + }} + > + +
+
+ ); + }, + }, { title: 'Field', dataIndex: 'field', @@ -264,12 +337,34 @@ function TableView({ }, }, ]; + function sortPinnedAttributes( + data: Record[], + sortingObj: Record, + ): Record[] { + const sortingKeys = Object.keys(sortingObj); + return data.sort((a, b) => { + const aKey = a.key; + const bKey = b.key; + const aSortIndex = sortingKeys.indexOf(aKey); + const bSortIndex = sortingKeys.indexOf(bKey); + + if (sortingObj[aKey] && !sortingObj[bKey]) { + return -1; + } + if (!sortingObj[aKey] && sortingObj[bKey]) { + return 1; + } + return aSortIndex - bSortIndex; + }); + } + + const sortedAttributes = sortPinnedAttributes(dataSource, pinnedAttributes); return ( Date: Thu, 28 Mar 2024 16:58:35 +0530 Subject: [PATCH 35/53] fix: update devtool property to eval-source-map (#4760) --- frontend/webpack.config.js | 2 +- frontend/webpack.config.prod.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/webpack.config.js b/frontend/webpack.config.js index 65883594bb..9a9bd39807 100644 --- a/frontend/webpack.config.js +++ b/frontend/webpack.config.js @@ -64,7 +64,7 @@ if (process.env.BUNDLE_ANALYSER === 'true') { */ const config = { mode: 'development', - devtool: 'source-map', + devtool: 'eval-source-map', entry: resolve(__dirname, './src/index.tsx'), devServer: { historyApiFallback: { diff --git a/frontend/webpack.config.prod.js b/frontend/webpack.config.prod.js index cf5816f24d..22e33d7976 100644 --- a/frontend/webpack.config.prod.js +++ b/frontend/webpack.config.prod.js @@ -79,7 +79,7 @@ if (process.env.BUNDLE_ANALYSER === 'true') { const config = { mode: 'production', - devtool: 'source-map', + devtool: 'eval-source-map', entry: resolve(__dirname, './src/index.tsx'), output: { path: resolve(__dirname, './build'), From 990fc83269cda3ec16b0ec763c62a5b31a1a46af Mon Sep 17 00:00:00 2001 From: Raj Kamal Singh <1133322+raj-k-singh@users.noreply.github.com> Date: Thu, 28 Mar 2024 19:57:07 +0530 Subject: [PATCH 36/53] Feat/integrations v0 mongo and nginx (#4763) * feat: flesh out pre-requisites for collecting mongodb logs and metrics * chore: remove stale pipelines in bundled integrations * chore: clean up 'collect metrics' step for mongodb * feat: add instructions for collecting and parsing mongodb logs * feat: add metrics and logs attributes to mongodb data collected list * feat: nginx logs collection instructions and some other cleanup * feat: add list of parsed log attributes to data collected list for nginx * chore: do not run pipeline population integration test if no built-in integration has a pipeline --- .../mongo/assets/pipelines/log-parser.json | 33 --- .../mongo/config/collect-logs.md | 117 +++++++++ ...e-otel-collector.md => collect-metrics.md} | 62 +++-- .../mongo/config/prerequisites.md | 53 ++-- .../mongo/integration.json | 226 +++++++++++++++--- .../builtin_integrations/mongo/overview.md | 5 +- .../nginx/assets/pipelines/log-parser.json | 62 ----- .../nginx/config/collect-logs.md | 139 +++++++++++ .../nginx/config/configure-otel-collector.md | 1 - .../nginx/config/prepare-nginx.md | 1 - .../nginx/config/prerequisites.md | 19 ++ .../nginx/integration.json | 77 +++--- .../builtin_integrations/nginx/overview.md | 2 +- .../postgres/config/collect-logs.md | 2 +- .../redis/assets/pipelines/log-parser.json | 33 --- .../redis/config/collect-logs.md | 2 +- .../redis/integration.json | 4 +- .../integration/signoz_integrations_test.go | 7 +- 18 files changed, 617 insertions(+), 228 deletions(-) delete mode 100644 pkg/query-service/app/integrations/builtin_integrations/mongo/assets/pipelines/log-parser.json create mode 100644 pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-logs.md rename pkg/query-service/app/integrations/builtin_integrations/mongo/config/{configure-otel-collector.md => collect-metrics.md} (57%) delete mode 100644 pkg/query-service/app/integrations/builtin_integrations/nginx/assets/pipelines/log-parser.json create mode 100644 pkg/query-service/app/integrations/builtin_integrations/nginx/config/collect-logs.md delete mode 100644 pkg/query-service/app/integrations/builtin_integrations/nginx/config/configure-otel-collector.md delete mode 100644 pkg/query-service/app/integrations/builtin_integrations/nginx/config/prepare-nginx.md create mode 100644 pkg/query-service/app/integrations/builtin_integrations/nginx/config/prerequisites.md delete mode 100644 pkg/query-service/app/integrations/builtin_integrations/redis/assets/pipelines/log-parser.json diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/assets/pipelines/log-parser.json b/pkg/query-service/app/integrations/builtin_integrations/mongo/assets/pipelines/log-parser.json deleted file mode 100644 index e75db3ec5d..0000000000 --- a/pkg/query-service/app/integrations/builtin_integrations/mongo/assets/pipelines/log-parser.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "id": "parse-default-mongo-access-log", - "name": "Parse default mongo access log", - "alias": "parse-default-mongo-access-log", - "description": "Parse standard mongo access log", - "enabled": true, - "filter": { - "op": "AND", - "items": [ - { - "key": { - "type": "tag", - "key": "source", - "dataType": "string" - }, - "op": "=", - "value": "mongo" - } - ] - }, - "config": [ - { - "type": "grok_parser", - "id": "parse-body-grok", - "enabled": true, - "orderId": 1, - "name": "Parse Body", - "parse_to": "attributes", - "pattern": "%{GREEDYDATA}", - "parse_from": "body" - } - ] -} \ No newline at end of file diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-logs.md b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-logs.md new file mode 100644 index 0000000000..fa55ca9a63 --- /dev/null +++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-logs.md @@ -0,0 +1,117 @@ +### Collect MongoDB Logs + +You can configure MongoDB logs collection by providing the required collector config to your collector. + +#### Create collector config file + +Save the following config for collecting MongoDB logs in a file named `mongodb-logs-collection-config.yaml` + +```yaml +receivers: + filelog/mongodb: + include: ["${env:MONGODB_LOG_FILE}"] + operators: + # Parse structured mongodb logs + # For more details, see https://www.mongodb.com/docs/manual/reference/log-messages/#structured-logging + - type: json_parser + if: body matches '^\\s*{\\s*".*}\\s*$' + parse_from: body + parse_to: attributes + timestamp: + parse_from: attributes.t.$$date + layout: '2006-01-02T15:04:05.000-07:00' + layout_type: gotime + severity: + parse_from: attributes.s + overwrite_text: true + mapping: + debug: + - D1 + - D2 + - D3 + - D4 + - D5 + info: I + warn: W + error: E + fatal: F + - type: flatten + if: attributes.attr != nil + field: attributes.attr + - type: move + if: attributes.msg != nil + from: attributes.msg + to: body + - type: move + if: attributes.c != nil + from: attributes.c + to: attributes.component + - type: move + if: attributes.id != nil + from: attributes.id + to: attributes.mongo_log_id + - type: remove + if: attributes.t != nil + field: attributes.t + - type: remove + if: attributes.s != nil + field: attributes.s + - type: add + field: attributes.source + value: mongodb + +processors: + batch: + send_batch_size: 10000 + send_batch_max_size: 11000 + timeout: 10s + +exporters: + # export to SigNoz cloud + otlp/mongodb-logs: + endpoint: "${env:OTLP_DESTINATION_ENDPOINT}" + tls: + insecure: false + headers: + "signoz-access-token": "${env:SIGNOZ_INGESTION_KEY}" + + # export to local collector + # otlp/mongodb-logs: + # endpoint: "localhost:4317" + # tls: + # insecure: true + + +service: + pipelines: + logs/mongodb: + receivers: [filelog/mongodb] + processors: [batch] + exporters: [otlp/mongodb-logs] +``` + +#### Set Environment Variables + +Set the following environment variables in your otel-collector environment: + +```bash + +# path of MongoDB server log file. must be accessible by the otel collector +export MONGODB_LOG_FILE=/var/log/mongodb.log + +# region specific SigNoz cloud ingestion endpoint +export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443" + +# your SigNoz ingestion key +export SIGNOZ_INGESTION_KEY="signoz-ingestion-key" + +``` + +#### Use collector config file + +Make the collector config file available to your otel collector and use it by adding the following flag to the command for running your collector +```bash +--config mongodb-logs-collection-config.yaml +``` +Note: the collector can use multiple config files, specified by multiple occurrences of the --config flag. + diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/config/configure-otel-collector.md b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-metrics.md similarity index 57% rename from pkg/query-service/app/integrations/builtin_integrations/mongo/config/configure-otel-collector.md rename to pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-metrics.md index 35238c9e9a..dcbc7dd582 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/mongo/config/configure-otel-collector.md +++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-metrics.md @@ -1,19 +1,21 @@ -### Configure otel collector +### Collect MongoDB Metrics -#### Save collector config file +You can configure MongoDB metrics collection by providing the required collector config to your collector. -Save the following collector config in a file named `mongo-collector-config.yaml` +#### Create collector config file -```bash +Save the following config for collecting mongodb metrics in a file named `mongodb-metrics-collection-config.yaml` + +```yaml receivers: mongodb: # - For standalone MongoDB deployments this is the hostname and port of the mongod instance # - For replica sets specify the hostnames and ports of the mongod instances that are in the replica set configuration. If the replica_set field is specified, nodes will be autodiscovered. # - For a sharded MongoDB deployment, please specify a list of the mongos hosts. hosts: - - endpoint: 127.0.0.1:27017 + - endpoint: ${env:MONGODB_ENDPOINT} # If authentication is required, the user can with clusterMonitor permissions can be provided here - username: monitoring + username: ${env:MONGODB_USERNAME} # If authentication is required, the password can be provided here. password: ${env:MONGODB_PASSWORD} collection_interval: 60s @@ -46,18 +48,19 @@ processors: hostname_sources: ["os"] exporters: - # export to local collector - otlp/local: - endpoint: "localhost:4317" - tls: - insecure: true # export to SigNoz cloud - otlp/signoz: - endpoint: "ingest.{region}.signoz.cloud:443" + otlp/mongodb: + endpoint: "${env:OTLP_DESTINATION_ENDPOINT}" tls: insecure: false headers: - "signoz-access-token": "" + "signoz-access-token": "${env:SIGNOZ_INGESTION_KEY}" + + # export to local collector + # otlp/mongodb: + # endpoint: "localhost:4317" + # tls: + # insecure: true service: pipelines: @@ -65,10 +68,37 @@ service: receivers: [mongodb] # note: remove this processor if the collector host is not running on the same host as the mongo instance processors: [resourcedetection/system] - exporters: [otlp/local] + exporters: [otlp/mongodb] + +``` + +#### Set Environment Variables + +Set the following environment variables in your otel-collector environment: + +```bash + +# MongoDB endpoint reachable from the otel collector" +export MONGODB_ENDPOINT="host:port" + +# password for MongoDB monitoring user" +export MONGODB_USERNAME="monitoring" + +# password for MongoDB monitoring user" +export MONGODB_PASSWORD="" + +# region specific SigNoz cloud ingestion endpoint +export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443" + +# your SigNoz ingestion key +export SIGNOZ_INGESTION_KEY="signoz-ingestion-key" ``` #### Use collector config file -Run your collector with the added flag `--config mongo-collector-config.yaml` +Make the collector config file available to your otel collector and use it by adding the following flag to the command for running your collector +```bash +--config mongodb-metrics-collection-config.yaml +``` +Note: the collector can use multiple config files, specified by multiple occurrences of the --config flag. diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/config/prerequisites.md b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/prerequisites.md index 5a844d6988..5191bfb194 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/mongo/config/prerequisites.md +++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/prerequisites.md @@ -1,22 +1,41 @@ -### Prepare mongo for monitoring +## Before You Begin -- Have a running mongodb instance -- Have the monitoring user created -- Have the monitoring user granted the necessary permissions +To configure metrics and logs collection for MongoDB, you need the following. -Mongodb recommends to set up a least privilege user (LPU) with a `clusterMonitor` role in order to collect. +### Ensure MongoDB server is prepared for monitoring -Run the following command to create a user with the necessary permissions. +- **Ensure that the MongoDB server is running a supported version** + MongoDB versions 4.4+ are supported. + You can use the following statement to determine server version + ```js + db.version() + ``` -```bash -use admin -db.createUser( - { - user: "monitoring", - pwd: "", - roles: ["clusterMonitor"] - } -); -``` +- **If collecting metrics, ensure that there is a MongoDB user with required permissions** + Mongodb recommends to set up a least privilege user (LPU) with a clusterMonitor role in order to collect metrics -Replace `` with a strong password and set is as env var `MONGODB_PASSWORD`. + To create a monitoring user, run: + ```js + use admin + db.createUser( + { + user: "monitoring", + pwd: "", + roles: ["clusterMonitor"] + } + ); + ``` + + +### Ensure OTEL Collector is running with access to the MongoDB server + +- **Ensure that an OTEL collector is running in your deployment environment** + If needed, please [install an OTEL Collector](https://signoz.io/docs/tutorial/opentelemetry-binary-usage-in-virtual-machine/) + If already installed, ensure that the collector version is v0.88.0 or newer. + + Also ensure that you can provide config files to the collector and that you can set environment variables and command line flags used for running it. + +- **Ensure that the OTEL collector can access the MongoDB server** + In order to collect metrics, the collector must be able to access the MongoDB server as a client using the monitoring user. + + In order to collect logs, the collector must be able to read the MongoDB server log file. diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json b/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json index c5d0fcefcd..b9543e0757 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json +++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json @@ -18,18 +18,20 @@ "instructions": "file://config/prerequisites.md" }, { - "title": "Configure Otel Collector", - "instructions": "file://config/configure-otel-collector.md" + "title": "Collect Metrics", + "instructions": "file://config/collect-metrics.md" + }, + { + "title": "Collect Logs", + "instructions": "file://config/collect-logs.md" } ], "assets": { "logs": { - "pipelines": [ - "file://assets/pipelines/log-parser.json" - ] + "pipelines": [] }, "dashboards": [ - "file://assets/dashboards/overview.json" + "file://assets/dashboards/overview.json" ], "alerts": [] }, @@ -52,37 +54,207 @@ "data_collected": { "logs": [ { - "name": "Request Method", - "path": "attributes[\"http.request.method\"]", - "type": "string", - "description": "HTTP method" + "name": "Timestamp", + "path": "timestamp", + "type": "timestamp" }, { - "name": "Request Path", - "path": "attributes[\"url.path\"]", - "type": "string", - "description": "path requested" + "name": "Severity Text", + "path": "severity_text", + "type": "string" }, { - "name": "Response Status Code", - "path": "attributes[\"http.response.status_code\"]", - "type": "int", - "description": "HTTP response code" + "name": "Severity Number", + "path": "severity_number", + "type": "number" + }, + { + "name": "MongoDB Component", + "path": "attributes.component", + "type": "string" } ], "metrics": [ { - "name": "http.server.request.duration", - "type": "Histogram", - "unit": "s", - "description": "Duration of HTTP server requests" + "description": "The number of cache operations of the instance.", + "unit": "number", + "type": "Sum", + "name": "mongodb_cache_operations" }, { - "name": "http.server.active_requests", - "type": "UpDownCounter", - "unit": "{ request }", - "description": "Number of active HTTP server requests" + "description": "The number of collections.", + "unit": "number", + "type": "Sum", + "name": "mongodb_collection_count" + }, + { + "description": "The size of the collection. Data compression does not affect this value.", + "unit": "Bytes", + "type": "Sum", + "name": "mongodb_data_size" + }, + { + "description": "The number of connections.", + "unit": "number", + "type": "Sum", + "name": "mongodb_connection_count" + }, + { + "description": "The number of extents.", + "unit": "number", + "type": "Sum", + "name": "mongodb_extent_count" + }, + { + "description": "The time the global lock has been held.", + "unit": "ms", + "type": "Sum", + "name": "mongodb_global_lock_time" + }, + { + "description": "The number of indexes.", + "unit": "number", + "type": "Sum", + "name": "mongodb_index_count" + }, + { + "description": "Sum of the space allocated to all indexes in the database, including free index space.", + "unit": "Bytes", + "type": "Sum", + "name": "mongodb_index_size" + }, + { + "description": "The amount of memory used.", + "unit": "Bytes", + "type": "Sum", + "name": "mongodb_memory_usage" + }, + { + "description": "The number of objects.", + "unit": "number", + "type": "Sum", + "name": "mongodb_object_count" + }, + { + "description": "The latency of operations.", + "unit": "us", + "type": "Gauge", + "name": "mongodb_operation_latency_time" + }, + { + "description": "The number of operations executed.", + "unit": "number", + "type": "Sum", + "name": "mongodb_operation_count" + }, + { + "description": "The number of replicated operations executed.", + "unit": "number", + "type": "Sum", + "name": "mongodb_operation_repl_count" + }, + { + "description": "The total amount of storage allocated to this collection.", + "unit": "Bytes", + "type": "Sum", + "name": "mongodb_storage_size" + }, + { + "description": "The number of existing databases.", + "unit": "number", + "type": "Sum", + "name": "mongodb_database_count" + }, + { + "description": "The number of times an index has been accessed.", + "unit": "number", + "type": "Sum", + "name": "mongodb_index_access_count" + }, + { + "description": "The number of document operations executed.", + "unit": "number", + "type": "Sum", + "name": "mongodb_document_operation_count" + }, + { + "description": "The number of bytes received.", + "unit": "Bytes", + "type": "Sum", + "name": "mongodb_network_io_receive" + }, + { + "description": "The number of by transmitted.", + "unit": "Bytes", + "type": "Sum", + "name": "mongodb_network_io_transmit" + }, + { + "description": "The number of requests received by the server.", + "unit": "number", + "type": "Sum", + "name": "mongodb_network_request_count" + }, + { + "description": "The total time spent performing operations.", + "unit": "ms", + "type": "Sum", + "name": "mongodb_operation_time" + }, + { + "description": "The total number of active sessions.", + "unit": "number", + "type": "Sum", + "name": "mongodb_session_count" + }, + { + "description": "The number of open cursors maintained for clients.", + "unit": "number", + "type": "Sum", + "name": "mongodb_cursor_count" + }, + { + "description": "The number of cursors that have timed out.", + "unit": "number", + "type": "Sum", + "name": "mongodb_cursor_timeout_count" + }, + { + "description": "Number of times the lock was acquired in the specified mode.", + "unit": "number", + "type": "Sum", + "name": "mongodb_lock_acquire_count" + }, + { + "description": "Number of times the lock acquisitions encountered waits because the locks were held in a conflicting mode.", + "unit": "number", + "type": "Sum", + "name": "mongodb_lock_acquire_wait_count" + }, + { + "description": "Cumulative wait time for the lock acquisitions.", + "unit": "microseconds", + "type": "Sum", + "name": "mongodb_lock_acquire_time" + }, + { + "description": "Number of times the lock acquisitions encountered deadlocks.", + "unit": "number", + "type": "Sum", + "name": "mongodb_lock_deadlock_count" + }, + { + "description": "The health status of the server.", + "unit": "number", + "type": "Gauge", + "name": "mongodb_health" + }, + { + "description": "The amount of time that the server has been running.", + "unit": "ms", + "type": "Sum", + "name": "mongodb_uptime" } ] } -} +} \ No newline at end of file diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/overview.md b/pkg/query-service/app/integrations/builtin_integrations/mongo/overview.md index c7a84541ad..c088a9aa44 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/mongo/overview.md +++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/overview.md @@ -1,3 +1,6 @@ ### Monitor MongoDB with SigNoz -Collect key MongoDB metrics and parse your MongoDB logs +Collect key MongoDB metrics and view them with an out of the box dashboard. + +Collect and parse MongoDB logs to populate timestamp, severity, and other log attributes for better querying and aggregation. + diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/assets/pipelines/log-parser.json b/pkg/query-service/app/integrations/builtin_integrations/nginx/assets/pipelines/log-parser.json deleted file mode 100644 index e9521e45ff..0000000000 --- a/pkg/query-service/app/integrations/builtin_integrations/nginx/assets/pipelines/log-parser.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "id": "parse-default-nginx-access-log", - "name": "Parse default nginx access log", - "alias": "parse-default-nginx-access-log", - "description": "Parse standard nginx access log", - "enabled": true, - "filter": { - "op": "AND", - "items": [ - { - "key": { - "type": "tag", - "key": "source", - "dataType": "string" - }, - "op": "=", - "value": "nginx" - } - ] - }, - "config": [ - { - "type": "grok_parser", - "id": "parse-body-grok", - "enabled": true, - "orderId": 1, - "name": "Parse Body", - "parse_to": "attributes", - "pattern": "%{IP:client.address} - %{USERNAME:enduser.id} \\[%{HTTPDATE:time.local}\\] \"((%{WORD:http.method} %{DATA:http.path}(\\?%{DATA:http.query})? %{WORD:network.protocol.name}/%{NOTSPACE:network.protocol.version})|%{DATA})\" %{INT:http.response.status_code:int} %{INT:http.request.body.bytes:int} \"%{NOTSPACE:http.referer}\" \"%{DATA:http.user.agent}\" %{INT:http.request.bytes:int} %{NUMBER:http.request.time:float} \\[%{DATA:proxy.upstream.name}?\\] \\[%{DATA:proxy.alternative.upstream.name}?\\] ((%{IP:network.peer.address}:%{INT:network.peer.port:int})|%{DATA})? (%{INT:http.response.bytes:int}|-)? (%{NUMBER:http.response.time:float}|-)? (%{NUMBER:network.peer.status.code:int}|-)? %{NOTSPACE:request.id}", - "parse_from": "body" - }, - { - "type": "severity_parser", - "id": "parse-sev", - "enabled": true, - "orderId": 2, - "name": "Set Severity", - "parse_from": "attributes[\"http.response.status_code\"]", - "mapping": { - "debug": [ - "1xx" - ], - "error": [ - "4xx" - ], - "fatal": [ - "5xx" - ], - "info": [ - "2xx" - ], - "trace": [ - "trace" - ], - "warn": [ - "3xx" - ] - }, - "overwrite_text": true - } - ] -} \ No newline at end of file diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/config/collect-logs.md b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/collect-logs.md new file mode 100644 index 0000000000..b421478ab9 --- /dev/null +++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/collect-logs.md @@ -0,0 +1,139 @@ +### Collect Nginx Logs + +You can configure Nginx logs collection by providing the required collector config to your collector. + +#### Create collector config file + +Save the following config for collecting Nginx logs in a file named `nginx-logs-collection-config.yaml` + +```yaml +receivers: + filelog/nginx-access-logs: + include: ["${env:NGINX_ACCESS_LOG_FILE}"] + operators: + # Parse the default nginx access log format. Nginx defaults to the "combined" log format + # $remote_addr - $remote_user [$time_local] "$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" + # For more details, see https://nginx.org/en/docs/http/ngx_http_log_module.html + - type: regex_parser + if: body matches '^(?P[0-9\\.]+) - (?P[^\\s]+) \\[(?P.+)\\] "(?P\\w+?) (?P.+?)" (?P[0-9]+) (?P[0-9]+) "(?P.+?)" "(?P.+?)"$' + parse_from: body + parse_to: attributes + regex: '^(?P[0-9\.]+) - (?P[^\s]+) \[(?P.+)\] "(?P\w+?) (?P.+?)" (?P[0-9]+) (?P[0-9]+) "(?P.+?)" "(?P.+?)"$' + timestamp: + parse_from: attributes.ts + layout: "02/Jan/2006:15:04:05 -0700" + layout_type: gotime + severity: + parse_from: attributes.status + overwrite_text: true + mapping: + debug: "1xx" + info: + - "2xx" + - "3xx" + warn: "4xx" + error: "5xx" + - type: remove + if: attributes.ts != nil + field: attributes.ts + - type: add + field: attributes.source + value: nginx + + filelog/nginx-error-logs: + include: ["${env:NGINX_ERROR_LOG_FILE}"] + operators: + # Parse the default nginx error log format. + # YYYY/MM/DD HH:MM:SS [LEVEL] PID#TID: *CID MESSAGE + # For more details, see https://github.com/phusion/nginx/blob/master/src/core/ngx_log.c + - type: regex_parser + if: body matches '^(?P.+?) \\[(?P\\w+)\\] (?P\\d+)#(?P\\d+). \\*(?P\\d+) (?P.+)$' + parse_from: body + parse_to: attributes + regex: '^(?P.+?) \[(?P\w+)\] (?P\d+)#(?P\d+). \*(?P\d+) (?P.+)$' + timestamp: + parse_from: attributes.ts + layout: "2006/01/02 15:04:05" + layout_type: gotime + severity: + parse_from: attributes.log_level + overwrite_text: true + mapping: + debug: "debug" + info: + - "info" + - "notice" + warn: "warn" + error: + - "error" + - "crit" + - "alert" + fatal: "emerg" + - type: remove + if: attributes.ts != nil + field: attributes.ts + - type: move + if: attributes.message != nil + from: attributes.message + to: body + - type: add + field: attributes.source + value: nginx + +processors: + batch: + send_batch_size: 10000 + send_batch_max_size: 11000 + timeout: 10s + +exporters: + # export to SigNoz cloud + otlp/nginx-logs: + endpoint: "${env:OTLP_DESTINATION_ENDPOINT}" + tls: + insecure: false + headers: + "signoz-access-token": "${env:SIGNOZ_INGESTION_KEY}" + + # export to local collector + # otlp/nginx-logs: + # endpoint: "localhost:4317" + # tls: + # insecure: true + +service: + pipelines: + logs/nginx: + receivers: [filelog/nginx-access-logs, filelog/nginx-error-logs] + processors: [batch] + exporters: [otlp/nginx-logs] + +``` + +#### Set Environment Variables + +Set the following environment variables in your otel-collector environment: + +```bash + +# path of Nginx access log file. must be accessible by the otel collector +export NGINX_ACCESS_LOG_FILE=/var/log/nginx/access.log; + +# path of Nginx error log file. must be accessible by the otel collector +export NGINX_ERROR_LOG_FILE=/var/log/nginx/error.log + +# region specific SigNoz cloud ingestion endpoint +export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443" + +# your SigNoz ingestion key +export SIGNOZ_INGESTION_KEY="signoz-ingestion-key" + +``` + +#### Use collector config file + +Make the collector config file available to your otel collector and use it by adding the following flag to the command for running your collector +```bash +--config nginx-logs-collection-config.yaml +``` +Note: the collector can use multiple config files, specified by multiple occurrences of the --config flag. diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/config/configure-otel-collector.md b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/configure-otel-collector.md deleted file mode 100644 index f5c22e16cb..0000000000 --- a/pkg/query-service/app/integrations/builtin_integrations/nginx/config/configure-otel-collector.md +++ /dev/null @@ -1 +0,0 @@ -### Configure otel collector diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/config/prepare-nginx.md b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/prepare-nginx.md deleted file mode 100644 index 2677d09b6b..0000000000 --- a/pkg/query-service/app/integrations/builtin_integrations/nginx/config/prepare-nginx.md +++ /dev/null @@ -1 +0,0 @@ -### Prepare nginx for observability diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/config/prerequisites.md b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/prerequisites.md new file mode 100644 index 0000000000..f2ce762f21 --- /dev/null +++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/prerequisites.md @@ -0,0 +1,19 @@ +## Before You Begin + +To configure logs collection for Nginx, you need the following. + +### Ensure Nginx server is running a supported version + +Ensure that your Nginx server is running a version newer than 1.0.0 + + +### Ensure OTEL Collector is running with access to the Nginx server + +- **Ensure that an OTEL collector is running in your deployment environment** + If needed, please [install an OTEL Collector](https://signoz.io/docs/tutorial/opentelemetry-binary-usage-in-virtual-machine/) + If already installed, ensure that the collector version is v0.88.0 or newer. + + Also ensure that you can provide config files to the collector and that you can set environment variables and command line flags used for running it. + +- **Ensure that the OTEL collector can access the Nginx server** + In order to collect logs, the collector must be able to read Nginx server log files. diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json b/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json index 558f9780d0..16f03bbed3 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json +++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json @@ -15,19 +15,17 @@ "overview": "file://overview.md", "configuration": [ { - "title": "Prepare Nginx", - "instructions": "file://config/prepare-nginx.md" + "title": "Prerequisites", + "instructions": "file://config/prerequisites.md" }, { - "title": "Configure Otel Collector", - "instructions": "file://config/configure-otel-collector.md" + "title": "Collect Logs", + "instructions": "file://config/collect-logs.md" } ], "assets": { "logs": { - "pipelines": [ - "file://assets/pipelines/log-parser.json" - ] + "pipelines": [] }, "dashboards": null, "alerts": null @@ -50,38 +48,57 @@ }, "data_collected": { "logs": [ + { + "name": "Timestamp", + "path": "timestamp", + "type": "timestamp" + }, + { + "name": "Severity Text", + "path": "severity_text", + "type": "string" + }, + { + "name": "Severity Number", + "path": "severity_number", + "type": "number" + }, + { + "name": "Body Bytes Sent", + "path": "attributes.body_bytes_sent", + "type": "string" + }, + { + "name": "Referrer", + "path": "attributes.http_referrer", + "type": "string" + }, + { + "name": "User Agent", + "path": "attributes.http_user_agent", + "type": "string" + }, { "name": "Request Method", - "path": "attributes[\"http.request.method\"]", - "type": "string", - "description": "HTTP method" + "path": "attributes.request_method", + "type": "string" }, { "name": "Request Path", - "path": "attributes[\"url.path\"]", - "type": "string", - "description": "path requested" + "path": "attributes.request_path", + "type": "string" }, { "name": "Response Status Code", - "path": "attributes[\"http.response.status_code\"]", - "type": "int", - "description": "HTTP response code" - } - ], - "metrics": [ - { - "name": "http.server.request.duration", - "type": "Histogram", - "unit": "s", - "description": "Duration of HTTP server requests" + "path": "attributes.status", + "type": "string" }, { - "name": "http.server.active_requests", - "type": "UpDownCounter", - "unit": "{ request }", - "description": "Number of active HTTP server requests" + "name": "Remote Address", + "path": "attributes.remote_addr", + "type": "string" } - ] + ], + "metrics": [] } -} +} \ No newline at end of file diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/overview.md b/pkg/query-service/app/integrations/builtin_integrations/nginx/overview.md index dac6354fc0..8c17af806c 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/nginx/overview.md +++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/overview.md @@ -1,3 +1,3 @@ ### Monitor Nginx with SigNoz -Parse your Nginx logs and collect key metrics. +Collect and parse Nginx logs to populate timestamp, severity, and other log attributes for better querying and aggregation. diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md index f048ec63b0..0c199061a7 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md +++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md @@ -35,7 +35,7 @@ receivers: - LOG - NOTICE - DETAIL - warning: WARNING + warn: WARNING error: ERROR fatal: - FATAL diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/assets/pipelines/log-parser.json b/pkg/query-service/app/integrations/builtin_integrations/redis/assets/pipelines/log-parser.json deleted file mode 100644 index d06760e0b8..0000000000 --- a/pkg/query-service/app/integrations/builtin_integrations/redis/assets/pipelines/log-parser.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "id": "parse-default-redis-access-log", - "name": "Parse default redis access log", - "alias": "parse-default-redis-access-log", - "description": "Parse standard redis access log", - "enabled": true, - "filter": { - "op": "AND", - "items": [ - { - "key": { - "type": "tag", - "key": "source", - "dataType": "string" - }, - "op": "=", - "value": "redis" - } - ] - }, - "config": [ - { - "type": "grok_parser", - "id": "parse-body-grok", - "enabled": true, - "orderId": 1, - "name": "Parse Body", - "parse_to": "attributes", - "pattern": "%{GREEDYDATA}", - "parse_from": "body" - } - ] -} \ No newline at end of file diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/config/collect-logs.md b/pkg/query-service/app/integrations/builtin_integrations/redis/config/collect-logs.md index 7be122de4c..e8b26ef710 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/redis/config/collect-logs.md +++ b/pkg/query-service/app/integrations/builtin_integrations/redis/config/collect-logs.md @@ -29,7 +29,7 @@ receivers: info: - '-' - '*' - warning: '#' + warn: '#' on_error: send - type: move if: attributes.message != nil diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json b/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json index a1f27ead72..e3f5ef2e3c 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json +++ b/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json @@ -28,9 +28,7 @@ ], "assets": { "logs": { - "pipelines": [ - "file://assets/pipelines/log-parser.json" - ] + "pipelines": [] }, "dashboards": [ "file://assets/dashboards/overview.json" diff --git a/pkg/query-service/tests/integration/signoz_integrations_test.go b/pkg/query-service/tests/integration/signoz_integrations_test.go index 292e353401..d58ccaf51a 100644 --- a/pkg/query-service/tests/integration/signoz_integrations_test.go +++ b/pkg/query-service/tests/integration/signoz_integrations_test.go @@ -141,9 +141,14 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) { break } } - require.NotNil(testAvailableIntegration) + + if testAvailableIntegration == nil { + // None of the built in integrations include a pipeline right now. + return + } // Installing an integration should add its pipelines to pipelines list + require.NotNil(testAvailableIntegration) require.False(testAvailableIntegration.IsInstalled) integrationsTB.RequestQSToInstallIntegration( testAvailableIntegration.Id, map[string]interface{}{}, From da4a6266c5478b7c3af12a706305c5c69ca0e20a Mon Sep 17 00:00:00 2001 From: Vishal Sharma Date: Thu, 28 Mar 2024 21:43:41 +0530 Subject: [PATCH 37/53] feat: add events API (#4761) --- ee/query-service/app/server.go | 4 +-- ee/query-service/license/manager.go | 4 +-- .../app/clickhouseReader/reader.go | 6 ++-- pkg/query-service/app/http_handler.go | 27 +++++++++++++--- pkg/query-service/app/parser.go | 13 ++++++++ pkg/query-service/app/server.go | 4 +-- pkg/query-service/auth/auth.go | 4 +-- pkg/query-service/dao/sqlite/rbac.go | 2 +- pkg/query-service/model/queryParams.go | 5 +++ pkg/query-service/telemetry/telemetry.go | 31 ++++++++++--------- 10 files changed, 68 insertions(+), 32 deletions(-) diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index c742eef01b..dfdff14939 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -445,7 +445,7 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface data["tracesUsed"] = signozTracesUsed userEmail, err := baseauth.GetEmailFromJwt(r.Context()) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail, true, false) } } return data, true @@ -488,7 +488,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler { if _, ok := telemetry.EnabledPaths()[path]; ok { userEmail, err := baseauth.GetEmailFromJwt(r.Context()) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail, true, false) } } diff --git a/ee/query-service/license/manager.go b/ee/query-service/license/manager.go index 56cb685fec..d348b6d216 100644 --- a/ee/query-service/license/manager.go +++ b/ee/query-service/license/manager.go @@ -204,7 +204,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) { zap.L().Error("License validation completed with error", zap.Error(reterr)) atomic.AddUint64(&lm.failedAttempts, 1) telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED, - map[string]interface{}{"err": reterr.Error()}, "") + map[string]interface{}{"err": reterr.Error()}, "", true, false) } else { zap.L().Info("License validation completed with no errors") } @@ -263,7 +263,7 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m userEmail, err := auth.GetEmailFromJwt(ctx) if err == nil { telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED, - map[string]interface{}{"err": errResponse.Err.Error()}, userEmail) + map[string]interface{}{"err": errResponse.Err.Error()}, userEmail, true, false) } } }() diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index a1b12d9415..1f5b2c2eb5 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -3802,7 +3802,7 @@ func (r *ClickHouseReader) GetLogs(ctx context.Context, params *model.LogsFilter if lenFilters != 0 { userEmail, err := auth.GetEmailFromJwt(ctx) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data, userEmail) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data, userEmail, true, false) } } @@ -3844,7 +3844,7 @@ func (r *ClickHouseReader) TailLogs(ctx context.Context, client *model.LogsTailC if lenFilters != 0 { userEmail, err := auth.GetEmailFromJwt(ctx) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data, userEmail) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data, userEmail, true, false) } } @@ -3936,7 +3936,7 @@ func (r *ClickHouseReader) AggregateLogs(ctx context.Context, params *model.Logs if lenFilters != 0 { userEmail, err := auth.GetEmailFromJwt(ctx) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data, userEmail) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data, userEmail, true, false) } } diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index c025345cef..aab6cb3393 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -401,6 +401,8 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *AuthMiddleware) { router.HandleFunc("/api/v1/explorer/views/{viewId}", am.EditAccess(aH.deleteSavedView)).Methods(http.MethodDelete) router.HandleFunc("/api/v1/feedback", am.OpenAccess(aH.submitFeedback)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/events", am.ViewAccess(aH.registerEvent)).Methods(http.MethodPost) + // router.HandleFunc("/api/v1/get_percentiles", aH.getApplicationPercentiles).Methods(http.MethodGet) router.HandleFunc("/api/v1/services", am.ViewAccess(aH.getServices)).Methods(http.MethodPost) router.HandleFunc("/api/v1/services/list", am.ViewAccess(aH.getServicesList)).Methods(http.MethodGet) @@ -1502,7 +1504,22 @@ func (aH *APIHandler) submitFeedback(w http.ResponseWriter, r *http.Request) { } userEmail, err := auth.GetEmailFromJwt(r.Context()) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_INPRODUCT_FEEDBACK, data, userEmail) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_INPRODUCT_FEEDBACK, data, userEmail, true, false) + } +} + +func (aH *APIHandler) registerEvent(w http.ResponseWriter, r *http.Request) { + + request, err := parseRegisterEventRequest(r) + if aH.HandleError(w, err, http.StatusBadRequest) { + return + } + userEmail, err := auth.GetEmailFromJwt(r.Context()) + if err == nil { + telemetry.GetInstance().SendEvent(request.EventName, request.Attributes, userEmail, true, true) + aH.WriteJSON(w, r, map[string]string{"data": "Event Processed Successfully"}) + } else { + RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil) } } @@ -1585,7 +1602,7 @@ func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) { } userEmail, err := auth.GetEmailFromJwt(r.Context()) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_NUMBER_OF_SERVICES, data, userEmail) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_NUMBER_OF_SERVICES, data, userEmail, true, false) } if (data["number"] != 0) && (data["number"] != telemetry.DEFAULT_NUMBER_OF_SERVICES) { @@ -2310,7 +2327,7 @@ func (aH *APIHandler) editOrg(w http.ResponseWriter, r *http.Request) { "organizationName": req.Name, } userEmail, err := auth.GetEmailFromJwt(r.Context()) - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_ORG_SETTINGS, data, userEmail) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_ORG_SETTINGS, data, userEmail, true, false) aH.WriteJSON(w, r, map[string]string{"data": "org updated successfully"}) } @@ -3525,7 +3542,7 @@ func sendQueryResultEvents(r *http.Request, result []*v3.Result, queryRangeParam "metricsUsed": signozMetricsUsed, "dashboardId": dashboardID, "widgetId": widgetID, - }, userEmail) + }, userEmail, true, false) } if alertMatched { var alertID string @@ -3547,7 +3564,7 @@ func sendQueryResultEvents(r *http.Request, result []*v3.Result, queryRangeParam "logsUsed": signozLogsUsed, "metricsUsed": signozMetricsUsed, "alertId": alertID, - }, userEmail) + }, userEmail, true, false) } } } diff --git a/pkg/query-service/app/parser.go b/pkg/query-service/app/parser.go index 9a9f388ab5..670f5eff25 100644 --- a/pkg/query-service/app/parser.go +++ b/pkg/query-service/app/parser.go @@ -66,6 +66,19 @@ func parseGetTopOperationsRequest(r *http.Request) (*model.GetTopOperationsParam return postData, nil } +func parseRegisterEventRequest(r *http.Request) (*model.RegisterEventParams, error) { + var postData *model.RegisterEventParams + err := json.NewDecoder(r.Body).Decode(&postData) + if err != nil { + return nil, err + } + if postData.EventName == "" { + return nil, errors.New("eventName param missing in query") + } + + return postData, nil +} + func parseMetricsTime(s string) (time.Time, error) { if t, err := strconv.ParseFloat(s, 64); err == nil { s, ns := math.Modf(t) diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index 549e74e976..cb34b048ea 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -452,7 +452,7 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface data["tracesUsed"] = signozTracesUsed userEmail, err := auth.GetEmailFromJwt(r.Context()) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail, true, false) } } return data, true @@ -496,7 +496,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler { if _, ok := telemetry.EnabledPaths()[path]; ok { userEmail, err := auth.GetEmailFromJwt(r.Context()) if err == nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail, true, false) } } // } diff --git a/pkg/query-service/auth/auth.go b/pkg/query-service/auth/auth.go index 0a90c8c730..f0d220df81 100644 --- a/pkg/query-service/auth/auth.go +++ b/pkg/query-service/auth/auth.go @@ -89,7 +89,7 @@ func Invite(ctx context.Context, req *model.InviteRequest) (*model.InviteRespons telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_USER_INVITATION_SENT, map[string]interface{}{ "invited user email": req.Email, - }, au.Email) + }, au.Email, true, false) // send email if SMTP is enabled if os.Getenv("SMTP_ENABLED") == "true" && req.FrontendBaseUrl != "" { @@ -404,7 +404,7 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b } telemetry.GetInstance().IdentifyUser(user) - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_USER_INVITATION_ACCEPTED, nil, req.Email) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_USER_INVITATION_ACCEPTED, nil, req.Email, true, false) return user, nil } diff --git a/pkg/query-service/dao/sqlite/rbac.go b/pkg/query-service/dao/sqlite/rbac.go index c28a2b675c..aba9beb065 100644 --- a/pkg/query-service/dao/sqlite/rbac.go +++ b/pkg/query-service/dao/sqlite/rbac.go @@ -203,7 +203,7 @@ func (mds *ModelDaoSqlite) CreateUser(ctx context.Context, } telemetry.GetInstance().IdentifyUser(user) - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_USER, data, user.Email) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_USER, data, user.Email, true, false) return user, nil } diff --git a/pkg/query-service/model/queryParams.go b/pkg/query-service/model/queryParams.go index 754de3eae0..11020a0abf 100644 --- a/pkg/query-service/model/queryParams.go +++ b/pkg/query-service/model/queryParams.go @@ -164,6 +164,11 @@ type GetTopOperationsParams struct { Limit int `json:"limit"` } +type RegisterEventParams struct { + EventName string `json:"eventName"` + Attributes map[string]interface{} `json:"attributes"` +} + type GetUsageParams struct { StartTime string EndTime string diff --git a/pkg/query-service/telemetry/telemetry.go b/pkg/query-service/telemetry/telemetry.go index 4c23cbd092..03b21855ba 100644 --- a/pkg/query-service/telemetry/telemetry.go +++ b/pkg/query-service/telemetry/telemetry.go @@ -197,7 +197,7 @@ func createTelemetry() { data := map[string]interface{}{} telemetry.SetTelemetryEnabled(constants.IsTelemetryEnabled()) - telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, "") + telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, "", true, false) ticker := time.NewTicker(HEART_BEAT_DURATION) activeUserTicker := time.NewTicker(ACTIVE_USER_DURATION) @@ -231,7 +231,12 @@ func createTelemetry() { if (telemetry.activeUser["traces"] != 0) || (telemetry.activeUser["metrics"] != 0) || (telemetry.activeUser["logs"] != 0) { telemetry.activeUser["any"] = 1 } - telemetry.SendEvent(TELEMETRY_EVENT_ACTIVE_USER, map[string]interface{}{"traces": telemetry.activeUser["traces"], "metrics": telemetry.activeUser["metrics"], "logs": telemetry.activeUser["logs"], "any": telemetry.activeUser["any"]}, "") + telemetry.SendEvent(TELEMETRY_EVENT_ACTIVE_USER, map[string]interface{}{ + "traces": telemetry.activeUser["traces"], + "metrics": telemetry.activeUser["metrics"], + "logs": telemetry.activeUser["logs"], + "any": telemetry.activeUser["any"]}, + "", true, false) telemetry.activeUser = map[string]int8{"traces": 0, "metrics": 0, "logs": 0, "any": 0} case <-ticker.C: @@ -239,15 +244,15 @@ func createTelemetry() { tagsInfo, _ := telemetry.reader.GetTagsInfoInLastHeartBeatInterval(context.Background(), HEART_BEAT_DURATION) if len(tagsInfo.Env) != 0 { - telemetry.SendEvent(TELEMETRY_EVENT_ENVIRONMENT, map[string]interface{}{"value": tagsInfo.Env}, "") + telemetry.SendEvent(TELEMETRY_EVENT_ENVIRONMENT, map[string]interface{}{"value": tagsInfo.Env}, "", true, false) } for language, _ := range tagsInfo.Languages { - telemetry.SendEvent(TELEMETRY_EVENT_LANGUAGE, map[string]interface{}{"language": language}, "") + telemetry.SendEvent(TELEMETRY_EVENT_LANGUAGE, map[string]interface{}{"language": language}, "", true, false) } for service, _ := range tagsInfo.Services { - telemetry.SendEvent(TELEMETRY_EVENT_SERVICE, map[string]interface{}{"serviceName": service}, "") + telemetry.SendEvent(TELEMETRY_EVENT_SERVICE, map[string]interface{}{"serviceName": service}, "", true, false) } totalSpans, _ := telemetry.reader.GetTotalSpans(context.Background()) @@ -280,7 +285,7 @@ func createTelemetry() { for key, value := range tsInfo { data[key] = value } - telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, "") + telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, "", true, false) alertsInfo, err := telemetry.reader.GetAlertsInfo(context.Background()) if err == nil { @@ -307,18 +312,18 @@ func createTelemetry() { } // send event only if there are dashboards or alerts or channels if dashboardsInfo.TotalDashboards > 0 || alertsInfo.TotalAlerts > 0 || len(*channels) > 0 || savedViewsInfo.TotalSavedViews > 0 { - telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, "") + telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, "", true, false) } } } } } if err != nil { - telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, map[string]interface{}{"error": err.Error()}, "") + telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, map[string]interface{}{"error": err.Error()}, "", true, false) } getDistributedInfoInLastHeartBeatInterval, _ := telemetry.reader.GetDistributedInfoInLastHeartBeatInterval(context.Background()) - telemetry.SendEvent(TELEMETRY_EVENT_DISTRIBUTED, getDistributedInfoInLastHeartBeatInterval, "") + telemetry.SendEvent(TELEMETRY_EVENT_DISTRIBUTED, getDistributedInfoInLastHeartBeatInterval, "", true, false) } } }() @@ -426,7 +431,7 @@ func (a *Telemetry) checkEvents(event string) bool { return sendEvent } -func (a *Telemetry) SendEvent(event string, data map[string]interface{}, userEmail string, opts ...bool) { +func (a *Telemetry) SendEvent(event string, data map[string]interface{}, userEmail string, rateLimitFlag bool, viaEventsAPI bool) { // ignore telemetry for default user if userEmail == DEFAULT_CLOUD_EMAIL || a.GetUserEmail() == DEFAULT_CLOUD_EMAIL { @@ -436,10 +441,6 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}, userEma if userEmail != "" { a.SetUserEmail(userEmail) } - rateLimitFlag := true - if len(opts) > 0 { - rateLimitFlag = opts[0] - } if !a.isTelemetryEnabled() { return @@ -485,7 +486,7 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}, userEma // check if event is part of SAAS_EVENTS_LIST _, isSaaSEvent := SAAS_EVENTS_LIST[event] - if a.saasOperator != nil && a.GetUserEmail() != "" && isSaaSEvent { + if a.saasOperator != nil && a.GetUserEmail() != "" && (isSaaSEvent || viaEventsAPI) { a.saasOperator.Enqueue(analytics.Track{ Event: event, UserId: a.GetUserEmail(), From e6e377beff550b5505a708cedd8688055f15b0cf Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Fri, 29 Mar 2024 11:08:33 +0530 Subject: [PATCH 38/53] fix: billing graph page crash (#4764) --- .../BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx b/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx index be77ebba95..a11488595f 100644 --- a/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx +++ b/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx @@ -46,7 +46,7 @@ const calculateStartEndTime = ( ): { startTime: number; endTime: number } => { const timestamps: number[] = []; data?.details?.breakdown?.forEach((breakdown: any) => { - breakdown?.dayWiseBreakdown?.breakdown.forEach((entry: any) => { + breakdown?.dayWiseBreakdown?.breakdown?.forEach((entry: any) => { timestamps.push(entry?.timestamp); }); }); From 7c2f5352d29980fed6514b06509545d4c15e68b4 Mon Sep 17 00:00:00 2001 From: Rajat Dabade Date: Fri, 29 Mar 2024 14:41:16 +0530 Subject: [PATCH 39/53] [Refactor]: Table Grid Formula issue. (#4758) * refactor: change the logic to match data from another query * refactor: updated logic * refactor: clean up * refactor: updated case to handle formula * chore: nit * refactor: isEqual instead of nested loops * chore: added comments * refactor: updated logic * refactor: clean up * refactor: updated case to handle formula * chore: nit * refactor: isEqual instead of nested loops --- .../src/lib/query/createTableColumnsFromQuery.ts | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/frontend/src/lib/query/createTableColumnsFromQuery.ts b/frontend/src/lib/query/createTableColumnsFromQuery.ts index d62e0a763f..1a7afbbc22 100644 --- a/frontend/src/lib/query/createTableColumnsFromQuery.ts +++ b/frontend/src/lib/query/createTableColumnsFromQuery.ts @@ -7,7 +7,7 @@ import { import { FORMULA_REGEXP } from 'constants/regExp'; import { QUERY_TABLE_CONFIG } from 'container/QueryTable/config'; import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces'; -import { isObject } from 'lodash-es'; +import { isEqual, isObject } from 'lodash-es'; import { ReactNode } from 'react'; import { IBuilderFormula, @@ -258,12 +258,7 @@ const findSeriaValueFromAnotherQuery = ( const localLabelEntries = Object.entries(seria.labels); if (localLabelEntries.length !== labelEntries.length) return; - const isExistLabels = localLabelEntries.find(([key, value]) => - labelEntries.find( - ([currentKey, currentValue]) => - currentKey === key && currentValue === value, - ), - ); + const isExistLabels = isEqual(localLabelEntries, labelEntries); if (isExistLabels) { value = seria; @@ -304,10 +299,9 @@ const fillRestAggregationData = ( if (targetSeria) { const isEqual = isEqualQueriesByLabel(equalQueriesByLabels, column.field); if (!isEqual) { + // This line is crucial. It ensures that no additional rows are added to the table for similar labels across all formulas here is how this check is applied: signoz/frontend/src/lib/query/createTableColumnsFromQuery.ts line number 370 equalQueriesByLabels.push(column.field); } - - column.data.push(parseFloat(targetSeria.values[0].value).toFixed(2)); } else { column.data.push('N/A'); } @@ -357,6 +351,7 @@ const fillDataFromSeries = ( } if (column.type !== 'field' && column.field !== queryName) { + // This code is executed only when there are multiple formulas. It checks if there are similar labels present in other formulas and, if found, adds them to the corresponding column data in the table. fillRestAggregationData( column, queryTableData, From 6eced60bf5354f9cdae5ae9e30efded3c8f48a7b Mon Sep 17 00:00:00 2001 From: Yunus M Date: Fri, 29 Mar 2024 14:53:48 +0530 Subject: [PATCH 40/53] =?UTF-8?q?feat:=20update=20time=20range=20selection?= =?UTF-8?q?=20flows=20to=20handle=20relative=20and=20absolu=E2=80=A6=20(#4?= =?UTF-8?q?742)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: update time range selection flows to handle relative and absolute times * fix: lint error * fix: lint error * feat: update logic to handle custom relative times on load and standardize relative time formats * fix: type issue * fix: handle light mode and on custom time range select * chore: update alert frequency corresponding times * chore: update copy URL * feat: update styles --- .../CustomTimePicker/CustomTimePicker.tsx | 33 ++- frontend/src/constants/query.ts | 1 + .../FormAlertRules/ChartPreview/index.tsx | 9 +- .../src/container/FormAlertRules/utils.ts | 24 +- .../GridCardLayout/GridCard/index.tsx | 8 +- .../src/container/LogsExplorerChart/index.tsx | 8 +- .../src/container/LogsSearchFilter/utils.ts | 7 +- .../WidgetGraph/WidgetGraphs.tsx | 8 +- .../src/container/ServiceApplication/types.ts | 7 +- .../container/TopNav/AutoRefresh/config.ts | 4 +- .../container/TopNav/AutoRefreshV2/config.ts | 4 +- .../TopNav/DateTimeSelection/config.ts | 81 ++++-- .../TopNav/DateTimeSelection/index.tsx | 10 +- .../DateTimeSelectionV2.styles.scss | 66 ++++- .../TopNav/DateTimeSelectionV2/config.ts | 152 ++++++++---- .../TopNav/DateTimeSelectionV2/index.tsx | 234 +++++++++++++++--- frontend/src/hooks/logs/useCopyLogLink.ts | 24 +- frontend/src/hooks/useQueryService.ts | 7 +- frontend/src/lib/dashboard/getQueryResults.ts | 7 +- frontend/src/lib/getMinMax.ts | 76 ++++-- frontend/src/lib/getStartEndRangeTime.ts | 7 +- frontend/src/store/actions/global.ts | 7 +- frontend/src/store/actions/trace/util.ts | 5 +- frontend/src/types/actions/globalTime.ts | 7 +- frontend/src/types/reducer/globalTime.ts | 7 +- 25 files changed, 636 insertions(+), 167 deletions(-) diff --git a/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx b/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx index a29f0180b4..114db17924 100644 --- a/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx +++ b/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx @@ -5,13 +5,14 @@ import './CustomTimePicker.styles.scss'; import { Input, Popover, Tooltip, Typography } from 'antd'; import cx from 'classnames'; import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal'; -import { Options } from 'container/TopNav/DateTimeSelection/config'; import { FixedDurationSuggestionOptions, + Options, RelativeDurationSuggestionOptions, } from 'container/TopNav/DateTimeSelectionV2/config'; import dayjs from 'dayjs'; -import { defaultTo, noop } from 'lodash-es'; +import { isValidTimeFormat } from 'lib/getMinMax'; +import { defaultTo, isFunction, noop } from 'lodash-es'; import debounce from 'lodash-es/debounce'; import { CheckCircle, ChevronDown, Clock } from 'lucide-react'; import { @@ -33,7 +34,14 @@ interface CustomTimePickerProps { onError: (value: boolean) => void; selectedValue: string; selectedTime: string; - onValidCustomDateChange: ([t1, t2]: any[]) => void; + onValidCustomDateChange: ({ + time: [t1, t2], + timeStr, + }: { + time: [dayjs.Dayjs | null, dayjs.Dayjs | null]; + timeStr: string; + }) => void; + onCustomTimeStatusUpdate?: (isValid: boolean) => void; open: boolean; setOpen: Dispatch>; items: any[]; @@ -53,6 +61,7 @@ function CustomTimePicker({ open, setOpen, onValidCustomDateChange, + onCustomTimeStatusUpdate, newPopover, customDateTimeVisible, setCustomDTPickerVisible, @@ -85,6 +94,7 @@ function CustomTimePicker({ return Options[index].label; } } + for ( let index = 0; index < RelativeDurationSuggestionOptions.length; @@ -94,12 +104,17 @@ function CustomTimePicker({ return RelativeDurationSuggestionOptions[index].label; } } + for (let index = 0; index < FixedDurationSuggestionOptions.length; index++) { if (FixedDurationSuggestionOptions[index].value === selectedTime) { return FixedDurationSuggestionOptions[index].label; } } + if (isValidTimeFormat(selectedTime)) { + return selectedTime; + } + return ''; }; @@ -161,13 +176,22 @@ function CustomTimePicker({ setInputStatus('error'); onError(true); setInputErrorMessage('Please enter time less than 6 months'); + if (isFunction(onCustomTimeStatusUpdate)) { + onCustomTimeStatusUpdate(true); + } } else { - onValidCustomDateChange([minTime, currentTime]); + onValidCustomDateChange({ + time: [minTime, currentTime], + timeStr: inputValue, + }); } } else { setInputStatus('error'); onError(true); setInputErrorMessage(null); + if (isFunction(onCustomTimeStatusUpdate)) { + onCustomTimeStatusUpdate(false); + } } }, 300); @@ -320,4 +344,5 @@ CustomTimePicker.defaultProps = { setCustomDTPickerVisible: noop, onCustomDateHandler: noop, handleGoLive: noop, + onCustomTimeStatusUpdate: noop, }; diff --git a/frontend/src/constants/query.ts b/frontend/src/constants/query.ts index 31ec5fcd20..6b70ae9786 100644 --- a/frontend/src/constants/query.ts +++ b/frontend/src/constants/query.ts @@ -29,4 +29,5 @@ export enum QueryParams { expandedWidgetId = 'expandedWidgetId', integration = 'integration', pagination = 'pagination', + relativeTime = 'relativeTime', } diff --git a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx index 6e8c167c29..2110216cf1 100644 --- a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx +++ b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx @@ -7,7 +7,10 @@ import GridPanelSwitch from 'container/GridPanelSwitch'; import { getFormatNameByOptionId } from 'container/NewWidget/RightContainer/alertFomatCategories'; import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems'; import { Time } from 'container/TopNav/DateTimeSelection/config'; -import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; +import { + CustomTimeType, + Time as TimeV2, +} from 'container/TopNav/DateTimeSelectionV2/config'; import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { useResizeObserver } from 'hooks/useDimensions'; @@ -39,7 +42,7 @@ export interface ChartPreviewProps { query: Query | null; graphType?: PANEL_TYPES; selectedTime?: timePreferenceType; - selectedInterval?: Time | TimeV2; + selectedInterval?: Time | TimeV2 | CustomTimeType; headline?: JSX.Element; alertDef?: AlertDef; userQueryKey?: string; @@ -53,7 +56,7 @@ function ChartPreview({ query, graphType = PANEL_TYPES.TIME_SERIES, selectedTime = 'GLOBAL_TIME', - selectedInterval = '5min', + selectedInterval = '5m', headline, userQueryKey, allowSelectedIntervalForStepGen = false, diff --git a/frontend/src/container/FormAlertRules/utils.ts b/frontend/src/container/FormAlertRules/utils.ts index 0d41ac5197..3015f0c426 100644 --- a/frontend/src/container/FormAlertRules/utils.ts +++ b/frontend/src/container/FormAlertRules/utils.ts @@ -12,22 +12,30 @@ import { // toChartInterval converts eval window to chart selection time interval export const toChartInterval = (evalWindow: string | undefined): Time => { switch (evalWindow) { + case '1m0s': + return '1m'; case '5m0s': - return '5min'; + return '5m'; case '10m0s': - return '10min'; + return '10m'; case '15m0s': - return '15min'; + return '15m'; case '30m0s': - return '30min'; + return '30m'; case '1h0m0s': - return '1hr'; + return '1h'; + case '3h0m0s': + return '3h'; case '4h0m0s': - return '4hr'; + return '4h'; + case '6h0m0s': + return '6h'; + case '12h0m0s': + return '12h'; case '24h0m0s': - return '1day'; + return '1d'; default: - return '5min'; + return '5m'; } }; diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx index 33df881c80..1633f0b947 100644 --- a/frontend/src/container/GridCardLayout/GridCard/index.tsx +++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx @@ -1,6 +1,7 @@ import { DEFAULT_ENTITY_VERSION } from 'constants/app'; import { QueryParams } from 'constants/query'; import { PANEL_TYPES } from 'constants/queryBuilder'; +import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/config'; import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; import { useStepInterval } from 'hooks/queryBuilder/useStepInterval'; import { useIsDarkMode } from 'hooks/useDarkMode'; @@ -81,8 +82,13 @@ function GridCardGraph({ const searchParams = new URLSearchParams(window.location.search); const startTime = searchParams.get(QueryParams.startTime); const endTime = searchParams.get(QueryParams.endTime); + const relativeTime = searchParams.get( + QueryParams.relativeTime, + ) as CustomTimeType; - if (startTime && endTime && startTime !== endTime) { + if (relativeTime) { + dispatch(UpdateTimeInterval(relativeTime)); + } else if (startTime && endTime && startTime !== endTime) { dispatch( UpdateTimeInterval('custom', [ parseInt(getTimeString(startTime), 10), diff --git a/frontend/src/container/LogsExplorerChart/index.tsx b/frontend/src/container/LogsExplorerChart/index.tsx index 2f909bea25..7f4d648529 100644 --- a/frontend/src/container/LogsExplorerChart/index.tsx +++ b/frontend/src/container/LogsExplorerChart/index.tsx @@ -2,6 +2,7 @@ import Graph from 'components/Graph'; import Spinner from 'components/Spinner'; import { QueryParams } from 'constants/query'; import { themeColors } from 'constants/theme'; +import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/config'; import useUrlQuery from 'hooks/useUrlQuery'; import getChartData, { GetChartDataProps } from 'lib/getChartData'; import GetMinMax from 'lib/getMinMax'; @@ -65,8 +66,13 @@ function LogsExplorerChart({ const searchParams = new URLSearchParams(window.location.search); const startTime = searchParams.get(QueryParams.startTime); const endTime = searchParams.get(QueryParams.endTime); + const relativeTime = searchParams.get( + QueryParams.relativeTime, + ) as CustomTimeType; - if (startTime && endTime && startTime !== endTime) { + if (relativeTime) { + dispatch(UpdateTimeInterval(relativeTime)); + } else if (startTime && endTime && startTime !== endTime) { dispatch( UpdateTimeInterval('custom', [ parseInt(getTimeString(startTime), 10), diff --git a/frontend/src/container/LogsSearchFilter/utils.ts b/frontend/src/container/LogsSearchFilter/utils.ts index 390a3c14b0..91f47e6ecc 100644 --- a/frontend/src/container/LogsSearchFilter/utils.ts +++ b/frontend/src/container/LogsSearchFilter/utils.ts @@ -1,9 +1,12 @@ import { Time } from 'container/TopNav/DateTimeSelection/config'; -import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; +import { + CustomTimeType, + Time as TimeV2, +} from 'container/TopNav/DateTimeSelectionV2/config'; import { GetMinMaxPayload } from 'lib/getMinMax'; export const getGlobalTime = ( - selectedTime: Time | TimeV2, + selectedTime: Time | TimeV2 | CustomTimeType, globalTime: GetMinMaxPayload, ): GetMinMaxPayload | undefined => { if (selectedTime === 'custom') { diff --git a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx index aa7553af53..7245d960f9 100644 --- a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx @@ -3,6 +3,7 @@ import { PANEL_TYPES } from 'constants/queryBuilder'; import GridPanelSwitch from 'container/GridPanelSwitch'; import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types'; import { timePreferance } from 'container/NewWidget/RightContainer/timeItems'; +import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/config'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { useResizeObserver } from 'hooks/useDimensions'; @@ -97,8 +98,13 @@ function WidgetGraph({ const searchParams = new URLSearchParams(window.location.search); const startTime = searchParams.get(QueryParams.startTime); const endTime = searchParams.get(QueryParams.endTime); + const relativeTime = searchParams.get( + QueryParams.relativeTime, + ) as CustomTimeType; - if (startTime && endTime && startTime !== endTime) { + if (relativeTime) { + dispatch(UpdateTimeInterval(relativeTime)); + } else if (startTime && endTime && startTime !== endTime) { dispatch( UpdateTimeInterval('custom', [ parseInt(getTimeString(startTime), 10), diff --git a/frontend/src/container/ServiceApplication/types.ts b/frontend/src/container/ServiceApplication/types.ts index 0717538cb8..4733b3053a 100644 --- a/frontend/src/container/ServiceApplication/types.ts +++ b/frontend/src/container/ServiceApplication/types.ts @@ -1,6 +1,9 @@ import { ServiceDataProps } from 'api/metrics/getTopLevelOperations'; import { Time } from 'container/TopNav/DateTimeSelection/config'; -import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; +import { + CustomTimeType, + Time as TimeV2, +} from 'container/TopNav/DateTimeSelectionV2/config'; import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults'; import { UseQueryResult } from 'react-query'; import { SuccessResponse } from 'types/api'; @@ -25,7 +28,7 @@ export interface GetQueryRangeRequestDataProps { topLevelOperations: [keyof ServiceDataProps, string[]][]; maxTime: number; minTime: number; - globalSelectedInterval: Time | TimeV2; + globalSelectedInterval: Time | TimeV2 | CustomTimeType; } export interface GetServiceListFromQueryProps { diff --git a/frontend/src/container/TopNav/AutoRefresh/config.ts b/frontend/src/container/TopNav/AutoRefresh/config.ts index 64aaca6c64..99c9db8ca5 100644 --- a/frontend/src/container/TopNav/AutoRefresh/config.ts +++ b/frontend/src/container/TopNav/AutoRefresh/config.ts @@ -1,7 +1,7 @@ import GetMinMax, { GetMinMaxPayload } from 'lib/getMinMax'; import { Time } from '../DateTimeSelection/config'; -import { Time as TimeV2 } from '../DateTimeSelectionV2/config'; +import { CustomTimeType, Time as TimeV2 } from '../DateTimeSelectionV2/config'; export const options: IOptions[] = [ { @@ -68,7 +68,7 @@ export interface IOptions { } export const getMinMax = ( - selectedTime: Time | TimeV2, + selectedTime: Time | TimeV2 | CustomTimeType, minTime: number, maxTime: number, ): GetMinMaxPayload => diff --git a/frontend/src/container/TopNav/AutoRefreshV2/config.ts b/frontend/src/container/TopNav/AutoRefreshV2/config.ts index a84f932fbc..8baae51d01 100644 --- a/frontend/src/container/TopNav/AutoRefreshV2/config.ts +++ b/frontend/src/container/TopNav/AutoRefreshV2/config.ts @@ -1,7 +1,7 @@ import GetMinMax, { GetMinMaxPayload } from 'lib/getMinMax'; import { Time } from '../DateTimeSelection/config'; -import { Time as TimeV2 } from '../DateTimeSelectionV2/config'; +import { CustomTimeType, Time as TimeV2 } from '../DateTimeSelectionV2/config'; export const options: IOptions[] = [ { @@ -68,7 +68,7 @@ export interface IOptions { } export const getMinMax = ( - selectedTime: Time | TimeV2, + selectedTime: Time | TimeV2 | CustomTimeType, minTime: number, maxTime: number, ): GetMinMaxPayload => diff --git a/frontend/src/container/TopNav/DateTimeSelection/config.ts b/frontend/src/container/TopNav/DateTimeSelection/config.ts index 3618686c95..102fe00c43 100644 --- a/frontend/src/container/TopNav/DateTimeSelection/config.ts +++ b/frontend/src/container/TopNav/DateTimeSelection/config.ts @@ -1,16 +1,18 @@ import ROUTES from 'constants/routes'; -type FiveMin = '5min'; -type TenMin = '10min'; -type FifteenMin = '15min'; -type ThirtyMin = '30min'; -type OneMin = '1min'; -type SixHour = '6hr'; -type OneHour = '1hr'; -type FourHour = '4hr'; -type OneDay = '1day'; -type ThreeDay = '3days'; -type OneWeek = '1week'; +type FiveMin = '5m'; +type TenMin = '10m'; +type FifteenMin = '15m'; +type ThirtyMin = '30m'; +type OneMin = '1m'; +type SixHour = '6h'; +type OneHour = '1h'; +type FourHour = '4h'; +type ThreeHour = '3h'; +type TwelveHour = '12h'; +type OneDay = '1d'; +type ThreeDay = '3d'; +type OneWeek = '1w'; type Custom = 'custom'; export type Time = @@ -22,37 +24,62 @@ export type Time = | FourHour | SixHour | OneHour + | ThreeHour | Custom | OneWeek | OneDay + | TwelveHour | ThreeDay; export const Options: Option[] = [ - { value: '5min', label: 'Last 5 min' }, - { value: '15min', label: 'Last 15 min' }, - { value: '30min', label: 'Last 30 min' }, - { value: '1hr', label: 'Last 1 hour' }, - { value: '6hr', label: 'Last 6 hour' }, - { value: '1day', label: 'Last 1 day' }, - { value: '3days', label: 'Last 3 days' }, - { value: '1week', label: 'Last 1 week' }, + { value: '5m', label: 'Last 5 min' }, + { value: '15m', label: 'Last 15 min' }, + { value: '30m', label: 'Last 30 min' }, + { value: '1h', label: 'Last 1 hour' }, + { value: '6h', label: 'Last 6 hour' }, + { value: '1d', label: 'Last 1 day' }, + { value: '3d', label: 'Last 3 days' }, + { value: '1w', label: 'Last 1 week' }, { value: 'custom', label: 'Custom' }, ]; +type TimeFrame = { + '5min': string; + '15min': string; + '30min': string; + '1hr': string; + '6hr': string; + '1day': string; + '3days': string; + '1week': string; + [key: string]: string; // Index signature to allow any string as index +}; + +export const RelativeTimeMap: TimeFrame = { + '5min': '5m', + '15min': '15m', + '30min': '30m', + '1hr': '1h', + '6hr': '6h', + '1day': '1d', + '3days': '3d', + '1week': '1w', +}; + export interface Option { value: Time; label: string; } export const RelativeDurationOptions: Option[] = [ - { value: '5min', label: 'Last 5 min' }, - { value: '15min', label: 'Last 15 min' }, - { value: '30min', label: 'Last 30 min' }, - { value: '1hr', label: 'Last 1 hour' }, - { value: '6hr', label: 'Last 6 hour' }, - { value: '1day', label: 'Last 1 day' }, - { value: '3days', label: 'Last 3 days' }, - { value: '1week', label: 'Last 1 week' }, + { value: '5m', label: 'Last 5 min' }, + { value: '15m', label: 'Last 15 min' }, + { value: '30m', label: 'Last 30 min' }, + { value: '1h', label: 'Last 1 hour' }, + { value: '6h', label: 'Last 6 hour' }, + { value: '1d', label: 'Last 1 day' }, + { value: '3d', label: 'Last 3 days' }, + { value: '1w', label: 'Last 1 week' }, ]; export const getDefaultOption = (route: string): Time => { diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx index 614e977a12..3d023959f2 100644 --- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx +++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx @@ -28,7 +28,7 @@ import { GlobalReducer } from 'types/reducer/globalTime'; import AutoRefresh from '../AutoRefresh'; import CustomDateTimeModal, { DateTimeRangeType } from '../CustomDateTimeModal'; -import { Time as TimeV2 } from '../DateTimeSelectionV2/config'; +import { CustomTimeType, Time as TimeV2 } from '../DateTimeSelectionV2/config'; import { getDefaultOption, getOptions, @@ -122,7 +122,7 @@ function DateTimeSelection({ const getInputLabel = ( startTime?: Dayjs, endTime?: Dayjs, - timeInterval: Time | TimeV2 = '15min', + timeInterval: Time | TimeV2 | CustomTimeType = '15m', ): string | Time => { if (startTime && endTime && timeInterval === 'custom') { const format = 'YYYY/MM/DD HH:mm'; @@ -225,7 +225,7 @@ function DateTimeSelection({ [location.pathname], ); - const onSelectHandler = (value: Time | TimeV2): void => { + const onSelectHandler = (value: Time | TimeV2 | CustomTimeType): void => { if (value !== 'custom') { updateTimeInterval(value); updateLocalStorageForRoutes(value); @@ -358,7 +358,7 @@ function DateTimeSelection({ }} selectedTime={selectedTime} onValidCustomDateChange={(dateTime): void => - onCustomDateHandler(dateTime as DateTimeRangeType) + onCustomDateHandler(dateTime.time as DateTimeRangeType) } selectedValue={getInputLabel( dayjs(minTime / 1000000), @@ -406,7 +406,7 @@ function DateTimeSelection({ interface DispatchProps { updateTimeInterval: ( - interval: Time | TimeV2, + interval: Time | TimeV2 | CustomTimeType, dateTimeRange?: [number, number], ) => (dispatch: Dispatch) => void; globalTimeLoading: () => void; diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss b/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss index bd4cc3cdb1..22efca5009 100644 --- a/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss +++ b/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss @@ -54,9 +54,61 @@ } } } + + .share-link-btn { + height: 34px; + } + + .shareable-link-popover { + margin-left: 8px; + } } -.date-time-root { +.share-modal-content { + display: flex; + flex-direction: column; + gap: 8px; + padding: 16px; + width: 420px; + + .absolute-relative-time-toggler-container { + display: flex; + gap: 8px; + align-items: center; + } + + .absolute-relative-time-toggler { + display: flex; + gap: 4px; + align-items: center; + } + + .absolute-relative-time-error { + font-size: 12px; + color: var(--bg-amber-600); + } + + .share-link { + display: flex; + align-items: center; + + .share-url { + flex: 1; + border: 1px solid var(--bg-slate-400); + border-radius: 2px; + background: var(--bg-ink-300); + height: 32px; + padding: 6px 8px; + } + + .copy-url-btn { + width: 32px; + } + } +} + +.date-time-root, +.shareable-link-popover-root { .ant-popover-inner { border-radius: 4px !important; border: 1px solid var(--bg-slate-400); @@ -185,7 +237,8 @@ } } - .date-time-root { + .date-time-root, + .shareable-link-popover-root { .ant-popover-inner { border: 1px solid var(--bg-vanilla-400); background: var(--bg-vanilla-100) !important; @@ -234,4 +287,13 @@ } } } + + .share-modal-content { + .share-link { + .share-url { + border: 1px solid var(--bg-vanilla-300); + background: var(--bg-vanilla-100); + } + } + } } diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts b/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts index 6231505580..f92beb6b8d 100644 --- a/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts +++ b/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts @@ -1,24 +1,24 @@ /* eslint-disable sonarjs/no-duplicate-string */ import ROUTES from 'constants/routes'; -type FiveMin = '5min'; -type TenMin = '10min'; -type FifteenMin = '15min'; -type ThirtyMin = '30min'; -type FortyFiveMin = '45min'; -type OneMin = '1min'; -type ThreeHour = '3hr'; -type SixHour = '6hr'; -type OneHour = '1hr'; -type FourHour = '4hr'; -type TwelveHour = '12hr'; -type OneDay = '1day'; -type ThreeDay = '3days'; -type FourDay = '4days'; -type TenDay = '10days'; -type OneWeek = '1week'; -type TwoWeek = '2weeks'; -type SixWeek = '6weeks'; +type FiveMin = '5m'; +type TenMin = '10m'; +type FifteenMin = '15m'; +type ThirtyMin = '30m'; +type FortyFiveMin = '45m'; +type OneMin = '1m'; +type ThreeHour = '3h'; +type SixHour = '6h'; +type OneHour = '1h'; +type FourHour = '4h'; +type TwelveHour = '12h'; +type OneDay = '1d'; +type ThreeDay = '3d'; +type FourDay = '4d'; +type TenDay = '10d'; +type OneWeek = '1w'; +type TwoWeek = '2w'; +type SixWeek = '6w'; type TwoMonths = '2months'; type Custom = 'custom'; @@ -44,15 +44,19 @@ export type Time = | TwoWeek | TwoMonths; +export type TimeUnit = 'm' | 'h' | 'd' | 'w'; + +export type CustomTimeType = `${string}${TimeUnit}`; + export const Options: Option[] = [ - { value: '5min', label: 'Last 5 minutes' }, - { value: '15min', label: 'Last 15 minutes' }, - { value: '30min', label: 'Last 30 minutes' }, - { value: '1hr', label: 'Last 1 hour' }, - { value: '6hr', label: 'Last 6 hours' }, - { value: '1day', label: 'Last 1 day' }, - { value: '3days', label: 'Last 3 days' }, - { value: '1week', label: 'Last 1 week' }, + { value: '5m', label: 'Last 5 minutes' }, + { value: '15m', label: 'Last 15 minutes' }, + { value: '30m', label: 'Last 30 minutes' }, + { value: '1h', label: 'Last 1 hour' }, + { value: '6h', label: 'Last 6 hours' }, + { value: '1d', label: 'Last 1 day' }, + { value: '3d', label: 'Last 3 days' }, + { value: '1w', label: 'Last 1 week' }, { value: 'custom', label: 'Custom' }, ]; @@ -61,36 +65,92 @@ export interface Option { label: string; } +export const OLD_RELATIVE_TIME_VALUES = [ + '1min', + '10min', + '15min', + '1hr', + '30min', + '45min', + '5min', + '1day', + '3days', + '4days', + '10days', + '1week', + '2weeks', + '6weeks', + '3hr', + '4hr', + '6hr', + '12hr', +]; + export const RelativeDurationOptions: Option[] = [ - { value: '5min', label: 'Last 5 minutes' }, - { value: '15min', label: 'Last 15 minutes' }, - { value: '30min', label: 'Last 30 minutes' }, - { value: '1hr', label: 'Last 1 hour' }, - { value: '6hr', label: 'Last 6 hour' }, - { value: '1day', label: 'Last 1 day' }, - { value: '3days', label: 'Last 3 days' }, - { value: '1week', label: 'Last 1 week' }, + { value: '5m', label: 'Last 5 minutes' }, + { value: '15m', label: 'Last 15 minutes' }, + { value: '30m', label: 'Last 30 minutes' }, + { value: '1h', label: 'Last 1 hour' }, + { value: '6h', label: 'Last 6 hour' }, + { value: '1d', label: 'Last 1 day' }, + { value: '3d', label: 'Last 3 days' }, + { value: '1w', label: 'Last 1 week' }, ]; export const RelativeDurationSuggestionOptions: Option[] = [ - { value: '3hr', label: '3h' }, - { value: '4days', label: '4d' }, - { value: '6weeks', label: '6w' }, - { value: '12hr', label: '12 hours' }, - { value: '10days', label: '10d' }, - { value: '2weeks', label: '2 weeks' }, + { value: '3h', label: 'Last 3 hours' }, + { value: '4d', label: 'Last 4 days' }, + { value: '6w', label: 'Last 6 weeks' }, + { value: '12h', label: 'Last 12 hours' }, + { value: '10d', label: 'Last 10 days' }, + { value: '2w', label: 'Last 2 weeks' }, { value: '2months', label: 'Last 2 months' }, - { value: '1day', label: 'today' }, + { value: '1d', label: 'today' }, ]; export const FixedDurationSuggestionOptions: Option[] = [ - { value: '45min', label: '45m' }, - { value: '12hr', label: '12 hours' }, - { value: '10days', label: '10d' }, - { value: '2weeks', label: '2 weeks' }, + { value: '45m', label: 'Last 45 mins' }, + { value: '12h', label: 'Last 12 hours' }, + { value: '10d', label: 'Last 10 days' }, + { value: '2w', label: 'Last 2 weeks' }, { value: '2months', label: 'Last 2 months' }, - { value: '1day', label: 'today' }, + { value: '1d', label: 'today' }, ]; +export const convertOldTimeToNewValidCustomTimeFormat = ( + time: string, +): CustomTimeType => { + const regex = /^(\d+)([a-zA-Z]+)/; + const match = regex.exec(time); + + if (match) { + let unit = 'm'; + + switch (match[2]) { + case 'min': + unit = 'm'; + break; + case 'hr': + unit = 'h'; + break; + case 'day': + case 'days': + unit = 'd'; + break; + case 'week': + case 'weeks': + unit = 'w'; + break; + + default: + break; + } + + return `${match[1]}${unit}` as CustomTimeType; + } + + return '30m'; +}; + export const getDefaultOption = (route: string): Time => { if (route === ROUTES.SERVICE_MAP) { return RelativeDurationOptions[2].value; diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx b/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx index 3ef5125ad7..3fa36610a0 100644 --- a/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx +++ b/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx @@ -1,7 +1,8 @@ import './DateTimeSelectionV2.styles.scss'; import { SyncOutlined } from '@ant-design/icons'; -import { Button } from 'antd'; +import { Color } from '@signozhq/design-tokens'; +import { Button, Popover, Switch, Typography } from 'antd'; import getLocalStorageKey from 'api/browser/localstorage/get'; import setLocalStorageKey from 'api/browser/localstorage/set'; import CustomTimePicker from 'components/CustomTimePicker/CustomTimePicker'; @@ -26,10 +27,12 @@ import GetMinMax from 'lib/getMinMax'; import getTimeString from 'lib/getTimeString'; import history from 'lib/history'; import { isObject } from 'lodash-es'; +import { Check, Copy, Info, Send } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useQueryClient } from 'react-query'; import { connect, useSelector } from 'react-redux'; import { RouteComponentProps, withRouter } from 'react-router-dom'; +import { useCopyToClipboard } from 'react-use'; import { bindActionCreators, Dispatch } from 'redux'; import { ThunkDispatch } from 'redux-thunk'; import { GlobalTimeLoading, UpdateTimeInterval } from 'store/actions'; @@ -42,9 +45,12 @@ import { GlobalReducer } from 'types/reducer/globalTime'; import AutoRefresh from '../AutoRefreshV2'; import { DateTimeRangeType } from '../CustomDateTimeModal'; import { + convertOldTimeToNewValidCustomTimeFormat, + CustomTimeType, getDefaultOption, getOptions, LocalStorageTimeRange, + OLD_RELATIVE_TIME_VALUES, Time, TimeRange, } from './config'; @@ -66,6 +72,10 @@ function DateTimeSelection({ const searchStartTime = urlQuery.get('startTime'); const searchEndTime = urlQuery.get('endTime'); const queryClient = useQueryClient(); + const [enableAbsoluteTime, setEnableAbsoluteTime] = useState(false); + const [isValidteRelativeTime, setIsValidteRelativeTime] = useState(false); + const [, handleCopyToClipboard] = useCopyToClipboard(); + const [isURLCopied, setIsURLCopied] = useState(false); const { localstorageStartTime, @@ -178,7 +188,7 @@ function DateTimeSelection({ const getInputLabel = ( startTime?: Dayjs, endTime?: Dayjs, - timeInterval: Time = '15min', + timeInterval: Time | CustomTimeType = '15m', ): string | Time => { if (startTime && endTime && timeInterval === 'custom') { const format = 'DD/MM/YYYY HH:mm'; @@ -284,28 +294,38 @@ function DateTimeSelection({ [location.pathname], ); - const onSelectHandler = (value: Time): void => { + const onSelectHandler = (value: Time | CustomTimeType): void => { if (value !== 'custom') { setIsOpen(false); updateTimeInterval(value); updateLocalStorageForRoutes(value); + setIsValidteRelativeTime(true); if (refreshButtonHidden) { setRefreshButtonHidden(false); } } else { setRefreshButtonHidden(true); setCustomDTPickerVisible(true); + setIsValidteRelativeTime(false); + setEnableAbsoluteTime(false); + + return; } const { maxTime, minTime } = GetMinMax(value, getTime()); if (!isLogsExplorerPage) { - urlQuery.set(QueryParams.startTime, minTime.toString()); - urlQuery.set(QueryParams.endTime, maxTime.toString()); + urlQuery.delete('startTime'); + urlQuery.delete('endTime'); + + urlQuery.set(QueryParams.relativeTime, value); + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; history.replace(generatedUrl); } + // For logs explorer - time range handling is managed in useCopyLogLink.ts:52 + if (!stagedQuery) { return; } @@ -319,18 +339,22 @@ function DateTimeSelection({ }; const onCustomDateHandler = (dateTimeRange: DateTimeRangeType): void => { + // console.log('dateTimeRange', dateTimeRange); if (dateTimeRange !== null) { const [startTimeMoment, endTimeMoment] = dateTimeRange; if (startTimeMoment && endTimeMoment) { const startTime = startTimeMoment; const endTime = endTimeMoment; setCustomDTPickerVisible(false); + updateTimeInterval('custom', [ startTime.toDate().getTime(), endTime.toDate().getTime(), ]); + setLocalStorageKey('startTime', startTime.toString()); setLocalStorageKey('endTime', endTime.toString()); + updateLocalStorageForRoutes(JSON.stringify({ startTime, endTime })); if (!isLogsExplorerPage) { @@ -339,6 +363,7 @@ function DateTimeSelection({ startTime?.toDate().getTime().toString(), ); urlQuery.set(QueryParams.endTime, endTime?.toDate().getTime().toString()); + urlQuery.delete(QueryParams.relativeTime); const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; history.replace(generatedUrl); } @@ -346,6 +371,57 @@ function DateTimeSelection({ } }; + const onValidCustomDateHandler = (dateTimeStr: CustomTimeType): void => { + setIsOpen(false); + updateTimeInterval(dateTimeStr); + updateLocalStorageForRoutes(dateTimeStr); + + urlQuery.delete('startTime'); + urlQuery.delete('endTime'); + + setIsValidteRelativeTime(true); + + const { maxTime, minTime } = GetMinMax(dateTimeStr, getTime()); + + if (!isLogsExplorerPage) { + urlQuery.delete('startTime'); + urlQuery.delete('endTime'); + + urlQuery.set(QueryParams.relativeTime, dateTimeStr); + + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; + history.replace(generatedUrl); + } + + if (!stagedQuery) { + return; + } + + // the second boolean param directs the qb about the time change so to merge the query and retain the current state + initQueryBuilderData(updateStepInterval(stagedQuery, maxTime, minTime), true); + }; + + const getCustomOrIntervalTime = ( + time: Time, + currentRoute: string, + ): Time | CustomTimeType => { + if (searchEndTime !== null && searchStartTime !== null) { + return 'custom'; + } + if ( + (localstorageEndTime === null || localstorageStartTime === null) && + time === 'custom' + ) { + return getDefaultOption(currentRoute); + } + + if (OLD_RELATIVE_TIME_VALUES.indexOf(time) > -1) { + return convertOldTimeToNewValidCustomTimeFormat(time); + } + + return time; + }; + // this is triggred when we change the routes and based on that we are changing the default options useEffect(() => { const metricsTimeDuration = getLocalStorageKey( @@ -365,21 +441,9 @@ function DateTimeSelection({ const currentOptions = getOptions(currentRoute); setOptions(currentOptions); - const getCustomOrIntervalTime = (time: Time): Time => { - if (searchEndTime !== null && searchStartTime !== null) { - return 'custom'; - } - if ( - (localstorageEndTime === null || localstorageStartTime === null) && - time === 'custom' - ) { - return getDefaultOption(currentRoute); - } + const updatedTime = getCustomOrIntervalTime(time, currentRoute); - return time; - }; - - const updatedTime = getCustomOrIntervalTime(time); + setIsValidteRelativeTime(updatedTime !== 'custom'); const [preStartTime = 0, preEndTime = 0] = getTime() || []; @@ -388,18 +452,113 @@ function DateTimeSelection({ updateTimeInterval(updatedTime, [preStartTime, preEndTime]); if (updatedTime !== 'custom') { - const { minTime, maxTime } = GetMinMax(updatedTime); - urlQuery.set(QueryParams.startTime, minTime.toString()); - urlQuery.set(QueryParams.endTime, maxTime.toString()); + urlQuery.delete('startTime'); + urlQuery.delete('endTime'); + + urlQuery.set(QueryParams.relativeTime, updatedTime); } else { - urlQuery.set(QueryParams.startTime, preStartTime.toString()); - urlQuery.set(QueryParams.endTime, preEndTime.toString()); + const startTime = preStartTime.toString(); + const endTime = preEndTime.toString(); + + urlQuery.set(QueryParams.startTime, startTime); + urlQuery.set(QueryParams.endTime, endTime); } + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; + history.replace(generatedUrl); // eslint-disable-next-line react-hooks/exhaustive-deps }, [location.pathname, updateTimeInterval, globalTimeLoading]); + // eslint-disable-next-line sonarjs/cognitive-complexity + const shareModalContent = (): JSX.Element => { + let currentUrl = window.location.href; + + const startTime = urlQuery.get(QueryParams.startTime); + const endTime = urlQuery.get(QueryParams.endTime); + const isCustomTime = !!(startTime && endTime && selectedTime === 'custom'); + + if (enableAbsoluteTime || isCustomTime) { + if (selectedTime === 'custom') { + if (searchStartTime && searchEndTime) { + urlQuery.set(QueryParams.startTime, searchStartTime.toString()); + urlQuery.set(QueryParams.endTime, searchEndTime.toString()); + } + } else { + const { minTime, maxTime } = GetMinMax(selectedTime); + + urlQuery.set(QueryParams.startTime, minTime.toString()); + urlQuery.set(QueryParams.endTime, maxTime.toString()); + } + + urlQuery.delete(QueryParams.relativeTime); + + currentUrl = `${window.location.origin}${ + location.pathname + }?${urlQuery.toString()}`; + } else { + urlQuery.delete(QueryParams.startTime); + urlQuery.delete(QueryParams.endTime); + + urlQuery.set(QueryParams.relativeTime, selectedTime); + currentUrl = `${window.location.origin}${ + location.pathname + }?${urlQuery.toString()}`; + } + + return ( +
+
+
+ {(selectedTime === 'custom' || !isValidteRelativeTime) && ( + + )} + { + setEnableAbsoluteTime(!enableAbsoluteTime); + }} + /> +
+ + Enable Absolute Time +
+ + {(selectedTime === 'custom' || !isValidteRelativeTime) && ( +
+ Please select / enter valid relative time to toggle. +
+ )} + +
+ + {currentUrl} + + +
+
+ ); + }; + return (
{!hasSelectedTimeError && !refreshButtonHidden && ( @@ -426,9 +585,12 @@ function DateTimeSelection({ setHasSelectedTimeError(hasError); }} selectedTime={selectedTime} - onValidCustomDateChange={(dateTime): void => - onCustomDateHandler(dateTime as DateTimeRangeType) - } + onValidCustomDateChange={(dateTime): void => { + onValidCustomDateHandler(dateTime.timeStr as CustomTimeType); + }} + onCustomTimeStatusUpdate={(isValid: boolean): void => { + setIsValidteRelativeTime(isValid); + }} selectedValue={getInputLabel( dayjs(minTime / 1000000), dayjs(maxTime / 1000000), @@ -457,6 +619,22 @@ function DateTimeSelection({
)} + + + +
@@ -468,7 +646,7 @@ interface DateTimeSelectionV2Props { } interface DispatchProps { updateTimeInterval: ( - interval: Time, + interval: Time | CustomTimeType, dateTimeRange?: [number, number], ) => (dispatch: Dispatch) => void; globalTimeLoading: () => void; diff --git a/frontend/src/hooks/logs/useCopyLogLink.ts b/frontend/src/hooks/logs/useCopyLogLink.ts index b663aa750c..8dee58c710 100644 --- a/frontend/src/hooks/logs/useCopyLogLink.ts +++ b/frontend/src/hooks/logs/useCopyLogLink.ts @@ -11,8 +11,11 @@ import { useMemo, useState, } from 'react'; +import { useSelector } from 'react-redux'; import { useLocation } from 'react-router-dom'; import { useCopyToClipboard } from 'react-use'; +import { AppState } from 'store/reducers'; +import { GlobalReducer } from 'types/reducer/globalTime'; import { HIGHLIGHTED_DELAY } from './configs'; import { LogTimeRange, UseCopyLogLink } from './types'; @@ -33,15 +36,30 @@ export const useCopyLogLink = (logId?: string): UseCopyLogLink => { null, ); + const { selectedTime } = useSelector( + (state) => state.globalTime, + ); + const onTimeRangeChange = useCallback( (newTimeRange: LogTimeRange | null): void => { urlQuery.set(QueryParams.timeRange, JSON.stringify(newTimeRange)); - urlQuery.set(QueryParams.startTime, newTimeRange?.start.toString() || ''); - urlQuery.set(QueryParams.endTime, newTimeRange?.end.toString() || ''); + + if (selectedTime !== 'custom') { + urlQuery.delete(QueryParams.startTime); + urlQuery.delete(QueryParams.endTime); + + urlQuery.set(QueryParams.relativeTime, selectedTime); + } else { + urlQuery.set(QueryParams.startTime, newTimeRange?.start.toString() || ''); + urlQuery.set(QueryParams.endTime, newTimeRange?.end.toString() || ''); + + urlQuery.delete(QueryParams.relativeTime); + } + const generatedUrl = `${pathname}?${urlQuery.toString()}`; history.replace(generatedUrl); }, - [pathname, urlQuery], + [pathname, urlQuery, selectedTime], ); const isActiveLog = useMemo(() => activeLogId === logId, [activeLogId, logId]); diff --git a/frontend/src/hooks/useQueryService.ts b/frontend/src/hooks/useQueryService.ts index c13654c56b..a5c54f2466 100644 --- a/frontend/src/hooks/useQueryService.ts +++ b/frontend/src/hooks/useQueryService.ts @@ -1,7 +1,10 @@ import getService from 'api/metrics/getService'; import { AxiosError } from 'axios'; import { Time } from 'container/TopNav/DateTimeSelection/config'; -import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; +import { + CustomTimeType, + Time as TimeV2, +} from 'container/TopNav/DateTimeSelectionV2/config'; import { QueryKey, useQuery, @@ -27,7 +30,7 @@ export const useQueryService = ({ interface UseQueryServiceProps { minTime: number; maxTime: number; - selectedTime: Time | TimeV2; + selectedTime: Time | TimeV2 | CustomTimeType; selectedTags: Tags[]; options?: UseQueryOptions; } diff --git a/frontend/src/lib/dashboard/getQueryResults.ts b/frontend/src/lib/dashboard/getQueryResults.ts index 64b749e45c..177ae9311b 100644 --- a/frontend/src/lib/dashboard/getQueryResults.ts +++ b/frontend/src/lib/dashboard/getQueryResults.ts @@ -6,7 +6,10 @@ import { getMetricsQueryRange } from 'api/metrics/getQueryRange'; import { PANEL_TYPES } from 'constants/queryBuilder'; import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems'; import { Time } from 'container/TopNav/DateTimeSelection/config'; -import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; +import { + CustomTimeType, + Time as TimeV2, +} from 'container/TopNav/DateTimeSelectionV2/config'; import { Pagination } from 'hooks/queryPagination'; import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld'; import { isEmpty } from 'lodash-es'; @@ -67,7 +70,7 @@ export interface GetQueryResultsProps { query: Query; graphType: PANEL_TYPES; selectedTime: timePreferenceType; - globalSelectedInterval: Time | TimeV2; + globalSelectedInterval: Time | TimeV2 | CustomTimeType; variables?: Record; params?: Record; tableParams?: { diff --git a/frontend/src/lib/getMinMax.ts b/frontend/src/lib/getMinMax.ts index c52436063d..4a5076b066 100644 --- a/frontend/src/lib/getMinMax.ts +++ b/frontend/src/lib/getMinMax.ts @@ -1,63 +1,101 @@ import { Time } from 'container/TopNav/DateTimeSelection/config'; import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; +import { isString } from 'lodash-es'; import { GlobalReducer } from 'types/reducer/globalTime'; import getMinAgo from './getStartAndEndTime/getMinAgo'; +const validCustomTimeRegex = /^(\d+)([mhdw])$/; + +export const isValidTimeFormat = (time: string): boolean => + validCustomTimeRegex.test(time); + +const extractTimeAndUnit = (time: string): { time: number; unit: string } => { + // Match the pattern + const match = /^(\d+)([mhdw])$/.exec(time); + + if (match) { + return { time: parseInt(match[1], 10), unit: match[2] }; + } + + return { + time: 30, + unit: 'm', + }; +}; + +export const getMinTimeForRelativeTimes = ( + time: number, + unit: string, +): number => { + switch (unit) { + case 'm': + return getMinAgo({ minutes: 1 * time }).getTime(); + case 'h': + return getMinAgo({ minutes: 60 * time }).getTime(); + case 'd': + return getMinAgo({ minutes: 24 * 60 * time }).getTime(); + case 'w': + return getMinAgo({ minutes: 24 * 60 * 7 * time }).getTime(); + default: + return getMinAgo({ minutes: 1 }).getTime(); + } +}; + const GetMinMax = ( - interval: Time | TimeV2, + interval: Time | TimeV2 | string, dateTimeRange?: [number, number], // eslint-disable-next-line sonarjs/cognitive-complexity ): GetMinMaxPayload => { let maxTime = new Date().getTime(); let minTime = 0; - if (interval === '1min') { + if (interval === '1m') { const minTimeAgo = getMinAgo({ minutes: 1 }).getTime(); minTime = minTimeAgo; - } else if (interval === '10min') { + } else if (interval === '10m') { const minTimeAgo = getMinAgo({ minutes: 10 }).getTime(); minTime = minTimeAgo; - } else if (interval === '15min') { + } else if (interval === '15m') { const minTimeAgo = getMinAgo({ minutes: 15 }).getTime(); minTime = minTimeAgo; - } else if (interval === '1hr') { + } else if (interval === '1h') { const minTimeAgo = getMinAgo({ minutes: 60 }).getTime(); minTime = minTimeAgo; - } else if (interval === '30min') { + } else if (interval === '30m') { const minTimeAgo = getMinAgo({ minutes: 30 }).getTime(); minTime = minTimeAgo; - } else if (interval === '45min') { + } else if (interval === '45m') { const minTimeAgo = getMinAgo({ minutes: 45 }).getTime(); minTime = minTimeAgo; - } else if (interval === '5min') { + } else if (interval === '5m') { const minTimeAgo = getMinAgo({ minutes: 5 }).getTime(); minTime = minTimeAgo; - } else if (interval === '1day') { + } else if (interval === '1d') { // one day = 24*60(min) const minTimeAgo = getMinAgo({ minutes: 24 * 60 }).getTime(); minTime = minTimeAgo; - } else if (interval === '3days') { + } else if (interval === '3d') { // three day = one day * 3 const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 3 }).getTime(); minTime = minTimeAgo; - } else if (interval === '4days') { + } else if (interval === '4d') { // four day = one day * 4 const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 4 }).getTime(); minTime = minTimeAgo; - } else if (interval === '10days') { + } else if (interval === '10d') { // ten day = one day * 10 const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 10 }).getTime(); minTime = minTimeAgo; - } else if (interval === '1week') { + } else if (interval === '1w') { // one week = one day * 7 const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 7 }).getTime(); minTime = minTimeAgo; - } else if (interval === '2weeks') { + } else if (interval === '2w') { // two week = one day * 14 const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 14 }).getTime(); minTime = minTimeAgo; - } else if (interval === '6weeks') { + } else if (interval === '6w') { // six week = one day * 42 const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 42 }).getTime(); minTime = minTimeAgo; @@ -65,13 +103,17 @@ const GetMinMax = ( // two months = one day * 60 const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 60 }).getTime(); minTime = minTimeAgo; - } else if (['3hr', '4hr', '6hr', '12hr'].includes(interval)) { - const h = parseInt(interval.replace('hr', ''), 10); + } else if (['3h', '4h', '6h', '12h'].includes(interval)) { + const h = parseInt(interval.replace('h', ''), 10); const minTimeAgo = getMinAgo({ minutes: h * 60 }).getTime(); minTime = minTimeAgo; } else if (interval === 'custom') { maxTime = (dateTimeRange || [])[1] || 0; minTime = (dateTimeRange || [])[0] || 0; + } else if (isString(interval) && isValidTimeFormat(interval)) { + const { time, unit } = extractTimeAndUnit(interval); + + minTime = getMinTimeForRelativeTimes(time, unit); } else { throw new Error('invalid time type'); } diff --git a/frontend/src/lib/getStartEndRangeTime.ts b/frontend/src/lib/getStartEndRangeTime.ts index 7fd087fd54..37e057b789 100644 --- a/frontend/src/lib/getStartEndRangeTime.ts +++ b/frontend/src/lib/getStartEndRangeTime.ts @@ -1,7 +1,10 @@ import { PANEL_TYPES } from 'constants/queryBuilder'; import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems'; import { Time } from 'container/TopNav/DateTimeSelection/config'; -import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; +import { + CustomTimeType, + Time as TimeV2, +} from 'container/TopNav/DateTimeSelectionV2/config'; import store from 'store'; import getMaxMinTime from './getMaxMinTime'; @@ -38,7 +41,7 @@ const getStartEndRangeTime = ({ interface GetStartEndRangeTimesProps { type?: timePreferenceType; graphType?: PANEL_TYPES | null; - interval?: Time | TimeV2; + interval?: Time | TimeV2 | CustomTimeType; } interface GetStartEndRangeTimesPayload { diff --git a/frontend/src/store/actions/global.ts b/frontend/src/store/actions/global.ts index 19c3be2b7b..149572d726 100644 --- a/frontend/src/store/actions/global.ts +++ b/frontend/src/store/actions/global.ts @@ -1,12 +1,15 @@ import { Time } from 'container/TopNav/DateTimeSelection/config'; -import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; +import { + CustomTimeType, + Time as TimeV2, +} from 'container/TopNav/DateTimeSelectionV2/config'; import GetMinMax from 'lib/getMinMax'; import { Dispatch } from 'redux'; import AppActions from 'types/actions'; import { UPDATE_TIME_INTERVAL } from 'types/actions/globalTime'; export const UpdateTimeInterval = ( - interval: Time | TimeV2, + interval: Time | TimeV2 | CustomTimeType, dateTimeRange: [number, number] = [0, 0], ): ((dispatch: Dispatch) => void) => ( dispatch: Dispatch, diff --git a/frontend/src/store/actions/trace/util.ts b/frontend/src/store/actions/trace/util.ts index 54cd819da5..df2955bbb8 100644 --- a/frontend/src/store/actions/trace/util.ts +++ b/frontend/src/store/actions/trace/util.ts @@ -88,4 +88,7 @@ export const getFilter = (data: GetFilterPayload): TraceReducer['filter'] => { }; export const stripTimestampsFromQuery = (query: string): string => - query.replace(/(\?|&)startTime=\d+/, '').replace(/&endTime=\d+/, ''); + query + .replace(/(\?|&)startTime=\d+/, '') + .replace(/&endTime=\d+/, '') + .replace(/[?&]relativeTime=[^&]+/g, ''); diff --git a/frontend/src/types/actions/globalTime.ts b/frontend/src/types/actions/globalTime.ts index 858a7c78a0..02243b7e05 100644 --- a/frontend/src/types/actions/globalTime.ts +++ b/frontend/src/types/actions/globalTime.ts @@ -1,5 +1,8 @@ import { Time } from 'container/TopNav/DateTimeSelection/config'; -import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; +import { + CustomTimeType, + Time as TimeV2, +} from 'container/TopNav/DateTimeSelectionV2/config'; import { ResetIdStartAndEnd, SetSearchQueryString } from './logs'; @@ -14,7 +17,7 @@ export type GlobalTime = { }; interface UpdateTime extends GlobalTime { - selectedTime: Time | TimeV2; + selectedTime: Time | TimeV2 | CustomTimeType; } interface UpdateTimeInterval { diff --git a/frontend/src/types/reducer/globalTime.ts b/frontend/src/types/reducer/globalTime.ts index cd7fac1ea8..bc5e4e2d67 100644 --- a/frontend/src/types/reducer/globalTime.ts +++ b/frontend/src/types/reducer/globalTime.ts @@ -1,12 +1,15 @@ import { Time } from 'container/TopNav/DateTimeSelection/config'; -import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; +import { + CustomTimeType, + Time as TimeV2, +} from 'container/TopNav/DateTimeSelectionV2/config'; import { GlobalTime } from 'types/actions/globalTime'; export interface GlobalReducer { maxTime: GlobalTime['maxTime']; minTime: GlobalTime['minTime']; loading: boolean; - selectedTime: Time | TimeV2; + selectedTime: Time | TimeV2 | CustomTimeType; isAutoRefreshDisabled: boolean; selectedAutoRefreshInterval: string; } From 43ceb052d8647f75012cc90fc4d2cc6b4f3c635d Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Fri, 29 Mar 2024 16:00:22 +0530 Subject: [PATCH 41/53] feat: do not retry query range API's with i/o timeout error (#4768) * feat: do not retry query range API's with i/o timeout error * feat: do not retry query range API's with i/o timeout error --- frontend/src/api/ErrorResponseHandler.ts | 3 ++- .../src/container/GridCardLayout/GridCard/index.tsx | 10 ++++++++++ frontend/src/lib/dashboard/getQueryResults.ts | 2 +- frontend/src/types/api/index.ts | 3 ++- 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/frontend/src/api/ErrorResponseHandler.ts b/frontend/src/api/ErrorResponseHandler.ts index 3f28ff418d..027418ec84 100644 --- a/frontend/src/api/ErrorResponseHandler.ts +++ b/frontend/src/api/ErrorResponseHandler.ts @@ -30,7 +30,8 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse { statusCode, payload: null, error: errorMessage, - message: null, + message: (response.data as any)?.status, + body: JSON.stringify((response.data as any).data), }; } diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx index 1633f0b947..d81e518222 100644 --- a/frontend/src/container/GridCardLayout/GridCard/index.tsx +++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx @@ -152,6 +152,16 @@ function GridCardGraph({ widget?.panelTypes, widget.timePreferance, ], + retry(failureCount, error): boolean { + if ( + String(error).includes('status: error') && + String(error).includes('i/o timeout') + ) { + return false; + } + + return failureCount < 2; + }, keepPreviousData: true, enabled: queryEnabledCondition, refetchOnMount: false, diff --git a/frontend/src/lib/dashboard/getQueryResults.ts b/frontend/src/lib/dashboard/getQueryResults.ts index 177ae9311b..0290a574cd 100644 --- a/frontend/src/lib/dashboard/getQueryResults.ts +++ b/frontend/src/lib/dashboard/getQueryResults.ts @@ -34,7 +34,7 @@ export async function GetMetricQueryRange( if (response.statusCode >= 400) { throw new Error( - `API responded with ${response.statusCode} - ${response.error}`, + `API responded with ${response.statusCode} - ${response.error} status: ${response.message}, errors: ${response?.body}`, ); } diff --git a/frontend/src/types/api/index.ts b/frontend/src/types/api/index.ts index 49fdb21a4e..b7a0ed57e4 100644 --- a/frontend/src/types/api/index.ts +++ b/frontend/src/types/api/index.ts @@ -6,7 +6,8 @@ export interface ErrorResponse { statusCode: ErrorStatusCode; payload: null; error: string; - message: null; + message: string | null; + body?: string | null; } export interface SuccessResponse { From 397da5857f2076bda25e79ddc0fbf1791f57790b Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Sat, 30 Mar 2024 08:55:46 +0530 Subject: [PATCH 42/53] fix: enrich all queries with non materialized attributes (#4772) --- pkg/query-service/app/logs/v3/enrich_query.go | 6 ++++++ pkg/query-service/app/logs/v3/enrich_query_test.go | 13 +++++++------ 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/pkg/query-service/app/logs/v3/enrich_query.go b/pkg/query-service/app/logs/v3/enrich_query.go index c8a5a797b2..904b348999 100644 --- a/pkg/query-service/app/logs/v3/enrich_query.go +++ b/pkg/query-service/app/logs/v3/enrich_query.go @@ -74,6 +74,12 @@ func isEnriched(field v3.AttributeKey) bool { if field.Type == v3.AttributeKeyTypeUnspecified || field.DataType == v3.AttributeKeyDataTypeUnspecified { return false } + + // try to enrich all attributes which doesn't have isColumn = true + if !field.IsColumn { + return false + } + return true } diff --git a/pkg/query-service/app/logs/v3/enrich_query_test.go b/pkg/query-service/app/logs/v3/enrich_query_test.go index 4903139610..3605fa5408 100644 --- a/pkg/query-service/app/logs/v3/enrich_query_test.go +++ b/pkg/query-service/app/logs/v3/enrich_query_test.go @@ -30,7 +30,7 @@ var testEnrichmentRequiredData = []struct { }, }, }, - EnrichmentRequired: false, + EnrichmentRequired: true, }, { Name: "attribute enrichment required", @@ -66,7 +66,7 @@ var testEnrichmentRequiredData = []struct { }, }, }, - EnrichmentRequired: false, + EnrichmentRequired: true, }, { Name: "filter enrichment required", @@ -118,7 +118,7 @@ var testEnrichmentRequiredData = []struct { }, }, }, - EnrichmentRequired: false, + EnrichmentRequired: true, }, { Name: "groupBy enrichment required", @@ -151,7 +151,7 @@ var testEnrichmentRequiredData = []struct { }, }, }, - EnrichmentRequired: false, + EnrichmentRequired: true, }, { Name: "orderBy enrichment required", @@ -200,7 +200,7 @@ var testEnrichmentRequiredData = []struct { }, }, }, - EnrichmentRequired: false, + EnrichmentRequired: true, }, } @@ -255,6 +255,7 @@ var testEnrichParamsData = []struct { Key: "response_time", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeInt64, + IsColumn: true, }, }, Result: v3.QueryRangeParamsV3{ @@ -273,7 +274,7 @@ var testEnrichParamsData = []struct { {Key: v3.AttributeKey{Key: "user_name", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeString}, Value: "john", Operator: "="}, }}, GroupBy: []v3.AttributeKey{{Key: "trace_id", Type: v3.AttributeKeyTypeUnspecified, DataType: v3.AttributeKeyDataTypeString, IsColumn: true}}, - OrderBy: []v3.OrderBy{{ColumnName: "response_time", Key: "response_time", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeInt64}}, + OrderBy: []v3.OrderBy{{ColumnName: "response_time", Key: "response_time", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeInt64, IsColumn: true}}, }, }, }, From a34c59762b9cce4e02610a1fc5145b2c789546bf Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Sat, 30 Mar 2024 17:57:01 +0530 Subject: [PATCH 43/53] feat: allow characters in attribute names (#4775) --- .../app/clickhouseReader/reader.go | 18 +++--- pkg/query-service/app/logs/parser_test.go | 10 ++-- .../app/logs/v3/query_builder.go | 4 +- .../app/logs/v3/query_builder_test.go | 55 ++++++++++--------- .../app/queryBuilder/query_builder_test.go | 8 +-- pkg/query-service/utils/format.go | 2 +- 6 files changed, 51 insertions(+), 46 deletions(-) diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index 1f5b2c2eb5..57d7318ee4 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -3674,7 +3674,7 @@ func isSelectedField(tableStatement string, field model.LogField) bool { // in case of attributes and resources, if there is a materialized column present then it is selected // TODO: handle partial change complete eg:- index is removed but materialized column is still present name := utils.GetClickhouseColumnName(field.Type, field.DataType, field.Name) - return strings.Contains(tableStatement, fmt.Sprintf("`%s`", name)) + return strings.Contains(tableStatement, fmt.Sprintf("%s", name)) } func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.UpdateField) *model.ApiError { @@ -3708,10 +3708,10 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda return &model.ApiError{Err: err, Typ: model.ErrorInternal} } - query = fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD COLUMN IF NOT EXISTS %s_exists bool DEFAULT if(indexOf(%s, '%s') != 0, true, false) CODEC(ZSTD(1))", + query = fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD COLUMN IF NOT EXISTS %s_exists` bool DEFAULT if(indexOf(%s, '%s') != 0, true, false) CODEC(ZSTD(1))", r.logsDB, table, r.cluster, - colname, + strings.TrimSuffix(colname, "`"), keyColName, field.Name, ) @@ -3733,10 +3733,10 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda if field.IndexGranularity == 0 { field.IndexGranularity = constants.DefaultLogSkipIndexGranularity } - query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD INDEX IF NOT EXISTS %s_idx (%s) TYPE %s GRANULARITY %d", + query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD INDEX IF NOT EXISTS %s_idx` (%s) TYPE %s GRANULARITY %d", r.logsDB, r.logsLocalTable, r.cluster, - colname, + strings.TrimSuffix(colname, "`"), colname, field.IndexType, field.IndexGranularity, @@ -3748,7 +3748,7 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda } else { // Delete the index first - query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s DROP INDEX IF EXISTS %s_idx", r.logsDB, r.logsLocalTable, r.cluster, colname) + query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s DROP INDEX IF EXISTS %s_idx`", r.logsDB, r.logsLocalTable, r.cluster, strings.TrimSuffix(colname, "`")) err := r.db.Exec(ctx, query) if err != nil { return &model.ApiError{Err: err, Typ: model.ErrorInternal} @@ -3768,11 +3768,11 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda } // drop exists column on logs table - query = "ALTER TABLE %s.%s ON CLUSTER %s DROP COLUMN IF EXISTS %s_exists " + query = "ALTER TABLE %s.%s ON CLUSTER %s DROP COLUMN IF EXISTS %s_exists` " err = r.db.Exec(ctx, fmt.Sprintf(query, r.logsDB, table, r.cluster, - colname, + strings.TrimSuffix(colname, "`"), ), ) if err != nil { @@ -4329,7 +4329,7 @@ func isColumn(tableStatement, attrType, field, datType string) bool { // value of attrType will be `resource` or `tag`, if `tag` change it to `attribute` name := utils.GetClickhouseColumnName(attrType, datType, field) - return strings.Contains(tableStatement, fmt.Sprintf("`%s` ", name)) + return strings.Contains(tableStatement, fmt.Sprintf("%s ", name)) } func (r *ClickHouseReader) GetLogAggregateAttributes(ctx context.Context, req *v3.AggregateAttributeRequest) (*v3.AggregateAttributeResponse, error) { diff --git a/pkg/query-service/app/logs/parser_test.go b/pkg/query-service/app/logs/parser_test.go index b02284ea05..bb7dde6296 100644 --- a/pkg/query-service/app/logs/parser_test.go +++ b/pkg/query-service/app/logs/parser_test.go @@ -252,7 +252,7 @@ func TestReplaceInterestingFields(t *testing.T) { }, } - expectedTokens := []string{"attributes_int64_value[indexOf(attributes_int64_key, 'id.userid')] IN (100) ", "and attribute_int64_id_key >= 50 ", `AND body ILIKE '%searchstring%'`} + expectedTokens := []string{"attributes_int64_value[indexOf(attributes_int64_key, 'id.userid')] IN (100) ", "and `attribute_int64_id_key` >= 50 ", `AND body ILIKE '%searchstring%'`} Convey("testInterestingFields", t, func() { tokens, err := replaceInterestingFields(&allFields, queryTokens) So(err, ShouldBeNil) @@ -374,7 +374,7 @@ var generateSQLQueryTestCases = []struct { IdGt: "2BsKLKv8cZrLCn6rkOcRGkdjBdM", IdLT: "2BsKG6tRpFWjYMcWsAGKfSxoQdU", }, - SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' and id > '2BsKLKv8cZrLCn6rkOcRGkdjBdM' and id < '2BsKG6tRpFWjYMcWsAGKfSxoQdU' ) and ( attribute_int64_field1 < 100 and attribute_int64_field1 > 50 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 ) ", + SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' and id > '2BsKLKv8cZrLCn6rkOcRGkdjBdM' and id < '2BsKG6tRpFWjYMcWsAGKfSxoQdU' ) and ( `attribute_int64_field1` < 100 and `attribute_int64_field1` > 50 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 ) ", }, { Name: "second query with only timestamp range", @@ -383,7 +383,7 @@ var generateSQLQueryTestCases = []struct { TimestampStart: uint64(1657689292000), TimestampEnd: uint64(1657689294000), }, - SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' ) and ( attribute_int64_field1 < 100 and attribute_int64_field1 > 50 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 ) ", + SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' ) and ( `attribute_int64_field1` < 100 and `attribute_int64_field1` > 50 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 ) ", }, { Name: "generate case sensitive query", @@ -392,7 +392,7 @@ var generateSQLQueryTestCases = []struct { TimestampStart: uint64(1657689292000), TimestampEnd: uint64(1657689294000), }, - SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' ) and ( attribute_int64_field1 < 100 and attributes_int64_value[indexOf(attributes_int64_key, 'FielD1')] > 50 and attribute_double64_Field2 > 10 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 ) ", + SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' ) and ( `attribute_int64_field1` < 100 and attributes_int64_value[indexOf(attributes_int64_key, 'FielD1')] > 50 and `attribute_double64_Field2` > 10 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] <= 500 and attributes_int64_value[indexOf(attributes_int64_key, 'code')] >= 400 ) ", }, { Name: "Check exists and not exists", @@ -401,7 +401,7 @@ var generateSQLQueryTestCases = []struct { TimestampStart: uint64(1657689292000), TimestampEnd: uint64(1657689294000), }, - SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' ) and ( has(attributes_int64_key, 'field1') and NOT has(attributes_double64_key, 'Field2') and attribute_double64_Field2 > 10 ) ", + SqlFilter: "( timestamp >= '1657689292000' and timestamp <= '1657689294000' ) and ( has(attributes_int64_key, 'field1') and NOT has(attributes_double64_key, 'Field2') and `attribute_double64_Field2` > 10 ) ", }, { Name: "Check top level key filter", diff --git a/pkg/query-service/app/logs/v3/query_builder.go b/pkg/query-service/app/logs/v3/query_builder.go index 7babe2cd0d..e01cb95d74 100644 --- a/pkg/query-service/app/logs/v3/query_builder.go +++ b/pkg/query-service/app/logs/v3/query_builder.go @@ -150,7 +150,7 @@ func GetExistsNexistsFilter(op v3.FilterOperator, item v3.FilterItem) string { if op == v3.FilterOperatorNotExists { val = false } - return fmt.Sprintf("%s_exists=%v", getClickhouseColumnName(item.Key), val) + return fmt.Sprintf("%s_exists`=%v", strings.TrimSuffix(getClickhouseColumnName(item.Key), "`"), val) } columnType := getClickhouseLogsColumnType(item.Key.Type) columnDataType := getClickhouseLogsColumnDataType(item.Key.DataType) @@ -212,7 +212,7 @@ func buildLogsTimeSeriesFilterQuery(fs *v3.FilterSet, groupBy []v3.AttributeKey, conditions = append(conditions, fmt.Sprintf("has(%s_%s_key, '%s')", columnType, columnDataType, attr.Key)) } else if attr.Type != v3.AttributeKeyTypeUnspecified { // for materialzied columns - conditions = append(conditions, fmt.Sprintf("%s_exists=true", getClickhouseColumnName(attr))) + conditions = append(conditions, fmt.Sprintf("%s_exists`=true", strings.TrimSuffix(getClickhouseColumnName(attr), "`"))) } } diff --git a/pkg/query-service/app/logs/v3/query_builder_test.go b/pkg/query-service/app/logs/v3/query_builder_test.go index d8c5a141b2..58a120cd44 100644 --- a/pkg/query-service/app/logs/v3/query_builder_test.go +++ b/pkg/query-service/app/logs/v3/query_builder_test.go @@ -26,17 +26,17 @@ var testGetClickhouseColumnNameData = []struct { { Name: "selected field", AttributeKey: v3.AttributeKey{Key: "servicename", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, - ExpectedColumnName: "attribute_string_servicename", + ExpectedColumnName: "`attribute_string_servicename`", }, { Name: "selected field resource", AttributeKey: v3.AttributeKey{Key: "sdk_version", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeResource, IsColumn: true}, - ExpectedColumnName: "resource_int64_sdk_version", + ExpectedColumnName: "`resource_int64_sdk_version`", }, { Name: "selected field float", AttributeKey: v3.AttributeKey{Key: "sdk_version", DataType: v3.AttributeKeyDataTypeFloat64, Type: v3.AttributeKeyTypeTag, IsColumn: true}, - ExpectedColumnName: "attribute_float64_sdk_version", + ExpectedColumnName: "`attribute_float64_sdk_version`", }, { Name: "same name as top level column", @@ -48,6 +48,11 @@ var testGetClickhouseColumnNameData = []struct { AttributeKey: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, ExpectedColumnName: "trace_id", }, + { + Name: "name with - ", + AttributeKey: v3.AttributeKey{Key: "test-attr", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, + ExpectedColumnName: "`attribute_string_test-attr`", + }, } func TestGetClickhouseColumnName(t *testing.T) { @@ -131,7 +136,7 @@ var timeSeriesFilterQueryData = []struct { {Key: v3.AttributeKey{Key: "user_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Value: "john", Operator: "="}, {Key: v3.AttributeKey{Key: "k8s_namespace", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "my_service", Operator: "!="}, }}, - ExpectedFilter: "attribute_string_user_name = 'john' AND resources_string_value[indexOf(resources_string_key, 'k8s_namespace')] != 'my_service'", + ExpectedFilter: "`attribute_string_user_name` = 'john' AND resources_string_value[indexOf(resources_string_key, 'k8s_namespace')] != 'my_service'", }, { Name: "Test like", @@ -194,7 +199,7 @@ var timeSeriesFilterQueryData = []struct { FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ {Key: v3.AttributeKey{Key: "host", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Value: "host: \"(?P\\S+)\"", Operator: "regex"}, }}, - ExpectedFilter: "match(attribute_string_host, 'host: \"(?P\\\\S+)\"')", + ExpectedFilter: "match(`attribute_string_host`, 'host: \"(?P\\\\S+)\"')", }, { Name: "Test not regex", @@ -217,7 +222,7 @@ var timeSeriesFilterQueryData = []struct { {Key: v3.AttributeKey{Key: "host", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "102.", Operator: "ncontains"}, }}, GroupBy: []v3.AttributeKey{{Key: "host", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}}, - ExpectedFilter: "attributes_string_value[indexOf(attributes_string_key, 'host')] NOT ILIKE '%102.%' AND attribute_string_host_exists=true", + ExpectedFilter: "attributes_string_value[indexOf(attributes_string_key, 'host')] NOT ILIKE '%102.%' AND `attribute_string_host_exists`=true", }, { Name: "Wrong data", @@ -266,14 +271,14 @@ var timeSeriesFilterQueryData = []struct { FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ {Key: v3.AttributeKey{Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Operator: "exists"}, }}, - ExpectedFilter: "attribute_string_method_exists=true", + ExpectedFilter: "`attribute_string_method_exists`=true", }, { Name: "Test nexists on materiazlied column", FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ {Key: v3.AttributeKey{Key: "status", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Operator: "nexists"}, }}, - ExpectedFilter: "attribute_int64_status_exists=false", + ExpectedFilter: "`attribute_int64_status_exists`=false", }, // add new tests } @@ -368,7 +373,7 @@ var testBuildLogsQueryData = []struct { OrderBy: []v3.OrderBy{{ColumnName: "#SIGNOZ_VALUE", Order: "ASC"}}, }, TableName: "logs", - ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(attribute_string_name))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND attribute_string_name_exists=true group by ts order by value ASC", + ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(`attribute_string_name`))) as value from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND `attribute_string_name_exists`=true group by ts order by value ASC", }, { Name: "Test aggregate count distinct on non selected field", @@ -421,9 +426,9 @@ var testBuildLogsQueryData = []struct { }, TableName: "logs", - ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, attribute_string_host$$name as `host.name`, toFloat64(count(distinct(attribute_string_method$$name))) as value" + + ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, `attribute_string_host$$name` as `host.name`, toFloat64(count(distinct(`attribute_string_method$$name`))) as value" + " from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " + - "AND attribute_string_host$$name_exists=true AND attribute_string_method$$name_exists=true " + + "AND `attribute_string_host$$name_exists`=true AND `attribute_string_method$$name_exists`=true " + "group by `host.name`,ts " + "order by `host.name` ASC", }, @@ -449,11 +454,11 @@ var testBuildLogsQueryData = []struct { TableName: "logs", ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts," + " attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, " + - "toFloat64(count(distinct(attribute_string_name))) as value from signoz_logs.distributed_logs " + + "toFloat64(count(distinct(`attribute_string_name`))) as value from signoz_logs.distributed_logs " + "where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " + "AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND resources_string_value[indexOf(resources_string_key, 'x')] != 'abc' " + "AND has(attributes_string_key, 'method') " + - "AND attribute_string_name_exists=true " + + "AND `attribute_string_name_exists`=true " + "group by `method`,ts " + "order by `method` ASC", }, @@ -480,12 +485,12 @@ var testBuildLogsQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts," + " attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, " + "resources_string_value[indexOf(resources_string_key, 'x')] as `x`, " + - "toFloat64(count(distinct(attribute_string_name))) as value from signoz_logs.distributed_logs " + + "toFloat64(count(distinct(`attribute_string_name`))) as value from signoz_logs.distributed_logs " + "where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " + "AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND resources_string_value[indexOf(resources_string_key, 'x')] != 'abc' " + "AND has(attributes_string_key, 'method') " + "AND has(resources_string_key, 'x') " + - "AND attribute_string_name_exists=true " + + "AND `attribute_string_name_exists`=true " + "group by `method`,`x`,ts " + "order by `method` ASC,`x` ASC", }, @@ -540,12 +545,12 @@ var testBuildLogsQueryData = []struct { TableName: "logs", ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts," + " attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, " + - "sum(attribute_float64_bytes) as value " + + "sum(`attribute_float64_bytes`) as value " + "from signoz_logs.distributed_logs " + "where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " + "AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' " + "AND has(attributes_string_key, 'method') " + - "AND attribute_float64_bytes_exists=true " + + "AND `attribute_float64_bytes_exists`=true " + "group by `method`,ts " + "order by `method` ASC", }, @@ -570,12 +575,12 @@ var testBuildLogsQueryData = []struct { TableName: "logs", ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts," + " attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, " + - "min(attribute_float64_bytes) as value " + + "min(`attribute_float64_bytes`) as value " + "from signoz_logs.distributed_logs " + "where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " + "AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' " + "AND has(attributes_string_key, 'method') " + - "AND attribute_float64_bytes_exists=true " + + "AND `attribute_float64_bytes_exists`=true " + "group by `method`,ts " + "order by `method` ASC", }, @@ -600,12 +605,12 @@ var testBuildLogsQueryData = []struct { TableName: "logs", ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts," + " attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, " + - "max(attribute_float64_bytes) as value " + + "max(`attribute_float64_bytes`) as value " + "from signoz_logs.distributed_logs " + "where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " + "AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' " + "AND has(attributes_string_key, 'method') " + - "AND attribute_float64_bytes_exists=true " + + "AND `attribute_float64_bytes_exists`=true " + "group by `method`,ts " + "order by `method` ASC", }, @@ -627,11 +632,11 @@ var testBuildLogsQueryData = []struct { TableName: "logs", ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts," + " attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`, " + - "quantile(0.05)(attribute_float64_bytes) as value " + + "quantile(0.05)(`attribute_float64_bytes`) as value " + "from signoz_logs.distributed_logs " + "where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " + "AND has(attributes_string_key, 'method') " + - "AND attribute_float64_bytes_exists=true " + + "AND `attribute_float64_bytes_exists`=true " + "group by `method`,ts " + "order by `method` ASC", }, @@ -653,10 +658,10 @@ var testBuildLogsQueryData = []struct { TableName: "logs", PreferRPM: true, ExpectedQuery: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, attributes_string_value[indexOf(attributes_string_key, 'method')] as `method`" + - ", sum(attribute_float64_bytes)/1.000000 as value from signoz_logs.distributed_logs " + + ", sum(`attribute_float64_bytes`)/1.000000 as value from signoz_logs.distributed_logs " + "where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) " + "AND has(attributes_string_key, 'method') " + - "AND attribute_float64_bytes_exists=true " + + "AND `attribute_float64_bytes_exists`=true " + "group by `method`,ts order by `method` ASC", }, { diff --git a/pkg/query-service/app/queryBuilder/query_builder_test.go b/pkg/query-service/app/queryBuilder/query_builder_test.go index 3cec2f301e..65fe21e1d7 100644 --- a/pkg/query-service/app/queryBuilder/query_builder_test.go +++ b/pkg/query-service/app/queryBuilder/query_builder_test.go @@ -554,10 +554,10 @@ var testLogsWithFormula = []struct { }, }, ExpectedQuery: "SELECT A.`key1.1` as `key1.1`, A.`ts` as `ts`, A.value - B.value as value FROM (SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, " + - "attribute_bool_key1$$1 as `key1.1`, toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1702980884000000000 AND timestamp <= 1702984484000000000) AND " + - "attribute_bool_key_2 = true AND attribute_bool_key1$$1_exists=true group by `key1.1`,ts order by value DESC) as A INNER JOIN (SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), " + - "INTERVAL 60 SECOND) AS ts, attribute_bool_key1$$1 as `key1.1`, toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1702980884000000000 AND " + - "timestamp <= 1702984484000000000) AND attributes_bool_value[indexOf(attributes_bool_key, 'key_1')] = true AND attribute_bool_key1$$1_exists=true group by `key1.1`,ts order by value DESC) as B " + + "`attribute_bool_key1$$1` as `key1.1`, toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1702980884000000000 AND timestamp <= 1702984484000000000) AND " + + "`attribute_bool_key_2` = true AND `attribute_bool_key1$$1_exists`=true group by `key1.1`,ts order by value DESC) as A INNER JOIN (SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), " + + "INTERVAL 60 SECOND) AS ts, `attribute_bool_key1$$1` as `key1.1`, toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1702980884000000000 AND " + + "timestamp <= 1702984484000000000) AND attributes_bool_value[indexOf(attributes_bool_key, 'key_1')] = true AND `attribute_bool_key1$$1_exists`=true group by `key1.1`,ts order by value DESC) as B " + "ON A.`key1.1` = B.`key1.1` AND A.`ts` = B.`ts`", }, } diff --git a/pkg/query-service/utils/format.go b/pkg/query-service/utils/format.go index 0a614e2987..aa9fc59720 100644 --- a/pkg/query-service/utils/format.go +++ b/pkg/query-service/utils/format.go @@ -243,7 +243,7 @@ func GetClickhouseColumnName(typeName string, dataType, field string) string { // if name contains . replace it with `$$` field = strings.ReplaceAll(field, ".", "$$") - colName := fmt.Sprintf("%s_%s_%s", strings.ToLower(typeName), strings.ToLower(dataType), field) + colName := fmt.Sprintf("`%s_%s_%s`", strings.ToLower(typeName), strings.ToLower(dataType), field) return colName } From 39e0ef68ca9dbdf94845ff5ae106dea23cc85461 Mon Sep 17 00:00:00 2001 From: Raj Kamal Singh <1133322+raj-k-singh@users.noreply.github.com> Date: Mon, 1 Apr 2024 12:06:08 +0530 Subject: [PATCH 44/53] chore: integration instructions: add typical log file locations on macOS (#4779) --- .../builtin_integrations/mongo/config/collect-logs.md | 4 +++- .../builtin_integrations/nginx/config/collect-logs.md | 4 +++- .../builtin_integrations/postgres/config/collect-logs.md | 4 +++- .../builtin_integrations/redis/config/collect-logs.md | 4 +++- 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-logs.md b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-logs.md index fa55ca9a63..86255f0df4 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-logs.md +++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/collect-logs.md @@ -97,7 +97,9 @@ Set the following environment variables in your otel-collector environment: ```bash # path of MongoDB server log file. must be accessible by the otel collector -export MONGODB_LOG_FILE=/var/log/mongodb.log +# typically found in /usr/local/var/log/mongodb on macOS +# mongod.conf file can also be checked for finding log file location +export MONGODB_LOG_FILE=/var/log/mongodb/mongodb.log # region specific SigNoz cloud ingestion endpoint export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443" diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/config/collect-logs.md b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/collect-logs.md index b421478ab9..71712c503b 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/nginx/config/collect-logs.md +++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/collect-logs.md @@ -117,9 +117,11 @@ Set the following environment variables in your otel-collector environment: ```bash # path of Nginx access log file. must be accessible by the otel collector -export NGINX_ACCESS_LOG_FILE=/var/log/nginx/access.log; +# typically found at /usr/local/var/log/nginx/access.log on macOS +export NGINX_ACCESS_LOG_FILE=/var/log/nginx/access.log # path of Nginx error log file. must be accessible by the otel collector +# typically found at /usr/local/var/log/nginx/error.log on macOS export NGINX_ERROR_LOG_FILE=/var/log/nginx/error.log # region specific SigNoz cloud ingestion endpoint diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md index 0c199061a7..9f20655cc3 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md +++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/collect-logs.md @@ -91,7 +91,9 @@ Set the following environment variables in your otel-collector environment: ```bash # path of Postgres server log file. must be accessible by the otel collector -export POSTGRESQL_LOG_FILE=/usr/local/var/log/postgres.log +# typically found in /usr/local/var/log/postgresql on macOS +# running `SELECT pg_current_logfile();` can also give you the location of postgresql log file +export POSTGRESQL_LOG_FILE=/var/log/postgresql/postgresql.log # region specific SigNoz cloud ingestion endpoint export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443" diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/config/collect-logs.md b/pkg/query-service/app/integrations/builtin_integrations/redis/config/collect-logs.md index e8b26ef710..0b8e697b13 100644 --- a/pkg/query-service/app/integrations/builtin_integrations/redis/config/collect-logs.md +++ b/pkg/query-service/app/integrations/builtin_integrations/redis/config/collect-logs.md @@ -82,7 +82,9 @@ Set the following environment variables in your otel-collector environment: ```bash # path of Redis server log file. must be accessible by the otel collector -export REDIS_LOG_FILE=/var/log/redis.log +# typically found in /usr/local/var/log/redis on macOS +# log file location can also be found in the output of `redis-cli CONFIG GET : *` +export REDIS_LOG_FILE=/var/log/redis/redis-server.log # region specific SigNoz cloud ingestion endpoint export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443" From 00d74bfebb83428e973239f61422b8bc460c82f5 Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Mon, 1 Apr 2024 12:40:15 +0530 Subject: [PATCH 45/53] feat: add integrations to the side-nav for cloud users (#4756) * feat: add integrations to the side-nav for cloud users * feat: change the route from integrations/installed to /integrations * feat: light mode table color * feat: increase the width of the integrations panel by 25 percent * feat: added telemetry constants and page view * feat: added telemetry events for integrations * feat: address review comments --- frontend/public/locales/en/titles.json | 2 +- frontend/src/AppRoutes/pageComponents.ts | 8 ------- frontend/src/AppRoutes/routes.ts | 12 ++--------- frontend/src/constants/routes.ts | 4 +--- frontend/src/container/SideNav/SideNav.tsx | 11 ++++++++++ frontend/src/container/SideNav/menuItems.tsx | 12 +++++------ .../TopNav/DateTimeSelectionV2/config.ts | 4 +--- .../IntegrationDetailContent.tsx | 10 +++++++-- .../Configure.tsx | 20 ++++++++++++++++-- .../IntegrationDetailContentTabs.styles.scss | 4 ++-- .../IntegrationDetailHeader.tsx | 14 +++++++++++++ .../IntegrationDetailPage.tsx | 2 ++ .../IntegrationsUninstallBar.tsx | 21 ++++++++++++++++++- .../Integrations/Integrations.styles.scss | 2 +- .../src/pages/Integrations/Integrations.tsx | 16 ++++++++++++-- frontend/src/pages/Integrations/utils.ts | 12 +++++++++++ .../IntegrationsModulePage/constants.tsx | 4 ++-- frontend/src/utils/permission/index.ts | 4 +--- 18 files changed, 116 insertions(+), 46 deletions(-) diff --git a/frontend/public/locales/en/titles.json b/frontend/public/locales/en/titles.json index e707c998f7..8aef9c9af6 100644 --- a/frontend/public/locales/en/titles.json +++ b/frontend/public/locales/en/titles.json @@ -48,5 +48,5 @@ "TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views", "DEFAULT": "Open source Observability Platform | SigNoz", "SHORTCUTS": "SigNoz | Shortcuts", - "INTEGRATIONS_INSTALLED": "SigNoz | Integrations" + "INTEGRATIONS": "SigNoz | Integrations" } diff --git a/frontend/src/AppRoutes/pageComponents.ts b/frontend/src/AppRoutes/pageComponents.ts index bea07a7e51..1252496c08 100644 --- a/frontend/src/AppRoutes/pageComponents.ts +++ b/frontend/src/AppRoutes/pageComponents.ts @@ -197,11 +197,3 @@ export const InstalledIntegrations = Loadable( /* webpackChunkName: "InstalledIntegrations" */ 'pages/IntegrationsModulePage' ), ); - -export const IntegrationsMarketPlace = Loadable( - // eslint-disable-next-line sonarjs/no-identical-functions - () => - import( - /* webpackChunkName: "IntegrationsMarketPlace" */ 'pages/IntegrationsModulePage' - ), -); diff --git a/frontend/src/AppRoutes/routes.ts b/frontend/src/AppRoutes/routes.ts index 360c74d8da..fed77f186e 100644 --- a/frontend/src/AppRoutes/routes.ts +++ b/frontend/src/AppRoutes/routes.ts @@ -15,7 +15,6 @@ import { ErrorDetails, IngestionSettings, InstalledIntegrations, - IntegrationsMarketPlace, LicensePage, ListAllALertsPage, LiveLogs, @@ -338,18 +337,11 @@ const routes: AppRoutes[] = [ key: 'SHORTCUTS', }, { - path: ROUTES.INTEGRATIONS_INSTALLED, + path: ROUTES.INTEGRATIONS, exact: true, component: InstalledIntegrations, isPrivate: true, - key: 'INTEGRATIONS_INSTALLED', - }, - { - path: ROUTES.INTEGRATIONS_MARKETPLACE, - exact: true, - component: IntegrationsMarketPlace, - isPrivate: true, - key: 'INTEGRATIONS_MARKETPLACE', + key: 'INTEGRATIONS', }, ]; diff --git a/frontend/src/constants/routes.ts b/frontend/src/constants/routes.ts index 0b087ff8cd..cbeb672a5c 100644 --- a/frontend/src/constants/routes.ts +++ b/frontend/src/constants/routes.ts @@ -51,9 +51,7 @@ const ROUTES = { TRACES_SAVE_VIEWS: '/traces/saved-views', WORKSPACE_LOCKED: '/workspace-locked', SHORTCUTS: '/shortcuts', - INTEGRATIONS_BASE: '/integrations', - INTEGRATIONS_INSTALLED: '/integrations/installed', - INTEGRATIONS_MARKETPLACE: '/integrations/marketplace', + INTEGRATIONS: '/integrations', } as const; export default ROUTES; diff --git a/frontend/src/container/SideNav/SideNav.tsx b/frontend/src/container/SideNav/SideNav.tsx index 665a406710..6b11ae140c 100644 --- a/frontend/src/container/SideNav/SideNav.tsx +++ b/frontend/src/container/SideNav/SideNav.tsx @@ -271,6 +271,17 @@ function SideNav({ } }, [isCloudUserVal, isEnterprise, isFetching]); + useEffect(() => { + if (!isCloudUserVal) { + let updatedMenuItems = [...menuItems]; + updatedMenuItems = updatedMenuItems.filter( + (item) => item.key !== ROUTES.INTEGRATIONS, + ); + setMenuItems(updatedMenuItems); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + const [isCurrentOrgSettings] = useComponentPermission( ['current_org_settings'], role, diff --git a/frontend/src/container/SideNav/menuItems.tsx b/frontend/src/container/SideNav/menuItems.tsx index ed6f10b10a..38529a7f3b 100644 --- a/frontend/src/container/SideNav/menuItems.tsx +++ b/frontend/src/container/SideNav/menuItems.tsx @@ -16,6 +16,7 @@ import { ScrollText, Settings, Slack, + Unplug, // Unplug, UserPlus, } from 'lucide-react'; @@ -90,11 +91,11 @@ const menuItems: SidebarItem[] = [ label: 'Alerts', icon: , }, - // { - // key: ROUTES.INTEGRATIONS_INSTALLED, - // label: 'Integrations', - // icon: , - // }, + { + key: ROUTES.INTEGRATIONS, + label: 'Integrations', + icon: , + }, { key: ROUTES.ALL_ERROR, label: 'Exceptions', @@ -127,7 +128,6 @@ export const NEW_ROUTES_MENU_ITEM_KEY_MAP: Record = { [ROUTES.TRACES_EXPLORER]: ROUTES.TRACE, [ROUTES.TRACE_EXPLORER]: ROUTES.TRACE, [ROUTES.LOGS_BASE]: ROUTES.LOGS_EXPLORER, - [ROUTES.INTEGRATIONS_BASE]: ROUTES.INTEGRATIONS_INSTALLED, }; export default menuItems; diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts b/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts index f92beb6b8d..eefa31475c 100644 --- a/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts +++ b/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts @@ -199,9 +199,7 @@ export const routesToSkip = [ ROUTES.TRACES_EXPLORER, ROUTES.TRACES_SAVE_VIEWS, ROUTES.SHORTCUTS, - ROUTES.INTEGRATIONS_BASE, - ROUTES.INTEGRATIONS_INSTALLED, - ROUTES.INTEGRATIONS_MARKETPLACE, + ROUTES.INTEGRATIONS, ]; export const routesToDisable = [ROUTES.LOGS_EXPLORER, ROUTES.LIVE_LOGS]; diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx index ec81d51db6..c0b3a52f44 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx @@ -12,12 +12,13 @@ import Overview from './IntegrationDetailContentTabs/Overview'; interface IntegrationDetailContentProps { activeDetailTab: string; integrationData: IntegrationDetailedProps; + integrationId: string; } function IntegrationDetailContent( props: IntegrationDetailContentProps, ): JSX.Element { - const { activeDetailTab, integrationData } = props; + const { activeDetailTab, integrationData, integrationId } = props; const items: TabsProps['items'] = [ { key: 'overview', @@ -49,7 +50,12 @@ function IntegrationDetailContent( Configure ), - children: , + children: ( + + ), }, { key: 'dataCollected', diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx index 92a5e0c823..2984ba40fe 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx @@ -3,20 +3,36 @@ import './IntegrationDetailContentTabs.styles.scss'; import { Button, Typography } from 'antd'; import cx from 'classnames'; import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer'; -import { useState } from 'react'; +import useAnalytics from 'hooks/analytics/useAnalytics'; +import { INTEGRATION_TELEMETRY_EVENTS } from 'pages/Integrations/utils'; +import { useEffect, useState } from 'react'; interface ConfigurationProps { configuration: Array<{ title: string; instructions: string }>; + integrationId: string; } function Configure(props: ConfigurationProps): JSX.Element { // TODO Mardown renderer support once instructions are ready - const { configuration } = props; + const { configuration, integrationId } = props; const [selectedConfigStep, setSelectedConfigStep] = useState(0); const handleMenuClick = (index: number): void => { setSelectedConfigStep(index); }; + + const { trackEvent } = useAnalytics(); + + useEffect(() => { + trackEvent( + INTEGRATION_TELEMETRY_EVENTS.INTEGRATIONS_DETAIL_CONFIGURE_INSTRUCTION, + { + integration: integrationId, + }, + ); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + return (
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss index 81dcb6bf59..bf542f539d 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss @@ -260,7 +260,7 @@ .logs-section { .table-row-dark { - background: rgba(255, 255, 255, 0.01); + background: var(--bg-vanilla-300); } .logs-section-table { @@ -271,7 +271,7 @@ .metrics-section { .table-row-dark { - background: rgba(255, 255, 255, 0.01); + background: var(--bg-vanilla-300); } .metrics-section-table { diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx index cab49391f5..f630f3ecc4 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx @@ -5,12 +5,14 @@ import { Button, Modal, Tooltip, Typography } from 'antd'; import installIntegration from 'api/Integrations/installIntegration'; import { SOMETHING_WENT_WRONG } from 'constants/api'; import dayjs from 'dayjs'; +import useAnalytics from 'hooks/analytics/useAnalytics'; import { useNotifications } from 'hooks/useNotifications'; import { ArrowLeftRight, Check } from 'lucide-react'; import { useState } from 'react'; import { useMutation } from 'react-query'; import { IntegrationConnectionStatus } from 'types/api/integrations/types'; +import { INTEGRATION_TELEMETRY_EVENTS } from '../utils'; import TestConnection, { ConnectionStates } from './TestConnection'; interface IntegrationDetailHeaderProps { @@ -37,6 +39,8 @@ function IntegrationDetailHeader( } = props; const [isModalOpen, setIsModalOpen] = useState(false); + const { trackEvent } = useAnalytics(); + const { notifications } = useNotifications(); const showModal = (): void => { @@ -120,8 +124,18 @@ function IntegrationDetailHeader( disabled={isInstallLoading} onClick={(): void => { if (connectionState === ConnectionStates.NotInstalled) { + trackEvent(INTEGRATION_TELEMETRY_EVENTS.INTEGRATIONS_DETAIL_CONNECT, { + integration: id, + }); mutate({ integration_id: id, config: {} }); } else { + trackEvent( + INTEGRATION_TELEMETRY_EVENTS.INTEGRATIONS_DETAIL_TEST_CONNECTION, + { + integration: id, + connectionStatus: connectionState, + }, + ); showModal(); } }} diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx index 88be0dc3a3..a0e97dfe1c 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx @@ -123,6 +123,7 @@ function IntegrationDetailPage(props: IntegrationDetailPageProps): JSX.Element { {connectionStatus !== ConnectionStates.NotInstalled && ( @@ -130,6 +131,7 @@ function IntegrationDetailPage(props: IntegrationDetailPageProps): JSX.Element { integrationTitle={defaultTo(integrationData?.title, '')} integrationId={selectedIntegration} refetchIntegrationDetails={refetch} + connectionStatus={connectionStatus} /> )} diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationsUninstallBar.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationsUninstallBar.tsx index 41e985abf8..a1ad762ec6 100644 --- a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationsUninstallBar.tsx +++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationsUninstallBar.tsx @@ -3,23 +3,35 @@ import './IntegrationDetailPage.styles.scss'; import { Button, Modal, Typography } from 'antd'; import unInstallIntegration from 'api/Integrations/uninstallIntegration'; import { SOMETHING_WENT_WRONG } from 'constants/api'; +import useAnalytics from 'hooks/analytics/useAnalytics'; import { useNotifications } from 'hooks/useNotifications'; import { X } from 'lucide-react'; import { useState } from 'react'; import { useMutation } from 'react-query'; +import { INTEGRATION_TELEMETRY_EVENTS } from '../utils'; +import { ConnectionStates } from './TestConnection'; + interface IntergrationsUninstallBarProps { integrationTitle: string; integrationId: string; refetchIntegrationDetails: () => void; + connectionStatus: ConnectionStates; } function IntergrationsUninstallBar( props: IntergrationsUninstallBarProps, ): JSX.Element { - const { integrationTitle, integrationId, refetchIntegrationDetails } = props; + const { + integrationTitle, + integrationId, + refetchIntegrationDetails, + connectionStatus, + } = props; const { notifications } = useNotifications(); const [isModalOpen, setIsModalOpen] = useState(false); + const { trackEvent } = useAnalytics(); + const { mutate: uninstallIntegration, isLoading: isUninstallLoading, @@ -40,6 +52,13 @@ function IntergrationsUninstallBar( }; const handleOk = (): void => { + trackEvent( + INTEGRATION_TELEMETRY_EVENTS.INTEGRATIONS_DETAIL_REMOVE_INTEGRATION, + { + integration: integrationId, + integrationStatus: connectionStatus, + }, + ); uninstallIntegration({ integration_id: integrationId, }); diff --git a/frontend/src/pages/Integrations/Integrations.styles.scss b/frontend/src/pages/Integrations/Integrations.styles.scss index 794b596407..aec8433a26 100644 --- a/frontend/src/pages/Integrations/Integrations.styles.scss +++ b/frontend/src/pages/Integrations/Integrations.styles.scss @@ -6,7 +6,7 @@ .integrations-content { width: calc(100% - 30px); - max-width: 736px; + max-width: 920px; .integrations-header { .title { diff --git a/frontend/src/pages/Integrations/Integrations.tsx b/frontend/src/pages/Integrations/Integrations.tsx index bda4184eab..1f1644fbc7 100644 --- a/frontend/src/pages/Integrations/Integrations.tsx +++ b/frontend/src/pages/Integrations/Integrations.tsx @@ -1,18 +1,22 @@ import './Integrations.styles.scss'; +import useAnalytics from 'hooks/analytics/useAnalytics'; import useUrlQuery from 'hooks/useUrlQuery'; -import { useCallback, useMemo, useState } from 'react'; +import { useCallback, useEffect, useMemo, useState } from 'react'; import { useHistory, useLocation } from 'react-router-dom'; import Header from './Header'; import IntegrationDetailPage from './IntegrationDetailPage/IntegrationDetailPage'; import IntegrationsList from './IntegrationsList'; +import { INTEGRATION_TELEMETRY_EVENTS } from './utils'; function Integrations(): JSX.Element { const urlQuery = useUrlQuery(); const history = useHistory(); const location = useLocation(); + const { trackPageView, trackEvent } = useAnalytics(); + const selectedIntegration = useMemo(() => urlQuery.get('integration'), [ urlQuery, ]); @@ -20,6 +24,9 @@ function Integrations(): JSX.Element { const setSelectedIntegration = useCallback( (integration: string | null) => { if (integration) { + trackEvent(INTEGRATION_TELEMETRY_EVENTS.INTEGRATIONS_ITEM_LIST_CLICKED, { + integration, + }); urlQuery.set('integration', integration); } else { urlQuery.set('integration', ''); @@ -27,13 +34,18 @@ function Integrations(): JSX.Element { const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; history.push(generatedUrl); }, - [history, location.pathname, urlQuery], + [history, location.pathname, trackEvent, urlQuery], ); const [activeDetailTab, setActiveDetailTab] = useState( 'overview', ); + useEffect(() => { + trackPageView(location.pathname); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + const [searchTerm, setSearchTerm] = useState(''); return (
diff --git a/frontend/src/pages/Integrations/utils.ts b/frontend/src/pages/Integrations/utils.ts index 81c70b6091..a244da4c82 100644 --- a/frontend/src/pages/Integrations/utils.ts +++ b/frontend/src/pages/Integrations/utils.ts @@ -7,3 +7,15 @@ export const handleContactSupport = (isCloudUser: boolean): void => { window.open('https://signoz.io/slack', '_blank'); } }; + +export const INTEGRATION_TELEMETRY_EVENTS = { + INTEGRATIONS_ITEM_LIST_CLICKED: 'Integrations Page: Clicked an integration', + INTEGRATIONS_DETAIL_CONNECT: + 'Integrations Detail Page: Clicked connect integration button', + INTEGRATIONS_DETAIL_TEST_CONNECTION: + 'Integrations Detail Page: Clicked test Connection button for integration', + INTEGRATIONS_DETAIL_REMOVE_INTEGRATION: + 'Integrations Detail Page: Clicked remove Integration button for integration', + INTEGRATIONS_DETAIL_CONFIGURE_INSTRUCTION: + 'Integrations Detail Page: Navigated to configure an integration', +}; diff --git a/frontend/src/pages/IntegrationsModulePage/constants.tsx b/frontend/src/pages/IntegrationsModulePage/constants.tsx index d0100798a8..333c9df44c 100644 --- a/frontend/src/pages/IntegrationsModulePage/constants.tsx +++ b/frontend/src/pages/IntegrationsModulePage/constants.tsx @@ -10,6 +10,6 @@ export const installedIntegrations: TabRoutes = { Integrations
), - route: ROUTES.INTEGRATIONS_INSTALLED, - key: ROUTES.INTEGRATIONS_INSTALLED, + route: ROUTES.INTEGRATIONS, + key: ROUTES.INTEGRATIONS, }; diff --git a/frontend/src/utils/permission/index.ts b/frontend/src/utils/permission/index.ts index b18be180cd..1c992965d4 100644 --- a/frontend/src/utils/permission/index.ts +++ b/frontend/src/utils/permission/index.ts @@ -96,7 +96,5 @@ export const routePermission: Record = { LOGS_BASE: [], OLD_LOGS_EXPLORER: [], SHORTCUTS: ['ADMIN', 'EDITOR', 'VIEWER'], - INTEGRATIONS_BASE: ['ADMIN', 'EDITOR', 'VIEWER'], - INTEGRATIONS_INSTALLED: ['ADMIN', 'EDITOR', 'VIEWER'], - INTEGRATIONS_MARKETPLACE: ['ADMIN', 'EDITOR', 'VIEWER'], + INTEGRATIONS: ['ADMIN', 'EDITOR', 'VIEWER'], }; From 51abe714216ed499ad2e025e01f5826d42e8d0e2 Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Mon, 1 Apr 2024 13:56:59 +0530 Subject: [PATCH 46/53] fix: do not move to next step if env not selected in onboarding (#4784) --- .../common/ModuleStepsContainer/ModuleStepsContainer.tsx | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx index 272d2b5083..662daedeaa 100644 --- a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx +++ b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx @@ -16,7 +16,7 @@ import { DataSourceType } from 'container/OnboardingContainer/Steps/DataSource/D import { hasFrameworks } from 'container/OnboardingContainer/utils/dataSourceUtils'; import useAnalytics from 'hooks/analytics/useAnalytics'; import history from 'lib/history'; -import { isEmpty } from 'lodash-es'; +import { isEmpty, isNull } from 'lodash-es'; import { useState } from 'react'; import { useOnboardingContext } from '../../context/OnboardingContext'; @@ -91,7 +91,10 @@ export default function ModuleStepsContainer({ name: selectedDataSourceName = '', } = selectedDataSource as DataSourceType; - if (step.id === environmentDetailsStep && selectedEnvironment === '') { + if ( + step.id === environmentDetailsStep && + (selectedEnvironment === '' || isNull(selectedEnvironment)) + ) { updateErrorDetails('Please select environment'); return false; } From 5e0e9da6c4748080a838f3e4fca9ced152bf569f Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Mon, 1 Apr 2024 14:51:40 +0530 Subject: [PATCH 47/53] fix: hotfix bug in enhance query (#4783) --- pkg/query-service/app/logs/v3/enrich_query.go | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pkg/query-service/app/logs/v3/enrich_query.go b/pkg/query-service/app/logs/v3/enrich_query.go index 904b348999..06445d164d 100644 --- a/pkg/query-service/app/logs/v3/enrich_query.go +++ b/pkg/query-service/app/logs/v3/enrich_query.go @@ -153,9 +153,12 @@ func enrichFieldWithMetadata(field v3.AttributeKey, fields map[string]v3.Attribu return field } - // enrich with default values if metadata is not found - field.Type = v3.AttributeKeyTypeTag - field.DataType = v3.AttributeKeyDataTypeString + if field.Type == "" { + field.Type = v3.AttributeKeyTypeTag + } + if field.DataType == "" { + field.DataType = v3.AttributeKeyDataTypeString + } return field } From 8c02f8ec31a0cf08e64a178b4a05b77e51635a38 Mon Sep 17 00:00:00 2001 From: Vishal Sharma Date: Mon, 1 Apr 2024 15:06:38 +0530 Subject: [PATCH 48/53] chore: rate limit param (#4785) --- pkg/query-service/app/http_handler.go | 4 ++-- pkg/query-service/model/queryParams.go | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index aab6cb3393..7f9e6795a7 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -401,7 +401,7 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *AuthMiddleware) { router.HandleFunc("/api/v1/explorer/views/{viewId}", am.EditAccess(aH.deleteSavedView)).Methods(http.MethodDelete) router.HandleFunc("/api/v1/feedback", am.OpenAccess(aH.submitFeedback)).Methods(http.MethodPost) - router.HandleFunc("/api/v1/events", am.ViewAccess(aH.registerEvent)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/event", am.ViewAccess(aH.registerEvent)).Methods(http.MethodPost) // router.HandleFunc("/api/v1/get_percentiles", aH.getApplicationPercentiles).Methods(http.MethodGet) router.HandleFunc("/api/v1/services", am.ViewAccess(aH.getServices)).Methods(http.MethodPost) @@ -1516,7 +1516,7 @@ func (aH *APIHandler) registerEvent(w http.ResponseWriter, r *http.Request) { } userEmail, err := auth.GetEmailFromJwt(r.Context()) if err == nil { - telemetry.GetInstance().SendEvent(request.EventName, request.Attributes, userEmail, true, true) + telemetry.GetInstance().SendEvent(request.EventName, request.Attributes, userEmail, request.RateLimited, true) aH.WriteJSON(w, r, map[string]string{"data": "Event Processed Successfully"}) } else { RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil) diff --git a/pkg/query-service/model/queryParams.go b/pkg/query-service/model/queryParams.go index 11020a0abf..4a24dba2b6 100644 --- a/pkg/query-service/model/queryParams.go +++ b/pkg/query-service/model/queryParams.go @@ -165,8 +165,9 @@ type GetTopOperationsParams struct { } type RegisterEventParams struct { - EventName string `json:"eventName"` - Attributes map[string]interface{} `json:"attributes"` + EventName string `json:"eventName"` + Attributes map[string]interface{} `json:"attributes"` + RateLimited bool `json:"rateLimited"` } type GetUsageParams struct { From 1610b95b84d1e1bef8a70a704a9e26d14fdb666f Mon Sep 17 00:00:00 2001 From: Yunus M Date: Mon, 1 Apr 2024 19:09:16 +0530 Subject: [PATCH 49/53] =?UTF-8?q?feat:=20onboarding=20flow=20-=20enable=20?= =?UTF-8?q?users=20to=20submit=20request=20for=20a=20new=20data=E2=80=A6?= =?UTF-8?q?=20(#4786)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: onboarding flow - enable users to submit request for a new data source , environment * chore: request data source to be available for all modules * chore: remove hardcoded value --- frontend/src/api/common/logEvent.ts | 28 +++ .../Onboarding.styles.scss | 21 +- .../Steps/DataSource/DataSource.styles.scss | 5 + .../Steps/DataSource/DataSource.tsx | 182 +++++++++++++----- .../EnvironmentDetails/EnvironmentDetails.tsx | 111 ++++++++++- frontend/src/periscope.scss | 5 + frontend/src/types/api/events/types.ts | 9 + frontend/src/typings/window.ts | 1 + 8 files changed, 311 insertions(+), 51 deletions(-) create mode 100644 frontend/src/api/common/logEvent.ts create mode 100644 frontend/src/types/api/events/types.ts diff --git a/frontend/src/api/common/logEvent.ts b/frontend/src/api/common/logEvent.ts new file mode 100644 index 0000000000..212d382d77 --- /dev/null +++ b/frontend/src/api/common/logEvent.ts @@ -0,0 +1,28 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { EventSuccessPayloadProps } from 'types/api/events/types'; + +const logEvent = async ( + eventName: string, + attributes: Record, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/event', { + eventName, + attributes, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default logEvent; diff --git a/frontend/src/container/OnboardingContainer/Onboarding.styles.scss b/frontend/src/container/OnboardingContainer/Onboarding.styles.scss index 4e00b629c8..018c9af352 100644 --- a/frontend/src/container/OnboardingContainer/Onboarding.styles.scss +++ b/frontend/src/container/OnboardingContainer/Onboarding.styles.scss @@ -58,10 +58,15 @@ box-sizing: border-box; cursor: pointer; width: 400px; + transition: 0.3s; .ant-card-body { padding: 0px; } + + &:hover { + transform: scale(1.05); + } } .moduleTitleStyle { @@ -73,6 +78,7 @@ white-space: nowrap; overflow: hidden; text-overflow: ellipsis; + text-align: center; } .moduleStyles.selected { @@ -107,8 +113,8 @@ .actionButtonsContainer { display: flex; - justify-content: space-between; align-items: center; + gap: 8px; box-sizing: border-box; align-items: center; } @@ -137,3 +143,16 @@ padding-left: 4px; } } + +.request-entity-container { + display: flex; + flex-direction: row; + justify-content: space-between; + align-items: center; + + border-radius: 4px; + border: 0.5px solid rgba(78, 116, 248, 0.2); + background: rgba(69, 104, 220, 0.1); + padding: 12px; + margin: 24px 0; +} diff --git a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.styles.scss b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.styles.scss index a991bb216d..a3d8468559 100644 --- a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.styles.scss +++ b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.styles.scss @@ -22,6 +22,7 @@ div[class*='-setup-instructions-container'] { .form-container { display: flex; + flex-direction: column; align-items: flex-start; width: 100%; gap: 16px; @@ -36,3 +37,7 @@ div[class*='-setup-instructions-container'] { text-align: center; font-size: 12px; } + +.service-name-container { + width: 100%; +} diff --git a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx index 13296671ec..5a06cdef94 100644 --- a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx +++ b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx @@ -2,7 +2,9 @@ /* eslint-disable jsx-a11y/click-events-have-key-events */ import './DataSource.styles.scss'; -import { Card, Form, Input, Select, Typography } from 'antd'; +import { LoadingOutlined } from '@ant-design/icons'; +import { Button, Card, Form, Input, Select, Space, Typography } from 'antd'; +import logEvent from 'api/common/logEvent'; import cx from 'classnames'; import { useOnboardingContext } from 'container/OnboardingContainer/context/OnboardingContext'; import { useCases } from 'container/OnboardingContainer/OnboardingContainer'; @@ -11,7 +13,10 @@ import { getSupportedFrameworks, hasFrameworks, } from 'container/OnboardingContainer/utils/dataSourceUtils'; +import { useNotifications } from 'hooks/useNotifications'; +import { Check } from 'lucide-react'; import { useEffect, useState } from 'react'; +import { useTranslation } from 'react-i18next'; import { popupContainer } from 'utils/selectPopupContainer'; export interface DataSourceType { @@ -23,6 +28,7 @@ export interface DataSourceType { export default function DataSource(): JSX.Element { const [form] = Form.useForm(); + const { t } = useTranslation(['common']); const { serviceName, @@ -42,6 +48,15 @@ export default function DataSource(): JSX.Element { DataSourceType[] >([]); + const requestedDataSourceName = Form.useWatch('requestedDataSourceName', form); + + const [ + isSubmittingRequestForDataSource, + setIsSubmittingRequestForDataSource, + ] = useState(false); + + const { notifications } = useNotifications(); + const [enableFrameworks, setEnableFrameworks] = useState(false); useEffect(() => { @@ -74,12 +89,49 @@ export default function DataSource(): JSX.Element { } }, [selectedModule, selectedDataSource]); + const handleRequestDataSourceSubmit = async (): Promise => { + try { + setIsSubmittingRequestForDataSource(true); + const response = await logEvent('Onboarding V2: Data Source Requested', { + module: selectedModule?.id, + dataSource: requestedDataSourceName, + }); + + if (response.statusCode === 200) { + notifications.success({ + message: 'Data Source Request Submitted', + }); + + form.setFieldValue('requestedDataSourceName', ''); + + setIsSubmittingRequestForDataSource(false); + } else { + notifications.error({ + message: + response.error || + t('something_went_wrong', { + ns: 'common', + }), + }); + + setIsSubmittingRequestForDataSource(false); + } + } catch (error) { + notifications.error({ + message: t('something_went_wrong', { + ns: 'common', + }), + }); + + setIsSubmittingRequestForDataSource(false); + } + }; + return (
* Select Data Source -
{supportedDataSources?.map((dataSource) => ( - {selectedModule?.id === useCases.APM.id && ( -
-
-
{ - const serviceName = form.getFieldValue('serviceName'); +
+
+ { + const serviceName = form.getFieldValue('serviceName'); - updateServiceName(serviceName); - }} - name="data-source-form" - style={{ minWidth: '300px' }} - layout="vertical" - validateTrigger="onBlur" - > - - - + updateServiceName(serviceName); + }} + name="data-source-form" + layout="vertical" + validateTrigger="onBlur" + > + {selectedModule?.id === useCases.APM.id && ( + <> + + + - {enableFrameworks && ( -
+ {enableFrameworks && ( +
+ + updateSelectedFramework(value)} - options={supportedframeworks} - /> + -
- )} - -
+ + +
+
+
- )} +
); } diff --git a/frontend/src/container/OnboardingContainer/Steps/EnvironmentDetails/EnvironmentDetails.tsx b/frontend/src/container/OnboardingContainer/Steps/EnvironmentDetails/EnvironmentDetails.tsx index f4f8381de7..02fac551df 100644 --- a/frontend/src/container/OnboardingContainer/Steps/EnvironmentDetails/EnvironmentDetails.tsx +++ b/frontend/src/container/OnboardingContainer/Steps/EnvironmentDetails/EnvironmentDetails.tsx @@ -1,8 +1,13 @@ -import { Card, Typography } from 'antd'; +import { LoadingOutlined } from '@ant-design/icons'; +import { Button, Card, Form, Input, Space, Typography } from 'antd'; +import logEvent from 'api/common/logEvent'; import cx from 'classnames'; import { useOnboardingContext } from 'container/OnboardingContainer/context/OnboardingContext'; import { useCases } from 'container/OnboardingContainer/OnboardingContainer'; -import { Server } from 'lucide-react'; +import { useNotifications } from 'hooks/useNotifications'; +import { Check, Server } from 'lucide-react'; +import { useState } from 'react'; +import { useTranslation } from 'react-i18next'; interface SupportedEnvironmentsProps { name: string; @@ -33,16 +38,78 @@ const supportedEnvironments: SupportedEnvironmentsProps[] = [ ]; export default function EnvironmentDetails(): JSX.Element { + const [form] = Form.useForm(); + const { t } = useTranslation(['common']); + const { selectedEnvironment, updateSelectedEnvironment, selectedModule, + selectedDataSource, + selectedFramework, errorDetails, updateErrorDetails, } = useOnboardingContext(); + const requestedEnvironmentName = Form.useWatch( + 'requestedEnvironmentName', + form, + ); + + const { notifications } = useNotifications(); + + const [ + isSubmittingRequestForEnvironment, + setIsSubmittingRequestForEnvironment, + ] = useState(false); + + const handleRequestedEnvironmentSubmit = async (): Promise => { + try { + setIsSubmittingRequestForEnvironment(true); + const response = await logEvent('Onboarding V2: Environment Requested', { + module: selectedModule?.id, + dataSource: selectedDataSource?.id, + framework: selectedFramework, + environment: requestedEnvironmentName, + }); + + if (response.statusCode === 200) { + notifications.success({ + message: 'Environment Request Submitted', + }); + + form.setFieldValue('requestedEnvironmentName', ''); + + setIsSubmittingRequestForEnvironment(false); + } else { + notifications.error({ + message: + response.error || + t('something_went_wrong', { + ns: 'common', + }), + }); + + setIsSubmittingRequestForEnvironment(false); + } + } catch (error) { + notifications.error({ + message: t('something_went_wrong', { + ns: 'common', + }), + }); + + setIsSubmittingRequestForEnvironment(false); + } + }; + return ( - <> +
* Select Environment @@ -80,11 +147,47 @@ export default function EnvironmentDetails(): JSX.Element { })}
+
+ + Cannot find what you’re looking for? Request a data source + + +
+ + + + + + +
+
+ {errorDetails && (
{errorDetails}
)} - + ); } diff --git a/frontend/src/periscope.scss b/frontend/src/periscope.scss index d32ac10973..53c14deb53 100644 --- a/frontend/src/periscope.scss +++ b/frontend/src/periscope.scss @@ -30,6 +30,11 @@ background-color: #4566d6; box-shadow: 0 2px 0 rgba(62, 86, 245, 0.09); } + + + &:disabled { + opacity: 0.5; + } } .periscope-tab { diff --git a/frontend/src/types/api/events/types.ts b/frontend/src/types/api/events/types.ts new file mode 100644 index 0000000000..e5b8cd8bd0 --- /dev/null +++ b/frontend/src/types/api/events/types.ts @@ -0,0 +1,9 @@ +export interface EventSuccessPayloadProps { + status: string; + data: string; +} + +export interface EventRequestPayloadProps { + eventName: string; + attributes: Record; +} diff --git a/frontend/src/typings/window.ts b/frontend/src/typings/window.ts index 02197e2444..3bff939c2d 100644 --- a/frontend/src/typings/window.ts +++ b/frontend/src/typings/window.ts @@ -6,6 +6,7 @@ declare global { interface Window { store: Store; clarity: ClarityType; + Intercom: any; analytics: Record; __REDUX_DEVTOOLS_EXTENSION_COMPOSE__: typeof compose; } From 3babce3ecf329110e995d4415a9cf2551a0be47f Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Tue, 2 Apr 2024 11:31:42 +0530 Subject: [PATCH 50/53] fix: added dashboard and QB shortcuts to the sidenav (#4791) --- .../constants/shortcuts/DashboardShortcuts.ts | 5 +++-- frontend/src/constants/shortcuts/QBShortcuts.ts | 2 +- frontend/src/pages/Shortcuts/utils.ts | 16 ++++++++++++++++ 3 files changed, 20 insertions(+), 3 deletions(-) diff --git a/frontend/src/constants/shortcuts/DashboardShortcuts.ts b/frontend/src/constants/shortcuts/DashboardShortcuts.ts index ee861708f7..aaa81cb3c1 100644 --- a/frontend/src/constants/shortcuts/DashboardShortcuts.ts +++ b/frontend/src/constants/shortcuts/DashboardShortcuts.ts @@ -9,9 +9,10 @@ export const DashboardShortcuts = { export const DashboardShortcutsName = { SaveChanges: `${userOS === UserOperatingSystem.MACOS ? 'cmd' : 'ctrl'}+s`, + DiscardChanges: `${userOS === UserOperatingSystem.MACOS ? 'cmd' : 'ctrl'}+d`, }; export const DashboardShortcutsDescription = { - SaveChanges: 'Save Changes', - DiscardChanges: 'Discard Changes', + SaveChanges: 'Save Changes for panel', + DiscardChanges: 'Discard Changes for panel', }; diff --git a/frontend/src/constants/shortcuts/QBShortcuts.ts b/frontend/src/constants/shortcuts/QBShortcuts.ts index 56fea081df..d1d841595a 100644 --- a/frontend/src/constants/shortcuts/QBShortcuts.ts +++ b/frontend/src/constants/shortcuts/QBShortcuts.ts @@ -13,5 +13,5 @@ export const QBShortcutsName = { }; export const QBShortcutsDescription = { - StageAndRunQuery: 'Stage and Run the query', + StageAndRunQuery: 'Stage and Run the current query', }; diff --git a/frontend/src/pages/Shortcuts/utils.ts b/frontend/src/pages/Shortcuts/utils.ts index 21dfa28767..5f03b0e86a 100644 --- a/frontend/src/pages/Shortcuts/utils.ts +++ b/frontend/src/pages/Shortcuts/utils.ts @@ -1,4 +1,9 @@ import { TableProps } from 'antd'; +import { + DashboardShortcuts, + DashboardShortcutsDescription, + DashboardShortcutsName, +} from 'constants/shortcuts/DashboardShortcuts'; import { GlobalShortcuts, GlobalShortcutsDescription, @@ -9,16 +14,25 @@ import { LogsExplorerShortcutsDescription, LogsExplorerShortcutsName, } from 'constants/shortcuts/logsExplorerShortcuts'; +import { + QBShortcuts, + QBShortcutsDescription, + QBShortcutsName, +} from 'constants/shortcuts/QBShortcuts'; // eslint-disable-next-line @typescript-eslint/naming-convention export const ALL_SHORTCUTS: Record> = { 'Global Shortcuts': GlobalShortcuts, 'Logs Explorer Shortcuts': LogsExplorerShortcuts, + 'Query Builder Shortcuts': QBShortcuts, + 'Dashboard Shortcuts': DashboardShortcuts, }; export const ALL_SHORTCUTS_LABEL: Record> = { 'Global Shortcuts': GlobalShortcutsName, 'Logs Explorer Shortcuts': LogsExplorerShortcutsName, + 'Query Builder Shortcuts': QBShortcutsName, + 'Dashboard Shortcuts': DashboardShortcutsName, }; export const ALL_SHORTCUTS_DESCRIPTION: Record< @@ -27,6 +41,8 @@ export const ALL_SHORTCUTS_DESCRIPTION: Record< > = { 'Global Shortcuts': GlobalShortcutsDescription, 'Logs Explorer Shortcuts': LogsExplorerShortcutsDescription, + 'Query Builder Shortcuts': QBShortcutsDescription, + 'Dashboard Shortcuts': DashboardShortcutsDescription, }; export const shortcutColumns = [ From 7a7d814288445d8a577382d575424963aa481161 Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Tue, 2 Apr 2024 12:38:10 +0530 Subject: [PATCH 51/53] fix: sidenav items overlapping in small screens (#4789) --- .../src/container/SideNav/SideNav.styles.scss | 95 +++++++---- frontend/src/container/SideNav/SideNav.tsx | 148 +++++++++--------- 2 files changed, 138 insertions(+), 105 deletions(-) diff --git a/frontend/src/container/SideNav/SideNav.styles.scss b/frontend/src/container/SideNav/SideNav.styles.scss index 2cc32e12f3..b796c9727c 100644 --- a/frontend/src/container/SideNav/SideNav.styles.scss +++ b/frontend/src/container/SideNav/SideNav.styles.scss @@ -78,36 +78,65 @@ box-shadow: none !important; } } + .nav-wrapper { + height: calc(100% - 52px); - .secondary-nav-items { - border-top: 1px solid var(--bg-slate-400); - padding: 8px 0; - max-width: 100%; - position: fixed; - bottom: 0; - left: 0; - width: 240px; + .primary-nav-items { + max-height: 65%; + overflow-y: auto; + overflow-x: hidden; - transition: all 0.3s, background 0s, border 0s; - - // position: relative; - - .collapse-expand-handlers { - position: absolute; - - top: -9px; - right: -9px; - cursor: pointer; - - display: none; - - transition: display 0.3s; - - svg { - fill: var(--bg-vanilla-400); - color: var(--bg-slate-300); + &::-webkit-scrollbar { + width: 0.1rem; } } + .secondary-nav-items { + max-height: 35%; + overflow-y: auto; + overflow-x: hidden; + border-top: 1px solid var(--bg-slate-400); + padding: 8px 0; + max-width: 100%; + position: fixed; + bottom: 0; + left: 0; + width: 240px; + + transition: all 0.3s, background 0s, border 0s; + + &::-webkit-scrollbar { + width: 0.1rem; + } + + .collapse-expand-handlers { + position: absolute; + + top: -9px; + right: -9px; + cursor: pointer; + + display: none; + + transition: display 0.3s; + + svg { + fill: var(--bg-vanilla-400); + color: var(--bg-slate-300); + } + } + } + } + + .nav-wrapper-cloud { + height: calc(100% - 88px); + + .secondary-nav-items { + max-height: 30%; + } + + .primary-nav-items { + max-height: 70%; + } } &.collapsed { @@ -157,13 +186,15 @@ } } - .secondary-nav-items { - border-top: 1px solid var(--bg-vanilla-400); + .nav-wrapper { + .secondary-nav-items { + border-top: 1px solid var(--bg-vanilla-400); - .collapse-expand-handlers { - svg { - color: var(--bg-slate-300); - fill: var(--bg-vanilla-400); + .collapse-expand-handlers { + svg { + color: var(--bg-slate-300); + fill: var(--bg-vanilla-400); + } } } } diff --git a/frontend/src/container/SideNav/SideNav.tsx b/frontend/src/container/SideNav/SideNav.tsx index 6b11ae140c..8ff08ca85e 100644 --- a/frontend/src/container/SideNav/SideNav.tsx +++ b/frontend/src/container/SideNav/SideNav.tsx @@ -375,90 +375,92 @@ function SideNav({
)} -
- {menuItems.map((item, index) => ( - { - handleMenuItemClick(event, item); - }} - /> - ))} -
- -
- - - {licenseData && !isLicenseActive && ( - - )} - - {userManagementMenuItems.map( - (item, index): JSX.Element => ( +
+
+ {menuItems.map((item, index) => ( { - handleUserManagentMenuItemClick(item?.key as string, event); + isActive={activeMenuKey === item.key} + onClick={(event): void => { + handleMenuItemClick(event, item); }} /> - ), - )} + ))} +
- {inviteMembers && ( +
{ - if (isCtrlMetaKey(event)) { - openInNewTab(`${inviteMemberMenuItem.key}`); - } else { - history.push(`${inviteMemberMenuItem.key}`); - } - }} + key="keyboardShortcuts" + item={shortcutMenuItem} + isActive={false} + onClick={onClickShortcuts} /> - )} - {user && ( - { - handleUserManagentMenuItemClick( - userSettingsMenuItem?.key as string, - event, - ); - }} - /> - )} - -
- {collapsed ? ( - - ) : ( - + {licenseData && !isLicenseActive && ( + )} + + {userManagementMenuItems.map( + (item, index): JSX.Element => ( + { + handleUserManagentMenuItemClick(item?.key as string, event); + }} + /> + ), + )} + + {inviteMembers && ( + { + if (isCtrlMetaKey(event)) { + openInNewTab(`${inviteMemberMenuItem.key}`); + } else { + history.push(`${inviteMemberMenuItem.key}`); + } + }} + /> + )} + + {user && ( + { + handleUserManagentMenuItemClick( + userSettingsMenuItem?.key as string, + event, + ); + }} + /> + )} + +
+ {collapsed ? ( + + ) : ( + + )} +
From ec9dbb6853324e0109f5dbcfdd963a1c7c2df88c Mon Sep 17 00:00:00 2001 From: Rajat Dabade Date: Tue, 2 Apr 2024 16:40:41 +0530 Subject: [PATCH 52/53] Dashboard Clean up and list view improvement. (#4675) * refactor: initial setup * refactor: created panelWrapper to separate panel data * fix: type error * fix: the dimension issue for graphs * refactor: done with table value uplot panels * refactor: done with logs panel component * refactor: updated props for log panel component * fix: query range duplicate issue for logs * refactor: trace list view done * fix: full view support * refactor: done with edit mode for panels * refactor: type and props * refactor: reduce an extra api call on edit for list view * refactor: done with full graph visibility handler * refactor: removed commented code * refactor: removed commented code * fix: build failure * refactor: updated service layer graphs * refactor: updated top level oparation query key * refactor: added drag select * refactor: done with drag select in chart * refactor: code cleanup * refactor: legend should not need stage and run query --- .../GridCard/FullView/index.tsx | 216 +++++++---------- .../GridCard/FullView/styles.ts | 1 + .../GridCardLayout/GridCard/FullView/types.ts | 2 - .../GridCard/WidgetGraphComponent.tsx | 50 ++-- .../GridCardLayout/GridCard/index.tsx | 159 +++--------- .../GridCardLayout/GridCard/types.ts | 23 +- .../GridCardLayout/GridCardLayout.tsx | 32 ++- .../src/container/GridPanelSwitch/index.tsx | 34 +-- .../src/container/GridPanelSwitch/types.ts | 4 +- .../LogsPanelTable/LogsPanelComponent.tsx | 228 +++++++----------- .../src/container/LogsPanelTable/utils.tsx | 51 ++++ .../MetricsApplication.factory.ts | 2 + .../MetricsApplication/Tabs/DBCall.tsx | 8 +- .../MetricsApplication/Tabs/External.tsx | 21 +- .../MetricsApplication/Tabs/Overview.tsx | 86 +++---- .../Tabs/Overview/ApDex/ApDexMetrics.tsx | 2 - .../Tabs/Overview/ApDex/ApDexTraces.tsx | 1 - .../Tabs/Overview/ServiceOverview.tsx | 65 ++--- .../Tabs/Overview/TopLevelOperations.tsx | 28 ++- .../container/MetricsApplication/styles.ts | 4 +- .../src/container/MetricsApplication/types.ts | 1 + .../LeftContainer/QuerySection/index.tsx | 23 +- .../WidgetGraph/WidgetGraphContainer.tsx | 69 ++---- .../WidgetGraph/WidgetGraphs.tsx | 181 ++++---------- .../LeftContainer/WidgetGraph/index.tsx | 52 +--- .../NewWidget/LeftContainer/index.tsx | 94 ++++++-- frontend/src/container/NewWidget/index.tsx | 99 +++++--- frontend/src/container/NewWidget/types.ts | 25 +- .../PanelWrapper/ListPanelWrapper.tsx | 37 +++ .../container/PanelWrapper/PanelWrapper.tsx | 40 +++ .../PanelWrapper/TablePanelWrapper.tsx | 24 ++ .../PanelWrapper/UplotPanelWrapper.tsx | 141 +++++++++++ .../PanelWrapper/ValuePanelWrapper.tsx | 21 ++ .../src/container/PanelWrapper/constants.ts | 16 ++ .../PanelWrapper/panelWrapper.types.ts | 22 ++ .../TracesTableComponent.tsx | 99 +++----- 36 files changed, 1005 insertions(+), 956 deletions(-) create mode 100644 frontend/src/container/PanelWrapper/ListPanelWrapper.tsx create mode 100644 frontend/src/container/PanelWrapper/PanelWrapper.tsx create mode 100644 frontend/src/container/PanelWrapper/TablePanelWrapper.tsx create mode 100644 frontend/src/container/PanelWrapper/UplotPanelWrapper.tsx create mode 100644 frontend/src/container/PanelWrapper/ValuePanelWrapper.tsx create mode 100644 frontend/src/container/PanelWrapper/constants.ts create mode 100644 frontend/src/container/PanelWrapper/panelWrapper.types.ts diff --git a/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx b/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx index c24a731d5e..e4f14e8e19 100644 --- a/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx +++ b/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx @@ -1,62 +1,60 @@ import './WidgetFullView.styles.scss'; -import { SyncOutlined } from '@ant-design/icons'; -import { Button } from 'antd'; +import { LoadingOutlined, SyncOutlined } from '@ant-design/icons'; +import { Button, Spin } from 'antd'; import cx from 'classnames'; import { ToggleGraphProps } from 'components/Graph/types'; import Spinner from 'components/Spinner'; import TimePreference from 'components/TimePreferenceDropDown'; import { DEFAULT_ENTITY_VERSION } from 'constants/app'; +import { QueryParams } from 'constants/query'; import { PANEL_TYPES } from 'constants/queryBuilder'; -import GridPanelSwitch from 'container/GridPanelSwitch'; import { timeItems, timePreferance, } from 'container/NewWidget/RightContainer/timeItems'; +import PanelWrapper from 'container/PanelWrapper/PanelWrapper'; import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; import { useStepInterval } from 'hooks/queryBuilder/useStepInterval'; import { useChartMutable } from 'hooks/useChartMutable'; -import { useIsDarkMode } from 'hooks/useDarkMode'; +import useUrlQuery from 'hooks/useUrlQuery'; import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables'; -import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions'; -import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData'; +import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults'; +import GetMinMax from 'lib/getMinMax'; +import history from 'lib/history'; import { useDashboard } from 'providers/Dashboard/Dashboard'; import { useCallback, useEffect, useRef, useState } from 'react'; -import { useSelector } from 'react-redux'; +import { useDispatch, useSelector } from 'react-redux'; +import { useLocation } from 'react-router-dom'; +import { UpdateTimeInterval } from 'store/actions'; import { AppState } from 'store/reducers'; import { GlobalReducer } from 'types/reducer/globalTime'; -import uPlot from 'uplot'; +import { getGraphType } from 'utils/getGraphType'; import { getSortedSeriesData } from 'utils/getSortedSeriesData'; -import { getTimeRange } from 'utils/getTimeRange'; import { getLocalStorageGraphVisibilityState } from '../utils'; import { PANEL_TYPES_VS_FULL_VIEW_TABLE } from './contants'; -import GraphManager from './GraphManager'; import { GraphContainer, TimeContainer } from './styles'; import { FullViewProps } from './types'; function FullView({ widget, fullViewOptions = true, - onClickHandler, - name, version, originalName, - yAxisUnit, - onDragSelect, isDependedDataLoaded = false, onToggleModelHandler, - parentChartRef, }: FullViewProps): JSX.Element { const { selectedTime: globalSelectedTime } = useSelector< AppState, GlobalReducer >((state) => state.globalTime); + const dispatch = useDispatch(); + const urlQuery = useUrlQuery(); + const location = useLocation(); const fullViewRef = useRef(null); - const [chartOptions, setChartOptions] = useState(); - const { selectedDashboard, isDashboardLocked } = useDashboard(); const getSelectedTime = useCallback( @@ -74,24 +72,70 @@ function FullView({ const updatedQuery = useStepInterval(widget?.query); - const response = useGetQueryRange( - { - selectedTime: selectedTime.enum, - graphType: - widget.panelTypes === PANEL_TYPES.BAR - ? PANEL_TYPES.TIME_SERIES - : widget.panelTypes, + const [requestData, setRequestData] = useState(() => { + if (widget.panelTypes !== PANEL_TYPES.LIST) { + return { + selectedTime: selectedTime.enum, + graphType: getGraphType(widget.panelTypes), + query: updatedQuery, + globalSelectedInterval: globalSelectedTime, + variables: getDashboardVariables(selectedDashboard?.data.variables), + }; + } + updatedQuery.builder.queryData[0].pageSize = 10; + return { query: updatedQuery, + graphType: PANEL_TYPES.LIST, + selectedTime: 'GLOBAL_TIME', globalSelectedInterval: globalSelectedTime, - variables: getDashboardVariables(selectedDashboard?.data.variables), - }, + tableParams: { + pagination: { + offset: 0, + limit: updatedQuery.builder.queryData[0].limit || 0, + }, + }, + }; + }); + + useEffect(() => { + setRequestData((prev) => ({ + ...prev, + selectedTime: selectedTime.enum, + })); + }, [selectedTime]); + + const response = useGetQueryRange( + requestData, selectedDashboard?.data?.version || version || DEFAULT_ENTITY_VERSION, { - queryKey: `FullViewGetMetricsQueryRange-${selectedTime.enum}-${globalSelectedTime}-${widget.id}`, - enabled: !isDependedDataLoaded && widget.panelTypes !== PANEL_TYPES.LIST, // Internally both the list view panel has it's own query range api call, so we don't need to call it again + queryKey: [widget?.query, widget?.panelTypes, requestData, version], + enabled: !isDependedDataLoaded, + keepPreviousData: true, }, ); + const onDragSelect = useCallback( + (start: number, end: number): void => { + const startTimestamp = Math.trunc(start); + const endTimestamp = Math.trunc(end); + + if (startTimestamp !== endTimestamp) { + dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp])); + } + + const { maxTime, minTime } = GetMinMax('custom', [ + startTimestamp, + endTimestamp, + ]); + + urlQuery.set(QueryParams.startTime, minTime.toString()); + urlQuery.set(QueryParams.endTime, maxTime.toString()); + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; + history.push(generatedUrl); + }, + [dispatch, location.pathname, urlQuery], + ); + const [graphsVisibilityStates, setGraphsVisibilityStates] = useState< boolean[] >(Array(response.data?.payload.data.result.length).fill(true)); @@ -118,60 +162,6 @@ function FullView({ response.data.payload.data.result = sortedSeriesData; } - const chartData = getUPlotChartData(response?.data?.payload, widget.fillSpans); - - const isDarkMode = useIsDarkMode(); - - const [minTimeScale, setMinTimeScale] = useState(); - const [maxTimeScale, setMaxTimeScale] = useState(); - - const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector< - AppState, - GlobalReducer - >((state) => state.globalTime); - - useEffect((): void => { - const { startTime, endTime } = getTimeRange(response); - - setMinTimeScale(startTime); - setMaxTimeScale(endTime); - }, [maxTime, minTime, globalSelectedInterval, response]); - - useEffect(() => { - if (!response.isFetching && fullViewRef.current) { - const width = fullViewRef.current?.clientWidth - ? fullViewRef.current.clientWidth - 45 - : 700; - - const height = fullViewRef.current?.clientWidth - ? fullViewRef.current.clientHeight - : 300; - - const newChartOptions = getUPlotChartOptions({ - id: originalName, - yAxisUnit: yAxisUnit || '', - apiResponse: response.data?.payload, - dimensions: { - height, - width, - }, - isDarkMode, - onDragSelect, - graphsVisibilityStates, - setGraphsVisibilityStates, - thresholds: widget.thresholds, - minTimeScale, - maxTimeScale, - softMax: widget.softMax === undefined ? null : widget.softMax, - softMin: widget.softMin === undefined ? null : widget.softMin, - panelType: widget.panelTypes, - }); - - setChartOptions(newChartOptions); - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [response.isFetching, graphsVisibilityStates, fullViewRef.current]); - useEffect(() => { graphsVisibilityStates?.forEach((e, i) => { fullViewChartRef?.current?.toggleGraph(i, e); @@ -180,7 +170,7 @@ function FullView({ const isListView = widget.panelTypes === PANEL_TYPES.LIST; - if (response.isFetching) { + if (response.isLoading && widget.panelTypes !== PANEL_TYPES.LIST) { return ; } @@ -189,6 +179,9 @@ function FullView({
{fullViewOptions && ( + {response.isFetching && ( + } /> + )} - {chartOptions && ( - - - - )} + + +
- - {canModifyChart && chartOptions && !isDashboardLocked && ( - - )}
); } diff --git a/frontend/src/container/GridCardLayout/GridCard/FullView/styles.ts b/frontend/src/container/GridCardLayout/GridCard/FullView/styles.ts index f963b31371..0133b1a49b 100644 --- a/frontend/src/container/GridCardLayout/GridCard/FullView/styles.ts +++ b/frontend/src/container/GridCardLayout/GridCard/FullView/styles.ts @@ -18,6 +18,7 @@ export const NotFoundContainer = styled.div` export const TimeContainer = styled.div` display: flex; justify-content: flex-end; + align-items: center; ${({ $panelType }): FlattenSimpleInterpolation => $panelType === PANEL_TYPES.TABLE ? css` diff --git a/frontend/src/container/GridCardLayout/GridCard/FullView/types.ts b/frontend/src/container/GridCardLayout/GridCard/FullView/types.ts index 6b2e750ae9..7440278fd8 100644 --- a/frontend/src/container/GridCardLayout/GridCard/FullView/types.ts +++ b/frontend/src/container/GridCardLayout/GridCard/FullView/types.ts @@ -53,10 +53,8 @@ export interface FullViewProps { version?: string; originalName: string; yAxisUnit?: string; - onDragSelect: (start: number, end: number) => void; isDependedDataLoaded?: boolean; onToggleModelHandler?: GraphManagerProps['onToggleModelHandler']; - parentChartRef: GraphManagerProps['lineChartRef']; } export interface GraphManagerProps extends UplotProps { diff --git a/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.tsx b/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.tsx index 3dddc46884..505a1864bc 100644 --- a/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.tsx +++ b/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.tsx @@ -6,7 +6,7 @@ import { ToggleGraphProps } from 'components/Graph/types'; import { SOMETHING_WENT_WRONG } from 'constants/api'; import { QueryParams } from 'constants/query'; import { PANEL_TYPES } from 'constants/queryBuilder'; -import GridPanelSwitch from 'container/GridPanelSwitch'; +import PanelWrapper from 'container/PanelWrapper/PanelWrapper'; import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard'; import { useNotifications } from 'hooks/useNotifications'; import useUrlQuery from 'hooks/useUrlQuery'; @@ -33,23 +33,20 @@ import FullView from './FullView'; import { Modal } from './styles'; import { WidgetGraphComponentProps } from './types'; import { getLocalStorageGraphVisibilityState } from './utils'; +// import { getLocalStorageGraphVisibilityState } from './utils'; function WidgetGraphComponent({ widget, queryResponse, errorMessage, - name, version, threshold, headerMenuList, isWarning, - data, - options, - graphVisibiltyState, + isFetchingResponse, + setRequestData, onClickHandler, onDragSelect, - setGraphVisibility, - isFetchingResponse, }: WidgetGraphComponentProps): JSX.Element { const [deleteModal, setDeleteModal] = useState(false); const [hovered, setHovered] = useState(false); @@ -61,12 +58,15 @@ function WidgetGraphComponent({ const isFullViewOpen = params.get(QueryParams.expandedWidgetId) === widget.id; const lineChartRef = useRef(); + const [graphVisibility, setGraphVisibility] = useState( + Array(queryResponse.data?.payload?.data.result.length || 0).fill(true), + ); const graphRef = useRef(null); useEffect(() => { if (!lineChartRef.current) return; - graphVisibiltyState.forEach((state, index) => { + graphVisibility.forEach((state, index) => { lineChartRef.current?.toggleGraph(index, state); }); // eslint-disable-next-line react-hooks/exhaustive-deps @@ -210,7 +210,7 @@ function WidgetGraphComponent({ graphVisibilityStates: localStoredVisibilityState, } = getLocalStorageGraphVisibilityState({ apiResponse: queryResponse.data.payload.data.result, - name, + name: widget.id, }); setGraphVisibility(localStoredVisibilityState); } @@ -252,7 +252,7 @@ function WidgetGraphComponent({ onBlur={(): void => { setHovered(false); }} - id={name} + id={widget.id} > @@ -305,26 +303,22 @@ function WidgetGraphComponent({ isFetchingResponse={isFetchingResponse} />
- {queryResponse.isLoading && } + {queryResponse.isLoading && widget.panelTypes !== PANEL_TYPES.LIST && ( + + )} {(queryResponse.isSuccess || widget.panelTypes === PANEL_TYPES.LIST) && (
-
)} diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx index d81e518222..363cba7f76 100644 --- a/frontend/src/container/GridCardLayout/GridCard/index.tsx +++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx @@ -4,80 +4,43 @@ import { PANEL_TYPES } from 'constants/queryBuilder'; import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/config'; import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; import { useStepInterval } from 'hooks/queryBuilder/useStepInterval'; -import { useIsDarkMode } from 'hooks/useDarkMode'; -import { useResizeObserver } from 'hooks/useDimensions'; import { useIntersectionObserver } from 'hooks/useIntersectionObserver'; -import useUrlQuery from 'hooks/useUrlQuery'; import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables'; -import GetMinMax from 'lib/getMinMax'; +import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults'; import getTimeString from 'lib/getTimeString'; -import history from 'lib/history'; -import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions'; -import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData'; import isEmpty from 'lodash-es/isEmpty'; -import _noop from 'lodash-es/noop'; import { useDashboard } from 'providers/Dashboard/Dashboard'; -import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'; +import { memo, useEffect, useRef, useState } from 'react'; import { useDispatch, useSelector } from 'react-redux'; -import { useLocation } from 'react-router-dom'; import { UpdateTimeInterval } from 'store/actions'; import { AppState } from 'store/reducers'; import { GlobalReducer } from 'types/reducer/globalTime'; import { getGraphType } from 'utils/getGraphType'; import { getSortedSeriesData } from 'utils/getSortedSeriesData'; -import { getTimeRange } from 'utils/getTimeRange'; import EmptyWidget from '../EmptyWidget'; import { MenuItemKeys } from '../WidgetHeader/contants'; import { GridCardGraphProps } from './types'; -import { getLocalStorageGraphVisibilityState } from './utils'; import WidgetGraphComponent from './WidgetGraphComponent'; function GridCardGraph({ widget, - name, - onClickHandler = _noop, headerMenuList = [MenuItemKeys.View], isQueryEnabled, threshold, variables, - fillSpans = false, version, + onClickHandler, + onDragSelect, }: GridCardGraphProps): JSX.Element { const dispatch = useDispatch(); const [errorMessage, setErrorMessage] = useState(); const { toScrollWidgetId, setToScrollWidgetId } = useDashboard(); - const [minTimeScale, setMinTimeScale] = useState(); - const [maxTimeScale, setMaxTimeScale] = useState(); - const urlQuery = useUrlQuery(); - const location = useLocation(); const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector< AppState, GlobalReducer >((state) => state.globalTime); - const onDragSelect = useCallback( - (start: number, end: number): void => { - const startTimestamp = Math.trunc(start); - const endTimestamp = Math.trunc(end); - - if (startTimestamp !== endTimestamp) { - dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp])); - } - - const { maxTime, minTime } = GetMinMax('custom', [ - startTimestamp, - endTimestamp, - ]); - - urlQuery.set(QueryParams.startTime, minTime.toString()); - urlQuery.set(QueryParams.endTime, maxTime.toString()); - const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; - history.push(generatedUrl); - }, - [dispatch, location.pathname, urlQuery], - ); - const handleBackNavigation = (): void => { const searchParams = new URLSearchParams(window.location.search); const startTime = searchParams.get(QueryParams.startTime); @@ -127,19 +90,39 @@ function GridCardGraph({ const isEmptyWidget = widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget); - const queryEnabledCondition = - isVisible && - !isEmptyWidget && - isQueryEnabled && - widget.panelTypes !== PANEL_TYPES.LIST; + const queryEnabledCondition = isVisible && !isEmptyWidget && isQueryEnabled; + + const [requestData, setRequestData] = useState(() => { + if (widget.panelTypes !== PANEL_TYPES.LIST) { + return { + selectedTime: widget?.timePreferance, + graphType: getGraphType(widget.panelTypes), + query: updatedQuery, + globalSelectedInterval, + variables: getDashboardVariables(variables), + }; + } + updatedQuery.builder.queryData[0].pageSize = 10; + return { + query: updatedQuery, + graphType: PANEL_TYPES.LIST, + selectedTime: 'GLOBAL_TIME', + globalSelectedInterval, + tableParams: { + pagination: { + offset: 0, + limit: updatedQuery.builder.queryData[0].limit || 0, + }, + }, + }; + }); const queryResponse = useGetQueryRange( { - selectedTime: widget?.timePreferance, - graphType: getGraphType(widget.panelTypes), - query: updatedQuery, - globalSelectedInterval, + ...requestData, variables: getDashboardVariables(variables), + selectedTime: 'GLOBAL_TIME', + globalSelectedInterval, }, version || DEFAULT_ENTITY_VERSION, { @@ -151,6 +134,7 @@ function GridCardGraph({ widget?.query, widget?.panelTypes, widget.timePreferance, + requestData, ], retry(failureCount, error): boolean { if ( @@ -173,15 +157,6 @@ function GridCardGraph({ const isEmptyLayout = widget?.id === PANEL_TYPES.EMPTY_WIDGET; - const containerDimensions = useResizeObserver(graphRef); - - useEffect((): void => { - const { startTime, endTime } = getTimeRange(queryResponse); - - setMinTimeScale(startTime); - setMaxTimeScale(endTime); - }, [maxTime, minTime, globalSelectedInterval, queryResponse]); - if (queryResponse.data && widget.panelTypes === PANEL_TYPES.BAR) { const sortedSeriesData = getSortedSeriesData( queryResponse.data?.payload.data.result, @@ -189,89 +164,29 @@ function GridCardGraph({ queryResponse.data.payload.data.result = sortedSeriesData; } - const chartData = getUPlotChartData(queryResponse?.data?.payload, fillSpans); - - const isDarkMode = useIsDarkMode(); - const menuList = widget.panelTypes === PANEL_TYPES.TABLE || widget.panelTypes === PANEL_TYPES.LIST ? headerMenuList.filter((menu) => menu !== MenuItemKeys.CreateAlerts) : headerMenuList; - const [graphVisibility, setGraphVisibility] = useState( - Array(queryResponse.data?.payload?.data.result.length || 0).fill(true), - ); - - useEffect(() => { - const { - graphVisibilityStates: localStoredVisibilityState, - } = getLocalStorageGraphVisibilityState({ - apiResponse: queryResponse.data?.payload.data.result || [], - name, - }); - setGraphVisibility(localStoredVisibilityState); - }, [name, queryResponse.data?.payload.data.result]); - - const options = useMemo( - () => - getUPlotChartOptions({ - id: widget?.id, - apiResponse: queryResponse.data?.payload, - dimensions: containerDimensions, - isDarkMode, - onDragSelect, - yAxisUnit: widget?.yAxisUnit, - onClickHandler, - thresholds: widget.thresholds, - minTimeScale, - maxTimeScale, - softMax: widget.softMax === undefined ? null : widget.softMax, - softMin: widget.softMin === undefined ? null : widget.softMin, - graphsVisibilityStates: graphVisibility, - setGraphsVisibilityStates: setGraphVisibility, - panelType: widget.panelTypes, - }), - [ - widget?.id, - widget?.yAxisUnit, - widget.thresholds, - widget.softMax, - widget.softMin, - queryResponse.data?.payload, - containerDimensions, - isDarkMode, - onDragSelect, - onClickHandler, - minTimeScale, - maxTimeScale, - graphVisibility, - setGraphVisibility, - widget.panelTypes, - ], - ); - return (
{isEmptyLayout ? ( ) : ( )}
diff --git a/frontend/src/container/GridCardLayout/GridCard/types.ts b/frontend/src/container/GridCardLayout/GridCard/types.ts index 9e527c143d..1235a26440 100644 --- a/frontend/src/container/GridCardLayout/GridCard/types.ts +++ b/frontend/src/container/GridCardLayout/GridCard/types.ts @@ -1,9 +1,9 @@ import { ToggleGraphProps } from 'components/Graph/types'; -import { UplotProps } from 'components/Uplot/Uplot'; +import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults'; import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin'; import { Dispatch, MutableRefObject, ReactNode, SetStateAction } from 'react'; import { UseQueryResult } from 'react-query'; -import { ErrorResponse, SuccessResponse } from 'types/api'; +import { SuccessResponse } from 'types/api'; import { Dashboard, Widgets } from 'types/api/dashboard/getAll'; import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; import uPlot from 'uplot'; @@ -16,35 +16,32 @@ export interface GraphVisibilityLegendEntryProps { legendEntry: LegendEntryProps[]; } -export interface WidgetGraphComponentProps extends UplotProps { +export interface WidgetGraphComponentProps { widget: Widgets; queryResponse: UseQueryResult< - SuccessResponse | ErrorResponse + SuccessResponse, + Error >; errorMessage: string | undefined; - name: string; version?: string; - onDragSelect: (start: number, end: number) => void; - onClickHandler?: OnClickPluginOpts['onClick']; threshold?: ReactNode; headerMenuList: MenuItemKeys[]; isWarning: boolean; - graphVisibiltyState: boolean[]; - setGraphVisibility: Dispatch>; isFetchingResponse: boolean; + setRequestData?: Dispatch>; + onClickHandler?: OnClickPluginOpts['onClick']; + onDragSelect: (start: number, end: number) => void; } export interface GridCardGraphProps { widget: Widgets; - name: string; - onDragSelect?: (start: number, end: number) => void; - onClickHandler?: OnClickPluginOpts['onClick']; threshold?: ReactNode; headerMenuList?: WidgetGraphComponentProps['headerMenuList']; + onClickHandler?: OnClickPluginOpts['onClick']; isQueryEnabled: boolean; variables?: Dashboard['data']['variables']; - fillSpans?: boolean; version?: string; + onDragSelect: (start: number, end: number) => void; } export interface GetGraphVisibilityStateOnLegendClickProps { diff --git a/frontend/src/container/GridCardLayout/GridCardLayout.tsx b/frontend/src/container/GridCardLayout/GridCardLayout.tsx index 16ae0eeaa5..b84e88b292 100644 --- a/frontend/src/container/GridCardLayout/GridCardLayout.tsx +++ b/frontend/src/container/GridCardLayout/GridCardLayout.tsx @@ -3,20 +3,25 @@ import './GridCardLayout.styles.scss'; import { PlusOutlined } from '@ant-design/icons'; import { Tooltip } from 'antd'; import { SOMETHING_WENT_WRONG } from 'constants/api'; +import { QueryParams } from 'constants/query'; import { PANEL_TYPES } from 'constants/queryBuilder'; import { themeColors } from 'constants/theme'; import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard'; import useComponentPermission from 'hooks/useComponentPermission'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { useNotifications } from 'hooks/useNotifications'; +import useUrlQuery from 'hooks/useUrlQuery'; +import history from 'lib/history'; import isEqual from 'lodash-es/isEqual'; import { FullscreenIcon } from 'lucide-react'; import { useDashboard } from 'providers/Dashboard/Dashboard'; -import { useEffect, useState } from 'react'; +import { useCallback, useEffect, useState } from 'react'; import { FullScreen, useFullScreenHandle } from 'react-full-screen'; import { Layout } from 'react-grid-layout'; import { useTranslation } from 'react-i18next'; -import { useSelector } from 'react-redux'; +import { useDispatch, useSelector } from 'react-redux'; +import { useLocation } from 'react-router-dom'; +import { UpdateTimeInterval } from 'store/actions'; import { AppState } from 'store/reducers'; import { Dashboard, Widgets } from 'types/api/dashboard/getAll'; import AppReducer from 'types/reducer/app'; @@ -45,6 +50,8 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element { } = useDashboard(); const { data } = selectedDashboard || {}; const handle = useFullScreenHandle(); + const { pathname } = useLocation(); + const dispatch = useDispatch(); const { widgets, variables } = data || {}; @@ -61,6 +68,7 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element { const updateDashboardMutation = useUpdateDashboard(); const { notifications } = useNotifications(); + const urlQuery = useUrlQuery(); let permissions: ComponentTypes[] = ['save_layout', 'add_panel']; @@ -126,6 +134,23 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element { } }; + const onDragSelect = useCallback( + (start: number, end: number) => { + const startTimestamp = Math.trunc(start); + const endTimestamp = Math.trunc(end); + + urlQuery.set(QueryParams.startTime, startTimestamp.toString()); + urlQuery.set(QueryParams.endTime, endTimestamp.toString()); + const generatedUrl = `${pathname}?${urlQuery.toString()}`; + history.replace(generatedUrl); + + if (startTimestamp !== endTimestamp) { + dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp])); + } + }, + [dispatch, pathname, urlQuery], + ); + useEffect(() => { if ( dashboardLayout && @@ -200,11 +225,10 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element { > diff --git a/frontend/src/container/GridPanelSwitch/index.tsx b/frontend/src/container/GridPanelSwitch/index.tsx index 18d4e7a63a..d8e526fecd 100644 --- a/frontend/src/container/GridPanelSwitch/index.tsx +++ b/frontend/src/container/GridPanelSwitch/index.tsx @@ -1,10 +1,8 @@ import { ToggleGraphProps } from 'components/Graph/types'; -import { DEFAULT_ENTITY_VERSION } from 'constants/app'; import { getComponentForPanelType } from 'constants/panelTypes'; import { PANEL_TYPES } from 'constants/queryBuilder'; import { GRID_TABLE_CONFIG } from 'container/GridTableComponent/config'; import { FC, forwardRef, memo, useMemo } from 'react'; -import { DataSource } from 'types/common/queryBuilder'; import { GridPanelSwitchProps, PropsTypePropsMap } from './types'; @@ -21,10 +19,7 @@ const GridPanelSwitch = forwardRef< query, options, thresholds, - selectedLogFields, - selectedTracesFields, dataSource, - selectedTime, }, ref, ): JSX.Element | null => { @@ -46,20 +41,7 @@ const GridPanelSwitch = forwardRef< query, thresholds, }, - [PANEL_TYPES.LIST]: - dataSource === DataSource.LOGS - ? { - selectedLogsFields: selectedLogFields || [], - query, - version: DEFAULT_ENTITY_VERSION, // As we don't support for Metrics, defaulting to v3 - selectedTime, - } - : { - selectedTracesFields: selectedTracesFields || [], - query, - version: DEFAULT_ENTITY_VERSION, // As we don't support for Metrics, defaulting to v3 - selectedTime, - }, + [PANEL_TYPES.LIST]: null, [PANEL_TYPES.TRACE]: null, [PANEL_TYPES.BAR]: { data, @@ -70,19 +52,7 @@ const GridPanelSwitch = forwardRef< }; return result; - }, [ - data, - options, - ref, - yAxisUnit, - thresholds, - panelData, - query, - dataSource, - selectedLogFields, - selectedTime, - selectedTracesFields, - ]); + }, [data, options, ref, yAxisUnit, thresholds, panelData, query]); const Component = getComponentForPanelType(panelType, dataSource) as FC< PropsTypePropsMap[typeof panelType] diff --git a/frontend/src/container/GridPanelSwitch/types.ts b/frontend/src/container/GridPanelSwitch/types.ts index ea437bf9ff..e587d59717 100644 --- a/frontend/src/container/GridPanelSwitch/types.ts +++ b/frontend/src/container/GridPanelSwitch/types.ts @@ -2,9 +2,7 @@ import { StaticLineProps, ToggleGraphProps } from 'components/Graph/types'; import { UplotProps } from 'components/Uplot/Uplot'; import { GridTableComponentProps } from 'container/GridTableComponent/types'; import { GridValueComponentProps } from 'container/GridValueComponent/types'; -import { LogsPanelComponentProps } from 'container/LogsPanelTable/LogsPanelComponent'; import { timePreferance } from 'container/NewWidget/RightContainer/timeItems'; -import { TracesTableComponentProps } from 'container/TracesTableComponent/TracesTableComponent'; import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin'; import { ForwardedRef } from 'react'; import { Widgets } from 'types/api/dashboard/getAll'; @@ -40,7 +38,7 @@ export type PropsTypePropsMap = { [PANEL_TYPES.VALUE]: GridValueComponentProps; [PANEL_TYPES.TABLE]: GridTableComponentProps; [PANEL_TYPES.TRACE]: null; - [PANEL_TYPES.LIST]: LogsPanelComponentProps | TracesTableComponentProps; + [PANEL_TYPES.LIST]: null; [PANEL_TYPES.BAR]: UplotProps & { ref: ForwardedRef; }; diff --git a/frontend/src/container/LogsPanelTable/LogsPanelComponent.tsx b/frontend/src/container/LogsPanelTable/LogsPanelComponent.tsx index 618f732595..59f78499b3 100644 --- a/frontend/src/container/LogsPanelTable/LogsPanelComponent.tsx +++ b/frontend/src/container/LogsPanelTable/LogsPanelComponent.tsx @@ -4,82 +4,53 @@ import { Table } from 'antd'; import LogDetail from 'components/LogDetail'; import { VIEW_TYPES } from 'components/LogDetail/constants'; import { SOMETHING_WENT_WRONG } from 'constants/api'; -import { DEFAULT_ENTITY_VERSION } from 'constants/app'; -import { OPERATORS, PANEL_TYPES } from 'constants/queryBuilder'; -import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; +import { PANEL_TYPES } from 'constants/queryBuilder'; import Controls from 'container/Controls'; -import { timePreferance } from 'container/NewWidget/RightContainer/timeItems'; import { PER_PAGE_OPTIONS } from 'container/TracesExplorer/ListView/configs'; import { tableStyles } from 'container/TracesExplorer/ListView/styles'; import { useActiveLog } from 'hooks/logs/useActiveLog'; -import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; import { Pagination } from 'hooks/queryPagination'; import { useLogsData } from 'hooks/useLogsData'; -import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables'; import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults'; import { FlatLogData } from 'lib/logs/flatLogData'; import { RowData } from 'lib/query/createTableColumnsFromQuery'; -import { useDashboard } from 'providers/Dashboard/Dashboard'; import { + Dispatch, HTMLAttributes, + SetStateAction, useCallback, useEffect, useMemo, useState, } from 'react'; -import { useSelector } from 'react-redux'; -import { AppState } from 'store/reducers'; +import { UseQueryResult } from 'react-query'; +import { SuccessResponse } from 'types/api'; import { Widgets } from 'types/api/dashboard/getAll'; import { ILog } from 'types/api/logs/log'; -import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; -import { Query } from 'types/api/queryBuilder/queryBuilderData'; -import { GlobalReducer } from 'types/reducer/globalTime'; -import { v4 as uuid } from 'uuid'; +import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; -import { getLogPanelColumnsList } from './utils'; +import { getLogPanelColumnsList, getNextOrPreviousItems } from './utils'; function LogsPanelComponent({ - selectedLogsFields, - query, - selectedTime, + widget, + setRequestData, + queryResponse, }: LogsPanelComponentProps): JSX.Element { - const { selectedTime: globalSelectedTime, maxTime, minTime } = useSelector< - AppState, - GlobalReducer - >((state) => state.globalTime); - const [pagination, setPagination] = useState({ offset: 0, - limit: query.builder.queryData[0].limit || 0, - }); - - const [requestData, setRequestData] = useState(() => { - const updatedQuery = { ...query }; - updatedQuery.builder.queryData[0].pageSize = 10; - return { - query: updatedQuery, - graphType: PANEL_TYPES.LIST, - selectedTime: 'GLOBAL_TIME', - globalSelectedInterval: globalSelectedTime, - tableParams: { - pagination, - }, - }; + limit: widget.query.builder.queryData[0].limit || 0, }); useEffect(() => { - setRequestData({ - ...requestData, - globalSelectedInterval: globalSelectedTime, + setRequestData((prev) => ({ + ...prev, tableParams: { pagination, }, - }); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [pagination]); + })); + }, [pagination, setRequestData]); const [pageSize, setPageSize] = useState(10); - const { selectedDashboard } = useDashboard(); const handleChangePageSize = (value: number): void => { setPagination({ @@ -88,53 +59,35 @@ function LogsPanelComponent({ offset: value, }); setPageSize(value); - const newQueryData = { ...requestData.query }; - newQueryData.builder.queryData[0].pageSize = value; - const newRequestData = { - ...requestData, - query: newQueryData, - tableParams: { - pagination, - }, - }; - setRequestData(newRequestData); + setRequestData((prev) => { + const newQueryData = { ...prev.query }; + newQueryData.builder.queryData[0].pageSize = value; + return { + ...prev, + query: newQueryData, + tableParams: { + pagination: { + limit: 0, + offset: value, + }, + }, + }; + }); }; - const { data, isFetching, isError } = useGetQueryRange( - { - ...requestData, - globalSelectedInterval: globalSelectedTime, - selectedTime: selectedTime?.enum || 'GLOBAL_TIME', - variables: getDashboardVariables(selectedDashboard?.data.variables), - }, - DEFAULT_ENTITY_VERSION, - { - queryKey: [ - REACT_QUERY_KEY.GET_QUERY_RANGE, - globalSelectedTime, - maxTime, - minTime, - requestData, - pagination, - selectedDashboard?.data.variables, - ], - enabled: !!requestData.query && !!selectedLogsFields?.length, - }, - ); - - const columns = getLogPanelColumnsList(selectedLogsFields); + const columns = getLogPanelColumnsList(widget.selectedLogFields); const dataLength = - data?.payload?.data?.newResult?.data?.result[0]?.list?.length; + queryResponse.data?.payload?.data?.newResult?.data?.result[0]?.list?.length; const totalCount = useMemo(() => dataLength || 0, [dataLength]); const [firstLog, setFirstLog] = useState(); const [lastLog, setLastLog] = useState(); const { logs } = useLogsData({ - result: data?.payload.data.newResult.data.result, + result: queryResponse.data?.payload.data.newResult.data.result, panelType: PANEL_TYPES.LIST, - stagedQuery: query, + stagedQuery: widget.query, }); useEffect(() => { @@ -167,92 +120,86 @@ function LogsPanelComponent({ ); const isOrderByTimeStamp = - query.builder.queryData[0].orderBy.length > 0 && - query.builder.queryData[0].orderBy[0].columnName === 'timestamp'; + widget.query.builder.queryData[0].orderBy.length > 0 && + widget.query.builder.queryData[0].orderBy[0].columnName === 'timestamp'; const handlePreviousPagination = (): void => { if (isOrderByTimeStamp) { - setRequestData({ - ...requestData, + setRequestData((prev) => ({ + ...prev, query: { - ...requestData.query, + ...prev.query, builder: { - ...requestData.query.builder, + ...prev.query.builder, queryData: [ { - ...requestData.query.builder.queryData[0], + ...prev.query.builder.queryData[0], filters: { - ...requestData.query.builder.queryData[0].filters, + ...prev.query.builder.queryData[0].filters, items: [ - { - id: uuid(), - key: { - key: 'id', - type: '', - dataType: DataTypes.String, - isColumn: true, - }, - op: OPERATORS['>'], - value: firstLog?.id || '', - }, + ...getNextOrPreviousItems( + prev.query.builder.queryData[0].filters.items, + 'PREV', + firstLog, + ), ], }, + limit: 0, + offset: 0, }, ], }, }, - }); - return; + })); + } + if (!isOrderByTimeStamp) { + setPagination({ + ...pagination, + limit: 0, + offset: pagination.offset - pageSize, + }); } - setPagination({ - ...pagination, - limit: 0, - offset: pagination.offset - pageSize, - }); }; const handleNextPagination = (): void => { if (isOrderByTimeStamp) { - setRequestData({ - ...requestData, + setRequestData((prev) => ({ + ...prev, query: { - ...requestData.query, + ...prev.query, builder: { - ...requestData.query.builder, + ...prev.query.builder, queryData: [ { - ...requestData.query.builder.queryData[0], + ...prev.query.builder.queryData[0], filters: { - ...requestData.query.builder.queryData[0].filters, + ...prev.query.builder.queryData[0].filters, items: [ - { - id: uuid(), - key: { - key: 'id', - type: '', - dataType: DataTypes.String, - isColumn: true, - }, - op: OPERATORS['<'], - value: lastLog?.id || '', - }, + ...getNextOrPreviousItems( + prev.query.builder.queryData[0].filters.items, + 'NEXT', + lastLog, + ), ], }, + limit: 0, + offset: 0, }, ], }, }, - }); - return; + })); + } + if (!isOrderByTimeStamp) { + setPagination({ + ...pagination, + limit: 0, + offset: pagination.offset + pageSize, + }); } - setPagination({ - ...pagination, - limit: 0, - offset: pagination.offset + pageSize, - }); }; - if (isError) { + if (queryResponse.isError) { return
{SOMETHING_WENT_WRONG}
; } @@ -265,19 +212,19 @@ function LogsPanelComponent({ tableLayout="fixed" scroll={{ x: `calc(50vw - 10px)` }} sticky - loading={isFetching} + loading={queryResponse.isFetching} style={tableStyles} dataSource={flattenLogData} columns={columns} onRow={handleRow} />
- {!query.builder.queryData[0].limit && ( + {!widget.query.builder.queryData[0].limit && (
>; + queryResponse: UseQueryResult< + SuccessResponse, + Error + >; + widget: Widgets; }; export default LogsPanelComponent; diff --git a/frontend/src/container/LogsPanelTable/utils.tsx b/frontend/src/container/LogsPanelTable/utils.tsx index 46701e763b..a95442b7cc 100644 --- a/frontend/src/container/LogsPanelTable/utils.tsx +++ b/frontend/src/container/LogsPanelTable/utils.tsx @@ -1,10 +1,15 @@ import { ColumnsType } from 'antd/es/table'; import { Typography } from 'antd/lib'; +import { OPERATORS } from 'constants/queryBuilder'; // import Typography from 'antd/es/typography/Typography'; import { RowData } from 'lib/query/createTableColumnsFromQuery'; import { ReactNode } from 'react'; import { Widgets } from 'types/api/dashboard/getAll'; import { IField } from 'types/api/logs/fields'; +import { ILog } from 'types/api/logs/log'; +import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; +import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData'; +import { v4 as uuid } from 'uuid'; export const getLogPanelColumnsList = ( selectedLogFields: Widgets['selectedLogFields'], @@ -36,3 +41,49 @@ export const getLogPanelColumnsList = ( return [...initialColumns, ...columns]; }; + +export const getNextOrPreviousItems = ( + items: TagFilterItem[], + direction: 'NEXT' | 'PREV', + log?: ILog, +): TagFilterItem[] => { + const nextItem = { + id: uuid(), + key: { + key: 'id', + type: '', + dataType: DataTypes.String, + isColumn: true, + }, + op: OPERATORS['<'], + value: log?.id || '', + }; + const prevItem = { + id: uuid(), + key: { + key: 'id', + type: '', + dataType: DataTypes.String, + isColumn: true, + }, + op: OPERATORS['>'], + value: log?.id || '', + }; + let index = items.findIndex((item) => item.op === OPERATORS['<']); + if (index === -1) { + index = items.findIndex((item) => item.op === OPERATORS['>']); + } + if (index === -1) { + if (direction === 'NEXT') { + return [...items, nextItem]; + } + return [...items, prevItem]; + } + const newItems = [...items]; + if (direction === 'NEXT') { + newItems[index] = nextItem; + } else { + newItems[index] = prevItem; + } + return newItems; +}; diff --git a/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts b/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts index b974972d70..672bc11812 100644 --- a/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts +++ b/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts @@ -8,6 +8,7 @@ export const getWidgetQueryBuilder = ({ title = '', panelTypes, yAxisUnit = '', + fillSpans = false, id, }: GetWidgetQueryBuilderProps): Widgets => ({ description: '', @@ -24,4 +25,5 @@ export const getWidgetQueryBuilder = ({ softMin: null, selectedLogFields: [], selectedTracesFields: [], + fillSpans, }); diff --git a/frontend/src/container/MetricsApplication/Tabs/DBCall.tsx b/frontend/src/container/MetricsApplication/Tabs/DBCall.tsx index 0d84d45136..54c2115927 100644 --- a/frontend/src/container/MetricsApplication/Tabs/DBCall.tsx +++ b/frontend/src/container/MetricsApplication/Tabs/DBCall.tsx @@ -70,6 +70,7 @@ function DBCall(): JSX.Element { panelTypes: PANEL_TYPES.TIME_SERIES, yAxisUnit: 'reqps', id: SERVICE_CHART_ID.dbCallsRPS, + fillSpans: false, }), [servicename, tagFilterItems], ); @@ -89,7 +90,8 @@ function DBCall(): JSX.Element { title: GraphTitle.DATABASE_CALLS_AVG_DURATION, panelTypes: PANEL_TYPES.TIME_SERIES, yAxisUnit: 'ms', - id: SERVICE_CHART_ID.dbCallsAvgDuration, + id: GraphTitle.DATABASE_CALLS_AVG_DURATION, + fillSpans: true, }), [servicename, tagFilterItems], ); @@ -112,8 +114,6 @@ function DBCall(): JSX.Element { { onGraphClickHandler(setSelectedTimeStamp)( @@ -147,8 +147,6 @@ function DBCall(): JSX.Element { { diff --git a/frontend/src/container/MetricsApplication/Tabs/External.tsx b/frontend/src/container/MetricsApplication/Tabs/External.tsx index 707675ec1d..62ce71b6b4 100644 --- a/frontend/src/container/MetricsApplication/Tabs/External.tsx +++ b/frontend/src/container/MetricsApplication/Tabs/External.tsx @@ -18,7 +18,7 @@ import { useParams } from 'react-router-dom'; import { EQueryType } from 'types/common/dashboard'; import { v4 as uuid } from 'uuid'; -import { GraphTitle, legend, MENU_ITEMS, SERVICE_CHART_ID } from '../constant'; +import { GraphTitle, legend, MENU_ITEMS } from '../constant'; import { getWidgetQueryBuilder } from '../MetricsApplication.factory'; import { Card, GraphContainer, Row } from '../styles'; import { Button } from './styles'; @@ -60,7 +60,7 @@ function External(): JSX.Element { title: GraphTitle.EXTERNAL_CALL_ERROR_PERCENTAGE, panelTypes: PANEL_TYPES.TIME_SERIES, yAxisUnit: '%', - id: SERVICE_CHART_ID.externalCallErrorPercentage, + id: GraphTitle.EXTERNAL_CALL_ERROR_PERCENTAGE, }), [servicename, tagFilterItems], ); @@ -86,7 +86,8 @@ function External(): JSX.Element { title: GraphTitle.EXTERNAL_CALL_DURATION, panelTypes: PANEL_TYPES.TIME_SERIES, yAxisUnit: 'ms', - id: SERVICE_CHART_ID.externalCallDuration, + id: GraphTitle.EXTERNAL_CALL_DURATION, + fillSpans: true, }), [servicename, tagFilterItems], ); @@ -108,7 +109,8 @@ function External(): JSX.Element { title: GraphTitle.EXTERNAL_CALL_RPS_BY_ADDRESS, panelTypes: PANEL_TYPES.TIME_SERIES, yAxisUnit: 'reqps', - id: SERVICE_CHART_ID.externalCallRPSByAddress, + id: GraphTitle.EXTERNAL_CALL_RPS_BY_ADDRESS, + fillSpans: true, }), [servicename, tagFilterItems], ); @@ -130,7 +132,8 @@ function External(): JSX.Element { title: GraphTitle.EXTERNAL_CALL_DURATION_BY_ADDRESS, panelTypes: PANEL_TYPES.TIME_SERIES, yAxisUnit: 'ms', - id: SERVICE_CHART_ID.externalCallDurationByAddress, + id: GraphTitle.EXTERNAL_CALL_DURATION_BY_ADDRESS, + fillSpans: true, }), [servicename, tagFilterItems], ); @@ -155,9 +158,7 @@ function External(): JSX.Element { { onGraphClickHandler(setSelectedTimeStamp)( @@ -192,8 +193,6 @@ function External(): JSX.Element { { @@ -230,8 +229,6 @@ function External(): JSX.Element { => @@ -267,10 +264,8 @@ function External(): JSX.Element { { onGraphClickHandler(setSelectedTimeStamp)( xValue, diff --git a/frontend/src/container/MetricsApplication/Tabs/Overview.tsx b/frontend/src/container/MetricsApplication/Tabs/Overview.tsx index 1fde8d0919..cfaf339d2e 100644 --- a/frontend/src/container/MetricsApplication/Tabs/Overview.tsx +++ b/frontend/src/container/MetricsApplication/Tabs/Overview.tsx @@ -19,13 +19,10 @@ import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin'; import { defaultTo } from 'lodash-es'; import { useCallback, useMemo, useState } from 'react'; import { useQuery } from 'react-query'; -import { useDispatch, useSelector } from 'react-redux'; +import { useDispatch } from 'react-redux'; import { useLocation, useParams } from 'react-router-dom'; import { UpdateTimeInterval } from 'store/actions'; -import { AppState } from 'store/reducers'; import { EQueryType } from 'types/common/dashboard'; -import { GlobalReducer } from 'types/reducer/globalTime'; -import { Tags } from 'types/reducer/trace'; import { v4 as uuid } from 'uuid'; import { GraphTitle, SERVICE_CHART_ID } from '../constant'; @@ -49,9 +46,6 @@ import { } from './util'; function Application(): JSX.Element { - const { maxTime, minTime } = useSelector( - (state) => state.globalTime, - ); const { servicename: encodedServiceName } = useParams(); const servicename = decodeURIComponent(encodedServiceName); const [selectedTimeStamp, setSelectedTimeStamp] = useState(0); @@ -59,10 +53,6 @@ function Application(): JSX.Element { const { queries } = useResourceAttribute(); const urlQuery = useUrlQuery(); - const selectedTags = useMemo( - () => (convertRawQueriesToTraceSelectedTags(queries) as Tags[]) || [], - [queries], - ); const isSpanMetricEnabled = useFeatureFlag(FeatureKeys.USE_SPAN_METRICS) ?.active; @@ -94,7 +84,7 @@ function Application(): JSX.Element { isLoading: topLevelOperationsIsLoading, isError: topLevelOperationsIsError, } = useQuery({ - queryKey: [servicename, minTime, maxTime, selectedTags], + queryKey: [servicename], queryFn: getTopLevelOperations, }); @@ -116,49 +106,41 @@ function Application(): JSX.Element { [servicename, topLevelOperations], ); - const operationPerSecWidget = useMemo( - () => - getWidgetQueryBuilder({ - query: { - queryType: EQueryType.QUERY_BUILDER, - promql: [], - builder: operationPerSec({ - servicename, - tagFilterItems, - topLevelOperations: topLevelOperationsRoute, - }), - clickhouse_sql: [], - id: uuid(), - }, - title: GraphTitle.RATE_PER_OPS, - panelTypes: PANEL_TYPES.TIME_SERIES, - yAxisUnit: 'ops', - id: SERVICE_CHART_ID.rps, + const operationPerSecWidget = getWidgetQueryBuilder({ + query: { + queryType: EQueryType.QUERY_BUILDER, + promql: [], + builder: operationPerSec({ + servicename, + tagFilterItems, + topLevelOperations: topLevelOperationsRoute, }), - [servicename, tagFilterItems, topLevelOperationsRoute], - ); + clickhouse_sql: [], + id: uuid(), + }, + title: GraphTitle.RATE_PER_OPS, + panelTypes: PANEL_TYPES.TIME_SERIES, + yAxisUnit: 'ops', + id: SERVICE_CHART_ID.rps, + }); - const errorPercentageWidget = useMemo( - () => - getWidgetQueryBuilder({ - query: { - queryType: EQueryType.QUERY_BUILDER, - promql: [], - builder: errorPercentage({ - servicename, - tagFilterItems, - topLevelOperations: topLevelOperationsRoute, - }), - clickhouse_sql: [], - id: uuid(), - }, - title: GraphTitle.ERROR_PERCENTAGE, - panelTypes: PANEL_TYPES.TIME_SERIES, - yAxisUnit: '%', - id: SERVICE_CHART_ID.errorPercentage, + const errorPercentageWidget = getWidgetQueryBuilder({ + query: { + queryType: EQueryType.QUERY_BUILDER, + promql: [], + builder: errorPercentage({ + servicename, + tagFilterItems, + topLevelOperations: topLevelOperationsRoute, }), - [servicename, tagFilterItems, topLevelOperationsRoute], - ); + clickhouse_sql: [], + id: uuid(), + }, + title: GraphTitle.ERROR_PERCENTAGE, + panelTypes: PANEL_TYPES.TIME_SERIES, + yAxisUnit: '%', + id: SERVICE_CHART_ID.errorPercentage, + }); const onDragSelect = useCallback( (start: number, end: number) => { diff --git a/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetrics.tsx b/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetrics.tsx index abfaf52d58..dcd01270ee 100644 --- a/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetrics.tsx +++ b/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetrics.tsx @@ -89,8 +89,6 @@ function ApDexMetrics({ return ( - getWidgetQueryBuilder({ - query: { - queryType: EQueryType.QUERY_BUILDER, - promql: [], - builder: latency({ - servicename, - tagFilterItems, - isSpanMetricEnable, - topLevelOperationsRoute, - }), - clickhouse_sql: [], - id: uuid(), - }, - title: GraphTitle.LATENCY, - panelTypes: PANEL_TYPES.TIME_SERIES, - yAxisUnit: 'ns', - id: SERVICE_CHART_ID.latency, + const latencyWidget = getWidgetQueryBuilder({ + query: { + queryType: EQueryType.QUERY_BUILDER, + promql: [], + builder: latency({ + servicename, + tagFilterItems, + isSpanMetricEnable, + topLevelOperationsRoute, }), - [servicename, isSpanMetricEnable, topLevelOperationsRoute, tagFilterItems], - ); + clickhouse_sql: [], + id: uuid(), + }, + title: GraphTitle.LATENCY, + panelTypes: PANEL_TYPES.TIME_SERIES, + yAxisUnit: 'ns', + id: SERVICE_CHART_ID.latency, + }); const isQueryEnabled = !topLevelOperationsIsLoading && topLevelOperationsRoute.length > 0; @@ -88,15 +85,23 @@ function ServiceOverview({ - + {topLevelOperationsIsLoading && ( + + )} + {!topLevelOperationsIsLoading && ( + + )} diff --git a/frontend/src/container/MetricsApplication/Tabs/Overview/TopLevelOperations.tsx b/frontend/src/container/MetricsApplication/Tabs/Overview/TopLevelOperations.tsx index 1b0903f4fa..9238e82231 100644 --- a/frontend/src/container/MetricsApplication/Tabs/Overview/TopLevelOperations.tsx +++ b/frontend/src/container/MetricsApplication/Tabs/Overview/TopLevelOperations.tsx @@ -1,4 +1,4 @@ -import { Typography } from 'antd'; +import { Skeleton, Typography } from 'antd'; import axios from 'axios'; import { SOMETHING_WENT_WRONG } from 'constants/api'; import { ENTITY_VERSION_V4 } from 'constants/app'; @@ -27,15 +27,23 @@ function TopLevelOperation({ ) : ( - + {topLevelOperationsIsLoading && ( + + )} + {!topLevelOperationsIsLoading && ( + + )} )} diff --git a/frontend/src/container/MetricsApplication/styles.ts b/frontend/src/container/MetricsApplication/styles.ts index 2e1ab9c8bf..8e396891e7 100644 --- a/frontend/src/container/MetricsApplication/styles.ts +++ b/frontend/src/container/MetricsApplication/styles.ts @@ -13,7 +13,7 @@ export const Card = styled(CardComponent)` } .ant-card-body { - height: calc(100% - 40px); + height: 100%; padding: 0; } `; @@ -40,7 +40,7 @@ export const ColErrorContainer = styled(ColComponent)` export const GraphContainer = styled.div` min-height: calc(40vh - 40px); - height: calc(100% - 40px); + height: 100%; `; export const GraphTitle = styled(Typography)` diff --git a/frontend/src/container/MetricsApplication/types.ts b/frontend/src/container/MetricsApplication/types.ts index 642bf0b057..d8464d1248 100644 --- a/frontend/src/container/MetricsApplication/types.ts +++ b/frontend/src/container/MetricsApplication/types.ts @@ -10,6 +10,7 @@ export interface GetWidgetQueryBuilderProps { panelTypes: Widgets['panelTypes']; yAxisUnit?: Widgets['yAxisUnit']; id?: Widgets['id']; + fillSpans?: Widgets['fillSpans']; } export interface NavigateToTraceProps { diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/index.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/index.tsx index 9dfd8d0deb..7c9fca416e 100644 --- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/index.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/index.tsx @@ -2,14 +2,11 @@ import './QuerySection.styles.scss'; import { Button, Tabs, Tooltip, Typography } from 'antd'; import TextToolTip from 'components/TextToolTip'; -import { DEFAULT_ENTITY_VERSION } from 'constants/app'; import { PANEL_TYPES } from 'constants/queryBuilder'; import { QBShortcuts } from 'constants/shortcuts/QBShortcuts'; -import { WidgetGraphProps } from 'container/NewWidget/types'; import { QueryBuilder } from 'container/QueryBuilder'; import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces'; import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys'; -import { useGetWidgetQueryRange } from 'hooks/queryBuilder/useGetWidgetQueryRange'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl'; import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval'; @@ -22,9 +19,12 @@ import { getSelectedWidgetIndex, } from 'providers/Dashboard/util'; import { useCallback, useEffect, useMemo } from 'react'; +import { UseQueryResult } from 'react-query'; import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; +import { SuccessResponse } from 'types/api'; import { Widgets } from 'types/api/dashboard/getAll'; +import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; import { Query } from 'types/api/queryBuilder/queryBuilderData'; import { EQueryType } from 'types/common/dashboard'; import AppReducer from 'types/reducer/app'; @@ -35,7 +35,7 @@ import PromQLQueryContainer from './QueryBuilder/promQL'; function QuerySection({ selectedGraph, - selectedTime, + queryResponse, }: QueryProps): JSX.Element { const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder(); const urlQuery = useUrlQuery(); @@ -51,14 +51,6 @@ function QuerySection({ const { selectedDashboard, setSelectedDashboard } = useDashboard(); - const getWidgetQueryRange = useGetWidgetQueryRange( - { - graphType: selectedGraph, - selectedTime: selectedTime.enum, - }, - selectedDashboard?.data?.version || DEFAULT_ENTITY_VERSION, - ); - const { widgets } = selectedDashboard?.data || {}; const getWidget = useCallback(() => { @@ -233,7 +225,7 @@ function QuerySection({