Merge pull request #3312 from SigNoz/release/v0.25.4

Release/v0.25.4
This commit is contained in:
Prashant Shahi 2023-08-10 18:03:46 +05:30 committed by GitHub
commit 6363c71442
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
56 changed files with 839 additions and 429 deletions

View File

@ -1,6 +1,7 @@
version: "3.9" version: "3.9"
x-clickhouse-defaults: &clickhouse-defaults x-clickhouse-defaults:
&clickhouse-defaults
image: clickhouse/clickhouse-server:22.8.8-alpine image: clickhouse/clickhouse-server:22.8.8-alpine
tty: true tty: true
deploy: deploy:
@ -16,7 +17,14 @@ x-clickhouse-defaults: &clickhouse-defaults
max-file: "3" max-file: "3"
healthcheck: healthcheck:
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'" # "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"] test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8123/ping"
]
interval: 30s interval: 30s
timeout: 5s timeout: 5s
retries: 3 retries: 3
@ -26,7 +34,8 @@ x-clickhouse-defaults: &clickhouse-defaults
soft: 262144 soft: 262144
hard: 262144 hard: 262144
x-clickhouse-depend: &clickhouse-depend x-clickhouse-depend:
&clickhouse-depend
depends_on: depends_on:
- clickhouse - clickhouse
# - clickhouse-2 # - clickhouse-2
@ -137,8 +146,8 @@ services:
condition: on-failure condition: on-failure
query-service: query-service:
image: signoz/query-service:0.25.3 image: signoz/query-service:0.25.4
command: ["-config=/root/config/prometheus.yml"] command: [ "-config=/root/config/prometheus.yml" ]
# ports: # ports:
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
# - "8080:8080" # query-service port # - "8080:8080" # query-service port
@ -156,7 +165,14 @@ services:
- TELEMETRY_ENABLED=true - TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-swarm - DEPLOYMENT_TYPE=docker-swarm
healthcheck: healthcheck:
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"] test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8080/api/v1/health"
]
interval: 30s interval: 30s
timeout: 5s timeout: 5s
retries: 3 retries: 3
@ -166,7 +182,7 @@ services:
<<: *clickhouse-depend <<: *clickhouse-depend
frontend: frontend:
image: signoz/frontend:0.25.3 image: signoz/frontend:0.25.4
deploy: deploy:
restart_policy: restart_policy:
condition: on-failure condition: on-failure
@ -179,8 +195,12 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:0.79.4 image: signoz/signoz-otel-collector:0.79.5
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] command:
[
"--config=/etc/otel-collector-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
user: root # required for reading docker container logs user: root # required for reading docker container logs
volumes: volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
@ -191,8 +211,8 @@ services:
- LOW_CARDINAL_EXCEPTION_GROUPING=false - LOW_CARDINAL_EXCEPTION_GROUPING=false
ports: ports:
# - "1777:1777" # pprof extension # - "1777:1777" # pprof extension
- "4317:4317" # OTLP gRPC receiver - "4317:4317" # OTLP gRPC receiver
- "4318:4318" # OTLP HTTP receiver - "4318:4318" # OTLP HTTP receiver
# - "8888:8888" # OtelCollector internal metrics # - "8888:8888" # OtelCollector internal metrics
# - "8889:8889" # signoz spanmetrics exposed by the agent # - "8889:8889" # signoz spanmetrics exposed by the agent
# - "9411:9411" # Zipkin port # - "9411:9411" # Zipkin port
@ -208,8 +228,12 @@ services:
<<: *clickhouse-depend <<: *clickhouse-depend
otel-collector-metrics: otel-collector-metrics:
image: signoz/signoz-otel-collector:0.79.4 image: signoz/signoz-otel-collector:0.79.5
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] command:
[
"--config=/etc/otel-collector-metrics-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
volumes: volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
# ports: # ports:
@ -222,9 +246,22 @@ services:
condition: on-failure condition: on-failure
<<: *clickhouse-depend <<: *clickhouse-depend
logspout:
image: "gliderlabs/logspout:v3.2.14"
volumes:
- /etc/hostname:/etc/host_hostname:ro
- /var/run/docker.sock:/var/run/docker.sock
command: syslog+tcp://otel-collector:2255
depends_on:
- otel-collector
deploy:
mode: global
restart_policy:
condition: on-failure
hotrod: hotrod:
image: jaegertracing/example-hotrod:1.30 image: jaegertracing/example-hotrod:1.30
command: ["all"] command: [ "all" ]
environment: environment:
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
logging: logging:

View File

@ -1,29 +1,21 @@
receivers: receivers:
filelog/dockercontainers: tcplog/docker:
include: [ "/var/lib/docker/containers/*/*.log" ] listen_address: "0.0.0.0:2255"
start_at: end
include_file_path: true
include_file_name: false
operators: operators:
- type: json_parser - type: regex_parser
id: parser-docker regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
output: extract_metadata_from_filepath timestamp:
timestamp: parse_from: attributes.timestamp
parse_from: attributes.time layout: '%Y-%m-%dT%H:%M:%S.%LZ'
layout: '%Y-%m-%dT%H:%M:%S.%LZ' - type: move
- type: regex_parser from: attributes["body"]
id: extract_metadata_from_filepath to: body
regex: '^.*containers/(?P<container_id>[^_]+)/.*log$' - type: remove
parse_from: attributes["log.file.path"] field: attributes.timestamp
output: parse_body # please remove names from below if you want to collect logs from them
- type: move - type: filter
id: parse_body id: signoz_logs_filter
from: attributes.log expr: 'attributes.container_name matches "^signoz_(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
to: body
output: time
- type: remove
id: time
field: attributes.time
opencensus: opencensus:
endpoint: 0.0.0.0:55678 endpoint: 0.0.0.0:55678
otlp/spanmetrics: otlp/spanmetrics:
@ -69,6 +61,40 @@ receivers:
job_name: otel-collector job_name: otel-collector
processors: processors:
logstransform/internal:
operators:
- type: trace_parser
if: '"trace_id" in attributes or "span_id" in attributes'
trace_id:
parse_from: attributes.trace_id
span_id:
parse_from: attributes.span_id
output: remove_trace_id
- type: trace_parser
if: '"traceId" in attributes or "spanId" in attributes'
trace_id:
parse_from: attributes.traceId
span_id:
parse_from: attributes.spanId
output: remove_traceId
- id: remove_traceId
type: remove
if: '"traceId" in attributes'
field: attributes.traceId
output: remove_spanId
- id: remove_spanId
type: remove
if: '"spanId" in attributes'
field: attributes.spanId
- id: remove_trace_id
type: remove
if: '"trace_id" in attributes'
field: attributes.trace_id
output: remove_span_id
- id: remove_span_id
type: remove
if: '"span_id" in attributes'
field: attributes.span_id
batch: batch:
send_batch_size: 10000 send_batch_size: 10000
send_batch_max_size: 11000 send_batch_max_size: 11000
@ -166,6 +192,6 @@ service:
receivers: [otlp/spanmetrics] receivers: [otlp/spanmetrics]
exporters: [prometheus] exporters: [prometheus]
logs: logs:
receivers: [otlp, filelog/dockercontainers] receivers: [otlp, tcplog/docker]
processors: [batch] processors: [logstransform/internal, batch]
exporters: [clickhouselogsexporter] exporters: [clickhouselogsexporter]

View File

@ -3,7 +3,7 @@ version: "2.4"
services: services:
clickhouse: clickhouse:
image: clickhouse/clickhouse-server:22.8.8-alpine image: clickhouse/clickhouse-server:22.8.8-alpine
container_name: clickhouse container_name: signoz-clickhouse
# ports: # ports:
# - "9000:9000" # - "9000:9000"
# - "8123:8123" # - "8123:8123"
@ -20,13 +20,20 @@ services:
max-file: "3" max-file: "3"
healthcheck: healthcheck:
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'" # "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"] test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8123/ping"
]
interval: 30s interval: 30s
timeout: 5s timeout: 5s
retries: 3 retries: 3
alertmanager: alertmanager:
container_name: alertmanager container_name: signoz-alertmanager
image: signoz/alertmanager:0.23.1 image: signoz/alertmanager:0.23.1
volumes: volumes:
- ./data/alertmanager:/data - ./data/alertmanager:/data
@ -40,9 +47,13 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector: otel-collector:
container_name: otel-collector container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:0.79.4 image: signoz/signoz-otel-collector:0.79.5
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] command:
[
"--config=/etc/otel-collector-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
# user: root # required for reading docker container logs # user: root # required for reading docker container logs
volumes: volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
@ -50,8 +61,8 @@ services:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux - OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
ports: ports:
# - "1777:1777" # pprof extension # - "1777:1777" # pprof extension
- "4317:4317" # OTLP gRPC receiver - "4317:4317" # OTLP gRPC receiver
- "4318:4318" # OTLP HTTP receiver - "4318:4318" # OTLP HTTP receiver
# - "8888:8888" # OtelCollector internal metrics # - "8888:8888" # OtelCollector internal metrics
# - "8889:8889" # signoz spanmetrics exposed by the agent # - "8889:8889" # signoz spanmetrics exposed by the agent
# - "9411:9411" # Zipkin port # - "9411:9411" # Zipkin port
@ -66,9 +77,13 @@ services:
condition: service_healthy condition: service_healthy
otel-collector-metrics: otel-collector-metrics:
container_name: otel-collector-metrics container_name: signoz-otel-collector-metrics
image: signoz/signoz-otel-collector:0.79.4 image: signoz/signoz-otel-collector:0.79.5
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] command:
[
"--config=/etc/otel-collector-metrics-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
volumes: volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
# ports: # ports:
@ -81,6 +96,17 @@ services:
clickhouse: clickhouse:
condition: service_healthy condition: service_healthy
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout
volumes:
- /etc/hostname:/etc/host_hostname:ro
- /var/run/docker.sock:/var/run/docker.sock
command: syslog+tcp://otel-collector:2255
depends_on:
- otel-collector
restart: on-failure
hotrod: hotrod:
image: jaegertracing/example-hotrod:1.30 image: jaegertracing/example-hotrod:1.30
container_name: hotrod container_name: hotrod
@ -88,7 +114,7 @@ services:
options: options:
max-size: 50m max-size: 50m
max-file: "3" max-file: "3"
command: ["all"] command: [ "all" ]
environment: environment:
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces

View File

@ -9,7 +9,7 @@ services:
args: args:
LDFLAGS: "" LDFLAGS: ""
TARGETPLATFORM: "${LOCAL_GOOS}/${LOCAL_GOARCH}" TARGETPLATFORM: "${LOCAL_GOOS}/${LOCAL_GOARCH}"
container_name: query-service container_name: signoz-query-service
environment: environment:
- ClickHouseUrl=tcp://clickhouse:9000 - ClickHouseUrl=tcp://clickhouse:9000
- ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/ - ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/
@ -22,13 +22,20 @@ services:
- ./prometheus.yml:/root/config/prometheus.yml - ./prometheus.yml:/root/config/prometheus.yml
- ../dashboards:/root/config/dashboards - ../dashboards:/root/config/dashboards
- ./data/signoz/:/var/lib/signoz/ - ./data/signoz/:/var/lib/signoz/
command: ["-config=/root/config/prometheus.yml"] command: [ "-config=/root/config/prometheus.yml" ]
ports: ports:
- "6060:6060" - "6060:6060"
- "8080:8080" - "8080:8080"
restart: on-failure restart: on-failure
healthcheck: healthcheck:
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"] test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8080/api/v1/health"
]
interval: 30s interval: 30s
timeout: 5s timeout: 5s
retries: 3 retries: 3
@ -43,7 +50,7 @@ services:
args: args:
TARGETOS: "${LOCAL_GOOS}" TARGETOS: "${LOCAL_GOOS}"
TARGETPLATFORM: "${LOCAL_GOARCH}" TARGETPLATFORM: "${LOCAL_GOARCH}"
container_name: frontend container_name: signoz-frontend
environment: environment:
- FRONTEND_API_ENDPOINT=http://query-service:8080 - FRONTEND_API_ENDPOINT=http://query-service:8080
restart: on-failure restart: on-failure

View File

@ -1,6 +1,7 @@
version: "2.4" version: "2.4"
x-clickhouse-defaults: &clickhouse-defaults x-clickhouse-defaults:
&clickhouse-defaults
restart: on-failure restart: on-failure
image: clickhouse/clickhouse-server:22.8.8-alpine image: clickhouse/clickhouse-server:22.8.8-alpine
tty: true tty: true
@ -14,7 +15,14 @@ x-clickhouse-defaults: &clickhouse-defaults
max-file: "3" max-file: "3"
healthcheck: healthcheck:
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'" # "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"] test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8123/ping"
]
interval: 30s interval: 30s
timeout: 5s timeout: 5s
retries: 3 retries: 3
@ -24,7 +32,8 @@ x-clickhouse-defaults: &clickhouse-defaults
soft: 262144 soft: 262144
hard: 262144 hard: 262144
x-clickhouse-depend: &clickhouse-depend x-clickhouse-depend:
&clickhouse-depend
depends_on: depends_on:
clickhouse: clickhouse:
condition: service_healthy condition: service_healthy
@ -37,7 +46,7 @@ services:
zookeeper-1: zookeeper-1:
image: bitnami/zookeeper:3.7.1 image: bitnami/zookeeper:3.7.1
container_name: zookeeper-1 container_name: signoz-zookeeper-1
hostname: zookeeper-1 hostname: zookeeper-1
user: root user: root
ports: ports:
@ -54,7 +63,7 @@ services:
# zookeeper-2: # zookeeper-2:
# image: bitnami/zookeeper:3.7.0 # image: bitnami/zookeeper:3.7.0
# container_name: zookeeper-2 # container_name: signoz-zookeeper-2
# hostname: zookeeper-2 # hostname: zookeeper-2
# user: root # user: root
# ports: # ports:
@ -71,7 +80,7 @@ services:
# zookeeper-3: # zookeeper-3:
# image: bitnami/zookeeper:3.7.0 # image: bitnami/zookeeper:3.7.0
# container_name: zookeeper-3 # container_name: signoz-zookeeper-3
# hostname: zookeeper-3 # hostname: zookeeper-3
# user: root # user: root
# ports: # ports:
@ -88,7 +97,7 @@ services:
clickhouse: clickhouse:
<<: *clickhouse-defaults <<: *clickhouse-defaults
container_name: clickhouse container_name: signoz-clickhouse
hostname: clickhouse hostname: clickhouse
ports: ports:
- "9000:9000" - "9000:9000"
@ -105,7 +114,7 @@ services:
# clickhouse-2: # clickhouse-2:
# <<: *clickhouse-defaults # <<: *clickhouse-defaults
# container_name: clickhouse-2 # container_name: signoz-clickhouse-2
# hostname: clickhouse-2 # hostname: clickhouse-2
# ports: # ports:
# - "9001:9000" # - "9001:9000"
@ -120,10 +129,10 @@ services:
# - ./data/clickhouse-2/:/var/lib/clickhouse/ # - ./data/clickhouse-2/:/var/lib/clickhouse/
# - ./user_scripts:/var/lib/clickhouse/user_scripts/ # - ./user_scripts:/var/lib/clickhouse/user_scripts/
# clickhouse-3: # clickhouse-3:
# <<: *clickhouse-defaults # <<: *clickhouse-defaults
# container_name: clickhouse-3 # container_name: signoz-clickhouse-3
# hostname: clickhouse-3 # hostname: clickhouse-3
# ports: # ports:
# - "9002:9000" # - "9002:9000"
@ -140,6 +149,7 @@ services:
alertmanager: alertmanager:
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.1} image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.1}
container_name: signoz-alertmanager
volumes: volumes:
- ./data/alertmanager:/data - ./data/alertmanager:/data
depends_on: depends_on:
@ -153,9 +163,9 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service: query-service:
image: signoz/query-service:${DOCKER_TAG:-0.25.3} image: signoz/query-service:${DOCKER_TAG:-0.25.4}
container_name: query-service container_name: signoz-query-service
command: ["-config=/root/config/prometheus.yml"] command: [ "-config=/root/config/prometheus.yml" ]
# ports: # ports:
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
# - "8080:8080" # query-service port # - "8080:8080" # query-service port
@ -174,15 +184,22 @@ services:
- DEPLOYMENT_TYPE=docker-standalone-amd - DEPLOYMENT_TYPE=docker-standalone-amd
restart: on-failure restart: on-failure
healthcheck: healthcheck:
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"] test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8080/api/v1/health"
]
interval: 30s interval: 30s
timeout: 5s timeout: 5s
retries: 3 retries: 3
<<: *clickhouse-depend <<: *clickhouse-depend
frontend: frontend:
image: signoz/frontend:${DOCKER_TAG:-0.25.3} image: signoz/frontend:${DOCKER_TAG:-0.25.4}
container_name: frontend container_name: signoz-frontend
restart: on-failure restart: on-failure
depends_on: depends_on:
- alertmanager - alertmanager
@ -193,8 +210,13 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.4} image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.5}
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] container_name: signoz-otel-collector
command:
[
"--config=/etc/otel-collector-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
user: root # required for reading docker container logs user: root # required for reading docker container logs
volumes: volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
@ -205,8 +227,8 @@ services:
- LOW_CARDINAL_EXCEPTION_GROUPING=false - LOW_CARDINAL_EXCEPTION_GROUPING=false
ports: ports:
# - "1777:1777" # pprof extension # - "1777:1777" # pprof extension
- "4317:4317" # OTLP gRPC receiver - "4317:4317" # OTLP gRPC receiver
- "4318:4318" # OTLP HTTP receiver - "4318:4318" # OTLP HTTP receiver
# - "8888:8888" # OtelCollector internal metrics # - "8888:8888" # OtelCollector internal metrics
# - "8889:8889" # signoz spanmetrics exposed by the agent # - "8889:8889" # signoz spanmetrics exposed by the agent
# - "9411:9411" # Zipkin port # - "9411:9411" # Zipkin port
@ -219,8 +241,13 @@ services:
<<: *clickhouse-depend <<: *clickhouse-depend
otel-collector-metrics: otel-collector-metrics:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.4} image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.5}
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] container_name: signoz-otel-collector-metrics
command:
[
"--config=/etc/otel-collector-metrics-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
volumes: volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
# ports: # ports:
@ -231,6 +258,17 @@ services:
restart: on-failure restart: on-failure
<<: *clickhouse-depend <<: *clickhouse-depend
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout
volumes:
- /etc/hostname:/etc/host_hostname:ro
- /var/run/docker.sock:/var/run/docker.sock
command: syslog+tcp://otel-collector:2255
depends_on:
- otel-collector
restart: on-failure
hotrod: hotrod:
image: jaegertracing/example-hotrod:1.30 image: jaegertracing/example-hotrod:1.30
container_name: hotrod container_name: hotrod
@ -238,7 +276,7 @@ services:
options: options:
max-size: 50m max-size: 50m
max-file: "3" max-file: "3"
command: ["all"] command: [ "all" ]
environment: environment:
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces

View File

@ -1,29 +1,21 @@
receivers: receivers:
filelog/dockercontainers: tcplog/docker:
include: [ "/var/lib/docker/containers/*/*.log" ] listen_address: "0.0.0.0:2255"
start_at: end
include_file_path: true
include_file_name: false
operators: operators:
- type: json_parser - type: regex_parser
id: parser-docker regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
output: extract_metadata_from_filepath timestamp:
timestamp: parse_from: attributes.timestamp
parse_from: attributes.time layout: '%Y-%m-%dT%H:%M:%S.%LZ'
layout: '%Y-%m-%dT%H:%M:%S.%LZ' - type: move
- type: regex_parser from: attributes["body"]
id: extract_metadata_from_filepath to: body
regex: '^.*containers/(?P<container_id>[^_]+)/.*log$' - type: remove
parse_from: attributes["log.file.path"] field: attributes.timestamp
output: parse_body # please remove names from below if you want to collect logs from them
- type: move - type: filter
id: parse_body id: signoz_logs_filter
from: attributes.log expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
to: body
output: time
- type: remove
id: time
field: attributes.time
opencensus: opencensus:
endpoint: 0.0.0.0:55678 endpoint: 0.0.0.0:55678
otlp/spanmetrics: otlp/spanmetrics:
@ -205,6 +197,6 @@ service:
receivers: [otlp/spanmetrics] receivers: [otlp/spanmetrics]
exporters: [prometheus] exporters: [prometheus]
logs: logs:
receivers: [otlp, filelog/dockercontainers] receivers: [otlp, tcplog/docker]
processors: [logstransform/internal, batch] processors: [logstransform/internal, batch]
exporters: [clickhouselogsexporter] exporters: [clickhouselogsexporter]

View File

@ -36,9 +36,9 @@ is_mac() {
[[ $OSTYPE == darwin* ]] [[ $OSTYPE == darwin* ]]
} }
# is_arm64(){ is_arm64(){
# [[ `uname -m` == 'arm64' ]] [[ `uname -m` == 'arm64' || `uname -m` == 'aarch64' ]]
# } }
check_os() { check_os() {
if is_mac; then if is_mac; then
@ -48,6 +48,16 @@ check_os() {
return return
fi fi
if is_arm64; then
arch="arm64"
arch_official="aarch64"
else
arch="amd64"
arch_official="x86_64"
fi
platform=$(uname -s | tr '[:upper:]' '[:lower:]')
os_name="$(cat /etc/*-release | awk -F= '$1 == "NAME" { gsub(/"/, ""); print $2; exit }')" os_name="$(cat /etc/*-release | awk -F= '$1 == "NAME" { gsub(/"/, ""); print $2; exit }')"
case "$os_name" in case "$os_name" in
@ -143,7 +153,7 @@ install_docker() {
$apt_cmd install software-properties-common gnupg-agent $apt_cmd install software-properties-common gnupg-agent
curl -fsSL "https://download.docker.com/linux/$os/gpg" | $sudo_cmd apt-key add - curl -fsSL "https://download.docker.com/linux/$os/gpg" | $sudo_cmd apt-key add -
$sudo_cmd add-apt-repository \ $sudo_cmd add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/$os $(lsb_release -cs) stable" "deb [arch=$arch] https://download.docker.com/linux/$os $(lsb_release -cs) stable"
$apt_cmd update $apt_cmd update
echo "Installing docker" echo "Installing docker"
$apt_cmd install docker-ce docker-ce-cli containerd.io $apt_cmd install docker-ce docker-ce-cli containerd.io
@ -178,12 +188,20 @@ install_docker() {
} }
compose_version () {
local compose_version
compose_version="$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep 'tag_name' | cut -d\" -f4)"
echo "${compose_version:-v2.18.1}"
}
install_docker_compose() { install_docker_compose() {
if [[ $package_manager == "apt-get" || $package_manager == "zypper" || $package_manager == "yum" ]]; then if [[ $package_manager == "apt-get" || $package_manager == "zypper" || $package_manager == "yum" ]]; then
if [[ ! -f /usr/bin/docker-compose ]];then if [[ ! -f /usr/bin/docker-compose ]];then
echo "++++++++++++++++++++++++" echo "++++++++++++++++++++++++"
echo "Installing docker-compose" echo "Installing docker-compose"
$sudo_cmd curl -L "https://github.com/docker/compose/releases/download/1.26.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose compose_url="https://github.com/docker/compose/releases/download/$(compose_version)/docker-compose-$platform-$arch_official"
echo "Downloading docker-compose from $compose_url"
$sudo_cmd curl -L "$compose_url" -o /usr/local/bin/docker-compose
$sudo_cmd chmod +x /usr/local/bin/docker-compose $sudo_cmd chmod +x /usr/local/bin/docker-compose
$sudo_cmd ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose $sudo_cmd ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
echo "docker-compose installed!" echo "docker-compose installed!"

View File

@ -2,6 +2,7 @@ package api
import ( import (
"net/http" "net/http"
"time"
"github.com/gorilla/mux" "github.com/gorilla/mux"
"go.signoz.io/signoz/ee/query-service/dao" "go.signoz.io/signoz/ee/query-service/dao"
@ -20,6 +21,9 @@ type APIHandlerOptions struct {
SkipConfig *basemodel.SkipConfig SkipConfig *basemodel.SkipConfig
PreferDelta bool PreferDelta bool
PreferSpanMetrics bool PreferSpanMetrics bool
MaxIdleConns int
MaxOpenConns int
DialTimeout time.Duration
AppDao dao.ModelDao AppDao dao.ModelDao
RulesManager *rules.Manager RulesManager *rules.Manager
FeatureFlags baseint.FeatureLookup FeatureFlags baseint.FeatureLookup
@ -40,6 +44,9 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
SkipConfig: opts.SkipConfig, SkipConfig: opts.SkipConfig,
PerferDelta: opts.PreferDelta, PerferDelta: opts.PreferDelta,
PreferSpanMetrics: opts.PreferSpanMetrics, PreferSpanMetrics: opts.PreferSpanMetrics,
MaxIdleConns: opts.MaxIdleConns,
MaxOpenConns: opts.MaxOpenConns,
DialTimeout: opts.DialTimeout,
AppDao: opts.AppDao, AppDao: opts.AppDao,
RuleManager: opts.RulesManager, RuleManager: opts.RulesManager,
FeatureFlags: opts.FeatureFlags, FeatureFlags: opts.FeatureFlags,

View File

@ -1,6 +1,8 @@
package db package db
import ( import (
"time"
"github.com/ClickHouse/clickhouse-go/v2" "github.com/ClickHouse/clickhouse-go/v2"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
@ -15,8 +17,15 @@ type ClickhouseReader struct {
*basechr.ClickHouseReader *basechr.ClickHouseReader
} }
func NewDataConnector(localDB *sqlx.DB, promConfigPath string, lm interfaces.FeatureLookup) *ClickhouseReader { func NewDataConnector(
ch := basechr.NewReader(localDB, promConfigPath, lm) localDB *sqlx.DB,
promConfigPath string,
lm interfaces.FeatureLookup,
maxIdleConns int,
maxOpenConns int,
dialTimeout time.Duration,
) *ClickhouseReader {
ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout)
return &ClickhouseReader{ return &ClickhouseReader{
conn: ch.GetConn(), conn: ch.GetConn(),
appdb: localDB, appdb: localDB,

View File

@ -59,6 +59,9 @@ type ServerOptions struct {
RuleRepoURL string RuleRepoURL string
PreferDelta bool PreferDelta bool
PreferSpanMetrics bool PreferSpanMetrics bool
MaxIdleConns int
MaxOpenConns int
DialTimeout time.Duration
} }
// Server runs HTTP api service // Server runs HTTP api service
@ -122,7 +125,14 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
storage := os.Getenv("STORAGE") storage := os.Getenv("STORAGE")
if storage == "clickhouse" { if storage == "clickhouse" {
zap.S().Info("Using ClickHouse as datastore ...") zap.S().Info("Using ClickHouse as datastore ...")
qb := db.NewDataConnector(localDB, serverOptions.PromConfigPath, lm) qb := db.NewDataConnector(
localDB,
serverOptions.PromConfigPath,
lm,
serverOptions.MaxIdleConns,
serverOptions.MaxOpenConns,
serverOptions.DialTimeout,
)
go qb.Start(readerReady) go qb.Start(readerReady)
reader = qb reader = qb
} else { } else {
@ -184,6 +194,9 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
SkipConfig: skipConfig, SkipConfig: skipConfig,
PreferDelta: serverOptions.PreferDelta, PreferDelta: serverOptions.PreferDelta,
PreferSpanMetrics: serverOptions.PreferSpanMetrics, PreferSpanMetrics: serverOptions.PreferSpanMetrics,
MaxIdleConns: serverOptions.MaxIdleConns,
MaxOpenConns: serverOptions.MaxOpenConns,
DialTimeout: serverOptions.DialTimeout,
AppDao: modelDao, AppDao: modelDao,
RulesManager: rm, RulesManager: rm,
FeatureFlags: lm, FeatureFlags: lm,

View File

@ -86,11 +86,18 @@ func main() {
var preferDelta bool var preferDelta bool
var preferSpanMetrics bool var preferSpanMetrics bool
var maxIdleConns int
var maxOpenConns int
var dialTimeout time.Duration
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)") flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)") flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)") flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)")
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)") flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)") flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)") flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)")
flag.Parse() flag.Parse()
@ -111,6 +118,9 @@ func main() {
PrivateHostPort: baseconst.PrivateHostPort, PrivateHostPort: baseconst.PrivateHostPort,
DisableRules: disableRules, DisableRules: disableRules,
RuleRepoURL: ruleRepoURL, RuleRepoURL: ruleRepoURL,
MaxIdleConns: maxIdleConns,
MaxOpenConns: maxOpenConns,
DialTimeout: dialTimeout,
} }
// Read the jwt secret key // Read the jwt secret key

View File

@ -10,6 +10,7 @@ export type LogsTableViewProps = {
logs: ILog[]; logs: ILog[];
fields: IField[]; fields: IField[];
linesPerRow: number; linesPerRow: number;
onClickExpand?: (log: ILog) => void;
}; };
export type UseTableViewResult = { export type UseTableViewResult = {

View File

@ -4,7 +4,7 @@ import {
QuestionCircleFilled, QuestionCircleFilled,
QuestionCircleOutlined, QuestionCircleOutlined,
} from '@ant-design/icons'; } from '@ant-design/icons';
import { Dropdown, Space } from 'antd'; import { Space } from 'antd';
import { useIsDarkMode } from 'hooks/useDarkMode'; import { useIsDarkMode } from 'hooks/useDarkMode';
import { useMemo, useState } from 'react'; import { useMemo, useState } from 'react';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
@ -13,6 +13,7 @@ import { ConfigProps } from 'types/api/dynamicConfigs/getDynamicConfigs';
import AppReducer from 'types/reducer/app'; import AppReducer from 'types/reducer/app';
import HelpToolTip from './Config'; import HelpToolTip from './Config';
import { ConfigDropdown } from './styles';
function DynamicConfigDropdown({ function DynamicConfigDropdown({
frontendId, frontendId,
@ -53,19 +54,17 @@ function DynamicConfigDropdown({
const DropDownIcon = isHelpDropDownOpen ? CaretUpFilled : CaretDownFilled; const DropDownIcon = isHelpDropDownOpen ? CaretUpFilled : CaretDownFilled;
return ( return (
<Dropdown <ConfigDropdown
onOpenChange={onToggleHandler} onOpenChange={onToggleHandler}
trigger={['click']} trigger={['click']}
menu={menu} menu={menu}
open={isHelpDropDownOpen} open={isHelpDropDownOpen}
> >
<Space align="center"> <Space align="center">
<Icon <Icon style={{ fontSize: 26, color: 'white', paddingTop: 26 }} />
style={{ fontSize: 26, color: 'white', paddingTop: 26, cursor: 'pointer' }}
/>
<DropDownIcon style={{ color: 'white' }} /> <DropDownIcon style={{ color: 'white' }} />
</Space> </Space>
</Dropdown> </ConfigDropdown>
); );
} }

View File

@ -0,0 +1,6 @@
import { Dropdown } from 'antd';
import styled from 'styled-components';
export const ConfigDropdown = styled(Dropdown)`
cursor: pointer;
`;

View File

@ -53,9 +53,7 @@ function WidgetGraphComponent({
setLayout, setLayout,
onDragSelect, onDragSelect,
onClickHandler, onClickHandler,
allowClone = true, headerMenuList,
allowDelete = true,
allowEdit = true,
}: WidgetGraphComponentProps): JSX.Element { }: WidgetGraphComponentProps): JSX.Element {
const [deleteModal, setDeleteModal] = useState(false); const [deleteModal, setDeleteModal] = useState(false);
const [modal, setModal] = useState<boolean>(false); const [modal, setModal] = useState<boolean>(false);
@ -281,9 +279,7 @@ function WidgetGraphComponent({
onClone={onCloneHandler} onClone={onCloneHandler}
queryResponse={queryResponse} queryResponse={queryResponse}
errorMessage={errorMessage} errorMessage={errorMessage}
allowClone={allowClone} headerMenuList={headerMenuList}
allowDelete={allowDelete}
allowEdit={allowEdit}
/> />
</div> </div>
)} )}
@ -297,7 +293,7 @@ function WidgetGraphComponent({
yAxisUnit={yAxisUnit} yAxisUnit={yAxisUnit}
onClickHandler={onClickHandler} onClickHandler={onClickHandler}
onDragSelect={onDragSelect} onDragSelect={onDragSelect}
panelData={[]} panelData={queryResponse.data?.payload?.data.newResult.data.result || []}
query={widget.query} query={widget.query}
ref={lineChartRef} ref={lineChartRef}
/> />
@ -313,9 +309,6 @@ WidgetGraphComponent.defaultProps = {
setLayout: undefined, setLayout: undefined,
onDragSelect: undefined, onDragSelect: undefined,
onClickHandler: undefined, onClickHandler: undefined,
allowDelete: true,
allowClone: true,
allowEdit: true,
}; };
const mapDispatchToProps = ( const mapDispatchToProps = (

View File

@ -15,6 +15,7 @@ import { GlobalReducer } from 'types/reducer/globalTime';
import { getSelectedDashboardVariable } from 'utils/dashboard/selectedDashboard'; import { getSelectedDashboardVariable } from 'utils/dashboard/selectedDashboard';
import EmptyWidget from '../EmptyWidget'; import EmptyWidget from '../EmptyWidget';
import { MenuItemKeys } from '../WidgetHeader/contants';
import { GridCardGraphProps } from './types'; import { GridCardGraphProps } from './types';
import WidgetGraphComponent from './WidgetGraphComponent'; import WidgetGraphComponent from './WidgetGraphComponent';
@ -26,9 +27,7 @@ function GridCardGraph({
setLayout, setLayout,
onDragSelect, onDragSelect,
onClickHandler, onClickHandler,
allowDelete, headerMenuList = [MenuItemKeys.View],
allowClone,
allowEdit,
isQueryEnabled, isQueryEnabled,
}: GridCardGraphProps): JSX.Element { }: GridCardGraphProps): JSX.Element {
const { isAddWidget } = useSelector<AppState, DashboardReducer>( const { isAddWidget } = useSelector<AppState, DashboardReducer>(
@ -121,9 +120,7 @@ function GridCardGraph({
yAxisUnit={yAxisUnit} yAxisUnit={yAxisUnit}
layout={layout} layout={layout}
setLayout={setLayout} setLayout={setLayout}
allowClone={allowClone} headerMenuList={headerMenuList}
allowDelete={allowDelete}
allowEdit={allowEdit}
/> />
)} )}
</span> </span>
@ -145,9 +142,7 @@ function GridCardGraph({
yAxisUnit={yAxisUnit} yAxisUnit={yAxisUnit}
layout={layout} layout={layout}
setLayout={setLayout} setLayout={setLayout}
allowClone={allowClone} headerMenuList={headerMenuList}
allowDelete={allowDelete}
allowEdit={allowEdit}
onClickHandler={onClickHandler} onClickHandler={onClickHandler}
/> />
) : ( ) : (
@ -170,9 +165,7 @@ function GridCardGraph({
name={name} name={name}
yAxisUnit={yAxisUnit} yAxisUnit={yAxisUnit}
onDragSelect={onDragSelect} onDragSelect={onDragSelect}
allowClone={allowClone} headerMenuList={headerMenuList}
allowDelete={allowDelete}
allowEdit={allowEdit}
onClickHandler={onClickHandler} onClickHandler={onClickHandler}
/> />
)} )}
@ -185,10 +178,8 @@ function GridCardGraph({
GridCardGraph.defaultProps = { GridCardGraph.defaultProps = {
onDragSelect: undefined, onDragSelect: undefined,
onClickHandler: undefined, onClickHandler: undefined,
allowDelete: true,
allowClone: true,
allowEdit: true,
isQueryEnabled: true, isQueryEnabled: true,
headerMenuList: [MenuItemKeys.View],
}; };
export default memo(GridCardGraph); export default memo(GridCardGraph);

View File

@ -10,6 +10,7 @@ import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { LayoutProps } from '..'; import { LayoutProps } from '..';
import { MenuItemKeys } from '../WidgetHeader/contants';
import { LegendEntryProps } from './FullView/types'; import { LegendEntryProps } from './FullView/types';
export interface GraphVisibilityLegendEntryProps { export interface GraphVisibilityLegendEntryProps {
@ -38,25 +39,19 @@ export interface WidgetGraphComponentProps extends DispatchProps {
setLayout?: Dispatch<SetStateAction<LayoutProps[]>>; setLayout?: Dispatch<SetStateAction<LayoutProps[]>>;
onDragSelect?: (start: number, end: number) => void; onDragSelect?: (start: number, end: number) => void;
onClickHandler?: GraphOnClickHandler; onClickHandler?: GraphOnClickHandler;
allowDelete?: boolean; headerMenuList: MenuItemKeys[];
allowClone?: boolean;
allowEdit?: boolean;
} }
export interface GridCardGraphProps { export interface GridCardGraphProps {
widget: Widgets; widget: Widgets;
name: string; name: string;
yAxisUnit: string | undefined; yAxisUnit: string | undefined;
// eslint-disable-next-line react/require-default-props
layout?: Layout[]; layout?: Layout[];
// eslint-disable-next-line react/require-default-props
setLayout?: Dispatch<SetStateAction<LayoutProps[]>>; setLayout?: Dispatch<SetStateAction<LayoutProps[]>>;
onDragSelect?: (start: number, end: number) => void; onDragSelect?: (start: number, end: number) => void;
onClickHandler?: GraphOnClickHandler; onClickHandler?: GraphOnClickHandler;
allowDelete?: boolean; headerMenuList?: WidgetGraphComponentProps['headerMenuList'];
allowClone?: boolean; isQueryEnabled: boolean;
allowEdit?: boolean;
isQueryEnabled?: boolean;
} }
export interface GetGraphVisibilityStateOnLegendClickProps { export interface GetGraphVisibilityStateOnLegendClickProps {

View File

@ -3,6 +3,7 @@ export enum MenuItemKeys {
Edit = 'edit', Edit = 'edit',
Delete = 'delete', Delete = 'delete',
Clone = 'clone', Clone = 'clone',
CreateAlerts = 'createAlerts',
} }
export const MENUITEM_KEYS_VS_LABELS = { export const MENUITEM_KEYS_VS_LABELS = {
@ -10,4 +11,5 @@ export const MENUITEM_KEYS_VS_LABELS = {
[MenuItemKeys.Edit]: 'Edit', [MenuItemKeys.Edit]: 'Edit',
[MenuItemKeys.Delete]: 'Delete', [MenuItemKeys.Delete]: 'Delete',
[MenuItemKeys.Clone]: 'Clone', [MenuItemKeys.Clone]: 'Clone',
[MenuItemKeys.CreateAlerts]: 'Create Alerts',
}; };

View File

@ -7,9 +7,9 @@ import {
FullscreenOutlined, FullscreenOutlined,
} from '@ant-design/icons'; } from '@ant-design/icons';
import { Dropdown, MenuProps, Tooltip, Typography } from 'antd'; import { Dropdown, MenuProps, Tooltip, Typography } from 'antd';
import { MenuItemType } from 'antd/es/menu/hooks/useItems';
import Spinner from 'components/Spinner'; import Spinner from 'components/Spinner';
import { queryParamNamesMap } from 'constants/queryBuilderQueryNames'; import { queryParamNamesMap } from 'constants/queryBuilderQueryNames';
import ROUTES from 'constants/routes';
import useComponentPermission from 'hooks/useComponentPermission'; import useComponentPermission from 'hooks/useComponentPermission';
import history from 'lib/history'; import history from 'lib/history';
import { useCallback, useMemo, useState } from 'react'; import { useCallback, useMemo, useState } from 'react';
@ -33,7 +33,7 @@ import {
HeaderContainer, HeaderContainer,
HeaderContentContainer, HeaderContentContainer,
} from './styles'; } from './styles';
import { KeyMethodMappingProps, MenuItem, TWidgetOptions } from './types'; import { MenuItem } from './types';
import { generateMenuList, isTWidgetOptions } from './utils'; import { generateMenuList, isTWidgetOptions } from './utils';
interface IWidgetHeaderProps { interface IWidgetHeaderProps {
@ -47,10 +47,9 @@ interface IWidgetHeaderProps {
SuccessResponse<MetricRangePayloadProps> | ErrorResponse SuccessResponse<MetricRangePayloadProps> | ErrorResponse
>; >;
errorMessage: string | undefined; errorMessage: string | undefined;
allowDelete?: boolean; headerMenuList?: MenuItemKeys[];
allowClone?: boolean;
allowEdit?: boolean;
} }
function WidgetHeader({ function WidgetHeader({
title, title,
widget, widget,
@ -60,9 +59,7 @@ function WidgetHeader({
parentHover, parentHover,
queryResponse, queryResponse,
errorMessage, errorMessage,
allowClone = true, headerMenuList,
allowDelete = true,
allowEdit = true,
}: IWidgetHeaderProps): JSX.Element { }: IWidgetHeaderProps): JSX.Element {
const [localHover, setLocalHover] = useState(false); const [localHover, setLocalHover] = useState(false);
const [isOpen, setIsOpen] = useState<boolean>(false); const [isOpen, setIsOpen] = useState<boolean>(false);
@ -78,32 +75,30 @@ function WidgetHeader({
); );
}, [widget.id, widget.panelTypes, widget.query]); }, [widget.id, widget.panelTypes, widget.query]);
const keyMethodMapping: KeyMethodMappingProps<TWidgetOptions> = useMemo( const onCreateAlertsHandler = useCallback(() => {
history.push(
`${ROUTES.ALERTS_NEW}?${
queryParamNamesMap.compositeQuery
}=${encodeURIComponent(JSON.stringify(widget.query))}`,
);
}, [widget]);
const keyMethodMapping = useMemo(
() => ({ () => ({
view: { [MenuItemKeys.View]: onView,
key: MenuItemKeys.View, [MenuItemKeys.Edit]: onEditHandler,
method: onView, [MenuItemKeys.Delete]: onDelete,
}, [MenuItemKeys.Clone]: onClone,
edit: { [MenuItemKeys.CreateAlerts]: onCreateAlertsHandler,
key: MenuItemKeys.Edit,
method: onEditHandler,
},
delete: {
key: MenuItemKeys.Delete,
method: onDelete,
},
clone: {
key: MenuItemKeys.Clone,
method: onClone,
},
}), }),
[onDelete, onEditHandler, onView, onClone], [onDelete, onEditHandler, onView, onClone, onCreateAlertsHandler],
); );
const onMenuItemSelectHandler: MenuProps['onClick'] = useCallback( const onMenuItemSelectHandler: MenuProps['onClick'] = useCallback(
({ key }: { key: string }): void => { ({ key }: { key: string }): void => {
if (isTWidgetOptions(key)) { if (isTWidgetOptions(key)) {
const functionToCall = keyMethodMapping[key]?.method; const functionToCall = keyMethodMapping[key];
if (functionToCall) { if (functionToCall) {
functionToCall(); functionToCall();
setIsOpen(false); setIsOpen(false);
@ -125,46 +120,43 @@ function WidgetHeader({
key: MenuItemKeys.View, key: MenuItemKeys.View,
icon: <FullscreenOutlined />, icon: <FullscreenOutlined />,
label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.View], label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.View],
isVisible: true, isVisible: headerMenuList?.includes(MenuItemKeys.View) || false,
disabled: queryResponse.isLoading, disabled: queryResponse.isLoading,
}, },
{ {
key: MenuItemKeys.Edit, key: MenuItemKeys.Edit,
icon: <EditFilled />, icon: <EditFilled />,
label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.Edit], label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.Edit],
isVisible: allowEdit, isVisible: headerMenuList?.includes(MenuItemKeys.Edit) || false,
disabled: !editWidget, disabled: !editWidget,
}, },
{ {
key: MenuItemKeys.Clone, key: MenuItemKeys.Clone,
icon: <CopyOutlined />, icon: <CopyOutlined />,
label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.Clone], label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.Clone],
isVisible: allowClone, isVisible: headerMenuList?.includes(MenuItemKeys.Clone) || false,
disabled: !editWidget, disabled: !editWidget,
}, },
{ {
key: MenuItemKeys.Delete, key: MenuItemKeys.Delete,
icon: <DeleteOutlined />, icon: <DeleteOutlined />,
label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.Delete], label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.Delete],
isVisible: allowDelete, isVisible: headerMenuList?.includes(MenuItemKeys.Delete) || false,
disabled: !deleteWidget, disabled: !deleteWidget,
danger: true, danger: true,
}, },
{
key: MenuItemKeys.CreateAlerts,
icon: <DeleteOutlined />,
label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.CreateAlerts],
isVisible: headerMenuList?.includes(MenuItemKeys.CreateAlerts) || false,
disabled: false,
},
], ],
[ [queryResponse.isLoading, headerMenuList, editWidget, deleteWidget],
allowEdit,
allowClone,
allowDelete,
queryResponse.isLoading,
deleteWidget,
editWidget,
],
); );
const menuList: MenuItemType[] = useMemo( const updatedMenuList = useMemo(() => generateMenuList(actions), [actions]);
(): MenuItemType[] => generateMenuList(actions, keyMethodMapping),
[actions, keyMethodMapping],
);
const onClickHandler = useCallback(() => { const onClickHandler = useCallback(() => {
setIsOpen((open) => !open); setIsOpen((open) => !open);
@ -172,10 +164,10 @@ function WidgetHeader({
const menu = useMemo( const menu = useMemo(
() => ({ () => ({
items: menuList, items: updatedMenuList,
onClick: onMenuItemSelectHandler, onClick: onMenuItemSelectHandler,
}), }),
[menuList, onMenuItemSelectHandler], [updatedMenuList, onMenuItemSelectHandler],
); );
return ( return (
@ -219,9 +211,7 @@ function WidgetHeader({
WidgetHeader.defaultProps = { WidgetHeader.defaultProps = {
onDelete: undefined, onDelete: undefined,
onClone: undefined, onClone: undefined,
allowDelete: true, headerMenuList: [MenuItemKeys.View],
allowClone: true,
allowEdit: true,
}; };
export default WidgetHeader; export default WidgetHeader;

View File

@ -3,23 +3,10 @@ import { ReactNode } from 'react';
import { MenuItemKeys } from './contants'; import { MenuItemKeys } from './contants';
export interface MenuItem { export interface MenuItem {
key: TWidgetOptions; key: MenuItemKeys;
icon: ReactNode; icon: ReactNode;
label: string; label: string;
isVisible: boolean; isVisible: boolean;
disabled: boolean; disabled: boolean;
danger?: boolean; danger?: boolean;
} }
export type TWidgetOptions =
| MenuItemKeys.View
| MenuItemKeys.Edit
| MenuItemKeys.Delete
| MenuItemKeys.Clone;
export type KeyMethodMappingProps<T extends TWidgetOptions> = {
[K in T]: {
key: TWidgetOptions;
method?: VoidFunction;
};
};

View File

@ -1,24 +1,22 @@
import { MenuItemType } from 'antd/es/menu/hooks/useItems'; import { MenuItemType } from 'antd/es/menu/hooks/useItems';
import { MenuItemKeys } from './contants'; import { MenuItemKeys } from './contants';
import { KeyMethodMappingProps, MenuItem, TWidgetOptions } from './types'; import { MenuItem } from './types';
export const generateMenuList = ( export const generateMenuList = (actions: MenuItem[]): MenuItemType[] =>
actions: MenuItem[],
keyMethodMapping: KeyMethodMappingProps<TWidgetOptions>,
): MenuItemType[] =>
actions actions
.filter((action: MenuItem) => action.isVisible) .filter((action: MenuItem) => action.isVisible)
.map(({ key, icon: Icon, label, disabled, ...rest }) => ({ .map(({ key, icon: Icon, label, disabled, ...rest }) => ({
key: keyMethodMapping[key].key, key,
icon: Icon, icon: Icon,
label, label,
disabled, disabled,
...rest, ...rest,
})); }));
export const isTWidgetOptions = (value: string): value is TWidgetOptions => export const isTWidgetOptions = (value: string): value is MenuItemKeys =>
value === MenuItemKeys.View || value === MenuItemKeys.View ||
value === MenuItemKeys.Edit || value === MenuItemKeys.Edit ||
value === MenuItemKeys.Delete || value === MenuItemKeys.Delete ||
value === MenuItemKeys.Clone; value === MenuItemKeys.Clone ||
value === MenuItemKeys.CreateAlerts;

View File

@ -0,0 +1,8 @@
import { MenuItemKeys } from 'container/GridGraphLayout/WidgetHeader/contants';
export const headerMenuList = [
MenuItemKeys.View,
MenuItemKeys.Clone,
MenuItemKeys.Delete,
MenuItemKeys.Edit,
];

View File

@ -29,6 +29,7 @@ import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
import AppReducer from 'types/reducer/app'; import AppReducer from 'types/reducer/app';
import DashboardReducer from 'types/reducer/dashboards'; import DashboardReducer from 'types/reducer/dashboards';
import { headerMenuList } from './config';
import Graph from './Graph'; import Graph from './Graph';
import GraphLayoutContainer from './GraphLayout'; import GraphLayoutContainer from './GraphLayout';
import { UpdateDashboard } from './utils'; import { UpdateDashboard } from './utils';
@ -49,6 +50,7 @@ export const getPreLayouts = (
yAxisUnit={widget?.yAxisUnit} yAxisUnit={widget?.yAxisUnit}
layout={layout} layout={layout}
setLayout={setLayout} setLayout={setLayout}
headerMenuList={headerMenuList}
/> />
); );
}, },
@ -233,6 +235,7 @@ function GridGraph(props: Props): JSX.Element {
layout={layout} layout={layout}
setLayout={setLayout} setLayout={setLayout}
onDragSelect={onDragSelect} onDragSelect={onDragSelect}
headerMenuList={headerMenuList}
/> />
), ),
}; };

View File

@ -3,7 +3,7 @@ import {
CaretUpFilled, CaretUpFilled,
LogoutOutlined, LogoutOutlined,
} from '@ant-design/icons'; } from '@ant-design/icons';
import { Button, Divider, Dropdown, MenuProps, Space, Typography } from 'antd'; import { Button, Divider, MenuProps, Space, Typography } from 'antd';
import { Logout } from 'api/utils'; import { Logout } from 'api/utils';
import ROUTES from 'constants/routes'; import ROUTES from 'constants/routes';
import Config from 'container/ConfigDropdown'; import Config from 'container/ConfigDropdown';
@ -33,6 +33,7 @@ import {
LogoutContainer, LogoutContainer,
NavLinkWrapper, NavLinkWrapper,
ToggleButton, ToggleButton,
UserDropdown,
} from './styles'; } from './styles';
function HeaderContainer(): JSX.Element { function HeaderContainer(): JSX.Element {
@ -133,7 +134,7 @@ function HeaderContainer(): JSX.Element {
unCheckedChildren="🌞" unCheckedChildren="🌞"
/> />
<Dropdown <UserDropdown
onOpenChange={onToggleHandler(setIsUserDropDownOpen)} onOpenChange={onToggleHandler(setIsUserDropDownOpen)}
trigger={['click']} trigger={['click']}
menu={menu} menu={menu}
@ -145,7 +146,7 @@ function HeaderContainer(): JSX.Element {
{!isUserDropDownOpen ? <CaretDownFilled /> : <CaretUpFilled />} {!isUserDropDownOpen ? <CaretDownFilled /> : <CaretUpFilled />}
</IconContainer> </IconContainer>
</Space> </Space>
</Dropdown> </UserDropdown>
</Space> </Space>
</Container> </Container>
</Header> </Header>

View File

@ -1,4 +1,4 @@
import { Avatar, Layout, Switch, Typography } from 'antd'; import { Avatar, Dropdown, Layout, Switch, Typography } from 'antd';
import styled from 'styled-components'; import styled from 'styled-components';
export const Header = styled(Layout.Header)` export const Header = styled(Layout.Header)`
@ -82,3 +82,7 @@ export const NavLinkWrapper = styled.div`
export const AvatarWrapper = styled(Avatar)` export const AvatarWrapper = styled(Avatar)`
background-color: rgba(255, 255, 255, 0.25); background-color: rgba(255, 255, 255, 0.25);
`; `;
export const UserDropdown = styled(Dropdown)`
cursor: pointer;
`;

View File

@ -1,5 +1,6 @@
import { ExclamationCircleOutlined } from '@ant-design/icons'; import { ExclamationCircleOutlined } from '@ant-design/icons';
import { Modal } from 'antd'; import { Modal } from 'antd';
import { useCallback } from 'react';
import { connect } from 'react-redux'; import { connect } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux'; import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk'; import { ThunkDispatch } from 'redux-thunk';
@ -9,11 +10,11 @@ import AppActions from 'types/actions';
import { Data } from '../index'; import { Data } from '../index';
import { TableLinkText } from './styles'; import { TableLinkText } from './styles';
const { confirm } = Modal;
function DeleteButton({ deleteDashboard, id }: DeleteButtonProps): JSX.Element { function DeleteButton({ deleteDashboard, id }: DeleteButtonProps): JSX.Element {
const openConfirmationDialog = (): void => { const [modal, contextHolder] = Modal.useModal();
confirm({
const openConfirmationDialog = useCallback((): void => {
modal.confirm({
title: 'Do you really want to delete this dashboard?', title: 'Do you really want to delete this dashboard?',
icon: <ExclamationCircleOutlined style={{ color: '#e42b35' }} />, icon: <ExclamationCircleOutlined style={{ color: '#e42b35' }} />,
onOk() { onOk() {
@ -25,12 +26,16 @@ function DeleteButton({ deleteDashboard, id }: DeleteButtonProps): JSX.Element {
okButtonProps: { danger: true }, okButtonProps: { danger: true },
centered: true, centered: true,
}); });
}; }, [id, modal, deleteDashboard]);
return ( return (
<TableLinkText type="danger" onClick={openConfirmationDialog}> <>
Delete <TableLinkText type="danger" onClick={openConfirmationDialog}>
</TableLinkText> Delete
</TableLinkText>
{contextHolder}
</>
); );
} }

View File

@ -1,3 +1,4 @@
import { initialFilters } from 'constants/queryBuilder';
import { getPaginationQueryData } from 'lib/newQueryBuilder/getPaginationQueryData'; import { getPaginationQueryData } from 'lib/newQueryBuilder/getPaginationQueryData';
import { ILog } from 'types/api/logs/log'; import { ILog } from 'types/api/logs/log';
import { import {
@ -28,7 +29,7 @@ export const getRequestData = ({
if (!query) return null; if (!query) return null;
const paginateData = getPaginationQueryData({ const paginateData = getPaginationQueryData({
currentStagedQueryData: stagedQueryData, filters: stagedQueryData?.filters || initialFilters,
listItemId: log ? log.id : null, listItemId: log ? log.id : null,
orderByTimestamp, orderByTimestamp,
page, page,

View File

@ -3,7 +3,9 @@ import TabLabel from 'components/TabLabel';
import { QueryParams } from 'constants/query'; import { QueryParams } from 'constants/query';
import { import {
initialAutocompleteData, initialAutocompleteData,
initialFilters,
initialQueriesMap, initialQueriesMap,
initialQueryBuilderFormValues,
PANEL_TYPES, PANEL_TYPES,
} from 'constants/queryBuilder'; } from 'constants/queryBuilder';
import { queryParamNamesMap } from 'constants/queryBuilderQueryNames'; import { queryParamNamesMap } from 'constants/queryBuilderQueryNames';
@ -36,6 +38,7 @@ import {
IBuilderQuery, IBuilderQuery,
OrderByPayload, OrderByPayload,
Query, Query,
TagFilter,
} from 'types/api/queryBuilder/queryBuilderData'; } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource, StringOperators } from 'types/common/queryBuilder'; import { DataSource, StringOperators } from 'types/common/queryBuilder';
import { GlobalReducer } from 'types/reducer/globalTime'; import { GlobalReducer } from 'types/reducer/globalTime';
@ -75,19 +78,19 @@ function LogsExplorerViews(): JSX.Element {
const handleAxisError = useAxiosError(); const handleAxisError = useAxiosError();
const currentStagedQueryData = useMemo(() => { const listQuery = useMemo(() => {
if (!stagedQuery || stagedQuery.builder.queryData.length !== 1) return null; if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
return stagedQuery.builder.queryData[0]; return stagedQuery.builder.queryData.find((item) => !item.disabled) || null;
}, [stagedQuery]); }, [stagedQuery]);
const orderByTimestamp: OrderByPayload | null = useMemo(() => { const orderByTimestamp: OrderByPayload | null = useMemo(() => {
const timestampOrderBy = currentStagedQueryData?.orderBy.find( const timestampOrderBy = listQuery?.orderBy.find(
(item) => item.columnName === 'timestamp', (item) => item.columnName === 'timestamp',
); );
return timestampOrderBy || null; return timestampOrderBy || null;
}, [currentStagedQueryData]); }, [listQuery]);
const isMultipleQueries = useMemo( const isMultipleQueries = useMemo(
() => () =>
@ -106,17 +109,17 @@ function LogsExplorerViews(): JSX.Element {
}, [currentQuery]); }, [currentQuery]);
const isLimit: boolean = useMemo(() => { const isLimit: boolean = useMemo(() => {
if (!currentStagedQueryData) return false; if (!listQuery) return false;
if (!currentStagedQueryData.limit) return false; if (!listQuery.limit) return false;
return logs.length >= currentStagedQueryData.limit; return logs.length >= listQuery.limit;
}, [logs.length, currentStagedQueryData]); }, [logs.length, listQuery]);
const listChartQuery = useMemo(() => { const listChartQuery = useMemo(() => {
if (!stagedQuery || !currentStagedQueryData) return null; if (!stagedQuery || !listQuery) return null;
const modifiedQueryData: IBuilderQuery = { const modifiedQueryData: IBuilderQuery = {
...currentStagedQueryData, ...listQuery,
aggregateOperator: StringOperators.COUNT, aggregateOperator: StringOperators.COUNT,
}; };
@ -132,7 +135,7 @@ function LogsExplorerViews(): JSX.Element {
}; };
return modifiedQuery; return modifiedQuery;
}, [stagedQuery, currentStagedQueryData]); }, [stagedQuery, listQuery]);
const exportDefaultQuery = useMemo( const exportDefaultQuery = useMemo(
() => () =>
@ -147,6 +150,9 @@ function LogsExplorerViews(): JSX.Element {
const listChartData = useGetExplorerQueryRange( const listChartData = useGetExplorerQueryRange(
listChartQuery, listChartQuery,
PANEL_TYPES.TIME_SERIES, PANEL_TYPES.TIME_SERIES,
{
enabled: !!listChartQuery && panelType === PANEL_TYPES.LIST,
},
); );
const { data, isFetching, isError } = useGetExplorerQueryRange( const { data, isFetching, isError } = useGetExplorerQueryRange(
@ -205,52 +211,66 @@ function LogsExplorerViews(): JSX.Element {
const getRequestData = useCallback( const getRequestData = useCallback(
( (
query: Query | null, query: Query | null,
params: { page: number; log: ILog | null; pageSize: number }, params: {
page: number;
log: ILog | null;
pageSize: number;
filters: TagFilter;
},
): Query | null => { ): Query | null => {
if (!query) return null; if (!query) return null;
const paginateData = getPaginationQueryData({ const paginateData = getPaginationQueryData({
currentStagedQueryData, filters: params.filters,
listItemId: params.log ? params.log.id : null, listItemId: params.log ? params.log.id : null,
orderByTimestamp, orderByTimestamp,
page: params.page, page: params.page,
pageSize: params.pageSize, pageSize: params.pageSize,
}); });
const queryData: IBuilderQuery[] =
query.builder.queryData.length > 1
? query.builder.queryData
: [
{
...(listQuery || initialQueryBuilderFormValues),
...paginateData,
},
];
const data: Query = { const data: Query = {
...query, ...query,
builder: { builder: {
...query.builder, ...query.builder,
queryData: query.builder.queryData.map((item) => ({ queryData,
...item,
...paginateData,
pageSize: params.pageSize,
})),
}, },
}; };
return data; return data;
}, },
[currentStagedQueryData, orderByTimestamp], [orderByTimestamp, listQuery],
); );
const handleEndReached = useCallback( const handleEndReached = useCallback(
(index: number) => { (index: number) => {
if (!listQuery) return;
if (isLimit) return; if (isLimit) return;
if (logs.length < pageSize) return; if (logs.length < pageSize) return;
const { limit, filters } = listQuery;
const lastLog = logs[index]; const lastLog = logs[index];
const limit = currentStagedQueryData?.limit; const nextLogsLength = logs.length + pageSize;
const nextLogsLenth = logs.length + pageSize;
const nextPageSize = const nextPageSize =
limit && nextLogsLenth >= limit ? limit - logs.length : pageSize; limit && nextLogsLength >= limit ? limit - logs.length : pageSize;
if (!stagedQuery) return; if (!stagedQuery) return;
const newRequestData = getRequestData(stagedQuery, { const newRequestData = getRequestData(stagedQuery, {
filters,
page: page + 1, page: page + 1,
log: orderByTimestamp ? lastLog : null, log: orderByTimestamp ? lastLog : null,
pageSize: nextPageSize, pageSize: nextPageSize,
@ -263,7 +283,7 @@ function LogsExplorerViews(): JSX.Element {
[ [
isLimit, isLimit,
logs, logs,
currentStagedQueryData?.limit, listQuery,
pageSize, pageSize,
stagedQuery, stagedQuery,
getRequestData, getRequestData,
@ -367,11 +387,13 @@ function LogsExplorerViews(): JSX.Element {
currentMinTimeRef.current !== minTime currentMinTimeRef.current !== minTime
) { ) {
const newRequestData = getRequestData(stagedQuery, { const newRequestData = getRequestData(stagedQuery, {
filters: listQuery?.filters || initialFilters,
page: 1, page: 1,
log: null, log: null,
pageSize: pageSize:
timeRange?.pageSize && activeLogId ? timeRange?.pageSize : pageSize, timeRange?.pageSize && activeLogId ? timeRange?.pageSize : pageSize,
}); });
setLogs([]); setLogs([]);
setPage(1); setPage(1);
setRequestData(newRequestData); setRequestData(newRequestData);
@ -385,11 +407,13 @@ function LogsExplorerViews(): JSX.Element {
stagedQuery, stagedQuery,
requestData, requestData,
getRequestData, getRequestData,
listQuery,
pageSize, pageSize,
minTime, minTime,
timeRange, timeRange,
activeLogId, activeLogId,
onTimeRangeChange, onTimeRangeChange,
panelType,
]); ]);
const tabsItems: TabsProps['items'] = useMemo( const tabsItems: TabsProps['items'] = useMemo(
@ -407,7 +431,7 @@ function LogsExplorerViews(): JSX.Element {
children: ( children: (
<LogsExplorerList <LogsExplorerList
isLoading={isFetching} isLoading={isFetching}
currentStagedQueryData={currentStagedQueryData} currentStagedQueryData={listQuery}
logs={logs} logs={logs}
onEndReached={handleEndReached} onEndReached={handleEndReached}
/> />
@ -435,7 +459,7 @@ function LogsExplorerViews(): JSX.Element {
isMultipleQueries, isMultipleQueries,
isGroupByExist, isGroupByExist,
isFetching, isFetching,
currentStagedQueryData, listQuery,
logs, logs,
handleEndReached, handleEndReached,
data, data,
@ -463,10 +487,14 @@ function LogsExplorerViews(): JSX.Element {
(queryData) => queryData.groupBy.length > 0, (queryData) => queryData.groupBy.length > 0,
); );
return isGroupByExist const firstPayloadQuery = data.payload.data.result.find(
? data.payload.data.result (item) => item.queryName === listQuery?.queryName,
: [data.payload.data.result[0]]; );
}, [stagedQuery, data, panelType, listChartData]);
const firstPayloadQueryArray = firstPayloadQuery ? [firstPayloadQuery] : [];
return isGroupByExist ? data.payload.data.result : firstPayloadQueryArray;
}, [stagedQuery, panelType, data, listChartData, listQuery]);
return ( return (
<> <>

View File

@ -5,6 +5,7 @@ import RawLogView from 'components/Logs/RawLogView';
import LogsTableView from 'components/Logs/TableView'; import LogsTableView from 'components/Logs/TableView';
import Spinner from 'components/Spinner'; import Spinner from 'components/Spinner';
import { contentStyle } from 'container/Trace/Search/config'; import { contentStyle } from 'container/Trace/Search/config';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import useFontFaceObserver from 'hooks/useFontObserver'; import useFontFaceObserver from 'hooks/useFontObserver';
import { memo, useCallback, useMemo } from 'react'; import { memo, useCallback, useMemo } from 'react';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
@ -26,6 +27,8 @@ type LogsTableProps = {
function LogsTable(props: LogsTableProps): JSX.Element { function LogsTable(props: LogsTableProps): JSX.Element {
const { viewMode, linesPerRow } = props; const { viewMode, linesPerRow } = props;
const { onSetActiveLog } = useActiveLog();
useFontFaceObserver( useFontFaceObserver(
[ [
{ {
@ -72,7 +75,12 @@ function LogsTable(props: LogsTableProps): JSX.Element {
const renderContent = useMemo(() => { const renderContent = useMemo(() => {
if (viewMode === 'table') { if (viewMode === 'table') {
return ( return (
<LogsTableView logs={logs} fields={selected} linesPerRow={linesPerRow} /> <LogsTableView
onClickExpand={onSetActiveLog}
logs={logs}
fields={selected}
linesPerRow={linesPerRow}
/>
); );
} }
@ -85,7 +93,7 @@ function LogsTable(props: LogsTableProps): JSX.Element {
/> />
</Card> </Card>
); );
}, [getItemContent, linesPerRow, logs, selected, viewMode]); }, [getItemContent, linesPerRow, logs, onSetActiveLog, selected, viewMode]);
if (isLoading) { if (isLoading) {
return <Spinner height={20} tip="Getting Logs" />; return <Spinner height={20} tip="Getting Logs" />;

View File

@ -117,9 +117,6 @@ function DBCall(): JSX.Element {
'database_call_rps', 'database_call_rps',
); );
}} }}
allowClone={false}
allowDelete={false}
allowEdit={false}
/> />
</GraphContainer> </GraphContainer>
</Card> </Card>
@ -153,9 +150,6 @@ function DBCall(): JSX.Element {
'database_call_avg_duration', 'database_call_avg_duration',
); );
}} }}
allowClone={false}
allowDelete={false}
allowEdit={false}
/> />
</GraphContainer> </GraphContainer>
</Card> </Card>

View File

@ -156,9 +156,6 @@ function External(): JSX.Element {
'external_call_error_percentage', 'external_call_error_percentage',
); );
}} }}
allowClone={false}
allowDelete={false}
allowEdit={false}
/> />
</GraphContainer> </GraphContainer>
</Card> </Card>
@ -194,9 +191,6 @@ function External(): JSX.Element {
'external_call_duration', 'external_call_duration',
); );
}} }}
allowClone={false}
allowDelete={false}
allowEdit={false}
/> />
</GraphContainer> </GraphContainer>
</Card> </Card>
@ -233,9 +227,6 @@ function External(): JSX.Element {
'external_call_rps_by_address', 'external_call_rps_by_address',
); );
}} }}
allowClone={false}
allowDelete={false}
allowEdit={false}
/> />
</GraphContainer> </GraphContainer>
</Card> </Card>
@ -271,9 +262,6 @@ function External(): JSX.Element {
'external_call_duration_by_address', 'external_call_duration_by_address',
); );
}} }}
allowClone={false}
allowDelete={false}
allowEdit={false}
/> />
</GraphContainer> </GraphContainer>
</Card> </Card>

View File

@ -75,9 +75,6 @@ function ServiceOverview({
widget={latencyWidget} widget={latencyWidget}
yAxisUnit="ns" yAxisUnit="ns"
onClickHandler={handleGraphClick('Service')} onClickHandler={handleGraphClick('Service')}
allowClone={false}
allowDelete={false}
allowEdit={false}
isQueryEnabled={isQueryEnabled} isQueryEnabled={isQueryEnabled}
/> />
</GraphContainer> </GraphContainer>

View File

@ -39,9 +39,6 @@ function TopLevelOperation({
onClickHandler={handleGraphClick(opName)} onClickHandler={handleGraphClick(opName)}
yAxisUnit={yAxisUnit} yAxisUnit={yAxisUnit}
onDragSelect={onDragSelect} onDragSelect={onDragSelect}
allowClone={false}
allowDelete={false}
allowEdit={false}
/> />
)} )}
</GraphContainer> </GraphContainer>

View File

@ -19,6 +19,7 @@ import { GlobalReducer } from 'types/reducer/globalTime';
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid';
import { IServiceName } from '../types'; import { IServiceName } from '../types';
import { title } from './config';
import ColumnWithLink from './TableRenderer/ColumnWithLink'; import ColumnWithLink from './TableRenderer/ColumnWithLink';
import { getTableColumnRenderer } from './TableRenderer/TableColumnRenderer'; import { getTableColumnRenderer } from './TableRenderer/TableColumnRenderer';
@ -108,6 +109,7 @@ function TopOperationMetrics(): JSX.Element {
return ( return (
<QueryTable <QueryTable
title={title}
query={updatedQuery} query={updatedQuery}
queryTableData={queryTableData} queryTableData={queryTableData}
loading={isLoading} loading={isLoading}

View File

@ -5,12 +5,13 @@ import { changeHistoryColumns } from '../../PipelineListsView/config';
import { HistoryTableWrapper } from '../../styles'; import { HistoryTableWrapper } from '../../styles';
import { historyPagination } from '../config'; import { historyPagination } from '../config';
function ChangeHistory({ piplineData }: ChangeHistoryProps): JSX.Element { function ChangeHistory({ pipelineData }: ChangeHistoryProps): JSX.Element {
return ( return (
<HistoryTableWrapper> <HistoryTableWrapper>
<Table <Table
columns={changeHistoryColumns} columns={changeHistoryColumns}
dataSource={piplineData?.history ?? []} dataSource={pipelineData?.history ?? []}
rowKey="id"
pagination={historyPagination} pagination={historyPagination}
/> />
</HistoryTableWrapper> </HistoryTableWrapper>
@ -18,7 +19,7 @@ function ChangeHistory({ piplineData }: ChangeHistoryProps): JSX.Element {
} }
interface ChangeHistoryProps { interface ChangeHistoryProps {
piplineData: Pipeline; pipelineData: Pipeline;
} }
export default ChangeHistory; export default ChangeHistory;

View File

@ -11,13 +11,13 @@ function CreatePipelineButton({
setActionType, setActionType,
isActionMode, isActionMode,
setActionMode, setActionMode,
piplineData, pipelineData,
}: CreatePipelineButtonProps): JSX.Element { }: CreatePipelineButtonProps): JSX.Element {
const { t } = useTranslation(['pipeline']); const { t } = useTranslation(['pipeline']);
const isAddNewPipelineVisible = useMemo( const isAddNewPipelineVisible = useMemo(
() => checkDataLength(piplineData?.pipelines), () => checkDataLength(pipelineData?.pipelines),
[piplineData?.pipelines], [pipelineData?.pipelines],
); );
const isDisabled = isActionMode === ActionMode.Editing; const isDisabled = isActionMode === ActionMode.Editing;
@ -56,7 +56,7 @@ interface CreatePipelineButtonProps {
setActionType: (actionType: string) => void; setActionType: (actionType: string) => void;
isActionMode: string; isActionMode: string;
setActionMode: (actionMode: string) => void; setActionMode: (actionMode: string) => void;
piplineData: Pipeline; pipelineData: Pipeline;
} }
export default CreatePipelineButton; export default CreatePipelineButton;

View File

@ -7,7 +7,7 @@ import PipelinesSearchSection from './PipelinesSearchSection';
function PipelinePageLayout({ function PipelinePageLayout({
refetchPipelineLists, refetchPipelineLists,
piplineData, pipelineData,
}: PipelinePageLayoutProps): JSX.Element { }: PipelinePageLayoutProps): JSX.Element {
const [isActionType, setActionType] = useState<string>(); const [isActionType, setActionType] = useState<string>();
const [isActionMode, setActionMode] = useState<string>('viewing-mode'); const [isActionMode, setActionMode] = useState<string>('viewing-mode');
@ -19,7 +19,7 @@ function PipelinePageLayout({
setActionType={setActionType} setActionType={setActionType}
setActionMode={setActionMode} setActionMode={setActionMode}
isActionMode={isActionMode} isActionMode={isActionMode}
piplineData={piplineData} pipelineData={pipelineData}
/> />
<PipelinesSearchSection setPipelineSearchValue={setPipelineSearchValue} /> <PipelinesSearchSection setPipelineSearchValue={setPipelineSearchValue} />
<PipelineListsView <PipelineListsView
@ -27,7 +27,7 @@ function PipelinePageLayout({
setActionType={setActionType} setActionType={setActionType}
setActionMode={setActionMode} setActionMode={setActionMode}
isActionMode={isActionMode} isActionMode={isActionMode}
piplineData={piplineData} pipelineData={pipelineData}
refetchPipelineLists={refetchPipelineLists} refetchPipelineLists={refetchPipelineLists}
pipelineSearchValue={pipelineSearchValue} pipelineSearchValue={pipelineSearchValue}
/> />
@ -37,7 +37,7 @@ function PipelinePageLayout({
interface PipelinePageLayoutProps { interface PipelinePageLayoutProps {
refetchPipelineLists: VoidFunction; refetchPipelineLists: VoidFunction;
piplineData: Pipeline; pipelineData: Pipeline;
} }
export default PipelinePageLayout; export default PipelinePageLayout;

View File

@ -4,21 +4,21 @@ import { ModeAndConfigWrapper } from './styles';
function ModeAndConfiguration({ function ModeAndConfiguration({
isActionMode, isActionMode,
verison, version,
}: ModeAndConfigurationType): JSX.Element { }: ModeAndConfigurationType): JSX.Element {
const actionMode = isActionMode === ActionMode.Editing; const actionMode = isActionMode === ActionMode.Editing;
return ( return (
<ModeAndConfigWrapper> <ModeAndConfigWrapper>
Mode: <span>{actionMode ? 'Editing' : 'Viewing'}</span> Mode: <span>{actionMode ? 'Editing' : 'Viewing'}</span>
<div>Configuration Version: {verison}</div> <div>Configuration Version: {version}</div>
</ModeAndConfigWrapper> </ModeAndConfigWrapper>
); );
} }
export interface ModeAndConfigurationType { export interface ModeAndConfigurationType {
isActionMode: string; isActionMode: string;
verison: string | number; version: string | number;
} }
export default ModeAndConfiguration; export default ModeAndConfiguration;

View File

@ -47,7 +47,7 @@ function PipelineListsView({
setActionType, setActionType,
isActionMode, isActionMode,
setActionMode, setActionMode,
piplineData, pipelineData,
refetchPipelineLists, refetchPipelineLists,
pipelineSearchValue, pipelineSearchValue,
}: PipelineListsViewProps): JSX.Element { }: PipelineListsViewProps): JSX.Element {
@ -55,10 +55,10 @@ function PipelineListsView({
const [modal, contextHolder] = Modal.useModal(); const [modal, contextHolder] = Modal.useModal();
const { notifications } = useNotifications(); const { notifications } = useNotifications();
const [prevPipelineData, setPrevPipelineData] = useState<Array<PipelineData>>( const [prevPipelineData, setPrevPipelineData] = useState<Array<PipelineData>>(
cloneDeep(piplineData?.pipelines), cloneDeep(pipelineData?.pipelines),
); );
const [currPipelineData, setCurrPipelineData] = useState<Array<PipelineData>>( const [currPipelineData, setCurrPipelineData] = useState<Array<PipelineData>>(
cloneDeep(piplineData?.pipelines), cloneDeep(pipelineData?.pipelines),
); );
const [ const [
expandedPipelineData, expandedPipelineData,
@ -77,14 +77,14 @@ function PipelineListsView({
const isEditingActionMode = isActionMode === ActionMode.Editing; const isEditingActionMode = isActionMode === ActionMode.Editing;
useEffect(() => { useEffect(() => {
if (pipelineSearchValue === '') setCurrPipelineData(piplineData?.pipelines); if (pipelineSearchValue === '') setCurrPipelineData(pipelineData?.pipelines);
if (pipelineSearchValue !== '') { if (pipelineSearchValue !== '') {
const filterData = piplineData?.pipelines.filter((data: PipelineData) => const filterData = pipelineData?.pipelines.filter((data: PipelineData) =>
getDataOnSearch(data as never, pipelineSearchValue), getDataOnSearch(data as never, pipelineSearchValue),
); );
setCurrPipelineData(filterData); setCurrPipelineData(filterData);
} }
}, [pipelineSearchValue, piplineData?.pipelines]); }, [pipelineSearchValue, pipelineData?.pipelines]);
const handleAlert = useCallback( const handleAlert = useCallback(
({ title, descrition, buttontext, onCancel, onOk }: AlertMessage) => { ({ title, descrition, buttontext, onCancel, onOk }: AlertMessage) => {
@ -414,7 +414,7 @@ function PipelineListsView({
<Container> <Container>
<ModeAndConfiguration <ModeAndConfiguration
isActionMode={isActionMode} isActionMode={isActionMode}
verison={piplineData?.version} version={pipelineData?.version}
/> />
<DndProvider backend={HTML5Backend}> <DndProvider backend={HTML5Backend}>
<Table <Table
@ -445,7 +445,7 @@ interface PipelineListsViewProps {
setActionType: (actionType?: ActionType) => void; setActionType: (actionType?: ActionType) => void;
isActionMode: string; isActionMode: string;
setActionMode: (actionMode: ActionMode) => void; setActionMode: (actionMode: ActionMode) => void;
piplineData: Pipeline; pipelineData: Pipeline;
refetchPipelineLists: VoidFunction; refetchPipelineLists: VoidFunction;
pipelineSearchValue: string; pipelineSearchValue: string;
} }

View File

@ -1,6 +1,6 @@
import { Pipeline, PipelineData } from 'types/api/pipeline/def'; import { Pipeline, PipelineData } from 'types/api/pipeline/def';
export const configurationVerison = '1.0'; export const configurationVersion = '1.0';
export const pipelineMockData: Array<PipelineData> = [ export const pipelineMockData: Array<PipelineData> = [
{ {

View File

@ -18,7 +18,7 @@ describe('PipelinePage container test', () => {
setActionType={jest.fn()} setActionType={jest.fn()}
isActionMode="viewing-mode" isActionMode="viewing-mode"
setActionMode={jest.fn()} setActionMode={jest.fn()}
piplineData={pipelineApiResponseMockData} pipelineData={pipelineApiResponseMockData}
/> />
</I18nextProvider> </I18nextProvider>
</Provider> </Provider>

View File

@ -49,7 +49,7 @@ describe('PipelinePage container test', () => {
<Provider store={store}> <Provider store={store}>
<I18nextProvider i18n={i18n}> <I18nextProvider i18n={i18n}>
<PipelinePageLayout <PipelinePageLayout
piplineData={pipelinedata} pipelineData={pipelinedata}
refetchPipelineLists={refetchPipelineLists} refetchPipelineLists={refetchPipelineLists}
/> />
</I18nextProvider> </I18nextProvider>

View File

@ -20,7 +20,7 @@ export function QueryTable({
renderActionCell, renderActionCell,
renderColumnCell, renderColumnCell,
}), }),
[query, queryTableData, renderColumnCell, renderActionCell], [query, queryTableData, renderActionCell, renderColumnCell],
); );
const tableColumns = modifyColumns ? modifyColumns(columns) : columns; const tableColumns = modifyColumns ? modifyColumns(columns) : columns;

View File

@ -0,0 +1,22 @@
import { EventListener, EventSourceEventMap } from 'event-source-polyfill';
import { useEventSource } from 'providers/EventSource';
import { useEffect } from 'react';
export const useEventSourceEvent = (
eventName: keyof EventSourceEventMap,
listener: EventListener,
): void => {
const { eventSourceInstance } = useEventSource();
useEffect(() => {
if (eventSourceInstance) {
eventSourceInstance.addEventListener(eventName, listener);
}
return (): void => {
if (eventSourceInstance) {
eventSourceInstance.removeEventListener(eventName, listener);
}
};
}, [eventName, eventSourceInstance, listener]);
};

View File

@ -1,4 +1,3 @@
import { initialFilters } from 'constants/queryBuilder';
import { FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config'; import { FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config';
import { import {
IBuilderQuery, IBuilderQuery,
@ -8,7 +7,7 @@ import {
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid';
type SetupPaginationQueryDataParams = { type SetupPaginationQueryDataParams = {
currentStagedQueryData: IBuilderQuery | null; filters: IBuilderQuery['filters'];
listItemId: string | null; listItemId: string | null;
orderByTimestamp: OrderByPayload | null; orderByTimestamp: OrderByPayload | null;
page: number; page: number;
@ -17,20 +16,15 @@ type SetupPaginationQueryDataParams = {
type SetupPaginationQueryData = ( type SetupPaginationQueryData = (
params: SetupPaginationQueryDataParams, params: SetupPaginationQueryDataParams,
) => Pick<IBuilderQuery, 'filters' | 'offset'>; ) => Partial<IBuilderQuery>;
export const getPaginationQueryData: SetupPaginationQueryData = ({ export const getPaginationQueryData: SetupPaginationQueryData = ({
currentStagedQueryData, filters,
listItemId, listItemId,
orderByTimestamp, orderByTimestamp,
page, page,
pageSize, pageSize,
}) => { }) => {
if (!currentStagedQueryData) {
return { limit: null, filters: initialFilters };
}
const filters = currentStagedQueryData.filters || initialFilters;
const offset = (page - 1) * pageSize; const offset = (page - 1) * pageSize;
const queryProps = { const queryProps = {
@ -69,5 +63,5 @@ export const getPaginationQueryData: SetupPaginationQueryData = ({
...queryProps, ...queryProps,
}; };
return { ...currentStagedQueryData, ...chunkOfQueryData }; return chunkOfQueryData;
}; };

View File

@ -28,14 +28,13 @@ export type RowData = {
[key: string]: string | number; [key: string]: string | number;
}; };
type DynamicColumn = { export type DynamicColumn = {
query: IBuilderQuery | IBuilderFormula; query: IBuilderQuery | IBuilderFormula;
field: string; field: string;
dataIndex: string; dataIndex: string;
title: string; title: string;
data: (string | number)[]; data: (string | number)[];
type: 'field' | 'operator' | 'formula'; type: 'field' | 'operator' | 'formula';
// sortable: boolean;
}; };
type DynamicColumns = DynamicColumn[]; type DynamicColumns = DynamicColumn[];
@ -91,37 +90,13 @@ const getQueryByName = <T extends keyof QueryBuilderData>(
return currentQuery as T extends 'queryData' ? IBuilderQuery : IBuilderFormula; return currentQuery as T extends 'queryData' ? IBuilderQuery : IBuilderFormula;
}; };
const addListLabels = ( const addLabels = (
query: IBuilderQuery | IBuilderFormula, query: IBuilderQuery | IBuilderFormula,
label: ListItemKey,
dynamicColumns: DynamicColumns,
): void => {
if (isValueExist('dataIndex', label, dynamicColumns)) return;
const fieldObj: DynamicColumn = {
query,
field: 'label',
dataIndex: label as string,
title: label as string,
data: [],
type: 'field',
// sortable: isNumber,
};
dynamicColumns.push(fieldObj);
};
const addSeriaLabels = (
label: string, label: string,
dynamicColumns: DynamicColumns, dynamicColumns: DynamicColumns,
query: IBuilderQuery | IBuilderFormula,
): void => { ): void => {
if (isValueExist('dataIndex', label, dynamicColumns)) return; if (isValueExist('dataIndex', label, dynamicColumns)) return;
// const labelValue = labels[label];
// const isNumber = !Number.isNaN(parseFloat(String(labelValue)));
const fieldObj: DynamicColumn = { const fieldObj: DynamicColumn = {
query, query,
field: label as string, field: label as string,
@ -129,7 +104,6 @@ const addSeriaLabels = (
title: label, title: label,
data: [], data: [],
type: 'field', type: 'field',
// sortable: isNumber,
}; };
dynamicColumns.push(fieldObj); dynamicColumns.push(fieldObj);
@ -155,7 +129,6 @@ const addOperatorFormulaColumns = (
title: customLabel || formulaLabel, title: customLabel || formulaLabel,
data: [], data: [],
type: 'formula', type: 'formula',
// sortable: isNumber,
}; };
dynamicColumns.push(formulaColumn); dynamicColumns.push(formulaColumn);
@ -181,7 +154,6 @@ const addOperatorFormulaColumns = (
title: customLabel || operatorLabel, title: customLabel || operatorLabel,
data: [], data: [],
type: 'operator', type: 'operator',
// sortable: isNumber,
}; };
dynamicColumns.push(operatorColumn); dynamicColumns.push(operatorColumn);
@ -224,7 +196,7 @@ const getDynamicColumns: GetDynamicColumns = (queryTableData, query) => {
if (list) { if (list) {
list.forEach((listItem) => { list.forEach((listItem) => {
Object.keys(listItem.data).forEach((label) => { Object.keys(listItem.data).forEach((label) => {
addListLabels(currentStagedQuery, label as ListItemKey, dynamicColumns); addLabels(currentStagedQuery, label, dynamicColumns);
}); });
}); });
} }
@ -245,7 +217,7 @@ const getDynamicColumns: GetDynamicColumns = (queryTableData, query) => {
Object.keys(seria.labels).forEach((label) => { Object.keys(seria.labels).forEach((label) => {
if (label === currentQuery?.queryName) return; if (label === currentQuery?.queryName) return;
addSeriaLabels(label as string, dynamicColumns, currentStagedQuery); addLabels(currentStagedQuery, label, dynamicColumns);
}); });
}); });
} }
@ -486,10 +458,6 @@ const generateTableColumns = (
title: item.title, title: item.title,
width: QUERY_TABLE_CONFIG.width, width: QUERY_TABLE_CONFIG.width,
render: renderColumnCell && renderColumnCell[item.dataIndex], render: renderColumnCell && renderColumnCell[item.dataIndex],
// sorter: item.sortable
// ? (a: RowData, b: RowData): number =>
// (a[item.key] as number) - (b[item.key] as number)
// : false,
}; };
return [...acc, column]; return [...acc, column];
@ -504,10 +472,14 @@ export const createTableColumnsFromQuery: CreateTableDataFromQuery = ({
renderActionCell, renderActionCell,
renderColumnCell, renderColumnCell,
}) => { }) => {
const dynamicColumns = getDynamicColumns(queryTableData, query); const sortedQueryTableData = queryTableData.sort((a, b) =>
a.queryName < b.queryName ? -1 : 1,
);
const dynamicColumns = getDynamicColumns(sortedQueryTableData, query);
const { filledDynamicColumns, rowsLength } = fillColumnsData( const { filledDynamicColumns, rowsLength } = fillColumnsData(
queryTableData, sortedQueryTableData,
dynamicColumns, dynamicColumns,
); );

View File

@ -8,14 +8,30 @@ import { useNotifications } from 'hooks/useNotifications';
import { useEffect, useMemo } from 'react'; import { useEffect, useMemo } from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { useQuery } from 'react-query'; import { useQuery } from 'react-query';
import { SuccessResponse } from 'types/api';
import { Pipeline } from 'types/api/pipeline/def'; import { Pipeline } from 'types/api/pipeline/def';
const pipelineRefetchInterval = (
pipelineResponse: SuccessResponse<Pipeline> | undefined,
): number | false => {
// Refetch pipeline data periodically if deployment of
// its latest changes is not complete yet.
const latestVersion = pipelineResponse?.payload?.history?.[0];
const isLatestDeploymentFinished = ['DEPLOYED', 'FAILED'].includes(
latestVersion?.deployStatus || '',
);
if (latestVersion && !isLatestDeploymentFinished) {
return 3000;
}
return false;
};
function Pipelines(): JSX.Element { function Pipelines(): JSX.Element {
const { t } = useTranslation('common'); const { t } = useTranslation('common');
const { notifications } = useNotifications(); const { notifications } = useNotifications();
const { const {
isLoading, isLoading,
data: piplineData, data: pipelineData,
isError, isError,
refetch: refetchPipelineLists, refetch: refetchPipelineLists,
} = useQuery(['version', 'latest', 'pipeline'], { } = useQuery(['version', 'latest', 'pipeline'], {
@ -23,6 +39,7 @@ function Pipelines(): JSX.Element {
getPipeline({ getPipeline({
version: 'latest', version: 'latest',
}), }),
refetchInterval: pipelineRefetchInterval,
}); });
const tabItems: TabsProps['items'] = useMemo( const tabItems: TabsProps['items'] = useMemo(
@ -33,26 +50,28 @@ function Pipelines(): JSX.Element {
children: ( children: (
<PipelinePage <PipelinePage
refetchPipelineLists={refetchPipelineLists} refetchPipelineLists={refetchPipelineLists}
piplineData={piplineData?.payload as Pipeline} pipelineData={pipelineData?.payload as Pipeline}
/> />
), ),
}, },
{ {
key: 'change-history', key: 'change-history',
label: `Change History`, label: `Change History`,
children: <ChangeHistory piplineData={piplineData?.payload as Pipeline} />, children: (
<ChangeHistory pipelineData={pipelineData?.payload as Pipeline} />
),
}, },
], ],
[piplineData?.payload, refetchPipelineLists], [pipelineData?.payload, refetchPipelineLists],
); );
useEffect(() => { useEffect(() => {
if (piplineData?.error && isError) { if (pipelineData?.error && isError) {
notifications.error({ notifications.error({
message: piplineData?.error || t('something_went_wrong'), message: pipelineData?.error || t('something_went_wrong'),
}); });
} }
}, [isError, notifications, piplineData?.error, t]); }, [isError, notifications, pipelineData?.error, t]);
if (isLoading) { if (isLoading) {
return <Spinner height="75vh" tip="Loading Pipelines..." />; return <Spinner height="75vh" tip="Loading Pipelines..." />;

View File

@ -0,0 +1,124 @@
import { apiV3 } from 'api/apiV1';
import { ENVIRONMENT } from 'constants/env';
import { EventListener, EventSourcePolyfill } from 'event-source-polyfill';
import {
createContext,
PropsWithChildren,
useCallback,
useContext,
useMemo,
useRef,
useState,
} from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import AppReducer from 'types/reducer/app';
interface IEventSourceContext {
eventSourceInstance: EventSourcePolyfill | null;
isConnectionOpen: boolean;
isConnectionLoading: boolean;
isConnectionError: string;
handleStartOpenConnection: (url?: string) => void;
handleCloseConnection: () => void;
}
const EventSourceContext = createContext<IEventSourceContext>({
eventSourceInstance: null,
isConnectionOpen: false,
isConnectionLoading: false,
isConnectionError: '',
handleStartOpenConnection: () => {},
handleCloseConnection: () => {},
});
export function EventSourceProvider({
children,
}: PropsWithChildren): JSX.Element {
const [isConnectionOpen, setIsConnectionOpen] = useState<boolean>(false);
const [isConnectionLoading, setIsConnectionLoading] = useState<boolean>(false);
const [isConnectionError, setIsConnectionError] = useState<string>('');
const { user } = useSelector<AppState, AppReducer>((state) => state.app);
const eventSourceRef = useRef<EventSourcePolyfill | null>(null);
const handleCloseConnection = useCallback(() => {
if (!eventSourceRef.current) return;
eventSourceRef.current.close();
setIsConnectionOpen(false);
setIsConnectionLoading(false);
}, []);
const handleOpenConnection: EventListener = useCallback(() => {
setIsConnectionLoading(false);
setIsConnectionOpen(true);
}, []);
const handleErrorConnection: EventListener = useCallback(() => {
if (!eventSourceRef.current) return;
handleCloseConnection();
eventSourceRef.current.removeEventListener('error', handleErrorConnection);
eventSourceRef.current.removeEventListener('open', handleOpenConnection);
}, [handleCloseConnection, handleOpenConnection]);
const handleStartOpenConnection = useCallback(
(url?: string) => {
const eventSourceUrl = url || `${ENVIRONMENT.baseURL}${apiV3}logs/livetail`;
const TIMEOUT_IN_MS = 10 * 60 * 1000;
eventSourceRef.current = new EventSourcePolyfill(eventSourceUrl, {
headers: {
Authorization: `Bearer ${user?.accessJwt}`,
},
heartbeatTimeout: TIMEOUT_IN_MS,
});
setIsConnectionLoading(true);
setIsConnectionError('');
eventSourceRef.current.addEventListener('error', handleErrorConnection);
eventSourceRef.current.addEventListener('open', handleOpenConnection);
},
[handleErrorConnection, handleOpenConnection, user?.accessJwt],
);
const contextValue = useMemo(
() => ({
eventSourceInstance: eventSourceRef.current,
isConnectionError,
isConnectionLoading,
isConnectionOpen,
handleStartOpenConnection,
handleCloseConnection,
}),
[
isConnectionError,
isConnectionLoading,
isConnectionOpen,
handleStartOpenConnection,
handleCloseConnection,
],
);
return (
<EventSourceContext.Provider value={contextValue}>
{children}
</EventSourceContext.Provider>
);
}
export const useEventSource = (): IEventSourceContext => {
const context = useContext(EventSourceContext);
if (!context) {
throw new Error('Should be used inside the context');
}
return context;
};

View File

@ -6,6 +6,7 @@ import (
"time" "time"
"github.com/ClickHouse/clickhouse-go/v2" "github.com/ClickHouse/clickhouse-go/v2"
"go.uber.org/zap"
) )
type Encoding string type Encoding string
@ -58,6 +59,9 @@ type namespaceConfig struct {
namespace string namespace string
Enabled bool Enabled bool
Datasource string Datasource string
MaxIdleConns int
MaxOpenConns int
DialTimeout time.Duration
TraceDB string TraceDB string
OperationsTable string OperationsTable string
IndexTable string IndexTable string
@ -88,8 +92,14 @@ type Connector func(cfg *namespaceConfig) (clickhouse.Conn, error)
func defaultConnector(cfg *namespaceConfig) (clickhouse.Conn, error) { func defaultConnector(cfg *namespaceConfig) (clickhouse.Conn, error) {
ctx := context.Background() ctx := context.Background()
dsnURL, err := url.Parse(cfg.Datasource) dsnURL, err := url.Parse(cfg.Datasource)
if err != nil {
return nil, err
}
options := &clickhouse.Options{ options := &clickhouse.Options{
Addr: []string{dsnURL.Host}, Addr: []string{dsnURL.Host},
MaxOpenConns: cfg.MaxOpenConns,
MaxIdleConns: cfg.MaxIdleConns,
DialTimeout: cfg.DialTimeout,
} }
if dsnURL.Query().Get("username") != "" { if dsnURL.Query().Get("username") != "" {
auth := clickhouse.Auth{ auth := clickhouse.Auth{
@ -98,6 +108,7 @@ func defaultConnector(cfg *namespaceConfig) (clickhouse.Conn, error) {
} }
options.Auth = auth options.Auth = auth
} }
zap.S().Infof("Connecting to Clickhouse at %s, MaxIdleConns: %d, MaxOpenConns: %d, DialTimeout: %s", dsnURL.Host, options.MaxIdleConns, options.MaxOpenConns, options.DialTimeout)
db, err := clickhouse.Open(options) db, err := clickhouse.Open(options)
if err != nil { if err != nil {
return nil, err return nil, err
@ -118,7 +129,14 @@ type Options struct {
} }
// NewOptions creates a new Options struct. // NewOptions creates a new Options struct.
func NewOptions(datasource string, primaryNamespace string, otherNamespaces ...string) *Options { func NewOptions(
datasource string,
maxIdleConns int,
maxOpenConns int,
dialTimeout time.Duration,
primaryNamespace string,
otherNamespaces ...string,
) *Options {
if datasource == "" { if datasource == "" {
datasource = defaultDatasource datasource = defaultDatasource
@ -129,6 +147,9 @@ func NewOptions(datasource string, primaryNamespace string, otherNamespaces ...s
namespace: primaryNamespace, namespace: primaryNamespace,
Enabled: true, Enabled: true,
Datasource: datasource, Datasource: datasource,
MaxIdleConns: maxIdleConns,
MaxOpenConns: maxOpenConns,
DialTimeout: dialTimeout,
TraceDB: defaultTraceDB, TraceDB: defaultTraceDB,
OperationsTable: defaultOperationsTable, OperationsTable: defaultOperationsTable,
IndexTable: defaultIndexTable, IndexTable: defaultIndexTable,

View File

@ -119,10 +119,17 @@ type ClickHouseReader struct {
} }
// NewTraceReader returns a TraceReader for the database // NewTraceReader returns a TraceReader for the database
func NewReader(localDB *sqlx.DB, configFile string, featureFlag interfaces.FeatureLookup) *ClickHouseReader { func NewReader(
localDB *sqlx.DB,
configFile string,
featureFlag interfaces.FeatureLookup,
maxIdleConns int,
maxOpenConns int,
dialTimeout time.Duration,
) *ClickHouseReader {
datasource := os.Getenv("ClickHouseUrl") datasource := os.Getenv("ClickHouseUrl")
options := NewOptions(datasource, primaryNamespace, archiveNamespace) options := NewOptions(datasource, maxIdleConns, maxOpenConns, dialTimeout, primaryNamespace, archiveNamespace)
db, err := initialize(options) db, err := initialize(options)
if err != nil { if err != nil {

View File

@ -77,6 +77,10 @@ type APIHandler struct {
preferDelta bool preferDelta bool
preferSpanMetrics bool preferSpanMetrics bool
maxIdleConns int
maxOpenConns int
dialTimeout time.Duration
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
// SetupCompleted indicates if SigNoz is ready for general use. // SetupCompleted indicates if SigNoz is ready for general use.
@ -94,6 +98,11 @@ type APIHandlerOpts struct {
PerferDelta bool PerferDelta bool
PreferSpanMetrics bool PreferSpanMetrics bool
MaxIdleConns int
MaxOpenConns int
DialTimeout time.Duration
// dao layer to perform crud on app objects like dashboard, alerts etc // dao layer to perform crud on app objects like dashboard, alerts etc
AppDao dao.ModelDao AppDao dao.ModelDao
@ -121,6 +130,9 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
skipConfig: opts.SkipConfig, skipConfig: opts.SkipConfig,
preferDelta: opts.PerferDelta, preferDelta: opts.PerferDelta,
preferSpanMetrics: opts.PreferSpanMetrics, preferSpanMetrics: opts.PreferSpanMetrics,
maxIdleConns: opts.MaxIdleConns,
maxOpenConns: opts.MaxOpenConns,
dialTimeout: opts.DialTimeout,
alertManager: alertManager, alertManager: alertManager,
ruleManager: opts.RuleManager, ruleManager: opts.RuleManager,
featureFlags: opts.FeatureFlags, featureFlags: opts.FeatureFlags,

View File

@ -51,6 +51,9 @@ type ServerOptions struct {
RuleRepoURL string RuleRepoURL string
PreferDelta bool PreferDelta bool
PreferSpanMetrics bool PreferSpanMetrics bool
MaxIdleConns int
MaxOpenConns int
DialTimeout time.Duration
} }
// Server runs HTTP, Mux and a grpc server // Server runs HTTP, Mux and a grpc server
@ -103,7 +106,14 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
storage := os.Getenv("STORAGE") storage := os.Getenv("STORAGE")
if storage == "clickhouse" { if storage == "clickhouse" {
zap.S().Info("Using ClickHouse as datastore ...") zap.S().Info("Using ClickHouse as datastore ...")
clickhouseReader := clickhouseReader.NewReader(localDB, serverOptions.PromConfigPath, fm) clickhouseReader := clickhouseReader.NewReader(
localDB,
serverOptions.PromConfigPath,
fm,
serverOptions.MaxIdleConns,
serverOptions.MaxOpenConns,
serverOptions.DialTimeout,
)
go clickhouseReader.Start(readerReady) go clickhouseReader.Start(readerReady)
reader = clickhouseReader reader = clickhouseReader
} else { } else {
@ -136,6 +146,9 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
SkipConfig: skipConfig, SkipConfig: skipConfig,
PerferDelta: serverOptions.PreferDelta, PerferDelta: serverOptions.PreferDelta,
PreferSpanMetrics: serverOptions.PreferSpanMetrics, PreferSpanMetrics: serverOptions.PreferSpanMetrics,
MaxIdleConns: serverOptions.MaxIdleConns,
MaxOpenConns: serverOptions.MaxOpenConns,
DialTimeout: serverOptions.DialTimeout,
AppDao: dao.DB(), AppDao: dao.DB(),
RuleManager: rm, RuleManager: rm,
FeatureFlags: fm, FeatureFlags: fm,

View File

@ -153,6 +153,7 @@ func buildTracesFilterQuery(fs *v3.FilterSet, keys map[string]v3.AttributeKey) (
columnName := getColumnName(item.Key, keys) columnName := getColumnName(item.Key, keys)
var fmtVal string var fmtVal string
key := enrichKeyWithMetadata(item.Key, keys) key := enrichKeyWithMetadata(item.Key, keys)
item.Operator = v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator))))
if item.Operator != v3.FilterOperatorExists && item.Operator != v3.FilterOperatorNotExists { if item.Operator != v3.FilterOperatorExists && item.Operator != v3.FilterOperatorNotExists {
var err error var err error
val, err = utils.ValidateAndCastValue(val, key.DataType) val, err = utils.ValidateAndCastValue(val, key.DataType)

View File

@ -6,6 +6,7 @@ import (
"os" "os"
"os/signal" "os/signal"
"syscall" "syscall"
"time"
"go.signoz.io/signoz/pkg/query-service/app" "go.signoz.io/signoz/pkg/query-service/app"
"go.signoz.io/signoz/pkg/query-service/auth" "go.signoz.io/signoz/pkg/query-service/auth"
@ -37,11 +38,18 @@ func main() {
var preferDelta bool var preferDelta bool
var preferSpanMetrics bool var preferSpanMetrics bool
var maxIdleConns int
var maxOpenConns int
var dialTimeout time.Duration
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)") flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)") flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)") flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)")
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)") flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
flag.StringVar(&ruleRepoURL, "rules.repo-url", constants.AlertHelpPage, "(host address used to build rule link in alert messages)") flag.StringVar(&ruleRepoURL, "rules.repo-url", constants.AlertHelpPage, "(host address used to build rule link in alert messages)")
flag.Parse() flag.Parse()
@ -61,6 +69,9 @@ func main() {
PrivateHostPort: constants.PrivateHostPort, PrivateHostPort: constants.PrivateHostPort,
DisableRules: disableRules, DisableRules: disableRules,
RuleRepoURL: ruleRepoURL, RuleRepoURL: ruleRepoURL,
MaxIdleConns: maxIdleConns,
MaxOpenConns: maxOpenConns,
DialTimeout: dialTimeout,
} }
// Read the jwt secret key // Read the jwt secret key

View File

@ -1,6 +1,7 @@
version: "2.4" version: "2.4"
x-clickhouse-defaults: &clickhouse-defaults x-clickhouse-defaults:
&clickhouse-defaults
restart: on-failure restart: on-failure
image: clickhouse/clickhouse-server:22.8.8-alpine image: clickhouse/clickhouse-server:22.8.8-alpine
tty: true tty: true
@ -14,7 +15,14 @@ x-clickhouse-defaults: &clickhouse-defaults
max-file: "3" max-file: "3"
healthcheck: healthcheck:
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'" # "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"] test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8123/ping"
]
interval: 30s interval: 30s
timeout: 5s timeout: 5s
retries: 3 retries: 3
@ -24,7 +32,8 @@ x-clickhouse-defaults: &clickhouse-defaults
soft: 262144 soft: 262144
hard: 262144 hard: 262144
x-clickhouse-depends: &clickhouse-depends x-clickhouse-depends:
&clickhouse-depends
depends_on: depends_on:
clickhouse: clickhouse:
condition: service_healthy condition: service_healthy
@ -36,6 +45,7 @@ x-clickhouse-depends: &clickhouse-depends
services: services:
zookeeper-1: zookeeper-1:
image: bitnami/zookeeper:3.7.1 image: bitnami/zookeeper:3.7.1
container_name: signoz-zookeeper-1
user: root user: root
ports: ports:
- "2181:2181" - "2181:2181"
@ -52,6 +62,7 @@ services:
# zookeeper-2: # zookeeper-2:
# image: bitnami/zookeeper:3.7.0 # image: bitnami/zookeeper:3.7.0
# container_name: signoz-zookeeper-2
# user: root # user: root
# ports: # ports:
# - "2182:2181" # - "2182:2181"
@ -67,6 +78,7 @@ services:
# zookeeper-3: # zookeeper-3:
# image: bitnami/zookeeper:3.7.0 # image: bitnami/zookeeper:3.7.0
# container_name: signoz-zookeeper-3
# user: root # user: root
# ports: # ports:
# - "2183:2181" # - "2183:2181"
@ -82,7 +94,7 @@ services:
clickhouse: clickhouse:
<<: *clickhouse-defaults <<: *clickhouse-defaults
container_name: clickhouse container_name: signoz-clickhouse
hostname: clickhouse hostname: clickhouse
ports: ports:
- "9000:9000" - "9000:9000"
@ -95,10 +107,9 @@ services:
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
- ./data/clickhouse/:/var/lib/clickhouse/ - ./data/clickhouse/:/var/lib/clickhouse/
# clickhouse-2: # clickhouse-2:
# <<: *clickhouse-defaults # <<: *clickhouse-defaults
# container_name: clickhouse-2 # container_name: signoz-clickhouse-2
# hostname: clickhouse-2 # hostname: clickhouse-2
# ports: # ports:
# - "9001:9000" # - "9001:9000"
@ -113,7 +124,7 @@ services:
# clickhouse-3: # clickhouse-3:
# <<: *clickhouse-defaults # <<: *clickhouse-defaults
# container_name: clickhouse-3 # container_name: signoz-clickhouse-3
# hostname: clickhouse-3 # hostname: clickhouse-3
# ports: # ports:
# - "9002:9000" # - "9002:9000"
@ -128,6 +139,7 @@ services:
alertmanager: alertmanager:
image: signoz/alertmanager:0.23.1 image: signoz/alertmanager:0.23.1
container_name: signoz-alertmanager
volumes: volumes:
- ./data/alertmanager:/data - ./data/alertmanager:/data
depends_on: depends_on:
@ -138,12 +150,12 @@ services:
- --queryService.url=http://query-service:8085 - --queryService.url=http://query-service:8085
- --storage.path=/data - --storage.path=/data
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service: query-service:
image: signoz/query-service:latest image: signoz/query-service:latest
container_name: query-service container_name: signoz-query-service
command: ["-config=/root/config/prometheus.yml"] command: [ "-config=/root/config/prometheus.yml" ]
# ports: # ports:
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
# - "8080:8080" # query-service port # - "8080:8080" # query-service port
@ -162,15 +174,27 @@ services:
- DEPLOYMENT_TYPE=docker-standalone-amd - DEPLOYMENT_TYPE=docker-standalone-amd
restart: on-failure restart: on-failure
healthcheck: healthcheck:
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"] test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8080/api/v1/health"
]
interval: 30s interval: 30s
timeout: 5s timeout: 5s
retries: 3 retries: 3
<<: *clickhouse-depends <<: *clickhouse-depends
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:0.79.4 image: signoz/signoz-otel-collector:0.79.5
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] container_name: signoz-otel-collector
command:
[
"--config=/etc/otel-collector-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
user: root # required for reading docker container logs user: root # required for reading docker container logs
volumes: volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
@ -181,8 +205,8 @@ services:
- LOW_CARDINAL_EXCEPTION_GROUPING=false - LOW_CARDINAL_EXCEPTION_GROUPING=false
ports: ports:
# - "1777:1777" # pprof extension # - "1777:1777" # pprof extension
- "4317:4317" # OTLP gRPC receiver - "4317:4317" # OTLP gRPC receiver
- "4318:4318" # OTLP HTTP receiver - "4318:4318" # OTLP HTTP receiver
# - "8888:8888" # OtelCollector internal metrics # - "8888:8888" # OtelCollector internal metrics
# - "8889:8889" # signoz spanmetrics exposed by the agent # - "8889:8889" # signoz spanmetrics exposed by the agent
# - "9411:9411" # Zipkin port # - "9411:9411" # Zipkin port
@ -195,8 +219,13 @@ services:
<<: *clickhouse-depends <<: *clickhouse-depends
otel-collector-metrics: otel-collector-metrics:
image: signoz/signoz-otel-collector:0.79.4 image: signoz/signoz-otel-collector:0.79.5
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] container_name: signoz-otel-collector-metrics
command:
[
"--config=/etc/otel-collector-metrics-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
volumes: volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
# ports: # ports:
@ -207,16 +236,27 @@ services:
restart: on-failure restart: on-failure
<<: *clickhouse-depends <<: *clickhouse-depends
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout
volumes:
- /etc/hostname:/etc/host_hostname:ro
- /var/run/docker.sock:/var/run/docker.sock
command: syslog+tcp://otel-collector:2255
depends_on:
- otel-collector
restart: on-failure
hotrod: hotrod:
image: jaegertracing/example-hotrod:1.30 image: jaegertracing/example-hotrod:1.30
container_name: hotrod container_name: hotrod
logging: logging:
options: options:
max-size: 50m max-size: 50m
max-file: "3" max-file: "3"
command: ["all"] command: [ "all" ]
environment: environment:
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
load-hotrod: load-hotrod:
image: "signoz/locust:1.2.3" image: "signoz/locust:1.2.3"

View File

@ -1,29 +1,21 @@
receivers: receivers:
filelog/dockercontainers: tcplog/docker:
include: [ "/var/lib/docker/containers/*/*.log" ] listen_address: "0.0.0.0:2255"
start_at: end
include_file_path: true
include_file_name: false
operators: operators:
- type: json_parser - type: regex_parser
id: parser-docker regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
output: extract_metadata_from_filepath timestamp:
timestamp: parse_from: attributes.timestamp
parse_from: attributes.time layout: '%Y-%m-%dT%H:%M:%S.%LZ'
layout: '%Y-%m-%dT%H:%M:%S.%LZ' - type: move
- type: regex_parser from: attributes["body"]
id: extract_metadata_from_filepath to: body
regex: '^.*containers/(?P<container_id>[^_]+)/.*log$' - type: remove
parse_from: attributes["log.file.path"] field: attributes.timestamp
output: parse_body # please remove names from below if you want to collect logs from them
- type: move - type: filter
id: parse_body id: signoz_logs_filter
from: attributes.log expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
to: body
output: time
- type: remove
id: time
field: attributes.time
opencensus: opencensus:
endpoint: 0.0.0.0:55678 endpoint: 0.0.0.0:55678
otlp/spanmetrics: otlp/spanmetrics:
@ -157,6 +149,6 @@ service:
receivers: [otlp/spanmetrics] receivers: [otlp/spanmetrics]
exporters: [prometheus] exporters: [prometheus]
logs: logs:
receivers: [otlp, filelog/dockercontainers] receivers: [otlp, tcplog/docker]
processors: [batch] processors: [batch]
exporters: [clickhouselogsexporter] exporters: [clickhouselogsexporter]