mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-14 13:25:54 +08:00
commit
6363c71442
@ -1,6 +1,7 @@
|
||||
version: "3.9"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
x-clickhouse-defaults:
|
||||
&clickhouse-defaults
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
tty: true
|
||||
deploy:
|
||||
@ -16,7 +17,14 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@ -26,7 +34,8 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-clickhouse-depend: &clickhouse-depend
|
||||
x-clickhouse-depend:
|
||||
&clickhouse-depend
|
||||
depends_on:
|
||||
- clickhouse
|
||||
# - clickhouse-2
|
||||
@ -137,8 +146,8 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.25.3
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
image: signoz/query-service:0.25.4
|
||||
command: [ "-config=/root/config/prometheus.yml" ]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
@ -156,7 +165,14 @@ services:
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@ -166,7 +182,7 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.25.3
|
||||
image: signoz/frontend:0.25.4
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@ -179,8 +195,12 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.79.4
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:0.79.5
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@ -191,8 +211,8 @@ services:
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
# - "8888:8888" # OtelCollector internal metrics
|
||||
# - "8889:8889" # signoz spanmetrics exposed by the agent
|
||||
# - "9411:9411" # Zipkin port
|
||||
@ -208,8 +228,12 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:0.79.4
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:0.79.5
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-metrics-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
# ports:
|
||||
@ -222,9 +246,22 @@ services:
|
||||
condition: on-failure
|
||||
<<: *clickhouse-depend
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-collector:2255
|
||||
depends_on:
|
||||
- otel-collector
|
||||
deploy:
|
||||
mode: global
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
command: ["all"]
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
logging:
|
||||
|
@ -1,29 +1,21 @@
|
||||
receivers:
|
||||
filelog/dockercontainers:
|
||||
include: [ "/var/lib/docker/containers/*/*.log" ]
|
||||
start_at: end
|
||||
include_file_path: true
|
||||
include_file_name: false
|
||||
tcplog/docker:
|
||||
listen_address: "0.0.0.0:2255"
|
||||
operators:
|
||||
- type: json_parser
|
||||
id: parser-docker
|
||||
output: extract_metadata_from_filepath
|
||||
timestamp:
|
||||
parse_from: attributes.time
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: regex_parser
|
||||
id: extract_metadata_from_filepath
|
||||
regex: '^.*containers/(?P<container_id>[^_]+)/.*log$'
|
||||
parse_from: attributes["log.file.path"]
|
||||
output: parse_body
|
||||
- type: move
|
||||
id: parse_body
|
||||
from: attributes.log
|
||||
to: body
|
||||
output: time
|
||||
- type: remove
|
||||
id: time
|
||||
field: attributes.time
|
||||
- type: regex_parser
|
||||
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
|
||||
timestamp:
|
||||
parse_from: attributes.timestamp
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: move
|
||||
from: attributes["body"]
|
||||
to: body
|
||||
- type: remove
|
||||
field: attributes.timestamp
|
||||
# please remove names from below if you want to collect logs from them
|
||||
- type: filter
|
||||
id: signoz_logs_filter
|
||||
expr: 'attributes.container_name matches "^signoz_(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
|
||||
opencensus:
|
||||
endpoint: 0.0.0.0:55678
|
||||
otlp/spanmetrics:
|
||||
@ -69,6 +61,40 @@ receivers:
|
||||
job_name: otel-collector
|
||||
|
||||
processors:
|
||||
logstransform/internal:
|
||||
operators:
|
||||
- type: trace_parser
|
||||
if: '"trace_id" in attributes or "span_id" in attributes'
|
||||
trace_id:
|
||||
parse_from: attributes.trace_id
|
||||
span_id:
|
||||
parse_from: attributes.span_id
|
||||
output: remove_trace_id
|
||||
- type: trace_parser
|
||||
if: '"traceId" in attributes or "spanId" in attributes'
|
||||
trace_id:
|
||||
parse_from: attributes.traceId
|
||||
span_id:
|
||||
parse_from: attributes.spanId
|
||||
output: remove_traceId
|
||||
- id: remove_traceId
|
||||
type: remove
|
||||
if: '"traceId" in attributes'
|
||||
field: attributes.traceId
|
||||
output: remove_spanId
|
||||
- id: remove_spanId
|
||||
type: remove
|
||||
if: '"spanId" in attributes'
|
||||
field: attributes.spanId
|
||||
- id: remove_trace_id
|
||||
type: remove
|
||||
if: '"trace_id" in attributes'
|
||||
field: attributes.trace_id
|
||||
output: remove_span_id
|
||||
- id: remove_span_id
|
||||
type: remove
|
||||
if: '"span_id" in attributes'
|
||||
field: attributes.span_id
|
||||
batch:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
@ -166,6 +192,6 @@ service:
|
||||
receivers: [otlp/spanmetrics]
|
||||
exporters: [prometheus]
|
||||
logs:
|
||||
receivers: [otlp, filelog/dockercontainers]
|
||||
processors: [batch]
|
||||
receivers: [otlp, tcplog/docker]
|
||||
processors: [logstransform/internal, batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
|
@ -3,7 +3,7 @@ version: "2.4"
|
||||
services:
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
container_name: clickhouse
|
||||
container_name: signoz-clickhouse
|
||||
# ports:
|
||||
# - "9000:9000"
|
||||
# - "8123:8123"
|
||||
@ -20,13 +20,20 @@ services:
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
alertmanager:
|
||||
container_name: alertmanager
|
||||
container_name: signoz-alertmanager
|
||||
image: signoz/alertmanager:0.23.1
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
@ -40,9 +47,13 @@ services:
|
||||
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: otel-collector
|
||||
image: signoz/signoz-otel-collector:0.79.4
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
container_name: signoz-otel-collector
|
||||
image: signoz/signoz-otel-collector:0.79.5
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
# user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@ -50,8 +61,8 @@ services:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
# - "8888:8888" # OtelCollector internal metrics
|
||||
# - "8889:8889" # signoz spanmetrics exposed by the agent
|
||||
# - "9411:9411" # Zipkin port
|
||||
@ -66,9 +77,13 @@ services:
|
||||
condition: service_healthy
|
||||
|
||||
otel-collector-metrics:
|
||||
container_name: otel-collector-metrics
|
||||
image: signoz/signoz-otel-collector:0.79.4
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
container_name: signoz-otel-collector-metrics
|
||||
image: signoz/signoz-otel-collector:0.79.5
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-metrics-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
# ports:
|
||||
@ -81,6 +96,17 @@ services:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-collector:2255
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
@ -88,7 +114,7 @@ services:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: ["all"]
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
|
@ -9,7 +9,7 @@ services:
|
||||
args:
|
||||
LDFLAGS: ""
|
||||
TARGETPLATFORM: "${LOCAL_GOOS}/${LOCAL_GOARCH}"
|
||||
container_name: query-service
|
||||
container_name: signoz-query-service
|
||||
environment:
|
||||
- ClickHouseUrl=tcp://clickhouse:9000
|
||||
- ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/
|
||||
@ -22,13 +22,20 @@ services:
|
||||
- ./prometheus.yml:/root/config/prometheus.yml
|
||||
- ../dashboards:/root/config/dashboards
|
||||
- ./data/signoz/:/var/lib/signoz/
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
command: [ "-config=/root/config/prometheus.yml" ]
|
||||
ports:
|
||||
- "6060:6060"
|
||||
- "8080:8080"
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@ -43,7 +50,7 @@ services:
|
||||
args:
|
||||
TARGETOS: "${LOCAL_GOOS}"
|
||||
TARGETPLATFORM: "${LOCAL_GOARCH}"
|
||||
container_name: frontend
|
||||
container_name: signoz-frontend
|
||||
environment:
|
||||
- FRONTEND_API_ENDPOINT=http://query-service:8080
|
||||
restart: on-failure
|
||||
|
@ -1,6 +1,7 @@
|
||||
version: "2.4"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
x-clickhouse-defaults:
|
||||
&clickhouse-defaults
|
||||
restart: on-failure
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
tty: true
|
||||
@ -14,7 +15,14 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@ -24,7 +32,8 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-clickhouse-depend: &clickhouse-depend
|
||||
x-clickhouse-depend:
|
||||
&clickhouse-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
@ -37,7 +46,7 @@ services:
|
||||
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: zookeeper-1
|
||||
container_name: signoz-zookeeper-1
|
||||
hostname: zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
@ -54,7 +63,7 @@ services:
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: zookeeper-2
|
||||
# container_name: signoz-zookeeper-2
|
||||
# hostname: zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
@ -71,7 +80,7 @@ services:
|
||||
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: zookeeper-3
|
||||
# container_name: signoz-zookeeper-3
|
||||
# hostname: zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
@ -88,7 +97,7 @@ services:
|
||||
|
||||
clickhouse:
|
||||
<<: *clickhouse-defaults
|
||||
container_name: clickhouse
|
||||
container_name: signoz-clickhouse
|
||||
hostname: clickhouse
|
||||
ports:
|
||||
- "9000:9000"
|
||||
@ -105,7 +114,7 @@ services:
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-2
|
||||
# container_name: signoz-clickhouse-2
|
||||
# hostname: clickhouse-2
|
||||
# ports:
|
||||
# - "9001:9000"
|
||||
@ -120,10 +129,10 @@ services:
|
||||
# - ./data/clickhouse-2/:/var/lib/clickhouse/
|
||||
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
|
||||
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-3
|
||||
# container_name: signoz-clickhouse-3
|
||||
# hostname: clickhouse-3
|
||||
# ports:
|
||||
# - "9002:9000"
|
||||
@ -140,6 +149,7 @@ services:
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.1}
|
||||
container_name: signoz-alertmanager
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
@ -153,9 +163,9 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.25.3}
|
||||
container_name: query-service
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.25.4}
|
||||
container_name: signoz-query-service
|
||||
command: [ "-config=/root/config/prometheus.yml" ]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
@ -174,15 +184,22 @@ services:
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
<<: *clickhouse-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.25.3}
|
||||
container_name: frontend
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.25.4}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
- alertmanager
|
||||
@ -193,8 +210,13 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.4}
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.5}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@ -205,8 +227,8 @@ services:
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
# - "8888:8888" # OtelCollector internal metrics
|
||||
# - "8889:8889" # signoz spanmetrics exposed by the agent
|
||||
# - "9411:9411" # Zipkin port
|
||||
@ -219,8 +241,13 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.4}
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.5}
|
||||
container_name: signoz-otel-collector-metrics
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-metrics-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
# ports:
|
||||
@ -231,6 +258,17 @@ services:
|
||||
restart: on-failure
|
||||
<<: *clickhouse-depend
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-collector:2255
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
@ -238,7 +276,7 @@ services:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: ["all"]
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
|
@ -1,29 +1,21 @@
|
||||
receivers:
|
||||
filelog/dockercontainers:
|
||||
include: [ "/var/lib/docker/containers/*/*.log" ]
|
||||
start_at: end
|
||||
include_file_path: true
|
||||
include_file_name: false
|
||||
tcplog/docker:
|
||||
listen_address: "0.0.0.0:2255"
|
||||
operators:
|
||||
- type: json_parser
|
||||
id: parser-docker
|
||||
output: extract_metadata_from_filepath
|
||||
timestamp:
|
||||
parse_from: attributes.time
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: regex_parser
|
||||
id: extract_metadata_from_filepath
|
||||
regex: '^.*containers/(?P<container_id>[^_]+)/.*log$'
|
||||
parse_from: attributes["log.file.path"]
|
||||
output: parse_body
|
||||
- type: move
|
||||
id: parse_body
|
||||
from: attributes.log
|
||||
to: body
|
||||
output: time
|
||||
- type: remove
|
||||
id: time
|
||||
field: attributes.time
|
||||
- type: regex_parser
|
||||
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
|
||||
timestamp:
|
||||
parse_from: attributes.timestamp
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: move
|
||||
from: attributes["body"]
|
||||
to: body
|
||||
- type: remove
|
||||
field: attributes.timestamp
|
||||
# please remove names from below if you want to collect logs from them
|
||||
- type: filter
|
||||
id: signoz_logs_filter
|
||||
expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
|
||||
opencensus:
|
||||
endpoint: 0.0.0.0:55678
|
||||
otlp/spanmetrics:
|
||||
@ -205,6 +197,6 @@ service:
|
||||
receivers: [otlp/spanmetrics]
|
||||
exporters: [prometheus]
|
||||
logs:
|
||||
receivers: [otlp, filelog/dockercontainers]
|
||||
receivers: [otlp, tcplog/docker]
|
||||
processors: [logstransform/internal, batch]
|
||||
exporters: [clickhouselogsexporter]
|
@ -36,9 +36,9 @@ is_mac() {
|
||||
[[ $OSTYPE == darwin* ]]
|
||||
}
|
||||
|
||||
# is_arm64(){
|
||||
# [[ `uname -m` == 'arm64' ]]
|
||||
# }
|
||||
is_arm64(){
|
||||
[[ `uname -m` == 'arm64' || `uname -m` == 'aarch64' ]]
|
||||
}
|
||||
|
||||
check_os() {
|
||||
if is_mac; then
|
||||
@ -48,6 +48,16 @@ check_os() {
|
||||
return
|
||||
fi
|
||||
|
||||
if is_arm64; then
|
||||
arch="arm64"
|
||||
arch_official="aarch64"
|
||||
else
|
||||
arch="amd64"
|
||||
arch_official="x86_64"
|
||||
fi
|
||||
|
||||
platform=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
os_name="$(cat /etc/*-release | awk -F= '$1 == "NAME" { gsub(/"/, ""); print $2; exit }')"
|
||||
|
||||
case "$os_name" in
|
||||
@ -143,7 +153,7 @@ install_docker() {
|
||||
$apt_cmd install software-properties-common gnupg-agent
|
||||
curl -fsSL "https://download.docker.com/linux/$os/gpg" | $sudo_cmd apt-key add -
|
||||
$sudo_cmd add-apt-repository \
|
||||
"deb [arch=amd64] https://download.docker.com/linux/$os $(lsb_release -cs) stable"
|
||||
"deb [arch=$arch] https://download.docker.com/linux/$os $(lsb_release -cs) stable"
|
||||
$apt_cmd update
|
||||
echo "Installing docker"
|
||||
$apt_cmd install docker-ce docker-ce-cli containerd.io
|
||||
@ -178,12 +188,20 @@ install_docker() {
|
||||
|
||||
}
|
||||
|
||||
compose_version () {
|
||||
local compose_version
|
||||
compose_version="$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep 'tag_name' | cut -d\" -f4)"
|
||||
echo "${compose_version:-v2.18.1}"
|
||||
}
|
||||
|
||||
install_docker_compose() {
|
||||
if [[ $package_manager == "apt-get" || $package_manager == "zypper" || $package_manager == "yum" ]]; then
|
||||
if [[ ! -f /usr/bin/docker-compose ]];then
|
||||
echo "++++++++++++++++++++++++"
|
||||
echo "Installing docker-compose"
|
||||
$sudo_cmd curl -L "https://github.com/docker/compose/releases/download/1.26.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
|
||||
compose_url="https://github.com/docker/compose/releases/download/$(compose_version)/docker-compose-$platform-$arch_official"
|
||||
echo "Downloading docker-compose from $compose_url"
|
||||
$sudo_cmd curl -L "$compose_url" -o /usr/local/bin/docker-compose
|
||||
$sudo_cmd chmod +x /usr/local/bin/docker-compose
|
||||
$sudo_cmd ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
|
||||
echo "docker-compose installed!"
|
||||
|
@ -2,6 +2,7 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"go.signoz.io/signoz/ee/query-service/dao"
|
||||
@ -20,6 +21,9 @@ type APIHandlerOptions struct {
|
||||
SkipConfig *basemodel.SkipConfig
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
DialTimeout time.Duration
|
||||
AppDao dao.ModelDao
|
||||
RulesManager *rules.Manager
|
||||
FeatureFlags baseint.FeatureLookup
|
||||
@ -40,6 +44,9 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
|
||||
SkipConfig: opts.SkipConfig,
|
||||
PerferDelta: opts.PreferDelta,
|
||||
PreferSpanMetrics: opts.PreferSpanMetrics,
|
||||
MaxIdleConns: opts.MaxIdleConns,
|
||||
MaxOpenConns: opts.MaxOpenConns,
|
||||
DialTimeout: opts.DialTimeout,
|
||||
AppDao: opts.AppDao,
|
||||
RuleManager: opts.RulesManager,
|
||||
FeatureFlags: opts.FeatureFlags,
|
||||
|
@ -1,6 +1,8 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
@ -15,8 +17,15 @@ type ClickhouseReader struct {
|
||||
*basechr.ClickHouseReader
|
||||
}
|
||||
|
||||
func NewDataConnector(localDB *sqlx.DB, promConfigPath string, lm interfaces.FeatureLookup) *ClickhouseReader {
|
||||
ch := basechr.NewReader(localDB, promConfigPath, lm)
|
||||
func NewDataConnector(
|
||||
localDB *sqlx.DB,
|
||||
promConfigPath string,
|
||||
lm interfaces.FeatureLookup,
|
||||
maxIdleConns int,
|
||||
maxOpenConns int,
|
||||
dialTimeout time.Duration,
|
||||
) *ClickhouseReader {
|
||||
ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout)
|
||||
return &ClickhouseReader{
|
||||
conn: ch.GetConn(),
|
||||
appdb: localDB,
|
||||
|
@ -59,6 +59,9 @@ type ServerOptions struct {
|
||||
RuleRepoURL string
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
DialTimeout time.Duration
|
||||
}
|
||||
|
||||
// Server runs HTTP api service
|
||||
@ -122,7 +125,14 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
storage := os.Getenv("STORAGE")
|
||||
if storage == "clickhouse" {
|
||||
zap.S().Info("Using ClickHouse as datastore ...")
|
||||
qb := db.NewDataConnector(localDB, serverOptions.PromConfigPath, lm)
|
||||
qb := db.NewDataConnector(
|
||||
localDB,
|
||||
serverOptions.PromConfigPath,
|
||||
lm,
|
||||
serverOptions.MaxIdleConns,
|
||||
serverOptions.MaxOpenConns,
|
||||
serverOptions.DialTimeout,
|
||||
)
|
||||
go qb.Start(readerReady)
|
||||
reader = qb
|
||||
} else {
|
||||
@ -184,6 +194,9 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
SkipConfig: skipConfig,
|
||||
PreferDelta: serverOptions.PreferDelta,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
MaxIdleConns: serverOptions.MaxIdleConns,
|
||||
MaxOpenConns: serverOptions.MaxOpenConns,
|
||||
DialTimeout: serverOptions.DialTimeout,
|
||||
AppDao: modelDao,
|
||||
RulesManager: rm,
|
||||
FeatureFlags: lm,
|
||||
|
@ -86,11 +86,18 @@ func main() {
|
||||
var preferDelta bool
|
||||
var preferSpanMetrics bool
|
||||
|
||||
var maxIdleConns int
|
||||
var maxOpenConns int
|
||||
var dialTimeout time.Duration
|
||||
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
|
||||
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
|
||||
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)")
|
||||
flag.Parse()
|
||||
@ -111,6 +118,9 @@ func main() {
|
||||
PrivateHostPort: baseconst.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
RuleRepoURL: ruleRepoURL,
|
||||
MaxIdleConns: maxIdleConns,
|
||||
MaxOpenConns: maxOpenConns,
|
||||
DialTimeout: dialTimeout,
|
||||
}
|
||||
|
||||
// Read the jwt secret key
|
||||
|
@ -10,6 +10,7 @@ export type LogsTableViewProps = {
|
||||
logs: ILog[];
|
||||
fields: IField[];
|
||||
linesPerRow: number;
|
||||
onClickExpand?: (log: ILog) => void;
|
||||
};
|
||||
|
||||
export type UseTableViewResult = {
|
||||
|
@ -4,7 +4,7 @@ import {
|
||||
QuestionCircleFilled,
|
||||
QuestionCircleOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Dropdown, Space } from 'antd';
|
||||
import { Space } from 'antd';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
@ -13,6 +13,7 @@ import { ConfigProps } from 'types/api/dynamicConfigs/getDynamicConfigs';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
|
||||
import HelpToolTip from './Config';
|
||||
import { ConfigDropdown } from './styles';
|
||||
|
||||
function DynamicConfigDropdown({
|
||||
frontendId,
|
||||
@ -53,19 +54,17 @@ function DynamicConfigDropdown({
|
||||
const DropDownIcon = isHelpDropDownOpen ? CaretUpFilled : CaretDownFilled;
|
||||
|
||||
return (
|
||||
<Dropdown
|
||||
<ConfigDropdown
|
||||
onOpenChange={onToggleHandler}
|
||||
trigger={['click']}
|
||||
menu={menu}
|
||||
open={isHelpDropDownOpen}
|
||||
>
|
||||
<Space align="center">
|
||||
<Icon
|
||||
style={{ fontSize: 26, color: 'white', paddingTop: 26, cursor: 'pointer' }}
|
||||
/>
|
||||
<Icon style={{ fontSize: 26, color: 'white', paddingTop: 26 }} />
|
||||
<DropDownIcon style={{ color: 'white' }} />
|
||||
</Space>
|
||||
</Dropdown>
|
||||
</ConfigDropdown>
|
||||
);
|
||||
}
|
||||
|
||||
|
6
frontend/src/container/ConfigDropdown/styles.ts
Normal file
6
frontend/src/container/ConfigDropdown/styles.ts
Normal file
@ -0,0 +1,6 @@
|
||||
import { Dropdown } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const ConfigDropdown = styled(Dropdown)`
|
||||
cursor: pointer;
|
||||
`;
|
@ -53,9 +53,7 @@ function WidgetGraphComponent({
|
||||
setLayout,
|
||||
onDragSelect,
|
||||
onClickHandler,
|
||||
allowClone = true,
|
||||
allowDelete = true,
|
||||
allowEdit = true,
|
||||
headerMenuList,
|
||||
}: WidgetGraphComponentProps): JSX.Element {
|
||||
const [deleteModal, setDeleteModal] = useState(false);
|
||||
const [modal, setModal] = useState<boolean>(false);
|
||||
@ -281,9 +279,7 @@ function WidgetGraphComponent({
|
||||
onClone={onCloneHandler}
|
||||
queryResponse={queryResponse}
|
||||
errorMessage={errorMessage}
|
||||
allowClone={allowClone}
|
||||
allowDelete={allowDelete}
|
||||
allowEdit={allowEdit}
|
||||
headerMenuList={headerMenuList}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@ -297,7 +293,7 @@ function WidgetGraphComponent({
|
||||
yAxisUnit={yAxisUnit}
|
||||
onClickHandler={onClickHandler}
|
||||
onDragSelect={onDragSelect}
|
||||
panelData={[]}
|
||||
panelData={queryResponse.data?.payload?.data.newResult.data.result || []}
|
||||
query={widget.query}
|
||||
ref={lineChartRef}
|
||||
/>
|
||||
@ -313,9 +309,6 @@ WidgetGraphComponent.defaultProps = {
|
||||
setLayout: undefined,
|
||||
onDragSelect: undefined,
|
||||
onClickHandler: undefined,
|
||||
allowDelete: true,
|
||||
allowClone: true,
|
||||
allowEdit: true,
|
||||
};
|
||||
|
||||
const mapDispatchToProps = (
|
||||
|
@ -15,6 +15,7 @@ import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { getSelectedDashboardVariable } from 'utils/dashboard/selectedDashboard';
|
||||
|
||||
import EmptyWidget from '../EmptyWidget';
|
||||
import { MenuItemKeys } from '../WidgetHeader/contants';
|
||||
import { GridCardGraphProps } from './types';
|
||||
import WidgetGraphComponent from './WidgetGraphComponent';
|
||||
|
||||
@ -26,9 +27,7 @@ function GridCardGraph({
|
||||
setLayout,
|
||||
onDragSelect,
|
||||
onClickHandler,
|
||||
allowDelete,
|
||||
allowClone,
|
||||
allowEdit,
|
||||
headerMenuList = [MenuItemKeys.View],
|
||||
isQueryEnabled,
|
||||
}: GridCardGraphProps): JSX.Element {
|
||||
const { isAddWidget } = useSelector<AppState, DashboardReducer>(
|
||||
@ -121,9 +120,7 @@ function GridCardGraph({
|
||||
yAxisUnit={yAxisUnit}
|
||||
layout={layout}
|
||||
setLayout={setLayout}
|
||||
allowClone={allowClone}
|
||||
allowDelete={allowDelete}
|
||||
allowEdit={allowEdit}
|
||||
headerMenuList={headerMenuList}
|
||||
/>
|
||||
)}
|
||||
</span>
|
||||
@ -145,9 +142,7 @@ function GridCardGraph({
|
||||
yAxisUnit={yAxisUnit}
|
||||
layout={layout}
|
||||
setLayout={setLayout}
|
||||
allowClone={allowClone}
|
||||
allowDelete={allowDelete}
|
||||
allowEdit={allowEdit}
|
||||
headerMenuList={headerMenuList}
|
||||
onClickHandler={onClickHandler}
|
||||
/>
|
||||
) : (
|
||||
@ -170,9 +165,7 @@ function GridCardGraph({
|
||||
name={name}
|
||||
yAxisUnit={yAxisUnit}
|
||||
onDragSelect={onDragSelect}
|
||||
allowClone={allowClone}
|
||||
allowDelete={allowDelete}
|
||||
allowEdit={allowEdit}
|
||||
headerMenuList={headerMenuList}
|
||||
onClickHandler={onClickHandler}
|
||||
/>
|
||||
)}
|
||||
@ -185,10 +178,8 @@ function GridCardGraph({
|
||||
GridCardGraph.defaultProps = {
|
||||
onDragSelect: undefined,
|
||||
onClickHandler: undefined,
|
||||
allowDelete: true,
|
||||
allowClone: true,
|
||||
allowEdit: true,
|
||||
isQueryEnabled: true,
|
||||
headerMenuList: [MenuItemKeys.View],
|
||||
};
|
||||
|
||||
export default memo(GridCardGraph);
|
||||
|
@ -10,6 +10,7 @@ import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
import { LayoutProps } from '..';
|
||||
import { MenuItemKeys } from '../WidgetHeader/contants';
|
||||
import { LegendEntryProps } from './FullView/types';
|
||||
|
||||
export interface GraphVisibilityLegendEntryProps {
|
||||
@ -38,25 +39,19 @@ export interface WidgetGraphComponentProps extends DispatchProps {
|
||||
setLayout?: Dispatch<SetStateAction<LayoutProps[]>>;
|
||||
onDragSelect?: (start: number, end: number) => void;
|
||||
onClickHandler?: GraphOnClickHandler;
|
||||
allowDelete?: boolean;
|
||||
allowClone?: boolean;
|
||||
allowEdit?: boolean;
|
||||
headerMenuList: MenuItemKeys[];
|
||||
}
|
||||
|
||||
export interface GridCardGraphProps {
|
||||
widget: Widgets;
|
||||
name: string;
|
||||
yAxisUnit: string | undefined;
|
||||
// eslint-disable-next-line react/require-default-props
|
||||
layout?: Layout[];
|
||||
// eslint-disable-next-line react/require-default-props
|
||||
setLayout?: Dispatch<SetStateAction<LayoutProps[]>>;
|
||||
onDragSelect?: (start: number, end: number) => void;
|
||||
onClickHandler?: GraphOnClickHandler;
|
||||
allowDelete?: boolean;
|
||||
allowClone?: boolean;
|
||||
allowEdit?: boolean;
|
||||
isQueryEnabled?: boolean;
|
||||
headerMenuList?: WidgetGraphComponentProps['headerMenuList'];
|
||||
isQueryEnabled: boolean;
|
||||
}
|
||||
|
||||
export interface GetGraphVisibilityStateOnLegendClickProps {
|
||||
|
@ -3,6 +3,7 @@ export enum MenuItemKeys {
|
||||
Edit = 'edit',
|
||||
Delete = 'delete',
|
||||
Clone = 'clone',
|
||||
CreateAlerts = 'createAlerts',
|
||||
}
|
||||
|
||||
export const MENUITEM_KEYS_VS_LABELS = {
|
||||
@ -10,4 +11,5 @@ export const MENUITEM_KEYS_VS_LABELS = {
|
||||
[MenuItemKeys.Edit]: 'Edit',
|
||||
[MenuItemKeys.Delete]: 'Delete',
|
||||
[MenuItemKeys.Clone]: 'Clone',
|
||||
[MenuItemKeys.CreateAlerts]: 'Create Alerts',
|
||||
};
|
||||
|
@ -7,9 +7,9 @@ import {
|
||||
FullscreenOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Dropdown, MenuProps, Tooltip, Typography } from 'antd';
|
||||
import { MenuItemType } from 'antd/es/menu/hooks/useItems';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { queryParamNamesMap } from 'constants/queryBuilderQueryNames';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import history from 'lib/history';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
@ -33,7 +33,7 @@ import {
|
||||
HeaderContainer,
|
||||
HeaderContentContainer,
|
||||
} from './styles';
|
||||
import { KeyMethodMappingProps, MenuItem, TWidgetOptions } from './types';
|
||||
import { MenuItem } from './types';
|
||||
import { generateMenuList, isTWidgetOptions } from './utils';
|
||||
|
||||
interface IWidgetHeaderProps {
|
||||
@ -47,10 +47,9 @@ interface IWidgetHeaderProps {
|
||||
SuccessResponse<MetricRangePayloadProps> | ErrorResponse
|
||||
>;
|
||||
errorMessage: string | undefined;
|
||||
allowDelete?: boolean;
|
||||
allowClone?: boolean;
|
||||
allowEdit?: boolean;
|
||||
headerMenuList?: MenuItemKeys[];
|
||||
}
|
||||
|
||||
function WidgetHeader({
|
||||
title,
|
||||
widget,
|
||||
@ -60,9 +59,7 @@ function WidgetHeader({
|
||||
parentHover,
|
||||
queryResponse,
|
||||
errorMessage,
|
||||
allowClone = true,
|
||||
allowDelete = true,
|
||||
allowEdit = true,
|
||||
headerMenuList,
|
||||
}: IWidgetHeaderProps): JSX.Element {
|
||||
const [localHover, setLocalHover] = useState(false);
|
||||
const [isOpen, setIsOpen] = useState<boolean>(false);
|
||||
@ -78,32 +75,30 @@ function WidgetHeader({
|
||||
);
|
||||
}, [widget.id, widget.panelTypes, widget.query]);
|
||||
|
||||
const keyMethodMapping: KeyMethodMappingProps<TWidgetOptions> = useMemo(
|
||||
const onCreateAlertsHandler = useCallback(() => {
|
||||
history.push(
|
||||
`${ROUTES.ALERTS_NEW}?${
|
||||
queryParamNamesMap.compositeQuery
|
||||
}=${encodeURIComponent(JSON.stringify(widget.query))}`,
|
||||
);
|
||||
}, [widget]);
|
||||
|
||||
const keyMethodMapping = useMemo(
|
||||
() => ({
|
||||
view: {
|
||||
key: MenuItemKeys.View,
|
||||
method: onView,
|
||||
},
|
||||
edit: {
|
||||
key: MenuItemKeys.Edit,
|
||||
method: onEditHandler,
|
||||
},
|
||||
delete: {
|
||||
key: MenuItemKeys.Delete,
|
||||
method: onDelete,
|
||||
},
|
||||
clone: {
|
||||
key: MenuItemKeys.Clone,
|
||||
method: onClone,
|
||||
},
|
||||
[MenuItemKeys.View]: onView,
|
||||
[MenuItemKeys.Edit]: onEditHandler,
|
||||
[MenuItemKeys.Delete]: onDelete,
|
||||
[MenuItemKeys.Clone]: onClone,
|
||||
[MenuItemKeys.CreateAlerts]: onCreateAlertsHandler,
|
||||
}),
|
||||
[onDelete, onEditHandler, onView, onClone],
|
||||
[onDelete, onEditHandler, onView, onClone, onCreateAlertsHandler],
|
||||
);
|
||||
|
||||
const onMenuItemSelectHandler: MenuProps['onClick'] = useCallback(
|
||||
({ key }: { key: string }): void => {
|
||||
if (isTWidgetOptions(key)) {
|
||||
const functionToCall = keyMethodMapping[key]?.method;
|
||||
const functionToCall = keyMethodMapping[key];
|
||||
|
||||
if (functionToCall) {
|
||||
functionToCall();
|
||||
setIsOpen(false);
|
||||
@ -125,46 +120,43 @@ function WidgetHeader({
|
||||
key: MenuItemKeys.View,
|
||||
icon: <FullscreenOutlined />,
|
||||
label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.View],
|
||||
isVisible: true,
|
||||
isVisible: headerMenuList?.includes(MenuItemKeys.View) || false,
|
||||
disabled: queryResponse.isLoading,
|
||||
},
|
||||
{
|
||||
key: MenuItemKeys.Edit,
|
||||
icon: <EditFilled />,
|
||||
label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.Edit],
|
||||
isVisible: allowEdit,
|
||||
isVisible: headerMenuList?.includes(MenuItemKeys.Edit) || false,
|
||||
disabled: !editWidget,
|
||||
},
|
||||
{
|
||||
key: MenuItemKeys.Clone,
|
||||
icon: <CopyOutlined />,
|
||||
label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.Clone],
|
||||
isVisible: allowClone,
|
||||
isVisible: headerMenuList?.includes(MenuItemKeys.Clone) || false,
|
||||
disabled: !editWidget,
|
||||
},
|
||||
{
|
||||
key: MenuItemKeys.Delete,
|
||||
icon: <DeleteOutlined />,
|
||||
label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.Delete],
|
||||
isVisible: allowDelete,
|
||||
isVisible: headerMenuList?.includes(MenuItemKeys.Delete) || false,
|
||||
disabled: !deleteWidget,
|
||||
danger: true,
|
||||
},
|
||||
{
|
||||
key: MenuItemKeys.CreateAlerts,
|
||||
icon: <DeleteOutlined />,
|
||||
label: MENUITEM_KEYS_VS_LABELS[MenuItemKeys.CreateAlerts],
|
||||
isVisible: headerMenuList?.includes(MenuItemKeys.CreateAlerts) || false,
|
||||
disabled: false,
|
||||
},
|
||||
],
|
||||
[
|
||||
allowEdit,
|
||||
allowClone,
|
||||
allowDelete,
|
||||
queryResponse.isLoading,
|
||||
deleteWidget,
|
||||
editWidget,
|
||||
],
|
||||
[queryResponse.isLoading, headerMenuList, editWidget, deleteWidget],
|
||||
);
|
||||
|
||||
const menuList: MenuItemType[] = useMemo(
|
||||
(): MenuItemType[] => generateMenuList(actions, keyMethodMapping),
|
||||
[actions, keyMethodMapping],
|
||||
);
|
||||
const updatedMenuList = useMemo(() => generateMenuList(actions), [actions]);
|
||||
|
||||
const onClickHandler = useCallback(() => {
|
||||
setIsOpen((open) => !open);
|
||||
@ -172,10 +164,10 @@ function WidgetHeader({
|
||||
|
||||
const menu = useMemo(
|
||||
() => ({
|
||||
items: menuList,
|
||||
items: updatedMenuList,
|
||||
onClick: onMenuItemSelectHandler,
|
||||
}),
|
||||
[menuList, onMenuItemSelectHandler],
|
||||
[updatedMenuList, onMenuItemSelectHandler],
|
||||
);
|
||||
|
||||
return (
|
||||
@ -219,9 +211,7 @@ function WidgetHeader({
|
||||
WidgetHeader.defaultProps = {
|
||||
onDelete: undefined,
|
||||
onClone: undefined,
|
||||
allowDelete: true,
|
||||
allowClone: true,
|
||||
allowEdit: true,
|
||||
headerMenuList: [MenuItemKeys.View],
|
||||
};
|
||||
|
||||
export default WidgetHeader;
|
||||
|
@ -3,23 +3,10 @@ import { ReactNode } from 'react';
|
||||
import { MenuItemKeys } from './contants';
|
||||
|
||||
export interface MenuItem {
|
||||
key: TWidgetOptions;
|
||||
key: MenuItemKeys;
|
||||
icon: ReactNode;
|
||||
label: string;
|
||||
isVisible: boolean;
|
||||
disabled: boolean;
|
||||
danger?: boolean;
|
||||
}
|
||||
|
||||
export type TWidgetOptions =
|
||||
| MenuItemKeys.View
|
||||
| MenuItemKeys.Edit
|
||||
| MenuItemKeys.Delete
|
||||
| MenuItemKeys.Clone;
|
||||
|
||||
export type KeyMethodMappingProps<T extends TWidgetOptions> = {
|
||||
[K in T]: {
|
||||
key: TWidgetOptions;
|
||||
method?: VoidFunction;
|
||||
};
|
||||
};
|
||||
|
@ -1,24 +1,22 @@
|
||||
import { MenuItemType } from 'antd/es/menu/hooks/useItems';
|
||||
|
||||
import { MenuItemKeys } from './contants';
|
||||
import { KeyMethodMappingProps, MenuItem, TWidgetOptions } from './types';
|
||||
import { MenuItem } from './types';
|
||||
|
||||
export const generateMenuList = (
|
||||
actions: MenuItem[],
|
||||
keyMethodMapping: KeyMethodMappingProps<TWidgetOptions>,
|
||||
): MenuItemType[] =>
|
||||
export const generateMenuList = (actions: MenuItem[]): MenuItemType[] =>
|
||||
actions
|
||||
.filter((action: MenuItem) => action.isVisible)
|
||||
.map(({ key, icon: Icon, label, disabled, ...rest }) => ({
|
||||
key: keyMethodMapping[key].key,
|
||||
key,
|
||||
icon: Icon,
|
||||
label,
|
||||
disabled,
|
||||
...rest,
|
||||
}));
|
||||
|
||||
export const isTWidgetOptions = (value: string): value is TWidgetOptions =>
|
||||
export const isTWidgetOptions = (value: string): value is MenuItemKeys =>
|
||||
value === MenuItemKeys.View ||
|
||||
value === MenuItemKeys.Edit ||
|
||||
value === MenuItemKeys.Delete ||
|
||||
value === MenuItemKeys.Clone;
|
||||
value === MenuItemKeys.Clone ||
|
||||
value === MenuItemKeys.CreateAlerts;
|
||||
|
8
frontend/src/container/GridGraphLayout/config.ts
Normal file
8
frontend/src/container/GridGraphLayout/config.ts
Normal file
@ -0,0 +1,8 @@
|
||||
import { MenuItemKeys } from 'container/GridGraphLayout/WidgetHeader/contants';
|
||||
|
||||
export const headerMenuList = [
|
||||
MenuItemKeys.View,
|
||||
MenuItemKeys.Clone,
|
||||
MenuItemKeys.Delete,
|
||||
MenuItemKeys.Edit,
|
||||
];
|
@ -29,6 +29,7 @@ import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import DashboardReducer from 'types/reducer/dashboards';
|
||||
|
||||
import { headerMenuList } from './config';
|
||||
import Graph from './Graph';
|
||||
import GraphLayoutContainer from './GraphLayout';
|
||||
import { UpdateDashboard } from './utils';
|
||||
@ -49,6 +50,7 @@ export const getPreLayouts = (
|
||||
yAxisUnit={widget?.yAxisUnit}
|
||||
layout={layout}
|
||||
setLayout={setLayout}
|
||||
headerMenuList={headerMenuList}
|
||||
/>
|
||||
);
|
||||
},
|
||||
@ -233,6 +235,7 @@ function GridGraph(props: Props): JSX.Element {
|
||||
layout={layout}
|
||||
setLayout={setLayout}
|
||||
onDragSelect={onDragSelect}
|
||||
headerMenuList={headerMenuList}
|
||||
/>
|
||||
),
|
||||
};
|
||||
|
@ -3,7 +3,7 @@ import {
|
||||
CaretUpFilled,
|
||||
LogoutOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Button, Divider, Dropdown, MenuProps, Space, Typography } from 'antd';
|
||||
import { Button, Divider, MenuProps, Space, Typography } from 'antd';
|
||||
import { Logout } from 'api/utils';
|
||||
import ROUTES from 'constants/routes';
|
||||
import Config from 'container/ConfigDropdown';
|
||||
@ -33,6 +33,7 @@ import {
|
||||
LogoutContainer,
|
||||
NavLinkWrapper,
|
||||
ToggleButton,
|
||||
UserDropdown,
|
||||
} from './styles';
|
||||
|
||||
function HeaderContainer(): JSX.Element {
|
||||
@ -133,7 +134,7 @@ function HeaderContainer(): JSX.Element {
|
||||
unCheckedChildren="🌞"
|
||||
/>
|
||||
|
||||
<Dropdown
|
||||
<UserDropdown
|
||||
onOpenChange={onToggleHandler(setIsUserDropDownOpen)}
|
||||
trigger={['click']}
|
||||
menu={menu}
|
||||
@ -145,7 +146,7 @@ function HeaderContainer(): JSX.Element {
|
||||
{!isUserDropDownOpen ? <CaretDownFilled /> : <CaretUpFilled />}
|
||||
</IconContainer>
|
||||
</Space>
|
||||
</Dropdown>
|
||||
</UserDropdown>
|
||||
</Space>
|
||||
</Container>
|
||||
</Header>
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Avatar, Layout, Switch, Typography } from 'antd';
|
||||
import { Avatar, Dropdown, Layout, Switch, Typography } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const Header = styled(Layout.Header)`
|
||||
@ -82,3 +82,7 @@ export const NavLinkWrapper = styled.div`
|
||||
export const AvatarWrapper = styled(Avatar)`
|
||||
background-color: rgba(255, 255, 255, 0.25);
|
||||
`;
|
||||
|
||||
export const UserDropdown = styled(Dropdown)`
|
||||
cursor: pointer;
|
||||
`;
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { ExclamationCircleOutlined } from '@ant-design/icons';
|
||||
import { Modal } from 'antd';
|
||||
import { useCallback } from 'react';
|
||||
import { connect } from 'react-redux';
|
||||
import { bindActionCreators, Dispatch } from 'redux';
|
||||
import { ThunkDispatch } from 'redux-thunk';
|
||||
@ -9,11 +10,11 @@ import AppActions from 'types/actions';
|
||||
import { Data } from '../index';
|
||||
import { TableLinkText } from './styles';
|
||||
|
||||
const { confirm } = Modal;
|
||||
|
||||
function DeleteButton({ deleteDashboard, id }: DeleteButtonProps): JSX.Element {
|
||||
const openConfirmationDialog = (): void => {
|
||||
confirm({
|
||||
const [modal, contextHolder] = Modal.useModal();
|
||||
|
||||
const openConfirmationDialog = useCallback((): void => {
|
||||
modal.confirm({
|
||||
title: 'Do you really want to delete this dashboard?',
|
||||
icon: <ExclamationCircleOutlined style={{ color: '#e42b35' }} />,
|
||||
onOk() {
|
||||
@ -25,12 +26,16 @@ function DeleteButton({ deleteDashboard, id }: DeleteButtonProps): JSX.Element {
|
||||
okButtonProps: { danger: true },
|
||||
centered: true,
|
||||
});
|
||||
};
|
||||
}, [id, modal, deleteDashboard]);
|
||||
|
||||
return (
|
||||
<TableLinkText type="danger" onClick={openConfirmationDialog}>
|
||||
Delete
|
||||
</TableLinkText>
|
||||
<>
|
||||
<TableLinkText type="danger" onClick={openConfirmationDialog}>
|
||||
Delete
|
||||
</TableLinkText>
|
||||
|
||||
{contextHolder}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { initialFilters } from 'constants/queryBuilder';
|
||||
import { getPaginationQueryData } from 'lib/newQueryBuilder/getPaginationQueryData';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import {
|
||||
@ -28,7 +29,7 @@ export const getRequestData = ({
|
||||
if (!query) return null;
|
||||
|
||||
const paginateData = getPaginationQueryData({
|
||||
currentStagedQueryData: stagedQueryData,
|
||||
filters: stagedQueryData?.filters || initialFilters,
|
||||
listItemId: log ? log.id : null,
|
||||
orderByTimestamp,
|
||||
page,
|
||||
|
@ -3,7 +3,9 @@ import TabLabel from 'components/TabLabel';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import {
|
||||
initialAutocompleteData,
|
||||
initialFilters,
|
||||
initialQueriesMap,
|
||||
initialQueryBuilderFormValues,
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { queryParamNamesMap } from 'constants/queryBuilderQueryNames';
|
||||
@ -36,6 +38,7 @@ import {
|
||||
IBuilderQuery,
|
||||
OrderByPayload,
|
||||
Query,
|
||||
TagFilter,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
@ -75,19 +78,19 @@ function LogsExplorerViews(): JSX.Element {
|
||||
|
||||
const handleAxisError = useAxiosError();
|
||||
|
||||
const currentStagedQueryData = useMemo(() => {
|
||||
if (!stagedQuery || stagedQuery.builder.queryData.length !== 1) return null;
|
||||
const listQuery = useMemo(() => {
|
||||
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
|
||||
|
||||
return stagedQuery.builder.queryData[0];
|
||||
return stagedQuery.builder.queryData.find((item) => !item.disabled) || null;
|
||||
}, [stagedQuery]);
|
||||
|
||||
const orderByTimestamp: OrderByPayload | null = useMemo(() => {
|
||||
const timestampOrderBy = currentStagedQueryData?.orderBy.find(
|
||||
const timestampOrderBy = listQuery?.orderBy.find(
|
||||
(item) => item.columnName === 'timestamp',
|
||||
);
|
||||
|
||||
return timestampOrderBy || null;
|
||||
}, [currentStagedQueryData]);
|
||||
}, [listQuery]);
|
||||
|
||||
const isMultipleQueries = useMemo(
|
||||
() =>
|
||||
@ -106,17 +109,17 @@ function LogsExplorerViews(): JSX.Element {
|
||||
}, [currentQuery]);
|
||||
|
||||
const isLimit: boolean = useMemo(() => {
|
||||
if (!currentStagedQueryData) return false;
|
||||
if (!currentStagedQueryData.limit) return false;
|
||||
if (!listQuery) return false;
|
||||
if (!listQuery.limit) return false;
|
||||
|
||||
return logs.length >= currentStagedQueryData.limit;
|
||||
}, [logs.length, currentStagedQueryData]);
|
||||
return logs.length >= listQuery.limit;
|
||||
}, [logs.length, listQuery]);
|
||||
|
||||
const listChartQuery = useMemo(() => {
|
||||
if (!stagedQuery || !currentStagedQueryData) return null;
|
||||
if (!stagedQuery || !listQuery) return null;
|
||||
|
||||
const modifiedQueryData: IBuilderQuery = {
|
||||
...currentStagedQueryData,
|
||||
...listQuery,
|
||||
aggregateOperator: StringOperators.COUNT,
|
||||
};
|
||||
|
||||
@ -132,7 +135,7 @@ function LogsExplorerViews(): JSX.Element {
|
||||
};
|
||||
|
||||
return modifiedQuery;
|
||||
}, [stagedQuery, currentStagedQueryData]);
|
||||
}, [stagedQuery, listQuery]);
|
||||
|
||||
const exportDefaultQuery = useMemo(
|
||||
() =>
|
||||
@ -147,6 +150,9 @@ function LogsExplorerViews(): JSX.Element {
|
||||
const listChartData = useGetExplorerQueryRange(
|
||||
listChartQuery,
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
{
|
||||
enabled: !!listChartQuery && panelType === PANEL_TYPES.LIST,
|
||||
},
|
||||
);
|
||||
|
||||
const { data, isFetching, isError } = useGetExplorerQueryRange(
|
||||
@ -205,52 +211,66 @@ function LogsExplorerViews(): JSX.Element {
|
||||
const getRequestData = useCallback(
|
||||
(
|
||||
query: Query | null,
|
||||
params: { page: number; log: ILog | null; pageSize: number },
|
||||
params: {
|
||||
page: number;
|
||||
log: ILog | null;
|
||||
pageSize: number;
|
||||
filters: TagFilter;
|
||||
},
|
||||
): Query | null => {
|
||||
if (!query) return null;
|
||||
|
||||
const paginateData = getPaginationQueryData({
|
||||
currentStagedQueryData,
|
||||
filters: params.filters,
|
||||
listItemId: params.log ? params.log.id : null,
|
||||
orderByTimestamp,
|
||||
page: params.page,
|
||||
pageSize: params.pageSize,
|
||||
});
|
||||
|
||||
const queryData: IBuilderQuery[] =
|
||||
query.builder.queryData.length > 1
|
||||
? query.builder.queryData
|
||||
: [
|
||||
{
|
||||
...(listQuery || initialQueryBuilderFormValues),
|
||||
...paginateData,
|
||||
},
|
||||
];
|
||||
|
||||
const data: Query = {
|
||||
...query,
|
||||
builder: {
|
||||
...query.builder,
|
||||
queryData: query.builder.queryData.map((item) => ({
|
||||
...item,
|
||||
...paginateData,
|
||||
pageSize: params.pageSize,
|
||||
})),
|
||||
queryData,
|
||||
},
|
||||
};
|
||||
|
||||
return data;
|
||||
},
|
||||
[currentStagedQueryData, orderByTimestamp],
|
||||
[orderByTimestamp, listQuery],
|
||||
);
|
||||
|
||||
const handleEndReached = useCallback(
|
||||
(index: number) => {
|
||||
if (!listQuery) return;
|
||||
|
||||
if (isLimit) return;
|
||||
if (logs.length < pageSize) return;
|
||||
|
||||
const { limit, filters } = listQuery;
|
||||
|
||||
const lastLog = logs[index];
|
||||
|
||||
const limit = currentStagedQueryData?.limit;
|
||||
|
||||
const nextLogsLenth = logs.length + pageSize;
|
||||
const nextLogsLength = logs.length + pageSize;
|
||||
|
||||
const nextPageSize =
|
||||
limit && nextLogsLenth >= limit ? limit - logs.length : pageSize;
|
||||
limit && nextLogsLength >= limit ? limit - logs.length : pageSize;
|
||||
|
||||
if (!stagedQuery) return;
|
||||
|
||||
const newRequestData = getRequestData(stagedQuery, {
|
||||
filters,
|
||||
page: page + 1,
|
||||
log: orderByTimestamp ? lastLog : null,
|
||||
pageSize: nextPageSize,
|
||||
@ -263,7 +283,7 @@ function LogsExplorerViews(): JSX.Element {
|
||||
[
|
||||
isLimit,
|
||||
logs,
|
||||
currentStagedQueryData?.limit,
|
||||
listQuery,
|
||||
pageSize,
|
||||
stagedQuery,
|
||||
getRequestData,
|
||||
@ -367,11 +387,13 @@ function LogsExplorerViews(): JSX.Element {
|
||||
currentMinTimeRef.current !== minTime
|
||||
) {
|
||||
const newRequestData = getRequestData(stagedQuery, {
|
||||
filters: listQuery?.filters || initialFilters,
|
||||
page: 1,
|
||||
log: null,
|
||||
pageSize:
|
||||
timeRange?.pageSize && activeLogId ? timeRange?.pageSize : pageSize,
|
||||
});
|
||||
|
||||
setLogs([]);
|
||||
setPage(1);
|
||||
setRequestData(newRequestData);
|
||||
@ -385,11 +407,13 @@ function LogsExplorerViews(): JSX.Element {
|
||||
stagedQuery,
|
||||
requestData,
|
||||
getRequestData,
|
||||
listQuery,
|
||||
pageSize,
|
||||
minTime,
|
||||
timeRange,
|
||||
activeLogId,
|
||||
onTimeRangeChange,
|
||||
panelType,
|
||||
]);
|
||||
|
||||
const tabsItems: TabsProps['items'] = useMemo(
|
||||
@ -407,7 +431,7 @@ function LogsExplorerViews(): JSX.Element {
|
||||
children: (
|
||||
<LogsExplorerList
|
||||
isLoading={isFetching}
|
||||
currentStagedQueryData={currentStagedQueryData}
|
||||
currentStagedQueryData={listQuery}
|
||||
logs={logs}
|
||||
onEndReached={handleEndReached}
|
||||
/>
|
||||
@ -435,7 +459,7 @@ function LogsExplorerViews(): JSX.Element {
|
||||
isMultipleQueries,
|
||||
isGroupByExist,
|
||||
isFetching,
|
||||
currentStagedQueryData,
|
||||
listQuery,
|
||||
logs,
|
||||
handleEndReached,
|
||||
data,
|
||||
@ -463,10 +487,14 @@ function LogsExplorerViews(): JSX.Element {
|
||||
(queryData) => queryData.groupBy.length > 0,
|
||||
);
|
||||
|
||||
return isGroupByExist
|
||||
? data.payload.data.result
|
||||
: [data.payload.data.result[0]];
|
||||
}, [stagedQuery, data, panelType, listChartData]);
|
||||
const firstPayloadQuery = data.payload.data.result.find(
|
||||
(item) => item.queryName === listQuery?.queryName,
|
||||
);
|
||||
|
||||
const firstPayloadQueryArray = firstPayloadQuery ? [firstPayloadQuery] : [];
|
||||
|
||||
return isGroupByExist ? data.payload.data.result : firstPayloadQueryArray;
|
||||
}, [stagedQuery, panelType, data, listChartData, listQuery]);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
@ -5,6 +5,7 @@ import RawLogView from 'components/Logs/RawLogView';
|
||||
import LogsTableView from 'components/Logs/TableView';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { contentStyle } from 'container/Trace/Search/config';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import useFontFaceObserver from 'hooks/useFontObserver';
|
||||
import { memo, useCallback, useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
@ -26,6 +27,8 @@ type LogsTableProps = {
|
||||
function LogsTable(props: LogsTableProps): JSX.Element {
|
||||
const { viewMode, linesPerRow } = props;
|
||||
|
||||
const { onSetActiveLog } = useActiveLog();
|
||||
|
||||
useFontFaceObserver(
|
||||
[
|
||||
{
|
||||
@ -72,7 +75,12 @@ function LogsTable(props: LogsTableProps): JSX.Element {
|
||||
const renderContent = useMemo(() => {
|
||||
if (viewMode === 'table') {
|
||||
return (
|
||||
<LogsTableView logs={logs} fields={selected} linesPerRow={linesPerRow} />
|
||||
<LogsTableView
|
||||
onClickExpand={onSetActiveLog}
|
||||
logs={logs}
|
||||
fields={selected}
|
||||
linesPerRow={linesPerRow}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@ -85,7 +93,7 @@ function LogsTable(props: LogsTableProps): JSX.Element {
|
||||
/>
|
||||
</Card>
|
||||
);
|
||||
}, [getItemContent, linesPerRow, logs, selected, viewMode]);
|
||||
}, [getItemContent, linesPerRow, logs, onSetActiveLog, selected, viewMode]);
|
||||
|
||||
if (isLoading) {
|
||||
return <Spinner height={20} tip="Getting Logs" />;
|
||||
|
@ -117,9 +117,6 @@ function DBCall(): JSX.Element {
|
||||
'database_call_rps',
|
||||
);
|
||||
}}
|
||||
allowClone={false}
|
||||
allowDelete={false}
|
||||
allowEdit={false}
|
||||
/>
|
||||
</GraphContainer>
|
||||
</Card>
|
||||
@ -153,9 +150,6 @@ function DBCall(): JSX.Element {
|
||||
'database_call_avg_duration',
|
||||
);
|
||||
}}
|
||||
allowClone={false}
|
||||
allowDelete={false}
|
||||
allowEdit={false}
|
||||
/>
|
||||
</GraphContainer>
|
||||
</Card>
|
||||
|
@ -156,9 +156,6 @@ function External(): JSX.Element {
|
||||
'external_call_error_percentage',
|
||||
);
|
||||
}}
|
||||
allowClone={false}
|
||||
allowDelete={false}
|
||||
allowEdit={false}
|
||||
/>
|
||||
</GraphContainer>
|
||||
</Card>
|
||||
@ -194,9 +191,6 @@ function External(): JSX.Element {
|
||||
'external_call_duration',
|
||||
);
|
||||
}}
|
||||
allowClone={false}
|
||||
allowDelete={false}
|
||||
allowEdit={false}
|
||||
/>
|
||||
</GraphContainer>
|
||||
</Card>
|
||||
@ -233,9 +227,6 @@ function External(): JSX.Element {
|
||||
'external_call_rps_by_address',
|
||||
);
|
||||
}}
|
||||
allowClone={false}
|
||||
allowDelete={false}
|
||||
allowEdit={false}
|
||||
/>
|
||||
</GraphContainer>
|
||||
</Card>
|
||||
@ -271,9 +262,6 @@ function External(): JSX.Element {
|
||||
'external_call_duration_by_address',
|
||||
);
|
||||
}}
|
||||
allowClone={false}
|
||||
allowDelete={false}
|
||||
allowEdit={false}
|
||||
/>
|
||||
</GraphContainer>
|
||||
</Card>
|
||||
|
@ -75,9 +75,6 @@ function ServiceOverview({
|
||||
widget={latencyWidget}
|
||||
yAxisUnit="ns"
|
||||
onClickHandler={handleGraphClick('Service')}
|
||||
allowClone={false}
|
||||
allowDelete={false}
|
||||
allowEdit={false}
|
||||
isQueryEnabled={isQueryEnabled}
|
||||
/>
|
||||
</GraphContainer>
|
||||
|
@ -39,9 +39,6 @@ function TopLevelOperation({
|
||||
onClickHandler={handleGraphClick(opName)}
|
||||
yAxisUnit={yAxisUnit}
|
||||
onDragSelect={onDragSelect}
|
||||
allowClone={false}
|
||||
allowDelete={false}
|
||||
allowEdit={false}
|
||||
/>
|
||||
)}
|
||||
</GraphContainer>
|
||||
|
@ -19,6 +19,7 @@ import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import { IServiceName } from '../types';
|
||||
import { title } from './config';
|
||||
import ColumnWithLink from './TableRenderer/ColumnWithLink';
|
||||
import { getTableColumnRenderer } from './TableRenderer/TableColumnRenderer';
|
||||
|
||||
@ -108,6 +109,7 @@ function TopOperationMetrics(): JSX.Element {
|
||||
|
||||
return (
|
||||
<QueryTable
|
||||
title={title}
|
||||
query={updatedQuery}
|
||||
queryTableData={queryTableData}
|
||||
loading={isLoading}
|
||||
|
@ -5,12 +5,13 @@ import { changeHistoryColumns } from '../../PipelineListsView/config';
|
||||
import { HistoryTableWrapper } from '../../styles';
|
||||
import { historyPagination } from '../config';
|
||||
|
||||
function ChangeHistory({ piplineData }: ChangeHistoryProps): JSX.Element {
|
||||
function ChangeHistory({ pipelineData }: ChangeHistoryProps): JSX.Element {
|
||||
return (
|
||||
<HistoryTableWrapper>
|
||||
<Table
|
||||
columns={changeHistoryColumns}
|
||||
dataSource={piplineData?.history ?? []}
|
||||
dataSource={pipelineData?.history ?? []}
|
||||
rowKey="id"
|
||||
pagination={historyPagination}
|
||||
/>
|
||||
</HistoryTableWrapper>
|
||||
@ -18,7 +19,7 @@ function ChangeHistory({ piplineData }: ChangeHistoryProps): JSX.Element {
|
||||
}
|
||||
|
||||
interface ChangeHistoryProps {
|
||||
piplineData: Pipeline;
|
||||
pipelineData: Pipeline;
|
||||
}
|
||||
|
||||
export default ChangeHistory;
|
||||
|
@ -11,13 +11,13 @@ function CreatePipelineButton({
|
||||
setActionType,
|
||||
isActionMode,
|
||||
setActionMode,
|
||||
piplineData,
|
||||
pipelineData,
|
||||
}: CreatePipelineButtonProps): JSX.Element {
|
||||
const { t } = useTranslation(['pipeline']);
|
||||
|
||||
const isAddNewPipelineVisible = useMemo(
|
||||
() => checkDataLength(piplineData?.pipelines),
|
||||
[piplineData?.pipelines],
|
||||
() => checkDataLength(pipelineData?.pipelines),
|
||||
[pipelineData?.pipelines],
|
||||
);
|
||||
const isDisabled = isActionMode === ActionMode.Editing;
|
||||
|
||||
@ -56,7 +56,7 @@ interface CreatePipelineButtonProps {
|
||||
setActionType: (actionType: string) => void;
|
||||
isActionMode: string;
|
||||
setActionMode: (actionMode: string) => void;
|
||||
piplineData: Pipeline;
|
||||
pipelineData: Pipeline;
|
||||
}
|
||||
|
||||
export default CreatePipelineButton;
|
||||
|
@ -7,7 +7,7 @@ import PipelinesSearchSection from './PipelinesSearchSection';
|
||||
|
||||
function PipelinePageLayout({
|
||||
refetchPipelineLists,
|
||||
piplineData,
|
||||
pipelineData,
|
||||
}: PipelinePageLayoutProps): JSX.Element {
|
||||
const [isActionType, setActionType] = useState<string>();
|
||||
const [isActionMode, setActionMode] = useState<string>('viewing-mode');
|
||||
@ -19,7 +19,7 @@ function PipelinePageLayout({
|
||||
setActionType={setActionType}
|
||||
setActionMode={setActionMode}
|
||||
isActionMode={isActionMode}
|
||||
piplineData={piplineData}
|
||||
pipelineData={pipelineData}
|
||||
/>
|
||||
<PipelinesSearchSection setPipelineSearchValue={setPipelineSearchValue} />
|
||||
<PipelineListsView
|
||||
@ -27,7 +27,7 @@ function PipelinePageLayout({
|
||||
setActionType={setActionType}
|
||||
setActionMode={setActionMode}
|
||||
isActionMode={isActionMode}
|
||||
piplineData={piplineData}
|
||||
pipelineData={pipelineData}
|
||||
refetchPipelineLists={refetchPipelineLists}
|
||||
pipelineSearchValue={pipelineSearchValue}
|
||||
/>
|
||||
@ -37,7 +37,7 @@ function PipelinePageLayout({
|
||||
|
||||
interface PipelinePageLayoutProps {
|
||||
refetchPipelineLists: VoidFunction;
|
||||
piplineData: Pipeline;
|
||||
pipelineData: Pipeline;
|
||||
}
|
||||
|
||||
export default PipelinePageLayout;
|
||||
|
@ -4,21 +4,21 @@ import { ModeAndConfigWrapper } from './styles';
|
||||
|
||||
function ModeAndConfiguration({
|
||||
isActionMode,
|
||||
verison,
|
||||
version,
|
||||
}: ModeAndConfigurationType): JSX.Element {
|
||||
const actionMode = isActionMode === ActionMode.Editing;
|
||||
|
||||
return (
|
||||
<ModeAndConfigWrapper>
|
||||
Mode: <span>{actionMode ? 'Editing' : 'Viewing'}</span>
|
||||
<div>Configuration Version: {verison}</div>
|
||||
<div>Configuration Version: {version}</div>
|
||||
</ModeAndConfigWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
export interface ModeAndConfigurationType {
|
||||
isActionMode: string;
|
||||
verison: string | number;
|
||||
version: string | number;
|
||||
}
|
||||
|
||||
export default ModeAndConfiguration;
|
||||
|
@ -47,7 +47,7 @@ function PipelineListsView({
|
||||
setActionType,
|
||||
isActionMode,
|
||||
setActionMode,
|
||||
piplineData,
|
||||
pipelineData,
|
||||
refetchPipelineLists,
|
||||
pipelineSearchValue,
|
||||
}: PipelineListsViewProps): JSX.Element {
|
||||
@ -55,10 +55,10 @@ function PipelineListsView({
|
||||
const [modal, contextHolder] = Modal.useModal();
|
||||
const { notifications } = useNotifications();
|
||||
const [prevPipelineData, setPrevPipelineData] = useState<Array<PipelineData>>(
|
||||
cloneDeep(piplineData?.pipelines),
|
||||
cloneDeep(pipelineData?.pipelines),
|
||||
);
|
||||
const [currPipelineData, setCurrPipelineData] = useState<Array<PipelineData>>(
|
||||
cloneDeep(piplineData?.pipelines),
|
||||
cloneDeep(pipelineData?.pipelines),
|
||||
);
|
||||
const [
|
||||
expandedPipelineData,
|
||||
@ -77,14 +77,14 @@ function PipelineListsView({
|
||||
const isEditingActionMode = isActionMode === ActionMode.Editing;
|
||||
|
||||
useEffect(() => {
|
||||
if (pipelineSearchValue === '') setCurrPipelineData(piplineData?.pipelines);
|
||||
if (pipelineSearchValue === '') setCurrPipelineData(pipelineData?.pipelines);
|
||||
if (pipelineSearchValue !== '') {
|
||||
const filterData = piplineData?.pipelines.filter((data: PipelineData) =>
|
||||
const filterData = pipelineData?.pipelines.filter((data: PipelineData) =>
|
||||
getDataOnSearch(data as never, pipelineSearchValue),
|
||||
);
|
||||
setCurrPipelineData(filterData);
|
||||
}
|
||||
}, [pipelineSearchValue, piplineData?.pipelines]);
|
||||
}, [pipelineSearchValue, pipelineData?.pipelines]);
|
||||
|
||||
const handleAlert = useCallback(
|
||||
({ title, descrition, buttontext, onCancel, onOk }: AlertMessage) => {
|
||||
@ -414,7 +414,7 @@ function PipelineListsView({
|
||||
<Container>
|
||||
<ModeAndConfiguration
|
||||
isActionMode={isActionMode}
|
||||
verison={piplineData?.version}
|
||||
version={pipelineData?.version}
|
||||
/>
|
||||
<DndProvider backend={HTML5Backend}>
|
||||
<Table
|
||||
@ -445,7 +445,7 @@ interface PipelineListsViewProps {
|
||||
setActionType: (actionType?: ActionType) => void;
|
||||
isActionMode: string;
|
||||
setActionMode: (actionMode: ActionMode) => void;
|
||||
piplineData: Pipeline;
|
||||
pipelineData: Pipeline;
|
||||
refetchPipelineLists: VoidFunction;
|
||||
pipelineSearchValue: string;
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { Pipeline, PipelineData } from 'types/api/pipeline/def';
|
||||
|
||||
export const configurationVerison = '1.0';
|
||||
export const configurationVersion = '1.0';
|
||||
|
||||
export const pipelineMockData: Array<PipelineData> = [
|
||||
{
|
||||
|
@ -18,7 +18,7 @@ describe('PipelinePage container test', () => {
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="viewing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
piplineData={pipelineApiResponseMockData}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
/>
|
||||
</I18nextProvider>
|
||||
</Provider>
|
||||
|
@ -49,7 +49,7 @@ describe('PipelinePage container test', () => {
|
||||
<Provider store={store}>
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<PipelinePageLayout
|
||||
piplineData={pipelinedata}
|
||||
pipelineData={pipelinedata}
|
||||
refetchPipelineLists={refetchPipelineLists}
|
||||
/>
|
||||
</I18nextProvider>
|
||||
|
@ -20,7 +20,7 @@ export function QueryTable({
|
||||
renderActionCell,
|
||||
renderColumnCell,
|
||||
}),
|
||||
[query, queryTableData, renderColumnCell, renderActionCell],
|
||||
[query, queryTableData, renderActionCell, renderColumnCell],
|
||||
);
|
||||
|
||||
const tableColumns = modifyColumns ? modifyColumns(columns) : columns;
|
||||
|
22
frontend/src/hooks/useEventSourceEvent/index.ts
Normal file
22
frontend/src/hooks/useEventSourceEvent/index.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { EventListener, EventSourceEventMap } from 'event-source-polyfill';
|
||||
import { useEventSource } from 'providers/EventSource';
|
||||
import { useEffect } from 'react';
|
||||
|
||||
export const useEventSourceEvent = (
|
||||
eventName: keyof EventSourceEventMap,
|
||||
listener: EventListener,
|
||||
): void => {
|
||||
const { eventSourceInstance } = useEventSource();
|
||||
|
||||
useEffect(() => {
|
||||
if (eventSourceInstance) {
|
||||
eventSourceInstance.addEventListener(eventName, listener);
|
||||
}
|
||||
|
||||
return (): void => {
|
||||
if (eventSourceInstance) {
|
||||
eventSourceInstance.removeEventListener(eventName, listener);
|
||||
}
|
||||
};
|
||||
}, [eventName, eventSourceInstance, listener]);
|
||||
};
|
@ -1,4 +1,3 @@
|
||||
import { initialFilters } from 'constants/queryBuilder';
|
||||
import { FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
@ -8,7 +7,7 @@ import {
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
type SetupPaginationQueryDataParams = {
|
||||
currentStagedQueryData: IBuilderQuery | null;
|
||||
filters: IBuilderQuery['filters'];
|
||||
listItemId: string | null;
|
||||
orderByTimestamp: OrderByPayload | null;
|
||||
page: number;
|
||||
@ -17,20 +16,15 @@ type SetupPaginationQueryDataParams = {
|
||||
|
||||
type SetupPaginationQueryData = (
|
||||
params: SetupPaginationQueryDataParams,
|
||||
) => Pick<IBuilderQuery, 'filters' | 'offset'>;
|
||||
) => Partial<IBuilderQuery>;
|
||||
|
||||
export const getPaginationQueryData: SetupPaginationQueryData = ({
|
||||
currentStagedQueryData,
|
||||
filters,
|
||||
listItemId,
|
||||
orderByTimestamp,
|
||||
page,
|
||||
pageSize,
|
||||
}) => {
|
||||
if (!currentStagedQueryData) {
|
||||
return { limit: null, filters: initialFilters };
|
||||
}
|
||||
|
||||
const filters = currentStagedQueryData.filters || initialFilters;
|
||||
const offset = (page - 1) * pageSize;
|
||||
|
||||
const queryProps = {
|
||||
@ -69,5 +63,5 @@ export const getPaginationQueryData: SetupPaginationQueryData = ({
|
||||
...queryProps,
|
||||
};
|
||||
|
||||
return { ...currentStagedQueryData, ...chunkOfQueryData };
|
||||
return chunkOfQueryData;
|
||||
};
|
||||
|
@ -28,14 +28,13 @@ export type RowData = {
|
||||
[key: string]: string | number;
|
||||
};
|
||||
|
||||
type DynamicColumn = {
|
||||
export type DynamicColumn = {
|
||||
query: IBuilderQuery | IBuilderFormula;
|
||||
field: string;
|
||||
dataIndex: string;
|
||||
title: string;
|
||||
data: (string | number)[];
|
||||
type: 'field' | 'operator' | 'formula';
|
||||
// sortable: boolean;
|
||||
};
|
||||
|
||||
type DynamicColumns = DynamicColumn[];
|
||||
@ -91,37 +90,13 @@ const getQueryByName = <T extends keyof QueryBuilderData>(
|
||||
return currentQuery as T extends 'queryData' ? IBuilderQuery : IBuilderFormula;
|
||||
};
|
||||
|
||||
const addListLabels = (
|
||||
const addLabels = (
|
||||
query: IBuilderQuery | IBuilderFormula,
|
||||
label: ListItemKey,
|
||||
dynamicColumns: DynamicColumns,
|
||||
): void => {
|
||||
if (isValueExist('dataIndex', label, dynamicColumns)) return;
|
||||
|
||||
const fieldObj: DynamicColumn = {
|
||||
query,
|
||||
field: 'label',
|
||||
dataIndex: label as string,
|
||||
title: label as string,
|
||||
data: [],
|
||||
type: 'field',
|
||||
// sortable: isNumber,
|
||||
};
|
||||
|
||||
dynamicColumns.push(fieldObj);
|
||||
};
|
||||
|
||||
const addSeriaLabels = (
|
||||
label: string,
|
||||
dynamicColumns: DynamicColumns,
|
||||
query: IBuilderQuery | IBuilderFormula,
|
||||
): void => {
|
||||
if (isValueExist('dataIndex', label, dynamicColumns)) return;
|
||||
|
||||
// const labelValue = labels[label];
|
||||
|
||||
// const isNumber = !Number.isNaN(parseFloat(String(labelValue)));
|
||||
|
||||
const fieldObj: DynamicColumn = {
|
||||
query,
|
||||
field: label as string,
|
||||
@ -129,7 +104,6 @@ const addSeriaLabels = (
|
||||
title: label,
|
||||
data: [],
|
||||
type: 'field',
|
||||
// sortable: isNumber,
|
||||
};
|
||||
|
||||
dynamicColumns.push(fieldObj);
|
||||
@ -155,7 +129,6 @@ const addOperatorFormulaColumns = (
|
||||
title: customLabel || formulaLabel,
|
||||
data: [],
|
||||
type: 'formula',
|
||||
// sortable: isNumber,
|
||||
};
|
||||
|
||||
dynamicColumns.push(formulaColumn);
|
||||
@ -181,7 +154,6 @@ const addOperatorFormulaColumns = (
|
||||
title: customLabel || operatorLabel,
|
||||
data: [],
|
||||
type: 'operator',
|
||||
// sortable: isNumber,
|
||||
};
|
||||
|
||||
dynamicColumns.push(operatorColumn);
|
||||
@ -224,7 +196,7 @@ const getDynamicColumns: GetDynamicColumns = (queryTableData, query) => {
|
||||
if (list) {
|
||||
list.forEach((listItem) => {
|
||||
Object.keys(listItem.data).forEach((label) => {
|
||||
addListLabels(currentStagedQuery, label as ListItemKey, dynamicColumns);
|
||||
addLabels(currentStagedQuery, label, dynamicColumns);
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -245,7 +217,7 @@ const getDynamicColumns: GetDynamicColumns = (queryTableData, query) => {
|
||||
Object.keys(seria.labels).forEach((label) => {
|
||||
if (label === currentQuery?.queryName) return;
|
||||
|
||||
addSeriaLabels(label as string, dynamicColumns, currentStagedQuery);
|
||||
addLabels(currentStagedQuery, label, dynamicColumns);
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -486,10 +458,6 @@ const generateTableColumns = (
|
||||
title: item.title,
|
||||
width: QUERY_TABLE_CONFIG.width,
|
||||
render: renderColumnCell && renderColumnCell[item.dataIndex],
|
||||
// sorter: item.sortable
|
||||
// ? (a: RowData, b: RowData): number =>
|
||||
// (a[item.key] as number) - (b[item.key] as number)
|
||||
// : false,
|
||||
};
|
||||
|
||||
return [...acc, column];
|
||||
@ -504,10 +472,14 @@ export const createTableColumnsFromQuery: CreateTableDataFromQuery = ({
|
||||
renderActionCell,
|
||||
renderColumnCell,
|
||||
}) => {
|
||||
const dynamicColumns = getDynamicColumns(queryTableData, query);
|
||||
const sortedQueryTableData = queryTableData.sort((a, b) =>
|
||||
a.queryName < b.queryName ? -1 : 1,
|
||||
);
|
||||
|
||||
const dynamicColumns = getDynamicColumns(sortedQueryTableData, query);
|
||||
|
||||
const { filledDynamicColumns, rowsLength } = fillColumnsData(
|
||||
queryTableData,
|
||||
sortedQueryTableData,
|
||||
dynamicColumns,
|
||||
);
|
||||
|
||||
|
@ -8,14 +8,30 @@ import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useEffect, useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQuery } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Pipeline } from 'types/api/pipeline/def';
|
||||
|
||||
const pipelineRefetchInterval = (
|
||||
pipelineResponse: SuccessResponse<Pipeline> | undefined,
|
||||
): number | false => {
|
||||
// Refetch pipeline data periodically if deployment of
|
||||
// its latest changes is not complete yet.
|
||||
const latestVersion = pipelineResponse?.payload?.history?.[0];
|
||||
const isLatestDeploymentFinished = ['DEPLOYED', 'FAILED'].includes(
|
||||
latestVersion?.deployStatus || '',
|
||||
);
|
||||
if (latestVersion && !isLatestDeploymentFinished) {
|
||||
return 3000;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
function Pipelines(): JSX.Element {
|
||||
const { t } = useTranslation('common');
|
||||
const { notifications } = useNotifications();
|
||||
const {
|
||||
isLoading,
|
||||
data: piplineData,
|
||||
data: pipelineData,
|
||||
isError,
|
||||
refetch: refetchPipelineLists,
|
||||
} = useQuery(['version', 'latest', 'pipeline'], {
|
||||
@ -23,6 +39,7 @@ function Pipelines(): JSX.Element {
|
||||
getPipeline({
|
||||
version: 'latest',
|
||||
}),
|
||||
refetchInterval: pipelineRefetchInterval,
|
||||
});
|
||||
|
||||
const tabItems: TabsProps['items'] = useMemo(
|
||||
@ -33,26 +50,28 @@ function Pipelines(): JSX.Element {
|
||||
children: (
|
||||
<PipelinePage
|
||||
refetchPipelineLists={refetchPipelineLists}
|
||||
piplineData={piplineData?.payload as Pipeline}
|
||||
pipelineData={pipelineData?.payload as Pipeline}
|
||||
/>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: 'change-history',
|
||||
label: `Change History`,
|
||||
children: <ChangeHistory piplineData={piplineData?.payload as Pipeline} />,
|
||||
children: (
|
||||
<ChangeHistory pipelineData={pipelineData?.payload as Pipeline} />
|
||||
),
|
||||
},
|
||||
],
|
||||
[piplineData?.payload, refetchPipelineLists],
|
||||
[pipelineData?.payload, refetchPipelineLists],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (piplineData?.error && isError) {
|
||||
if (pipelineData?.error && isError) {
|
||||
notifications.error({
|
||||
message: piplineData?.error || t('something_went_wrong'),
|
||||
message: pipelineData?.error || t('something_went_wrong'),
|
||||
});
|
||||
}
|
||||
}, [isError, notifications, piplineData?.error, t]);
|
||||
}, [isError, notifications, pipelineData?.error, t]);
|
||||
|
||||
if (isLoading) {
|
||||
return <Spinner height="75vh" tip="Loading Pipelines..." />;
|
||||
|
124
frontend/src/providers/EventSource.tsx
Normal file
124
frontend/src/providers/EventSource.tsx
Normal file
@ -0,0 +1,124 @@
|
||||
import { apiV3 } from 'api/apiV1';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { EventListener, EventSourcePolyfill } from 'event-source-polyfill';
|
||||
import {
|
||||
createContext,
|
||||
PropsWithChildren,
|
||||
useCallback,
|
||||
useContext,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
|
||||
interface IEventSourceContext {
|
||||
eventSourceInstance: EventSourcePolyfill | null;
|
||||
isConnectionOpen: boolean;
|
||||
isConnectionLoading: boolean;
|
||||
isConnectionError: string;
|
||||
handleStartOpenConnection: (url?: string) => void;
|
||||
handleCloseConnection: () => void;
|
||||
}
|
||||
|
||||
const EventSourceContext = createContext<IEventSourceContext>({
|
||||
eventSourceInstance: null,
|
||||
isConnectionOpen: false,
|
||||
isConnectionLoading: false,
|
||||
isConnectionError: '',
|
||||
handleStartOpenConnection: () => {},
|
||||
handleCloseConnection: () => {},
|
||||
});
|
||||
|
||||
export function EventSourceProvider({
|
||||
children,
|
||||
}: PropsWithChildren): JSX.Element {
|
||||
const [isConnectionOpen, setIsConnectionOpen] = useState<boolean>(false);
|
||||
const [isConnectionLoading, setIsConnectionLoading] = useState<boolean>(false);
|
||||
const [isConnectionError, setIsConnectionError] = useState<string>('');
|
||||
|
||||
const { user } = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
|
||||
const eventSourceRef = useRef<EventSourcePolyfill | null>(null);
|
||||
|
||||
const handleCloseConnection = useCallback(() => {
|
||||
if (!eventSourceRef.current) return;
|
||||
|
||||
eventSourceRef.current.close();
|
||||
setIsConnectionOpen(false);
|
||||
setIsConnectionLoading(false);
|
||||
}, []);
|
||||
|
||||
const handleOpenConnection: EventListener = useCallback(() => {
|
||||
setIsConnectionLoading(false);
|
||||
setIsConnectionOpen(true);
|
||||
}, []);
|
||||
|
||||
const handleErrorConnection: EventListener = useCallback(() => {
|
||||
if (!eventSourceRef.current) return;
|
||||
|
||||
handleCloseConnection();
|
||||
|
||||
eventSourceRef.current.removeEventListener('error', handleErrorConnection);
|
||||
eventSourceRef.current.removeEventListener('open', handleOpenConnection);
|
||||
}, [handleCloseConnection, handleOpenConnection]);
|
||||
|
||||
const handleStartOpenConnection = useCallback(
|
||||
(url?: string) => {
|
||||
const eventSourceUrl = url || `${ENVIRONMENT.baseURL}${apiV3}logs/livetail`;
|
||||
|
||||
const TIMEOUT_IN_MS = 10 * 60 * 1000;
|
||||
|
||||
eventSourceRef.current = new EventSourcePolyfill(eventSourceUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${user?.accessJwt}`,
|
||||
},
|
||||
heartbeatTimeout: TIMEOUT_IN_MS,
|
||||
});
|
||||
|
||||
setIsConnectionLoading(true);
|
||||
setIsConnectionError('');
|
||||
|
||||
eventSourceRef.current.addEventListener('error', handleErrorConnection);
|
||||
|
||||
eventSourceRef.current.addEventListener('open', handleOpenConnection);
|
||||
},
|
||||
[handleErrorConnection, handleOpenConnection, user?.accessJwt],
|
||||
);
|
||||
|
||||
const contextValue = useMemo(
|
||||
() => ({
|
||||
eventSourceInstance: eventSourceRef.current,
|
||||
isConnectionError,
|
||||
isConnectionLoading,
|
||||
isConnectionOpen,
|
||||
handleStartOpenConnection,
|
||||
handleCloseConnection,
|
||||
}),
|
||||
[
|
||||
isConnectionError,
|
||||
isConnectionLoading,
|
||||
isConnectionOpen,
|
||||
handleStartOpenConnection,
|
||||
handleCloseConnection,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<EventSourceContext.Provider value={contextValue}>
|
||||
{children}
|
||||
</EventSourceContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export const useEventSource = (): IEventSourceContext => {
|
||||
const context = useContext(EventSourceContext);
|
||||
|
||||
if (!context) {
|
||||
throw new Error('Should be used inside the context');
|
||||
}
|
||||
|
||||
return context;
|
||||
};
|
@ -6,6 +6,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type Encoding string
|
||||
@ -58,6 +59,9 @@ type namespaceConfig struct {
|
||||
namespace string
|
||||
Enabled bool
|
||||
Datasource string
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
DialTimeout time.Duration
|
||||
TraceDB string
|
||||
OperationsTable string
|
||||
IndexTable string
|
||||
@ -88,8 +92,14 @@ type Connector func(cfg *namespaceConfig) (clickhouse.Conn, error)
|
||||
func defaultConnector(cfg *namespaceConfig) (clickhouse.Conn, error) {
|
||||
ctx := context.Background()
|
||||
dsnURL, err := url.Parse(cfg.Datasource)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
options := &clickhouse.Options{
|
||||
Addr: []string{dsnURL.Host},
|
||||
Addr: []string{dsnURL.Host},
|
||||
MaxOpenConns: cfg.MaxOpenConns,
|
||||
MaxIdleConns: cfg.MaxIdleConns,
|
||||
DialTimeout: cfg.DialTimeout,
|
||||
}
|
||||
if dsnURL.Query().Get("username") != "" {
|
||||
auth := clickhouse.Auth{
|
||||
@ -98,6 +108,7 @@ func defaultConnector(cfg *namespaceConfig) (clickhouse.Conn, error) {
|
||||
}
|
||||
options.Auth = auth
|
||||
}
|
||||
zap.S().Infof("Connecting to Clickhouse at %s, MaxIdleConns: %d, MaxOpenConns: %d, DialTimeout: %s", dsnURL.Host, options.MaxIdleConns, options.MaxOpenConns, options.DialTimeout)
|
||||
db, err := clickhouse.Open(options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -118,7 +129,14 @@ type Options struct {
|
||||
}
|
||||
|
||||
// NewOptions creates a new Options struct.
|
||||
func NewOptions(datasource string, primaryNamespace string, otherNamespaces ...string) *Options {
|
||||
func NewOptions(
|
||||
datasource string,
|
||||
maxIdleConns int,
|
||||
maxOpenConns int,
|
||||
dialTimeout time.Duration,
|
||||
primaryNamespace string,
|
||||
otherNamespaces ...string,
|
||||
) *Options {
|
||||
|
||||
if datasource == "" {
|
||||
datasource = defaultDatasource
|
||||
@ -129,6 +147,9 @@ func NewOptions(datasource string, primaryNamespace string, otherNamespaces ...s
|
||||
namespace: primaryNamespace,
|
||||
Enabled: true,
|
||||
Datasource: datasource,
|
||||
MaxIdleConns: maxIdleConns,
|
||||
MaxOpenConns: maxOpenConns,
|
||||
DialTimeout: dialTimeout,
|
||||
TraceDB: defaultTraceDB,
|
||||
OperationsTable: defaultOperationsTable,
|
||||
IndexTable: defaultIndexTable,
|
||||
|
@ -119,10 +119,17 @@ type ClickHouseReader struct {
|
||||
}
|
||||
|
||||
// NewTraceReader returns a TraceReader for the database
|
||||
func NewReader(localDB *sqlx.DB, configFile string, featureFlag interfaces.FeatureLookup) *ClickHouseReader {
|
||||
func NewReader(
|
||||
localDB *sqlx.DB,
|
||||
configFile string,
|
||||
featureFlag interfaces.FeatureLookup,
|
||||
maxIdleConns int,
|
||||
maxOpenConns int,
|
||||
dialTimeout time.Duration,
|
||||
) *ClickHouseReader {
|
||||
|
||||
datasource := os.Getenv("ClickHouseUrl")
|
||||
options := NewOptions(datasource, primaryNamespace, archiveNamespace)
|
||||
options := NewOptions(datasource, maxIdleConns, maxOpenConns, dialTimeout, primaryNamespace, archiveNamespace)
|
||||
db, err := initialize(options)
|
||||
|
||||
if err != nil {
|
||||
|
@ -77,6 +77,10 @@ type APIHandler struct {
|
||||
preferDelta bool
|
||||
preferSpanMetrics bool
|
||||
|
||||
maxIdleConns int
|
||||
maxOpenConns int
|
||||
dialTimeout time.Duration
|
||||
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
|
||||
// SetupCompleted indicates if SigNoz is ready for general use.
|
||||
@ -94,6 +98,11 @@ type APIHandlerOpts struct {
|
||||
|
||||
PerferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
DialTimeout time.Duration
|
||||
|
||||
// dao layer to perform crud on app objects like dashboard, alerts etc
|
||||
AppDao dao.ModelDao
|
||||
|
||||
@ -121,6 +130,9 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
skipConfig: opts.SkipConfig,
|
||||
preferDelta: opts.PerferDelta,
|
||||
preferSpanMetrics: opts.PreferSpanMetrics,
|
||||
maxIdleConns: opts.MaxIdleConns,
|
||||
maxOpenConns: opts.MaxOpenConns,
|
||||
dialTimeout: opts.DialTimeout,
|
||||
alertManager: alertManager,
|
||||
ruleManager: opts.RuleManager,
|
||||
featureFlags: opts.FeatureFlags,
|
||||
|
@ -51,6 +51,9 @@ type ServerOptions struct {
|
||||
RuleRepoURL string
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
DialTimeout time.Duration
|
||||
}
|
||||
|
||||
// Server runs HTTP, Mux and a grpc server
|
||||
@ -103,7 +106,14 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
storage := os.Getenv("STORAGE")
|
||||
if storage == "clickhouse" {
|
||||
zap.S().Info("Using ClickHouse as datastore ...")
|
||||
clickhouseReader := clickhouseReader.NewReader(localDB, serverOptions.PromConfigPath, fm)
|
||||
clickhouseReader := clickhouseReader.NewReader(
|
||||
localDB,
|
||||
serverOptions.PromConfigPath,
|
||||
fm,
|
||||
serverOptions.MaxIdleConns,
|
||||
serverOptions.MaxOpenConns,
|
||||
serverOptions.DialTimeout,
|
||||
)
|
||||
go clickhouseReader.Start(readerReady)
|
||||
reader = clickhouseReader
|
||||
} else {
|
||||
@ -136,6 +146,9 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
SkipConfig: skipConfig,
|
||||
PerferDelta: serverOptions.PreferDelta,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
MaxIdleConns: serverOptions.MaxIdleConns,
|
||||
MaxOpenConns: serverOptions.MaxOpenConns,
|
||||
DialTimeout: serverOptions.DialTimeout,
|
||||
AppDao: dao.DB(),
|
||||
RuleManager: rm,
|
||||
FeatureFlags: fm,
|
||||
|
@ -153,6 +153,7 @@ func buildTracesFilterQuery(fs *v3.FilterSet, keys map[string]v3.AttributeKey) (
|
||||
columnName := getColumnName(item.Key, keys)
|
||||
var fmtVal string
|
||||
key := enrichKeyWithMetadata(item.Key, keys)
|
||||
item.Operator = v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator))))
|
||||
if item.Operator != v3.FilterOperatorExists && item.Operator != v3.FilterOperatorNotExists {
|
||||
var err error
|
||||
val, err = utils.ValidateAndCastValue(val, key.DataType)
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/app"
|
||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||
@ -37,11 +38,18 @@ func main() {
|
||||
var preferDelta bool
|
||||
var preferSpanMetrics bool
|
||||
|
||||
var maxIdleConns int
|
||||
var maxOpenConns int
|
||||
var dialTimeout time.Duration
|
||||
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
|
||||
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
|
||||
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", constants.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
flag.Parse()
|
||||
|
||||
@ -61,6 +69,9 @@ func main() {
|
||||
PrivateHostPort: constants.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
RuleRepoURL: ruleRepoURL,
|
||||
MaxIdleConns: maxIdleConns,
|
||||
MaxOpenConns: maxOpenConns,
|
||||
DialTimeout: dialTimeout,
|
||||
}
|
||||
|
||||
// Read the jwt secret key
|
||||
|
@ -1,6 +1,7 @@
|
||||
version: "2.4"
|
||||
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
x-clickhouse-defaults:
|
||||
&clickhouse-defaults
|
||||
restart: on-failure
|
||||
image: clickhouse/clickhouse-server:22.8.8-alpine
|
||||
tty: true
|
||||
@ -14,7 +15,14 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8123/ping"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@ -24,7 +32,8 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
|
||||
x-clickhouse-depends: &clickhouse-depends
|
||||
x-clickhouse-depends:
|
||||
&clickhouse-depends
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
@ -36,6 +45,7 @@ x-clickhouse-depends: &clickhouse-depends
|
||||
services:
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: signoz-zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
- "2181:2181"
|
||||
@ -52,6 +62,7 @@ services:
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2182:2181"
|
||||
@ -67,6 +78,7 @@ services:
|
||||
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: signoz-zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
# - "2183:2181"
|
||||
@ -82,7 +94,7 @@ services:
|
||||
|
||||
clickhouse:
|
||||
<<: *clickhouse-defaults
|
||||
container_name: clickhouse
|
||||
container_name: signoz-clickhouse
|
||||
hostname: clickhouse
|
||||
ports:
|
||||
- "9000:9000"
|
||||
@ -95,10 +107,9 @@ services:
|
||||
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
- ./data/clickhouse/:/var/lib/clickhouse/
|
||||
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-2
|
||||
# container_name: signoz-clickhouse-2
|
||||
# hostname: clickhouse-2
|
||||
# ports:
|
||||
# - "9001:9000"
|
||||
@ -113,7 +124,7 @@ services:
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-3
|
||||
# container_name: signoz-clickhouse-3
|
||||
# hostname: clickhouse-3
|
||||
# ports:
|
||||
# - "9002:9000"
|
||||
@ -128,6 +139,7 @@ services:
|
||||
|
||||
alertmanager:
|
||||
image: signoz/alertmanager:0.23.1
|
||||
container_name: signoz-alertmanager
|
||||
volumes:
|
||||
- ./data/alertmanager:/data
|
||||
depends_on:
|
||||
@ -138,12 +150,12 @@ services:
|
||||
- --queryService.url=http://query-service:8085
|
||||
- --storage.path=/data
|
||||
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:latest
|
||||
container_name: query-service
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
container_name: signoz-query-service
|
||||
command: [ "-config=/root/config/prometheus.yml" ]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
# - "8080:8080" # query-service port
|
||||
@ -162,15 +174,27 @@ services:
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--spider",
|
||||
"-q",
|
||||
"localhost:8080/api/v1/health"
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
<<: *clickhouse-depends
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.79.4
|
||||
command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:0.79.5
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
@ -181,8 +205,8 @@ services:
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
# - "8888:8888" # OtelCollector internal metrics
|
||||
# - "8889:8889" # signoz spanmetrics exposed by the agent
|
||||
# - "9411:9411" # Zipkin port
|
||||
@ -195,8 +219,13 @@ services:
|
||||
<<: *clickhouse-depends
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:0.79.4
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
|
||||
image: signoz/signoz-otel-collector:0.79.5
|
||||
container_name: signoz-otel-collector-metrics
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-metrics-config.yaml",
|
||||
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
|
||||
]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
# ports:
|
||||
@ -207,16 +236,27 @@ services:
|
||||
restart: on-failure
|
||||
<<: *clickhouse-depends
|
||||
|
||||
logspout:
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
container_name: signoz-logspout
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-collector:2255
|
||||
depends_on:
|
||||
- otel-collector
|
||||
restart: on-failure
|
||||
|
||||
hotrod:
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: ["all"]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
image: jaegertracing/example-hotrod:1.30
|
||||
container_name: hotrod
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||
|
||||
load-hotrod:
|
||||
image: "signoz/locust:1.2.3"
|
||||
|
@ -1,29 +1,21 @@
|
||||
receivers:
|
||||
filelog/dockercontainers:
|
||||
include: [ "/var/lib/docker/containers/*/*.log" ]
|
||||
start_at: end
|
||||
include_file_path: true
|
||||
include_file_name: false
|
||||
tcplog/docker:
|
||||
listen_address: "0.0.0.0:2255"
|
||||
operators:
|
||||
- type: json_parser
|
||||
id: parser-docker
|
||||
output: extract_metadata_from_filepath
|
||||
timestamp:
|
||||
parse_from: attributes.time
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: regex_parser
|
||||
id: extract_metadata_from_filepath
|
||||
regex: '^.*containers/(?P<container_id>[^_]+)/.*log$'
|
||||
parse_from: attributes["log.file.path"]
|
||||
output: parse_body
|
||||
- type: move
|
||||
id: parse_body
|
||||
from: attributes.log
|
||||
to: body
|
||||
output: time
|
||||
- type: remove
|
||||
id: time
|
||||
field: attributes.time
|
||||
- type: regex_parser
|
||||
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
|
||||
timestamp:
|
||||
parse_from: attributes.timestamp
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: move
|
||||
from: attributes["body"]
|
||||
to: body
|
||||
- type: remove
|
||||
field: attributes.timestamp
|
||||
# please remove names from below if you want to collect logs from them
|
||||
- type: filter
|
||||
id: signoz_logs_filter
|
||||
expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
|
||||
opencensus:
|
||||
endpoint: 0.0.0.0:55678
|
||||
otlp/spanmetrics:
|
||||
@ -157,6 +149,6 @@ service:
|
||||
receivers: [otlp/spanmetrics]
|
||||
exporters: [prometheus]
|
||||
logs:
|
||||
receivers: [otlp, filelog/dockercontainers]
|
||||
receivers: [otlp, tcplog/docker]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
|
Loading…
x
Reference in New Issue
Block a user