mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-09-15 19:43:14 +08:00
commit
fcbf82c2f3
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@ -8,8 +8,4 @@
|
|||||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||||
/deploy/ @prashant-shahi
|
/deploy/ @prashant-shahi
|
||||||
/sample-apps/ @prashant-shahi
|
/sample-apps/ @prashant-shahi
|
||||||
**/query-service/ @srikanthccv
|
|
||||||
Makefile @srikanthccv
|
|
||||||
go.* @srikanthccv
|
|
||||||
.git* @srikanthccv
|
|
||||||
.github @prashant-shahi
|
.github @prashant-shahi
|
||||||
|
@ -146,7 +146,7 @@ services:
|
|||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz/query-service:0.35.0
|
image: signoz/query-service:0.35.1
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"-config=/root/config/prometheus.yml",
|
"-config=/root/config/prometheus.yml",
|
||||||
@ -186,7 +186,7 @@ services:
|
|||||||
<<: *db-depend
|
<<: *db-depend
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: signoz/frontend:0.35.0
|
image: signoz/frontend:0.35.1
|
||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
@ -199,7 +199,7 @@ services:
|
|||||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
otel-collector:
|
otel-collector:
|
||||||
image: signoz/signoz-otel-collector:0.88.1
|
image: signoz/signoz-otel-collector:0.88.3
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"--config=/etc/otel-collector-config.yaml",
|
"--config=/etc/otel-collector-config.yaml",
|
||||||
@ -237,7 +237,7 @@ services:
|
|||||||
- query-service
|
- query-service
|
||||||
|
|
||||||
otel-collector-migrator:
|
otel-collector-migrator:
|
||||||
image: signoz/signoz-schema-migrator:0.88.1
|
image: signoz/signoz-schema-migrator:0.88.3
|
||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
@ -250,7 +250,7 @@ services:
|
|||||||
# - clickhouse-3
|
# - clickhouse-3
|
||||||
|
|
||||||
otel-collector-metrics:
|
otel-collector-metrics:
|
||||||
image: signoz/signoz-otel-collector:0.88.1
|
image: signoz/signoz-otel-collector:0.88.3
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"--config=/etc/otel-collector-metrics-config.yaml",
|
"--config=/etc/otel-collector-metrics-config.yaml",
|
||||||
|
@ -66,7 +66,7 @@ services:
|
|||||||
- --storage.path=/data
|
- --storage.path=/data
|
||||||
|
|
||||||
otel-collector-migrator:
|
otel-collector-migrator:
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.1}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.3}
|
||||||
container_name: otel-migrator
|
container_name: otel-migrator
|
||||||
command:
|
command:
|
||||||
- "--dsn=tcp://clickhouse:9000"
|
- "--dsn=tcp://clickhouse:9000"
|
||||||
@ -81,7 +81,7 @@ services:
|
|||||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||||
otel-collector:
|
otel-collector:
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
image: signoz/signoz-otel-collector:0.88.1
|
image: signoz/signoz-otel-collector:0.88.3
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"--config=/etc/otel-collector-config.yaml",
|
"--config=/etc/otel-collector-config.yaml",
|
||||||
@ -118,7 +118,7 @@ services:
|
|||||||
|
|
||||||
otel-collector-metrics:
|
otel-collector-metrics:
|
||||||
container_name: signoz-otel-collector-metrics
|
container_name: signoz-otel-collector-metrics
|
||||||
image: signoz/signoz-otel-collector:0.88.1
|
image: signoz/signoz-otel-collector:0.88.3
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"--config=/etc/otel-collector-metrics-config.yaml",
|
"--config=/etc/otel-collector-metrics-config.yaml",
|
||||||
|
@ -164,7 +164,7 @@ services:
|
|||||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz/query-service:${DOCKER_TAG:-0.35.0}
|
image: signoz/query-service:${DOCKER_TAG:-0.35.1}
|
||||||
container_name: signoz-query-service
|
container_name: signoz-query-service
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
@ -203,7 +203,7 @@ services:
|
|||||||
<<: *db-depend
|
<<: *db-depend
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: signoz/frontend:${DOCKER_TAG:-0.35.0}
|
image: signoz/frontend:${DOCKER_TAG:-0.35.1}
|
||||||
container_name: signoz-frontend
|
container_name: signoz-frontend
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
depends_on:
|
depends_on:
|
||||||
@ -215,7 +215,7 @@ services:
|
|||||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
otel-collector-migrator:
|
otel-collector-migrator:
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.1}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.3}
|
||||||
container_name: otel-migrator
|
container_name: otel-migrator
|
||||||
command:
|
command:
|
||||||
- "--dsn=tcp://clickhouse:9000"
|
- "--dsn=tcp://clickhouse:9000"
|
||||||
@ -229,7 +229,7 @@ services:
|
|||||||
|
|
||||||
|
|
||||||
otel-collector:
|
otel-collector:
|
||||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.1}
|
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.3}
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
@ -269,7 +269,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
otel-collector-metrics:
|
otel-collector-metrics:
|
||||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.1}
|
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.3}
|
||||||
container_name: signoz-otel-collector-metrics
|
container_name: signoz-otel-collector-metrics
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
|
@ -12,6 +12,7 @@ import (
|
|||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
"go.signoz.io/signoz/ee/query-service/model"
|
"go.signoz.io/signoz/ee/query-service/model"
|
||||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||||
|
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -47,8 +48,18 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
|||||||
req.CreatedAt = time.Now().Unix()
|
req.CreatedAt = time.Now().Unix()
|
||||||
req.Token = generatePATToken()
|
req.Token = generatePATToken()
|
||||||
|
|
||||||
|
// default expiry is 30 days
|
||||||
|
if req.ExpiresAt == 0 {
|
||||||
|
req.ExpiresAt = time.Now().AddDate(0, 0, 30).Unix()
|
||||||
|
}
|
||||||
|
// max expiry is 1 year
|
||||||
|
if req.ExpiresAt > time.Now().AddDate(1, 0, 0).Unix() {
|
||||||
|
req.ExpiresAt = time.Now().AddDate(1, 0, 0).Unix()
|
||||||
|
}
|
||||||
|
|
||||||
zap.S().Debugf("Got PAT request: %+v", req)
|
zap.S().Debugf("Got PAT request: %+v", req)
|
||||||
if apierr := ah.AppDao().CreatePAT(ctx, &req); apierr != nil {
|
var apierr basemodel.BaseApiError
|
||||||
|
if req, apierr = ah.AppDao().CreatePAT(ctx, req); apierr != nil {
|
||||||
RespondError(w, apierr, nil)
|
RespondError(w, apierr, nil)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -480,7 +480,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
|
if _, ok := telemetry.EnabledPaths()[path]; ok {
|
||||||
userEmail, err := auth.GetEmailFromJwt(r.Context())
|
userEmail, err := auth.GetEmailFromJwt(r.Context())
|
||||||
if err == nil {
|
if err == nil {
|
||||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail)
|
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail)
|
||||||
|
@ -33,7 +33,7 @@ type ModelDao interface {
|
|||||||
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
|
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
|
||||||
GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError)
|
GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError)
|
||||||
|
|
||||||
CreatePAT(ctx context.Context, p *model.PAT) basemodel.BaseApiError
|
CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError)
|
||||||
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
|
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
|
||||||
GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError)
|
GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError)
|
||||||
GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError)
|
GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError)
|
||||||
|
@ -3,14 +3,15 @@ package sqlite
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
"go.signoz.io/signoz/ee/query-service/model"
|
"go.signoz.io/signoz/ee/query-service/model"
|
||||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (m *modelDao) CreatePAT(ctx context.Context, p *model.PAT) basemodel.BaseApiError {
|
func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError) {
|
||||||
_, err := m.DB().ExecContext(ctx,
|
result, err := m.DB().ExecContext(ctx,
|
||||||
"INSERT INTO personal_access_tokens (user_id, token, name, created_at, expires_at) VALUES ($1, $2, $3, $4, $5)",
|
"INSERT INTO personal_access_tokens (user_id, token, name, created_at, expires_at) VALUES ($1, $2, $3, $4, $5)",
|
||||||
p.UserID,
|
p.UserID,
|
||||||
p.Token,
|
p.Token,
|
||||||
@ -19,9 +20,15 @@ func (m *modelDao) CreatePAT(ctx context.Context, p *model.PAT) basemodel.BaseAp
|
|||||||
p.ExpiresAt)
|
p.ExpiresAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Errorf("Failed to insert PAT in db, err: %v", zap.Error(err))
|
zap.S().Errorf("Failed to insert PAT in db, err: %v", zap.Error(err))
|
||||||
return model.InternalError(fmt.Errorf("PAT insertion failed"))
|
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||||
}
|
}
|
||||||
return nil
|
id, err := result.LastInsertId()
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Errorf("Failed to get last inserted id, err: %v", zap.Error(err))
|
||||||
|
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
|
||||||
|
}
|
||||||
|
p.Id = strconv.Itoa(int(id))
|
||||||
|
return p, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *modelDao) ListPATs(ctx context.Context, userID string) ([]model.PAT, basemodel.BaseApiError) {
|
func (m *modelDao) ListPATs(ctx context.Context, userID string) ([]model.PAT, basemodel.BaseApiError) {
|
||||||
@ -90,7 +97,7 @@ func (m *modelDao) GetUserByPAT(ctx context.Context, token string) (*basemodel.U
|
|||||||
u.org_id,
|
u.org_id,
|
||||||
u.group_id
|
u.group_id
|
||||||
FROM users u, personal_access_tokens p
|
FROM users u, personal_access_tokens p
|
||||||
WHERE u.id = p.user_id and p.token=?;`
|
WHERE u.id = p.user_id and p.token=? and p.expires_at >= strftime('%s', 'now');`
|
||||||
|
|
||||||
if err := m.DB().Select(&users, query, token); err != nil {
|
if err := m.DB().Select(&users, query, token); err != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
|
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
|
||||||
|
@ -6,5 +6,5 @@ type PAT struct {
|
|||||||
Token string `json:"token" db:"token"`
|
Token string `json:"token" db:"token"`
|
||||||
Name string `json:"name" db:"name"`
|
Name string `json:"name" db:"name"`
|
||||||
CreatedAt int64 `json:"createdAt" db:"created_at"`
|
CreatedAt int64 `json:"createdAt" db:"created_at"`
|
||||||
ExpiresAt int64 `json:"expiresAt" db:"expires_at"` // unused as of now
|
ExpiresAt int64 `json:"expiresAt" db:"expires_at"`
|
||||||
}
|
}
|
||||||
|
@ -52,14 +52,14 @@ var BasicPlan = basemodel.FeatureSet{
|
|||||||
Name: basemodel.QueryBuilderPanels,
|
Name: basemodel.QueryBuilderPanels,
|
||||||
Active: true,
|
Active: true,
|
||||||
Usage: 0,
|
Usage: 0,
|
||||||
UsageLimit: 5,
|
UsageLimit: 20,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
basemodel.Feature{
|
basemodel.Feature{
|
||||||
Name: basemodel.QueryBuilderAlerts,
|
Name: basemodel.QueryBuilderAlerts,
|
||||||
Active: true,
|
Active: true,
|
||||||
Usage: 0,
|
Usage: 0,
|
||||||
UsageLimit: 5,
|
UsageLimit: 10,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
basemodel.Feature{
|
basemodel.Feature{
|
||||||
|
@ -2,3 +2,19 @@
|
|||||||
. "$(dirname "$0")/_/husky.sh"
|
. "$(dirname "$0")/_/husky.sh"
|
||||||
|
|
||||||
cd frontend && yarn run commitlint --edit $1
|
cd frontend && yarn run commitlint --edit $1
|
||||||
|
|
||||||
|
branch="$(git rev-parse --abbrev-ref HEAD)"
|
||||||
|
|
||||||
|
color_red="$(tput setaf 1)"
|
||||||
|
bold="$(tput bold)"
|
||||||
|
reset="$(tput sgr0)"
|
||||||
|
|
||||||
|
if [ "$branch" = "main" ]; then
|
||||||
|
echo "${color_red}${bold}You can't commit directly to the main branch${reset}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$branch" = "develop" ]; then
|
||||||
|
echo "${color_red}${bold}You can't commit directly to the develop branch${reset}"
|
||||||
|
exit 1
|
||||||
|
fi
|
@ -22,7 +22,7 @@ const config: Config.InitialOptions = {
|
|||||||
'^.+\\.(js|jsx)$': 'babel-jest',
|
'^.+\\.(js|jsx)$': 'babel-jest',
|
||||||
},
|
},
|
||||||
transformIgnorePatterns: [
|
transformIgnorePatterns: [
|
||||||
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend)/)',
|
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios)/)',
|
||||||
],
|
],
|
||||||
setupFilesAfterEnv: ['<rootDir>jest.setup.ts'],
|
setupFilesAfterEnv: ['<rootDir>jest.setup.ts'],
|
||||||
testPathIgnorePatterns: ['/node_modules/', '/public/'],
|
testPathIgnorePatterns: ['/node_modules/', '/public/'],
|
||||||
|
@ -38,7 +38,7 @@
|
|||||||
"ansi-to-html": "0.7.2",
|
"ansi-to-html": "0.7.2",
|
||||||
"antd": "5.11.0",
|
"antd": "5.11.0",
|
||||||
"antd-table-saveas-excel": "2.2.1",
|
"antd-table-saveas-excel": "2.2.1",
|
||||||
"axios": "^0.21.0",
|
"axios": "1.6.2",
|
||||||
"babel-eslint": "^10.1.0",
|
"babel-eslint": "^10.1.0",
|
||||||
"babel-jest": "^29.6.4",
|
"babel-jest": "^29.6.4",
|
||||||
"babel-loader": "9.1.3",
|
"babel-loader": "9.1.3",
|
||||||
@ -87,7 +87,7 @@
|
|||||||
"react-helmet-async": "1.3.0",
|
"react-helmet-async": "1.3.0",
|
||||||
"react-i18next": "^11.16.1",
|
"react-i18next": "^11.16.1",
|
||||||
"react-markdown": "8.0.7",
|
"react-markdown": "8.0.7",
|
||||||
"react-query": "^3.34.19",
|
"react-query": "3.39.3",
|
||||||
"react-redux": "^7.2.2",
|
"react-redux": "^7.2.2",
|
||||||
"react-router-dom": "^5.2.0",
|
"react-router-dom": "^5.2.0",
|
||||||
"react-syntax-highlighter": "15.5.0",
|
"react-syntax-highlighter": "15.5.0",
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { AxiosError } from 'axios';
|
import { AxiosError, AxiosResponse } from 'axios';
|
||||||
import { ErrorResponse } from 'types/api';
|
import { ErrorResponse } from 'types/api';
|
||||||
import { ErrorStatusCode } from 'types/common';
|
import { ErrorStatusCode } from 'types/common';
|
||||||
|
|
||||||
@ -10,7 +10,7 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse {
|
|||||||
const statusCode = response.status as ErrorStatusCode;
|
const statusCode = response.status as ErrorStatusCode;
|
||||||
|
|
||||||
if (statusCode >= 400 && statusCode < 500) {
|
if (statusCode >= 400 && statusCode < 500) {
|
||||||
const { data } = response;
|
const { data } = response as AxiosResponse;
|
||||||
|
|
||||||
if (statusCode === 404) {
|
if (statusCode === 404) {
|
||||||
return {
|
return {
|
||||||
|
@ -3,9 +3,9 @@ import { ApiResponse } from 'types/api';
|
|||||||
import { Props } from 'types/api/dashboard/get';
|
import { Props } from 'types/api/dashboard/get';
|
||||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||||
|
|
||||||
const get = (props: Props): Promise<Dashboard> =>
|
const getDashboard = (props: Props): Promise<Dashboard> =>
|
||||||
axios
|
axios
|
||||||
.get<ApiResponse<Dashboard>>(`/dashboards/${props.uuid}`)
|
.get<ApiResponse<Dashboard>>(`/dashboards/${props.uuid}`)
|
||||||
.then((res) => res.data.data);
|
.then((res) => res.data.data);
|
||||||
|
|
||||||
export default get;
|
export default getDashboard;
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||||
import loginApi from 'api/user/login';
|
import loginApi from 'api/user/login';
|
||||||
import afterLogin from 'AppRoutes/utils';
|
import afterLogin from 'AppRoutes/utils';
|
||||||
import axios, { AxiosRequestConfig, AxiosResponse } from 'axios';
|
import axios, { AxiosResponse, InternalAxiosRequestConfig } from 'axios';
|
||||||
import { ENVIRONMENT } from 'constants/env';
|
import { ENVIRONMENT } from 'constants/env';
|
||||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||||
import store from 'store';
|
import store from 'store';
|
||||||
@ -17,14 +17,16 @@ const interceptorsResponse = (
|
|||||||
): Promise<AxiosResponse<any>> => Promise.resolve(value);
|
): Promise<AxiosResponse<any>> => Promise.resolve(value);
|
||||||
|
|
||||||
const interceptorsRequestResponse = (
|
const interceptorsRequestResponse = (
|
||||||
value: AxiosRequestConfig,
|
value: InternalAxiosRequestConfig,
|
||||||
): AxiosRequestConfig => {
|
): InternalAxiosRequestConfig => {
|
||||||
const token =
|
const token =
|
||||||
store.getState().app.user?.accessJwt ||
|
store.getState().app.user?.accessJwt ||
|
||||||
getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) ||
|
getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) ||
|
||||||
'';
|
'';
|
||||||
|
|
||||||
value.headers.Authorization = token ? `Bearer ${token}` : '';
|
if (value && value.headers) {
|
||||||
|
value.headers.Authorization = token ? `Bearer ${token}` : '';
|
||||||
|
}
|
||||||
|
|
||||||
return value;
|
return value;
|
||||||
};
|
};
|
||||||
@ -92,8 +94,8 @@ const instance = axios.create({
|
|||||||
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
||||||
});
|
});
|
||||||
|
|
||||||
instance.interceptors.response.use(interceptorsResponse, interceptorRejected);
|
|
||||||
instance.interceptors.request.use(interceptorsRequestResponse);
|
instance.interceptors.request.use(interceptorsRequestResponse);
|
||||||
|
instance.interceptors.response.use(interceptorsResponse, interceptorRejected);
|
||||||
|
|
||||||
export const AxiosAlertManagerInstance = axios.create({
|
export const AxiosAlertManagerInstance = axios.create({
|
||||||
baseURL: `${ENVIRONMENT.baseURL}${apiAlertManager}`,
|
baseURL: `${ENVIRONMENT.baseURL}${apiAlertManager}`,
|
||||||
|
@ -9,9 +9,10 @@ import {
|
|||||||
|
|
||||||
export const getMetricsQueryRange = async (
|
export const getMetricsQueryRange = async (
|
||||||
props: QueryRangePayload,
|
props: QueryRangePayload,
|
||||||
|
signal: AbortSignal,
|
||||||
): Promise<SuccessResponse<MetricRangePayloadV3> | ErrorResponse> => {
|
): Promise<SuccessResponse<MetricRangePayloadV3> | ErrorResponse> => {
|
||||||
try {
|
try {
|
||||||
const response = await axios.post('/query_range', props);
|
const response = await axios.post('/query_range', props, { signal });
|
||||||
|
|
||||||
return {
|
return {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
|
@ -32,6 +32,7 @@ export interface ChartPreviewProps {
|
|||||||
alertDef?: AlertDef;
|
alertDef?: AlertDef;
|
||||||
userQueryKey?: string;
|
userQueryKey?: string;
|
||||||
allowSelectedIntervalForStepGen?: boolean;
|
allowSelectedIntervalForStepGen?: boolean;
|
||||||
|
yAxisUnit: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
function ChartPreview({
|
function ChartPreview({
|
||||||
@ -44,6 +45,7 @@ function ChartPreview({
|
|||||||
userQueryKey,
|
userQueryKey,
|
||||||
allowSelectedIntervalForStepGen = false,
|
allowSelectedIntervalForStepGen = false,
|
||||||
alertDef,
|
alertDef,
|
||||||
|
yAxisUnit,
|
||||||
}: ChartPreviewProps): JSX.Element | null {
|
}: ChartPreviewProps): JSX.Element | null {
|
||||||
const { t } = useTranslation('alerts');
|
const { t } = useTranslation('alerts');
|
||||||
const threshold = alertDef?.condition.target || 0;
|
const threshold = alertDef?.condition.target || 0;
|
||||||
@ -112,7 +114,7 @@ function ChartPreview({
|
|||||||
() =>
|
() =>
|
||||||
getUPlotChartOptions({
|
getUPlotChartOptions({
|
||||||
id: 'alert_legend_widget',
|
id: 'alert_legend_widget',
|
||||||
yAxisUnit: query?.unit,
|
yAxisUnit,
|
||||||
apiResponse: queryResponse?.data?.payload,
|
apiResponse: queryResponse?.data?.payload,
|
||||||
dimensions: containerDimensions,
|
dimensions: containerDimensions,
|
||||||
isDarkMode,
|
isDarkMode,
|
||||||
@ -129,14 +131,14 @@ function ChartPreview({
|
|||||||
optionName,
|
optionName,
|
||||||
threshold,
|
threshold,
|
||||||
alertDef?.condition.targetUnit,
|
alertDef?.condition.targetUnit,
|
||||||
query?.unit,
|
yAxisUnit,
|
||||||
)})`,
|
)})`,
|
||||||
thresholdUnit: alertDef?.condition.targetUnit,
|
thresholdUnit: alertDef?.condition.targetUnit,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
[
|
[
|
||||||
query?.unit,
|
yAxisUnit,
|
||||||
queryResponse?.data?.payload,
|
queryResponse?.data?.payload,
|
||||||
containerDimensions,
|
containerDimensions,
|
||||||
isDarkMode,
|
isDarkMode,
|
||||||
@ -168,7 +170,7 @@ function ChartPreview({
|
|||||||
name={name || 'Chart Preview'}
|
name={name || 'Chart Preview'}
|
||||||
panelData={queryResponse.data?.payload.data.newResult.data.result || []}
|
panelData={queryResponse.data?.payload.data.newResult.data.result || []}
|
||||||
query={query || initialQueriesMap.metrics}
|
query={query || initialQueriesMap.metrics}
|
||||||
yAxisUnit={query?.unit}
|
yAxisUnit={yAxisUnit}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
@ -61,8 +61,20 @@ export const getThresholdLabel = (
|
|||||||
unit === MiscellaneousFormats.PercentUnit ||
|
unit === MiscellaneousFormats.PercentUnit ||
|
||||||
yAxisUnit === MiscellaneousFormats.PercentUnit
|
yAxisUnit === MiscellaneousFormats.PercentUnit
|
||||||
) {
|
) {
|
||||||
|
if (unit === MiscellaneousFormats.Percent) {
|
||||||
|
return `${value}%`;
|
||||||
|
}
|
||||||
return `${value * 100}%`;
|
return `${value * 100}%`;
|
||||||
}
|
}
|
||||||
|
if (
|
||||||
|
unit === MiscellaneousFormats.Percent ||
|
||||||
|
yAxisUnit === MiscellaneousFormats.Percent
|
||||||
|
) {
|
||||||
|
if (unit === MiscellaneousFormats.PercentUnit) {
|
||||||
|
return `${value * 100}%`;
|
||||||
|
}
|
||||||
|
return `${value}%`;
|
||||||
|
}
|
||||||
return `${value} ${optionName}`;
|
return `${value} ${optionName}`;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -82,6 +82,7 @@ function FormAlertRules({
|
|||||||
|
|
||||||
// alertDef holds the form values to be posted
|
// alertDef holds the form values to be posted
|
||||||
const [alertDef, setAlertDef] = useState<AlertDef>(initialValue);
|
const [alertDef, setAlertDef] = useState<AlertDef>(initialValue);
|
||||||
|
const [yAxisUnit, setYAxisUnit] = useState<string>(currentQuery.unit || '');
|
||||||
|
|
||||||
// initQuery contains initial query when component was mounted
|
// initQuery contains initial query when component was mounted
|
||||||
const initQuery = useMemo(() => initialValue.condition.compositeQuery, [
|
const initQuery = useMemo(() => initialValue.condition.compositeQuery, [
|
||||||
@ -400,6 +401,7 @@ function FormAlertRules({
|
|||||||
query={stagedQuery}
|
query={stagedQuery}
|
||||||
selectedInterval={globalSelectedInterval}
|
selectedInterval={globalSelectedInterval}
|
||||||
alertDef={alertDef}
|
alertDef={alertDef}
|
||||||
|
yAxisUnit={yAxisUnit || ''}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -415,6 +417,7 @@ function FormAlertRules({
|
|||||||
query={stagedQuery}
|
query={stagedQuery}
|
||||||
alertDef={alertDef}
|
alertDef={alertDef}
|
||||||
selectedInterval={globalSelectedInterval}
|
selectedInterval={globalSelectedInterval}
|
||||||
|
yAxisUnit={yAxisUnit || ''}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -427,7 +430,8 @@ function FormAlertRules({
|
|||||||
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
|
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
|
||||||
alertType !== AlertTypes.METRICS_BASED_ALERT;
|
alertType !== AlertTypes.METRICS_BASED_ALERT;
|
||||||
|
|
||||||
const onUnitChangeHandler = (): void => {
|
const onUnitChangeHandler = (value: string): void => {
|
||||||
|
setYAxisUnit(value);
|
||||||
// reset target unit
|
// reset target unit
|
||||||
setAlertDef((def) => ({
|
setAlertDef((def) => ({
|
||||||
...def,
|
...def,
|
||||||
@ -457,7 +461,10 @@ function FormAlertRules({
|
|||||||
renderPromAndChQueryChartPreview()}
|
renderPromAndChQueryChartPreview()}
|
||||||
|
|
||||||
<StepContainer>
|
<StepContainer>
|
||||||
<BuilderUnitsFilter onChange={onUnitChangeHandler} />
|
<BuilderUnitsFilter
|
||||||
|
onChange={onUnitChangeHandler}
|
||||||
|
yAxisUnit={yAxisUnit}
|
||||||
|
/>
|
||||||
</StepContainer>
|
</StepContainer>
|
||||||
|
|
||||||
<QuerySection
|
<QuerySection
|
||||||
|
@ -3,6 +3,7 @@ import './GridCardLayout.styles.scss';
|
|||||||
import { PlusOutlined, SaveFilled } from '@ant-design/icons';
|
import { PlusOutlined, SaveFilled } from '@ant-design/icons';
|
||||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||||
|
import { themeColors } from 'constants/theme';
|
||||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||||
import useComponentPermission from 'hooks/useComponentPermission';
|
import useComponentPermission from 'hooks/useComponentPermission';
|
||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
@ -155,6 +156,7 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
|
|||||||
onLayoutChange={setLayouts}
|
onLayoutChange={setLayouts}
|
||||||
draggableHandle=".drag-handle"
|
draggableHandle=".drag-handle"
|
||||||
layout={layouts}
|
layout={layouts}
|
||||||
|
style={{ backgroundColor: isDarkMode ? '' : themeColors.snowWhite }}
|
||||||
>
|
>
|
||||||
{layouts.map((layout) => {
|
{layouts.map((layout) => {
|
||||||
const { i: id } = layout;
|
const { i: id } = layout;
|
||||||
|
@ -0,0 +1,19 @@
|
|||||||
|
import { TopOperationList } from '../TopOperationsTable';
|
||||||
|
|
||||||
|
interface TopOperation {
|
||||||
|
numCalls: number;
|
||||||
|
errorCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getTopOperationList = ({
|
||||||
|
errorCount,
|
||||||
|
numCalls,
|
||||||
|
}: TopOperation): TopOperationList =>
|
||||||
|
({
|
||||||
|
p50: 0,
|
||||||
|
errorCount,
|
||||||
|
name: 'test',
|
||||||
|
numCalls,
|
||||||
|
p95: 0,
|
||||||
|
p99: 0,
|
||||||
|
} as TopOperationList);
|
70
frontend/src/container/MetricsApplication/utils.test.ts
Normal file
70
frontend/src/container/MetricsApplication/utils.test.ts
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
import { getTopOperationList } from './__mocks__/getTopOperation';
|
||||||
|
import { TopOperationList } from './TopOperationsTable';
|
||||||
|
import {
|
||||||
|
convertedTracesToDownloadData,
|
||||||
|
getErrorRate,
|
||||||
|
getNearestHighestBucketValue,
|
||||||
|
} from './utils';
|
||||||
|
|
||||||
|
describe('Error Rate', () => {
|
||||||
|
test('should return correct error rate', () => {
|
||||||
|
const list: TopOperationList = getTopOperationList({
|
||||||
|
errorCount: 10,
|
||||||
|
numCalls: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(getErrorRate(list)).toBe(10);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle no errors gracefully', () => {
|
||||||
|
const list = getTopOperationList({ errorCount: 0, numCalls: 100 });
|
||||||
|
expect(getErrorRate(list)).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle zero calls', () => {
|
||||||
|
const list = getTopOperationList({ errorCount: 0, numCalls: 0 });
|
||||||
|
expect(getErrorRate(list)).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getNearestHighestBucketValue', () => {
|
||||||
|
test('should return nearest higher bucket value', () => {
|
||||||
|
expect(getNearestHighestBucketValue(50, [10, 20, 30, 40, 60, 70])).toBe('60');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return +Inf for value higher than any bucket', () => {
|
||||||
|
expect(getNearestHighestBucketValue(80, [10, 20, 30, 40, 60, 70])).toBe(
|
||||||
|
'+Inf',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return the first bucket for value lower than all buckets', () => {
|
||||||
|
expect(getNearestHighestBucketValue(5, [10, 20, 30, 40, 60, 70])).toBe('10');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('convertedTracesToDownloadData', () => {
|
||||||
|
test('should convert trace data correctly', () => {
|
||||||
|
const data = [
|
||||||
|
{
|
||||||
|
name: 'op1',
|
||||||
|
p50: 50000000,
|
||||||
|
p95: 95000000,
|
||||||
|
p99: 99000000,
|
||||||
|
numCalls: 100,
|
||||||
|
errorCount: 10,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
expect(convertedTracesToDownloadData(data)).toEqual([
|
||||||
|
{
|
||||||
|
Name: 'op1',
|
||||||
|
'P50 (in ms)': '50.00',
|
||||||
|
'P95 (in ms)': '95.00',
|
||||||
|
'P99 (in ms)': '99.00',
|
||||||
|
'Number of calls': '100',
|
||||||
|
'Error Rate (%)': '10.00',
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
@ -5,8 +5,12 @@ import history from 'lib/history';
|
|||||||
import { TopOperationList } from './TopOperationsTable';
|
import { TopOperationList } from './TopOperationsTable';
|
||||||
import { NavigateToTraceProps } from './types';
|
import { NavigateToTraceProps } from './types';
|
||||||
|
|
||||||
export const getErrorRate = (list: TopOperationList): number =>
|
export const getErrorRate = (list: TopOperationList): number => {
|
||||||
(list.errorCount / list.numCalls) * 100;
|
if (list.errorCount === 0 && list.numCalls === 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
return (list.errorCount / list.numCalls) * 100;
|
||||||
|
};
|
||||||
|
|
||||||
export const navigateToTrace = ({
|
export const navigateToTrace = ({
|
||||||
servicename,
|
servicename,
|
||||||
|
@ -140,11 +140,12 @@ function VariableItem({
|
|||||||
enabled: false,
|
enabled: false,
|
||||||
queryFn: () =>
|
queryFn: () =>
|
||||||
dashboardVariablesQuery({
|
dashboardVariablesQuery({
|
||||||
query: variableData.queryValue || '',
|
query: variableQueryValue || '',
|
||||||
variables: variablePropsToPayloadVariables(existingVariables),
|
variables: variablePropsToPayloadVariables(existingVariables),
|
||||||
}),
|
}),
|
||||||
refetchOnWindowFocus: false,
|
refetchOnWindowFocus: false,
|
||||||
onSuccess: (response) => {
|
onSuccess: (response) => {
|
||||||
|
setErrorPreview(null);
|
||||||
handleQueryResult(response);
|
handleQueryResult(response);
|
||||||
},
|
},
|
||||||
onError: (error: {
|
onError: (error: {
|
||||||
|
@ -78,6 +78,7 @@ export const alertsCategory = [
|
|||||||
name: CategoryNames.Miscellaneous,
|
name: CategoryNames.Miscellaneous,
|
||||||
formats: [
|
formats: [
|
||||||
{ name: 'Percent (0.0-1.0)', id: MiscellaneousFormats.PercentUnit },
|
{ name: 'Percent (0.0-1.0)', id: MiscellaneousFormats.PercentUnit },
|
||||||
|
{ name: 'Percent (0 - 100)', id: MiscellaneousFormats.Percent },
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -53,7 +53,7 @@ const usePipelinePreview = ({
|
|||||||
isLoading: isFetching,
|
isLoading: isFetching,
|
||||||
outputLogs,
|
outputLogs,
|
||||||
isError,
|
isError,
|
||||||
errorMsg: error?.response?.data?.error || '',
|
errorMsg: error?.message || '',
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -10,10 +10,11 @@ import { filterOption } from './utils';
|
|||||||
|
|
||||||
function BuilderUnitsFilter({
|
function BuilderUnitsFilter({
|
||||||
onChange,
|
onChange,
|
||||||
|
yAxisUnit,
|
||||||
}: IBuilderUnitsFilterProps): JSX.Element {
|
}: IBuilderUnitsFilterProps): JSX.Element {
|
||||||
const { currentQuery, handleOnUnitsChange } = useQueryBuilder();
|
const { currentQuery, handleOnUnitsChange } = useQueryBuilder();
|
||||||
|
|
||||||
const selectedValue = currentQuery?.unit;
|
const selectedValue = yAxisUnit || currentQuery?.unit;
|
||||||
|
|
||||||
const allOptions = categoryToSupport.map((category) => ({
|
const allOptions = categoryToSupport.map((category) => ({
|
||||||
label: category,
|
label: category,
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
export interface IBuilderUnitsFilterProps {
|
export interface IBuilderUnitsFilterProps {
|
||||||
onChange?: (value: string) => void;
|
onChange?: (value: string) => void;
|
||||||
|
yAxisUnit?: string;
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,35 @@
|
|||||||
/* eslint-disable react/no-unstable-nested-components */
|
/* eslint-disable react/no-unstable-nested-components */
|
||||||
import type { SelectProps } from 'antd';
|
import type { SelectProps } from 'antd';
|
||||||
import { Tag } from 'antd';
|
import { Tag, Tooltip } from 'antd';
|
||||||
import { Dispatch, SetStateAction, useCallback, useMemo } from 'react';
|
import { BaseOptionType } from 'antd/es/select';
|
||||||
|
import { Dispatch, SetStateAction, useCallback, useMemo, useRef } from 'react';
|
||||||
import { Alerts } from 'types/api/alerts/getTriggered';
|
import { Alerts } from 'types/api/alerts/getTriggered';
|
||||||
|
|
||||||
import { Container, Select } from './styles';
|
import { Container, Select } from './styles';
|
||||||
|
|
||||||
|
function TextOverflowTooltip({
|
||||||
|
option,
|
||||||
|
}: {
|
||||||
|
option: BaseOptionType;
|
||||||
|
}): JSX.Element {
|
||||||
|
const contentRef = useRef<HTMLDivElement | null>(null);
|
||||||
|
const isOverflow = contentRef.current
|
||||||
|
? contentRef.current?.offsetWidth < contentRef.current?.scrollWidth
|
||||||
|
: false;
|
||||||
|
return (
|
||||||
|
<Tooltip
|
||||||
|
placement="left"
|
||||||
|
title={option.value}
|
||||||
|
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||||
|
{...(!isOverflow ? { open: false } : {})}
|
||||||
|
>
|
||||||
|
<div className="ant-select-item-option-content" ref={contentRef}>
|
||||||
|
{option.value}
|
||||||
|
</div>
|
||||||
|
</Tooltip>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
function Filter({
|
function Filter({
|
||||||
setSelectedFilter,
|
setSelectedFilter,
|
||||||
setSelectedGroup,
|
setSelectedGroup,
|
||||||
@ -51,6 +75,7 @@ function Filter({
|
|||||||
|
|
||||||
const options = uniqueLabels.map((e) => ({
|
const options = uniqueLabels.map((e) => ({
|
||||||
value: e,
|
value: e,
|
||||||
|
title: '',
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const getTags: SelectProps['tagRender'] = (props): JSX.Element => {
|
const getTags: SelectProps['tagRender'] = (props): JSX.Element => {
|
||||||
@ -88,6 +113,9 @@ function Filter({
|
|||||||
placeholder="Group by any tag"
|
placeholder="Group by any tag"
|
||||||
tagRender={(props): JSX.Element => getTags(props)}
|
tagRender={(props): JSX.Element => getTags(props)}
|
||||||
options={options}
|
options={options}
|
||||||
|
optionRender={(option): JSX.Element => (
|
||||||
|
<TextOverflowTooltip option={option} />
|
||||||
|
)}
|
||||||
/>
|
/>
|
||||||
</Container>
|
</Container>
|
||||||
);
|
);
|
||||||
|
@ -15,14 +15,19 @@ type UseGetQueryRange = (
|
|||||||
|
|
||||||
export const useGetQueryRange: UseGetQueryRange = (requestData, options) => {
|
export const useGetQueryRange: UseGetQueryRange = (requestData, options) => {
|
||||||
const queryKey = useMemo(() => {
|
const queryKey = useMemo(() => {
|
||||||
if (options?.queryKey) {
|
if (options?.queryKey && Array.isArray(options.queryKey)) {
|
||||||
return [...options.queryKey];
|
return [...options.queryKey];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (options?.queryKey && typeof options.queryKey === 'string') {
|
||||||
|
return options.queryKey;
|
||||||
|
}
|
||||||
|
|
||||||
return [REACT_QUERY_KEY.GET_QUERY_RANGE, requestData];
|
return [REACT_QUERY_KEY.GET_QUERY_RANGE, requestData];
|
||||||
}, [options?.queryKey, requestData]);
|
}, [options?.queryKey, requestData]);
|
||||||
|
|
||||||
return useQuery<SuccessResponse<MetricRangePayloadProps>, Error>({
|
return useQuery<SuccessResponse<MetricRangePayloadProps>, Error>({
|
||||||
queryFn: async () => GetMetricQueryRange(requestData),
|
queryFn: async ({ signal }) => GetMetricQueryRange(requestData, signal),
|
||||||
...options,
|
...options,
|
||||||
queryKey,
|
queryKey,
|
||||||
});
|
});
|
||||||
|
@ -17,10 +17,11 @@ import { prepareQueryRangePayload } from './prepareQueryRangePayload';
|
|||||||
|
|
||||||
export async function GetMetricQueryRange(
|
export async function GetMetricQueryRange(
|
||||||
props: GetQueryResultsProps,
|
props: GetQueryResultsProps,
|
||||||
|
signal?: AbortSignal,
|
||||||
): Promise<SuccessResponse<MetricRangePayloadProps>> {
|
): Promise<SuccessResponse<MetricRangePayloadProps>> {
|
||||||
const { legendMap, queryPayload } = prepareQueryRangePayload(props);
|
const { legendMap, queryPayload } = prepareQueryRangePayload(props);
|
||||||
|
|
||||||
const response = await getMetricsQueryRange(queryPayload);
|
const response = await getMetricsQueryRange(queryPayload, signal);
|
||||||
|
|
||||||
if (response.statusCode >= 400) {
|
if (response.statusCode >= 400) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
@ -232,6 +232,11 @@ const unitsMapping = [
|
|||||||
{
|
{
|
||||||
label: 'Percent (0.0-1.0)',
|
label: 'Percent (0.0-1.0)',
|
||||||
value: 'percentunit',
|
value: 'percentunit',
|
||||||
|
factor: 100,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'Percent (0 - 100)',
|
||||||
|
value: 'percent',
|
||||||
factor: 1,
|
factor: 1,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
@ -55,6 +55,7 @@ export const getUPlotChartOptions = ({
|
|||||||
legend: {
|
legend: {
|
||||||
show: true,
|
show: true,
|
||||||
live: false,
|
live: false,
|
||||||
|
isolate: true,
|
||||||
},
|
},
|
||||||
focus: {
|
focus: {
|
||||||
alpha: 0.3,
|
alpha: 0.3,
|
||||||
@ -158,16 +159,24 @@ export const getUPlotChartOptions = ({
|
|||||||
(self): void => {
|
(self): void => {
|
||||||
const legend = self.root.querySelector('.u-legend');
|
const legend = self.root.querySelector('.u-legend');
|
||||||
if (legend) {
|
if (legend) {
|
||||||
const seriesEls = legend.querySelectorAll('.u-label');
|
const seriesEls = legend.querySelectorAll('.u-series');
|
||||||
const seriesArray = Array.from(seriesEls);
|
const seriesArray = Array.from(seriesEls);
|
||||||
seriesArray.forEach((seriesEl, index) => {
|
seriesArray.forEach((seriesEl, index) => {
|
||||||
seriesEl.addEventListener('click', () => {
|
seriesEl.addEventListener('click', () => {
|
||||||
if (graphsVisibilityStates) {
|
if (graphsVisibilityStates) {
|
||||||
setGraphsVisibilityStates?.((prev) => {
|
setGraphsVisibilityStates?.((prev) => {
|
||||||
const newGraphVisibilityStates = [...prev];
|
const newGraphVisibilityStates = [...prev];
|
||||||
newGraphVisibilityStates[index + 1] = !newGraphVisibilityStates[
|
if (
|
||||||
index + 1
|
newGraphVisibilityStates[index + 1] &&
|
||||||
];
|
newGraphVisibilityStates.every((value, i) =>
|
||||||
|
i === index + 1 ? value : !value,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
newGraphVisibilityStates.fill(true);
|
||||||
|
} else {
|
||||||
|
newGraphVisibilityStates.fill(false);
|
||||||
|
newGraphVisibilityStates[index + 1] = true;
|
||||||
|
}
|
||||||
return newGraphVisibilityStates;
|
return newGraphVisibilityStates;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -12,7 +12,9 @@ function DashboardPage(): JSX.Element {
|
|||||||
const { isFetching, isError, isLoading } = dashboardResponse;
|
const { isFetching, isError, isLoading } = dashboardResponse;
|
||||||
|
|
||||||
const errorMessage = isError
|
const errorMessage = isError
|
||||||
? (dashboardResponse?.error as AxiosError)?.response?.data.errorType
|
? // eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore
|
||||||
|
(dashboardResponse?.error as AxiosError)?.response?.data?.errorType
|
||||||
: 'Something went wrong';
|
: 'Something went wrong';
|
||||||
|
|
||||||
if (isError && !isFetching && errorMessage === ErrorType.NotFound) {
|
if (isError && !isFetching && errorMessage === ErrorType.NotFound) {
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import './Support.styles.scss';
|
import './Support.styles.scss';
|
||||||
|
|
||||||
import { Button, Card, Typography } from 'antd';
|
import { Button, Card, Typography } from 'antd';
|
||||||
|
import useAnalytics from 'hooks/analytics/useAnalytics';
|
||||||
import {
|
import {
|
||||||
Book,
|
Book,
|
||||||
Cable,
|
Cable,
|
||||||
@ -82,6 +83,8 @@ const supportChannels = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
export default function Support(): JSX.Element {
|
export default function Support(): JSX.Element {
|
||||||
|
const { trackEvent } = useAnalytics();
|
||||||
|
|
||||||
const handleChannelWithRedirects = (url: string): void => {
|
const handleChannelWithRedirects = (url: string): void => {
|
||||||
window.open(url, '_blank');
|
window.open(url, '_blank');
|
||||||
};
|
};
|
||||||
@ -111,6 +114,8 @@ export default function Support(): JSX.Element {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const handleChannelClick = (channel: Channel): void => {
|
const handleChannelClick = (channel: Channel): void => {
|
||||||
|
trackEvent(`Support : ${channel.name}`);
|
||||||
|
|
||||||
switch (channel.key) {
|
switch (channel.key) {
|
||||||
case channelsMap.documentation:
|
case channelsMap.documentation:
|
||||||
case channelsMap.github:
|
case channelsMap.github:
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import Modal from 'antd/es/modal';
|
import Modal from 'antd/es/modal';
|
||||||
import get from 'api/dashboard/get';
|
import getDashboard from 'api/dashboard/get';
|
||||||
import lockDashboardApi from 'api/dashboard/lockDashboard';
|
import lockDashboardApi from 'api/dashboard/lockDashboard';
|
||||||
import unlockDashboardApi from 'api/dashboard/unlockDashboard';
|
import unlockDashboardApi from 'api/dashboard/unlockDashboard';
|
||||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||||
@ -107,7 +107,7 @@ export function DashboardProvider({
|
|||||||
{
|
{
|
||||||
enabled: (!!isDashboardPage || !!isDashboardWidgetPage) && isLoggedIn,
|
enabled: (!!isDashboardPage || !!isDashboardWidgetPage) && isLoggedIn,
|
||||||
queryFn: () =>
|
queryFn: () =>
|
||||||
get({
|
getDashboard({
|
||||||
uuid: dashboardId,
|
uuid: dashboardId,
|
||||||
}),
|
}),
|
||||||
refetchOnWindowFocus: false,
|
refetchOnWindowFocus: false,
|
||||||
|
@ -4639,7 +4639,16 @@ axe-core@^4.6.2:
|
|||||||
resolved "https://registry.npmjs.org/axe-core/-/axe-core-4.7.0.tgz"
|
resolved "https://registry.npmjs.org/axe-core/-/axe-core-4.7.0.tgz"
|
||||||
integrity sha512-M0JtH+hlOL5pLQwHOLNYZaXuhqmvS8oExsqB1SBYgA4Dk7u/xx+YdGHXaK5pyUfed5mYXdlYiphWq3G8cRi5JQ==
|
integrity sha512-M0JtH+hlOL5pLQwHOLNYZaXuhqmvS8oExsqB1SBYgA4Dk7u/xx+YdGHXaK5pyUfed5mYXdlYiphWq3G8cRi5JQ==
|
||||||
|
|
||||||
axios@^0.21.0, axios@^0.21.1:
|
axios@1.6.2:
|
||||||
|
version "1.6.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.2.tgz#de67d42c755b571d3e698df1b6504cde9b0ee9f2"
|
||||||
|
integrity sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==
|
||||||
|
dependencies:
|
||||||
|
follow-redirects "^1.15.0"
|
||||||
|
form-data "^4.0.0"
|
||||||
|
proxy-from-env "^1.1.0"
|
||||||
|
|
||||||
|
axios@^0.21.1:
|
||||||
version "0.21.4"
|
version "0.21.4"
|
||||||
resolved "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz"
|
resolved "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz"
|
||||||
integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==
|
integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==
|
||||||
@ -7710,6 +7719,11 @@ follow-redirects@^1.0.0, follow-redirects@^1.14.0:
|
|||||||
resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz"
|
resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz"
|
||||||
integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==
|
integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==
|
||||||
|
|
||||||
|
follow-redirects@^1.15.0:
|
||||||
|
version "1.15.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a"
|
||||||
|
integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==
|
||||||
|
|
||||||
fontfaceobserver@2.3.0:
|
fontfaceobserver@2.3.0:
|
||||||
version "2.3.0"
|
version "2.3.0"
|
||||||
resolved "https://registry.npmjs.org/fontfaceobserver/-/fontfaceobserver-2.3.0.tgz"
|
resolved "https://registry.npmjs.org/fontfaceobserver/-/fontfaceobserver-2.3.0.tgz"
|
||||||
@ -7759,6 +7773,15 @@ form-data@^3.0.0:
|
|||||||
combined-stream "^1.0.8"
|
combined-stream "^1.0.8"
|
||||||
mime-types "^2.1.12"
|
mime-types "^2.1.12"
|
||||||
|
|
||||||
|
form-data@^4.0.0:
|
||||||
|
version "4.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452"
|
||||||
|
integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==
|
||||||
|
dependencies:
|
||||||
|
asynckit "^0.4.0"
|
||||||
|
combined-stream "^1.0.8"
|
||||||
|
mime-types "^2.1.12"
|
||||||
|
|
||||||
format@^0.2.0:
|
format@^0.2.0:
|
||||||
version "0.2.2"
|
version "0.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b"
|
resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b"
|
||||||
@ -12294,6 +12317,11 @@ proxy-addr@~2.0.7:
|
|||||||
forwarded "0.2.0"
|
forwarded "0.2.0"
|
||||||
ipaddr.js "1.9.1"
|
ipaddr.js "1.9.1"
|
||||||
|
|
||||||
|
proxy-from-env@^1.1.0:
|
||||||
|
version "1.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2"
|
||||||
|
integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==
|
||||||
|
|
||||||
prr@~1.0.1:
|
prr@~1.0.1:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz"
|
resolved "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz"
|
||||||
@ -12962,9 +12990,9 @@ react-markdown@8.0.7, react-markdown@~8.0.0:
|
|||||||
unist-util-visit "^4.0.0"
|
unist-util-visit "^4.0.0"
|
||||||
vfile "^5.0.0"
|
vfile "^5.0.0"
|
||||||
|
|
||||||
react-query@^3.34.19:
|
react-query@3.39.3:
|
||||||
version "3.39.3"
|
version "3.39.3"
|
||||||
resolved "https://registry.npmjs.org/react-query/-/react-query-3.39.3.tgz"
|
resolved "https://registry.yarnpkg.com/react-query/-/react-query-3.39.3.tgz#4cea7127c6c26bdea2de5fb63e51044330b03f35"
|
||||||
integrity sha512-nLfLz7GiohKTJDuT4us4X3h/8unOh+00MLb2yJoGTPjxKs2bc1iDhkNx2bd5MKklXnOD3NrVZ+J2UXujA5In4g==
|
integrity sha512-nLfLz7GiohKTJDuT4us4X3h/8unOh+00MLb2yJoGTPjxKs2bc1iDhkNx2bd5MKklXnOD3NrVZ+J2UXujA5In4g==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/runtime" "^7.5.5"
|
"@babel/runtime" "^7.5.5"
|
||||||
|
2
go.mod
2
go.mod
@ -5,7 +5,7 @@ go 1.21
|
|||||||
require (
|
require (
|
||||||
github.com/ClickHouse/clickhouse-go/v2 v2.15.0
|
github.com/ClickHouse/clickhouse-go/v2 v2.15.0
|
||||||
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb
|
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb
|
||||||
github.com/SigNoz/signoz-otel-collector v0.88.1
|
github.com/SigNoz/signoz-otel-collector v0.88.3
|
||||||
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
|
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
|
||||||
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
|
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
|
||||||
github.com/antonmedv/expr v1.15.3
|
github.com/antonmedv/expr v1.15.3
|
||||||
|
4
go.sum
4
go.sum
@ -98,8 +98,8 @@ github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb h1:bneLSKPf9YUSFm
|
|||||||
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA=
|
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA=
|
||||||
github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY=
|
github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY=
|
||||||
github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww=
|
github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww=
|
||||||
github.com/SigNoz/signoz-otel-collector v0.88.1 h1:Xeu6Kn8VA0g6it60PMIAclayYSIogBq0rnkodlpxllI=
|
github.com/SigNoz/signoz-otel-collector v0.88.3 h1:30sEJZmCQjfjo8CZGxqXKZkWE7Zij9TeS1uUqNFEZRU=
|
||||||
github.com/SigNoz/signoz-otel-collector v0.88.1/go.mod h1:KyEc6JSFS6f8Nw3UdSm4aGDGucEpQYZUdYwjvY8uMVc=
|
github.com/SigNoz/signoz-otel-collector v0.88.3/go.mod h1:KyEc6JSFS6f8Nw3UdSm4aGDGucEpQYZUdYwjvY8uMVc=
|
||||||
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
|
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
|
||||||
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
|
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
|
||||||
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=
|
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=
|
||||||
|
@ -50,8 +50,8 @@ func (r *Repo) GetConfigHistory(
|
|||||||
disabled,
|
disabled,
|
||||||
deploy_status,
|
deploy_status,
|
||||||
deploy_result,
|
deploy_result,
|
||||||
last_hash,
|
coalesce(last_hash, '') as last_hash,
|
||||||
last_config
|
coalesce(last_config, '{}') as last_config
|
||||||
FROM agent_config_versions AS v
|
FROM agent_config_versions AS v
|
||||||
WHERE element_type = $1
|
WHERE element_type = $1
|
||||||
ORDER BY created_at desc, version desc
|
ORDER BY created_at desc, version desc
|
||||||
@ -89,8 +89,8 @@ func (r *Repo) GetConfigVersion(
|
|||||||
disabled,
|
disabled,
|
||||||
deploy_status,
|
deploy_status,
|
||||||
deploy_result,
|
deploy_result,
|
||||||
last_hash,
|
coalesce(last_hash, '') as last_hash,
|
||||||
last_config
|
coalesce(last_config, '{}') as last_config
|
||||||
FROM agent_config_versions v
|
FROM agent_config_versions v
|
||||||
WHERE element_type = $1
|
WHERE element_type = $1
|
||||||
AND version = $2`, typ, v)
|
AND version = $2`, typ, v)
|
||||||
|
@ -172,21 +172,6 @@ func (m *Manager) ReportConfigDeploymentStatus(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ready indicates if Manager can accept new config update requests
|
|
||||||
func (mgr *Manager) Ready() bool {
|
|
||||||
if atomic.LoadUint32(&mgr.lock) != 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return opamp.Ready()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Static methods for working with default manager instance in this module.
|
|
||||||
|
|
||||||
// Ready indicates if Manager can accept new config update requests
|
|
||||||
func Ready() bool {
|
|
||||||
return m.Ready()
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetLatestVersion(
|
func GetLatestVersion(
|
||||||
ctx context.Context, elementType ElementTypeDef,
|
ctx context.Context, elementType ElementTypeDef,
|
||||||
) (*ConfigVersion, *model.ApiError) {
|
) (*ConfigVersion, *model.ApiError) {
|
||||||
@ -210,11 +195,6 @@ func StartNewVersion(
|
|||||||
ctx context.Context, userId string, eleType ElementTypeDef, elementIds []string,
|
ctx context.Context, userId string, eleType ElementTypeDef, elementIds []string,
|
||||||
) (*ConfigVersion, *model.ApiError) {
|
) (*ConfigVersion, *model.ApiError) {
|
||||||
|
|
||||||
if !m.Ready() {
|
|
||||||
// agent is already being updated, ask caller to wait and re-try after sometime
|
|
||||||
return nil, model.UnavailableError(fmt.Errorf("agent updater is busy"))
|
|
||||||
}
|
|
||||||
|
|
||||||
// create a new version
|
// create a new version
|
||||||
cfg := NewConfigversion(eleType)
|
cfg := NewConfigversion(eleType)
|
||||||
|
|
||||||
|
@ -53,6 +53,8 @@ func NewConfigversion(typeDef ElementTypeDef) *ConfigVersion {
|
|||||||
IsValid: false,
|
IsValid: false,
|
||||||
Disabled: false,
|
Disabled: false,
|
||||||
DeployStatus: PendingDeploy,
|
DeployStatus: PendingDeploy,
|
||||||
|
LastHash: "",
|
||||||
|
LastConf: "{}",
|
||||||
// todo: get user id from context?
|
// todo: get user id from context?
|
||||||
// CreatedBy
|
// CreatedBy
|
||||||
}
|
}
|
||||||
|
@ -43,6 +43,7 @@ import (
|
|||||||
promModel "github.com/prometheus/common/model"
|
promModel "github.com/prometheus/common/model"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
|
|
||||||
|
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/logs"
|
"go.signoz.io/signoz/pkg/query-service/app/logs"
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/services"
|
"go.signoz.io/signoz/pkg/query-service/app/services"
|
||||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||||
@ -51,6 +52,7 @@ import (
|
|||||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||||
"go.signoz.io/signoz/pkg/query-service/model"
|
"go.signoz.io/signoz/pkg/query-service/model"
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
"go.signoz.io/signoz/pkg/query-service/rules"
|
||||||
"go.signoz.io/signoz/pkg/query-service/telemetry"
|
"go.signoz.io/signoz/pkg/query-service/telemetry"
|
||||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||||
)
|
)
|
||||||
@ -3421,6 +3423,100 @@ func (r *ClickHouseReader) GetTagsInfoInLastHeartBeatInterval(ctx context.Contex
|
|||||||
return &tagsInfo, nil
|
return &tagsInfo, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetDashboardsInfo returns analytics data for dashboards
|
||||||
|
func (r *ClickHouseReader) GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error) {
|
||||||
|
dashboardsInfo := model.DashboardsInfo{}
|
||||||
|
// fetch dashboards from dashboard db
|
||||||
|
query := "SELECT data FROM dashboards"
|
||||||
|
var dashboardsData []dashboards.Dashboard
|
||||||
|
err := r.localDB.Select(&dashboardsData, query)
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return &dashboardsInfo, err
|
||||||
|
}
|
||||||
|
for _, dashboard := range dashboardsData {
|
||||||
|
dashboardsInfo = countPanelsInDashboard(dashboard.Data)
|
||||||
|
}
|
||||||
|
dashboardsInfo.TotalDashboards = len(dashboardsData)
|
||||||
|
|
||||||
|
return &dashboardsInfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func countPanelsInDashboard(data map[string]interface{}) model.DashboardsInfo {
|
||||||
|
var logsPanelCount, tracesPanelCount, metricsPanelCount int
|
||||||
|
// totalPanels := 0
|
||||||
|
if data != nil && data["widgets"] != nil {
|
||||||
|
widgets, ok := data["widgets"].(interface{})
|
||||||
|
if ok {
|
||||||
|
data, ok := widgets.([]interface{})
|
||||||
|
if ok {
|
||||||
|
for _, widget := range data {
|
||||||
|
sData, ok := widget.(map[string]interface{})
|
||||||
|
if ok && sData["query"] != nil {
|
||||||
|
// totalPanels++
|
||||||
|
query, ok := sData["query"].(interface{}).(map[string]interface{})
|
||||||
|
if ok && query["queryType"] == "builder" && query["builder"] != nil {
|
||||||
|
builderData, ok := query["builder"].(interface{}).(map[string]interface{})
|
||||||
|
if ok && builderData["queryData"] != nil {
|
||||||
|
builderQueryData, ok := builderData["queryData"].([]interface{})
|
||||||
|
if ok {
|
||||||
|
for _, queryData := range builderQueryData {
|
||||||
|
data, ok := queryData.(map[string]interface{})
|
||||||
|
if ok {
|
||||||
|
if data["dataSource"] == "traces" {
|
||||||
|
tracesPanelCount++
|
||||||
|
} else if data["dataSource"] == "metrics" {
|
||||||
|
metricsPanelCount++
|
||||||
|
} else if data["dataSource"] == "logs" {
|
||||||
|
logsPanelCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return model.DashboardsInfo{
|
||||||
|
LogsBasedPanels: logsPanelCount,
|
||||||
|
TracesBasedPanels: tracesPanelCount,
|
||||||
|
MetricBasedPanels: metricsPanelCount,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
|
||||||
|
alertsInfo := model.AlertsInfo{}
|
||||||
|
// fetch alerts from rules db
|
||||||
|
query := "SELECT data FROM rules"
|
||||||
|
var alertsData []string
|
||||||
|
err := r.localDB.Select(&alertsData, query)
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return &alertsInfo, err
|
||||||
|
}
|
||||||
|
for _, alert := range alertsData {
|
||||||
|
var rule rules.GettableRule
|
||||||
|
err = json.Unmarshal([]byte(alert), &rule)
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Errorf("msg:", "invalid rule data", "\t err:", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if rule.AlertType == "LOGS_BASED_ALERT" {
|
||||||
|
alertsInfo.LogsBasedAlerts = alertsInfo.LogsBasedAlerts + 1
|
||||||
|
} else if rule.AlertType == "METRIC_BASED_ALERT" {
|
||||||
|
alertsInfo.MetricBasedAlerts = alertsInfo.MetricBasedAlerts + 1
|
||||||
|
} else if rule.AlertType == "TRACES_BASED_ALERT" {
|
||||||
|
alertsInfo.TracesBasedAlerts = alertsInfo.TracesBasedAlerts + 1
|
||||||
|
}
|
||||||
|
alertsInfo.TotalAlerts = alertsInfo.TotalAlerts + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return &alertsInfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *ClickHouseReader) GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError) {
|
func (r *ClickHouseReader) GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError) {
|
||||||
// response will contain top level fields from the otel log model
|
// response will contain top level fields from the otel log model
|
||||||
response := model.GetFieldsResponse{
|
response := model.GetFieldsResponse{
|
||||||
|
@ -314,6 +314,7 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *AuthMid
|
|||||||
subRouter.HandleFunc("/autocomplete/attribute_values", am.ViewAccess(
|
subRouter.HandleFunc("/autocomplete/attribute_values", am.ViewAccess(
|
||||||
withCacheControl(AutoCompleteCacheControlAge, aH.autoCompleteAttributeValues))).Methods(http.MethodGet)
|
withCacheControl(AutoCompleteCacheControlAge, aH.autoCompleteAttributeValues))).Methods(http.MethodGet)
|
||||||
subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QueryRangeV3)).Methods(http.MethodPost)
|
subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QueryRangeV3)).Methods(http.MethodPost)
|
||||||
|
subRouter.HandleFunc("/query_range/format", am.ViewAccess(aH.QueryRangeV3Format)).Methods(http.MethodPost)
|
||||||
|
|
||||||
// live logs
|
// live logs
|
||||||
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.liveTailLogs)).Methods(http.MethodGet)
|
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.liveTailLogs)).Methods(http.MethodGet)
|
||||||
@ -3001,6 +3002,18 @@ func (aH *APIHandler) getSpanKeysV3(ctx context.Context, queryRangeParams *v3.Qu
|
|||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (aH *APIHandler) QueryRangeV3Format(w http.ResponseWriter, r *http.Request) {
|
||||||
|
queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
|
||||||
|
|
||||||
|
if apiErrorObj != nil {
|
||||||
|
zap.S().Errorf(apiErrorObj.Err.Error())
|
||||||
|
RespondError(w, apiErrorObj, nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
aH.Respond(w, queryRangeParams)
|
||||||
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.QueryRangeParamsV3, w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.QueryRangeParamsV3, w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
var result []*v3.Result
|
var result []*v3.Result
|
||||||
|
@ -73,12 +73,6 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !agentConf.Ready() {
|
|
||||||
return nil, model.UnavailableError(fmt.Errorf(
|
|
||||||
"agent updater unavailable at the moment. Please try in sometime",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
// prepare config elements
|
// prepare config elements
|
||||||
elements := make([]string, len(pipelines))
|
elements := make([]string, len(pipelines))
|
||||||
for i, p := range pipelines {
|
for i, p := range pipelines {
|
||||||
|
57
pkg/query-service/app/metrics/v4/cumulative/helper.go
Normal file
57
pkg/query-service/app/metrics/v4/cumulative/helper.go
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
package cumulative
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// groupingSets returns a string of comma separated tags for group by clause
|
||||||
|
// `ts` is always added to the group by clause
|
||||||
|
func groupingSets(tags ...string) string {
|
||||||
|
withTs := append(tags, "ts")
|
||||||
|
return fmt.Sprintf(`GROUPING SETS ( (%s), (%s) )`, strings.Join(withTs, ", "), strings.Join(tags, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
// groupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause
|
||||||
|
func groupingSetsByAttributeKeyTags(tags ...v3.AttributeKey) string {
|
||||||
|
groupTags := []string{}
|
||||||
|
for _, tag := range tags {
|
||||||
|
groupTags = append(groupTags, tag.Key)
|
||||||
|
}
|
||||||
|
return groupingSets(groupTags...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// groupBy returns a string of comma separated tags for group by clause
|
||||||
|
func groupByAttributeKeyTags(tags ...v3.AttributeKey) string {
|
||||||
|
groupTags := []string{}
|
||||||
|
for _, tag := range tags {
|
||||||
|
groupTags = append(groupTags, tag.Key)
|
||||||
|
}
|
||||||
|
groupTags = append(groupTags, "ts")
|
||||||
|
return strings.Join(groupTags, ", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// orderBy returns a string of comma separated tags for order by clause
|
||||||
|
// if the order is not specified, it defaults to ASC
|
||||||
|
func orderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string {
|
||||||
|
var orderBy []string
|
||||||
|
for _, tag := range tags {
|
||||||
|
found := false
|
||||||
|
for _, item := range items {
|
||||||
|
if item.ColumnName == tag.Key {
|
||||||
|
found = true
|
||||||
|
orderBy = append(orderBy, fmt.Sprintf("%s %s", item.ColumnName, item.Order))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
orderBy = append(orderBy, fmt.Sprintf("%s ASC", tag.Key))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
orderBy = append(orderBy, "ts ASC")
|
||||||
|
|
||||||
|
return strings.Join(orderBy, ", ")
|
||||||
|
}
|
220
pkg/query-service/app/metrics/v4/cumulative/timeseries.go
Normal file
220
pkg/query-service/app/metrics/v4/cumulative/timeseries.go
Normal file
@ -0,0 +1,220 @@
|
|||||||
|
package cumulative
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
v4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
|
||||||
|
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// See https://clickhouse.com/docs/en/sql-reference/window-functions for more details on `lagInFrame` function
|
||||||
|
//
|
||||||
|
// Calculating the rate of change of a metric is a common use case.
|
||||||
|
// Requests and errors are two examples of metrics that are often expressed as a rate of change.
|
||||||
|
// The rate of change is the difference between the current value and the previous value divided by
|
||||||
|
// the time difference between the current and previous values (i.e. the time interval).
|
||||||
|
//
|
||||||
|
// The value of a cumulative counter always increases. However, the rate of change can be negative
|
||||||
|
// if the value decreases between two samples. This can happen if the counter is reset when the
|
||||||
|
// application restarts or if the counter is reset manually. In this case, the rate of change is
|
||||||
|
// not meaningful and should be ignored.
|
||||||
|
//
|
||||||
|
// The condition `(per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0`
|
||||||
|
// checks if the rate of change is negative. If it is negative, the value is replaced with `nan`.
|
||||||
|
//
|
||||||
|
// The condition `ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400` checks
|
||||||
|
// if the time difference between the current and previous values is greater than or equal to 1 day.
|
||||||
|
// The first sample of a metric is always `nan` because there is no previous value to compare it to.
|
||||||
|
// When the first sample is encountered, the previous value for the time is set to default i.e `1970-01-01`.
|
||||||
|
// Since any difference between the first sample timestamp and the previous value timestamp will be
|
||||||
|
// greater than or equal to 1 day, the rate of change for the first sample will be `nan`.
|
||||||
|
//
|
||||||
|
// If neither of the above conditions are true, the rate of change is calculated as
|
||||||
|
// `(per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)`
|
||||||
|
// where `rate_window` is a window function that partitions the data by fingerprint and orders it by timestamp.
|
||||||
|
// We want to calculate the rate of change for each time series, so we partition the data by fingerprint.
|
||||||
|
//
|
||||||
|
// The `increase` function is similar to the `rate` function, except that it does not divide by the time interval.
|
||||||
|
const (
|
||||||
|
rateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)))`
|
||||||
|
increaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window)))`
|
||||||
|
)
|
||||||
|
|
||||||
|
// prepareTimeAggregationSubQueryTimeSeries prepares the sub-query to be used for temporal aggregation
|
||||||
|
// of time series data
|
||||||
|
|
||||||
|
// The following example illustrates how the sub-query is used to calculate the sume of values for each
|
||||||
|
// time series in a 15 seconds interval:
|
||||||
|
|
||||||
|
// ```
|
||||||
|
// timestamp 01.00 01.05 01.10 01.15 01.20 01.25 01.30 01.35 01.40
|
||||||
|
// +------+------+------+------+------+------+------+------+------+
|
||||||
|
// | | | | | | | | | |
|
||||||
|
// | v1 | v2 | v3 | v4 | v5 | v6 | v7 | v8 | v9 |
|
||||||
|
// | | | | | | | | | |
|
||||||
|
// +------+------+------+------+------+------+------+------+------+
|
||||||
|
// | | | | | | | | |
|
||||||
|
// | | | | | | | | |
|
||||||
|
// | | |
|
||||||
|
// +------+ +------+ +------+
|
||||||
|
// | v1+ | | v4+ | | v7+ |
|
||||||
|
// | v2+ | | v5+ | | v8+ |
|
||||||
|
// | v3 | | v6 | | v9 |
|
||||||
|
// +------+ +------+ +------+
|
||||||
|
// 01.00 01.15 01.30
|
||||||
|
// ```
|
||||||
|
|
||||||
|
// Calculating the rate/increase involves an additional step. We first calculate the maximum value for each time series
|
||||||
|
// in a 15 seconds interval. Then, we calculate the difference between the current maximum value and the previous
|
||||||
|
// maximum value
|
||||||
|
|
||||||
|
// The following example illustrates how the sub-query is used to calculate the rate of change for each time series
|
||||||
|
// in a 15 seconds interval:
|
||||||
|
|
||||||
|
// ```
|
||||||
|
// timestamp 01.00 01.05 01.10 01.15 01.20 01.25 01.30 01.35 01.40
|
||||||
|
// +------+------+------+------+------+------+------+------+------+
|
||||||
|
// | | | | | | | | | |
|
||||||
|
// | v1 | v2 | v3 | v4 | v5 | v6 | v7 | v8 | v9 |
|
||||||
|
// | | | | | | | | | |
|
||||||
|
// +------+------+------+------+------+------+------+------+------+
|
||||||
|
// | | | | | | | | |
|
||||||
|
// | | | | | | | | |
|
||||||
|
// | | |
|
||||||
|
// +------+ +------+ +------+
|
||||||
|
// max(| v1, | max(| v4, | max(| v7, |
|
||||||
|
// | v2, | | v5, | | v8, |
|
||||||
|
// | v3 |) | v6 |) | v9 |)
|
||||||
|
// +------+ +------+ +------+
|
||||||
|
// 01.00 01.15 01.30
|
||||||
|
|
||||||
|
// +-------+ +--------+
|
||||||
|
// | V6-V2 | | V9-V6 |
|
||||||
|
// | | | |
|
||||||
|
// | | | |
|
||||||
|
// +------+ +--------+
|
||||||
|
// 01.00 01.15
|
||||||
|
// ```
|
||||||
|
|
||||||
|
// The rate of change is calculated as (Vy - Vx) / (Ty - Tx) where Vx and Vy are the values at time Tx and Ty respectively.
|
||||||
|
// In an ideal scenario, the last value of each interval could be used to calculate the rate of change. Instead, we use
|
||||||
|
// the maximum value of each interval to calculate the rate of change. This is because any process restart can cause the
|
||||||
|
// value to be reset to 0. This will produce an inaccurate result. The max is the best approximation we can get.
|
||||||
|
// We don't expect the process to restart very often, so this should be a good approximation.
|
||||||
|
|
||||||
|
func prepareTimeAggregationSubQueryTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
|
||||||
|
var subQuery string
|
||||||
|
|
||||||
|
timeSeriesSubQuery, err := v4.PrepareTimeseriesFilterQuery(mq)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
samplesTableFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end)
|
||||||
|
|
||||||
|
// Select the aggregate value for interval
|
||||||
|
queryTmpl :=
|
||||||
|
"SELECT fingerprint, %s" +
|
||||||
|
" toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
|
||||||
|
" %s as per_series_value" +
|
||||||
|
" FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
|
||||||
|
" INNER JOIN" +
|
||||||
|
" (%s) as filtered_time_series" +
|
||||||
|
" USING fingerprint" +
|
||||||
|
" WHERE " + samplesTableFilter +
|
||||||
|
" GROUP BY fingerprint, ts" +
|
||||||
|
" ORDER BY fingerprint, ts"
|
||||||
|
|
||||||
|
var selectLabelsAny string
|
||||||
|
for _, tag := range mq.GroupBy {
|
||||||
|
selectLabelsAny += fmt.Sprintf("any(%s) as %s,", tag.Key, tag.Key)
|
||||||
|
}
|
||||||
|
|
||||||
|
var selectLabels string
|
||||||
|
for _, tag := range mq.GroupBy {
|
||||||
|
selectLabels += tag.Key + ","
|
||||||
|
}
|
||||||
|
|
||||||
|
switch mq.TimeAggregation {
|
||||||
|
case v3.TimeAggregationAvg:
|
||||||
|
op := "avg(value)"
|
||||||
|
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
|
case v3.TimeAggregationSum:
|
||||||
|
op := "sum(value)"
|
||||||
|
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
|
case v3.TimeAggregationMin:
|
||||||
|
op := "min(value)"
|
||||||
|
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
|
case v3.TimeAggregationMax:
|
||||||
|
op := "max(value)"
|
||||||
|
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
|
case v3.TimeAggregationCount:
|
||||||
|
op := "count(value)"
|
||||||
|
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
|
case v3.TimeAggregationCountDistinct:
|
||||||
|
op := "count(distinct(value))"
|
||||||
|
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
|
case v3.TimeAggregationAnyLast:
|
||||||
|
op := "anyLast(value)"
|
||||||
|
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
|
case v3.TimeAggregationRate:
|
||||||
|
op := "max(value)"
|
||||||
|
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
|
rateQueryTmpl :=
|
||||||
|
"SELECT %s ts, " + rateWithoutNegative +
|
||||||
|
" as per_series_value FROM (%s) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)"
|
||||||
|
subQuery = fmt.Sprintf(rateQueryTmpl, selectLabels, innerSubQuery)
|
||||||
|
case v3.TimeAggregationIncrease:
|
||||||
|
op := "max(value)"
|
||||||
|
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
|
rateQueryTmpl :=
|
||||||
|
"SELECT %s ts, " + increaseWithoutNegative +
|
||||||
|
" as per_series_value FROM (%s) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)"
|
||||||
|
subQuery = fmt.Sprintf(rateQueryTmpl, selectLabels, innerSubQuery)
|
||||||
|
}
|
||||||
|
return subQuery, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// prepareMetricQueryCumulativeTimeSeries prepares the query to be used for fetching metrics
|
||||||
|
func prepareMetricQueryCumulativeTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
|
||||||
|
var query string
|
||||||
|
|
||||||
|
temporalAggSubQuery, err := prepareTimeAggregationSubQueryTimeSeries(start, end, step, mq)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
groupBy := groupingSetsByAttributeKeyTags(mq.GroupBy...)
|
||||||
|
orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
|
||||||
|
selectLabels := groupByAttributeKeyTags(mq.GroupBy...)
|
||||||
|
|
||||||
|
queryTmpl :=
|
||||||
|
"SELECT %s," +
|
||||||
|
" %s as value" +
|
||||||
|
" FROM (%s)" +
|
||||||
|
" WHERE isNaN(per_series_value) = 0" +
|
||||||
|
" GROUP BY %s" +
|
||||||
|
" ORDER BY %s"
|
||||||
|
|
||||||
|
switch mq.SpaceAggregation {
|
||||||
|
case v3.SpaceAggregationAvg:
|
||||||
|
op := "avg(per_series_value)"
|
||||||
|
query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
|
||||||
|
case v3.SpaceAggregationSum:
|
||||||
|
op := "sum(per_series_value)"
|
||||||
|
query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
|
||||||
|
case v3.SpaceAggregationMin:
|
||||||
|
op := "min(per_series_value)"
|
||||||
|
query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
|
||||||
|
case v3.SpaceAggregationMax:
|
||||||
|
op := "max(per_series_value)"
|
||||||
|
query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
|
||||||
|
case v3.SpaceAggregationCount:
|
||||||
|
op := "count(per_series_value)"
|
||||||
|
query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
|
||||||
|
}
|
||||||
|
|
||||||
|
return query, nil
|
||||||
|
}
|
229
pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
Normal file
229
pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
package cumulative
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestPrepareTimeAggregationSubQuery(t *testing.T) {
|
||||||
|
// The time aggregation is performed for each unique series - since the fingerprint represents the
|
||||||
|
// unique hash of label set, we always group by fingerprint regardless of the GroupBy
|
||||||
|
// This sub result is then aggregated on dimensions using the provided GroupBy clause keys
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
builderQuery *v3.BuilderQuery
|
||||||
|
start int64
|
||||||
|
end int64
|
||||||
|
expectedQueryContains string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "test time aggregation = avg, temporality = cumulative",
|
||||||
|
builderQuery: &v3.BuilderQuery{
|
||||||
|
QueryName: "A",
|
||||||
|
StepInterval: 60,
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateAttribute: v3.AttributeKey{
|
||||||
|
Key: "http_requests",
|
||||||
|
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||||
|
Type: v3.AttributeKeyTypeUnspecified,
|
||||||
|
IsColumn: true,
|
||||||
|
IsJSON: false,
|
||||||
|
},
|
||||||
|
Temporality: v3.Cumulative,
|
||||||
|
Filters: &v3.FilterSet{
|
||||||
|
Operator: "AND",
|
||||||
|
Items: []v3.FilterItem{
|
||||||
|
{
|
||||||
|
Key: v3.AttributeKey{
|
||||||
|
Key: "service_name",
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
},
|
||||||
|
Operator: v3.FilterOperatorNotEqual,
|
||||||
|
Value: "payment_service",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: v3.AttributeKey{
|
||||||
|
Key: "endpoint",
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
},
|
||||||
|
Operator: v3.FilterOperatorIn,
|
||||||
|
Value: []interface{}{"/paycallback", "/payme", "/paypal"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupBy: []v3.AttributeKey{{
|
||||||
|
Key: "service_name",
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
}},
|
||||||
|
Expression: "A",
|
||||||
|
Disabled: false,
|
||||||
|
TimeAggregation: v3.TimeAggregationAvg,
|
||||||
|
},
|
||||||
|
start: 1701794980000,
|
||||||
|
end: 1701796780000,
|
||||||
|
expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test time aggregation = rate, temporality = cumulative",
|
||||||
|
builderQuery: &v3.BuilderQuery{
|
||||||
|
QueryName: "A",
|
||||||
|
StepInterval: 60,
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateAttribute: v3.AttributeKey{
|
||||||
|
Key: "http_requests",
|
||||||
|
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||||
|
Type: v3.AttributeKeyTypeUnspecified,
|
||||||
|
IsColumn: true,
|
||||||
|
IsJSON: false,
|
||||||
|
},
|
||||||
|
Temporality: v3.Cumulative,
|
||||||
|
Filters: &v3.FilterSet{
|
||||||
|
Operator: "AND",
|
||||||
|
Items: []v3.FilterItem{
|
||||||
|
{
|
||||||
|
Key: v3.AttributeKey{
|
||||||
|
Key: "service_name",
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
},
|
||||||
|
Operator: v3.FilterOperatorContains,
|
||||||
|
Value: "payment_service",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupBy: []v3.AttributeKey{{
|
||||||
|
Key: "service_name",
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
}},
|
||||||
|
Expression: "A",
|
||||||
|
Disabled: false,
|
||||||
|
TimeAggregation: v3.TimeAggregationRate,
|
||||||
|
},
|
||||||
|
start: 1701794980000,
|
||||||
|
end: 1701796780000,
|
||||||
|
expectedQueryContains: "SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases {
|
||||||
|
t.Run(testCase.name, func(t *testing.T) {
|
||||||
|
query, err := prepareTimeAggregationSubQueryTimeSeries(
|
||||||
|
testCase.start,
|
||||||
|
testCase.end,
|
||||||
|
testCase.builderQuery.StepInterval,
|
||||||
|
testCase.builderQuery,
|
||||||
|
)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Contains(t, query, testCase.expectedQueryContains)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func TestPrepareTimeseriesQuery(t *testing.T) {
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
builderQuery *v3.BuilderQuery
|
||||||
|
start int64
|
||||||
|
end int64
|
||||||
|
expectedQueryContains string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "test time aggregation = avg, space aggregation = sum, temporality = unspecified",
|
||||||
|
builderQuery: &v3.BuilderQuery{
|
||||||
|
QueryName: "A",
|
||||||
|
StepInterval: 60,
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateAttribute: v3.AttributeKey{
|
||||||
|
Key: "system_memory_usage",
|
||||||
|
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||||
|
Type: v3.AttributeKeyTypeUnspecified,
|
||||||
|
IsColumn: true,
|
||||||
|
IsJSON: false,
|
||||||
|
},
|
||||||
|
Temporality: v3.Unspecified,
|
||||||
|
Filters: &v3.FilterSet{
|
||||||
|
Operator: "AND",
|
||||||
|
Items: []v3.FilterItem{
|
||||||
|
{
|
||||||
|
Key: v3.AttributeKey{
|
||||||
|
Key: "state",
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
},
|
||||||
|
Operator: v3.FilterOperatorNotEqual,
|
||||||
|
Value: "idle",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupBy: []v3.AttributeKey{},
|
||||||
|
Expression: "A",
|
||||||
|
Disabled: false,
|
||||||
|
TimeAggregation: v3.TimeAggregationAvg,
|
||||||
|
SpaceAggregation: v3.SpaceAggregationSum,
|
||||||
|
},
|
||||||
|
start: 1701794980000,
|
||||||
|
end: 1701796780000,
|
||||||
|
expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (ts), () ) ORDER BY ts ASC",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative",
|
||||||
|
builderQuery: &v3.BuilderQuery{
|
||||||
|
QueryName: "A",
|
||||||
|
StepInterval: 60,
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateAttribute: v3.AttributeKey{
|
||||||
|
Key: "http_requests",
|
||||||
|
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||||
|
Type: v3.AttributeKeyTypeUnspecified,
|
||||||
|
IsColumn: true,
|
||||||
|
IsJSON: false,
|
||||||
|
},
|
||||||
|
Temporality: v3.Cumulative,
|
||||||
|
Filters: &v3.FilterSet{
|
||||||
|
Operator: "AND",
|
||||||
|
Items: []v3.FilterItem{
|
||||||
|
{
|
||||||
|
Key: v3.AttributeKey{
|
||||||
|
Key: "service_name",
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
},
|
||||||
|
Operator: v3.FilterOperatorContains,
|
||||||
|
Value: "payment_service",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupBy: []v3.AttributeKey{{
|
||||||
|
Key: "service_name",
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
}},
|
||||||
|
Expression: "A",
|
||||||
|
Disabled: false,
|
||||||
|
TimeAggregation: v3.TimeAggregationRate,
|
||||||
|
SpaceAggregation: v3.SpaceAggregationSum,
|
||||||
|
},
|
||||||
|
start: 1701794980000,
|
||||||
|
end: 1701796780000,
|
||||||
|
expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases {
|
||||||
|
t.Run(testCase.name, func(t *testing.T) {
|
||||||
|
query, err := prepareMetricQueryCumulativeTimeSeries(
|
||||||
|
testCase.start,
|
||||||
|
testCase.end,
|
||||||
|
testCase.builderQuery.StepInterval,
|
||||||
|
testCase.builderQuery,
|
||||||
|
)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Contains(t, query, testCase.expectedQueryContains)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
86
pkg/query-service/app/metrics/v4/query_builder.go
Normal file
86
pkg/query-service/app/metrics/v4/query_builder.go
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
package v4
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PrepareTimeseriesFilterQuery builds the sub-query to be used for filtering timeseries based on the search criteria
|
||||||
|
func PrepareTimeseriesFilterQuery(mq *v3.BuilderQuery) (string, error) {
|
||||||
|
var conditions []string
|
||||||
|
var fs *v3.FilterSet = mq.Filters
|
||||||
|
var groupTags []v3.AttributeKey = mq.GroupBy
|
||||||
|
|
||||||
|
conditions = append(conditions, fmt.Sprintf("metric_name = %s", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key)))
|
||||||
|
conditions = append(conditions, fmt.Sprintf("temporality = '%s'", mq.Temporality))
|
||||||
|
|
||||||
|
if fs != nil && len(fs.Items) != 0 {
|
||||||
|
for _, item := range fs.Items {
|
||||||
|
toFormat := item.Value
|
||||||
|
op := v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator))))
|
||||||
|
if op == v3.FilterOperatorContains || op == v3.FilterOperatorNotContains {
|
||||||
|
toFormat = fmt.Sprintf("%%%s%%", toFormat)
|
||||||
|
}
|
||||||
|
fmtVal := utils.ClickHouseFormattedValue(toFormat)
|
||||||
|
switch op {
|
||||||
|
case v3.FilterOperatorEqual:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') = %s", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorNotEqual:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') != %s", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorIn:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') IN %s", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorNotIn:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') NOT IN %s", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorLike:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorNotLike:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorRegex:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorNotRegex:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("not match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorGreaterThan:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') > %s", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorGreaterThanOrEq:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') >= %s", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorLessThan:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') < %s", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorLessThanOrEq:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') <= %s", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorContains:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorNotContains:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
|
||||||
|
case v3.FilterOperatorExists:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("has(JSONExtractKeys(labels), '%s')", item.Key.Key))
|
||||||
|
case v3.FilterOperatorNotExists:
|
||||||
|
conditions = append(conditions, fmt.Sprintf("not has(JSONExtractKeys(labels), '%s')", item.Key.Key))
|
||||||
|
default:
|
||||||
|
return "", fmt.Errorf("unsupported filter operator")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
whereClause := strings.Join(conditions, " AND ")
|
||||||
|
|
||||||
|
var selectLabels string
|
||||||
|
for _, tag := range groupTags {
|
||||||
|
selectLabels += fmt.Sprintf("JSONExtractString(labels, '%s') as %s, ", tag.Key, tag.Key)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The table JOIN key always exists
|
||||||
|
selectLabels += "fingerprint"
|
||||||
|
|
||||||
|
filterSubQuery := fmt.Sprintf(
|
||||||
|
"SELECT DISTINCT %s FROM %s.%s WHERE %s",
|
||||||
|
selectLabels,
|
||||||
|
constants.SIGNOZ_METRIC_DBNAME,
|
||||||
|
constants.SIGNOZ_TIMESERIES_LOCAL_TABLENAME,
|
||||||
|
whereClause,
|
||||||
|
)
|
||||||
|
|
||||||
|
return filterSubQuery, nil
|
||||||
|
}
|
150
pkg/query-service/app/metrics/v4/query_builder_test.go
Normal file
150
pkg/query-service/app/metrics/v4/query_builder_test.go
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
package v4
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestPrepareTimeseriesFilterQuery(t *testing.T) {
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
builderQuery *v3.BuilderQuery
|
||||||
|
expectedQueryContains string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "test prepare time series with no filters and no group by",
|
||||||
|
builderQuery: &v3.BuilderQuery{
|
||||||
|
QueryName: "A",
|
||||||
|
StepInterval: 60,
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateAttribute: v3.AttributeKey{
|
||||||
|
Key: "http_requests",
|
||||||
|
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||||
|
Type: v3.AttributeKeyTypeUnspecified,
|
||||||
|
IsColumn: true,
|
||||||
|
IsJSON: false,
|
||||||
|
},
|
||||||
|
Temporality: v3.Delta,
|
||||||
|
Expression: "A",
|
||||||
|
Disabled: false,
|
||||||
|
// remaining struct fields are not needed here
|
||||||
|
},
|
||||||
|
expectedQueryContains: "SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta'",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test prepare time series with no filters and group by",
|
||||||
|
builderQuery: &v3.BuilderQuery{
|
||||||
|
QueryName: "A",
|
||||||
|
StepInterval: 60,
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateAttribute: v3.AttributeKey{
|
||||||
|
Key: "http_requests",
|
||||||
|
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||||
|
Type: v3.AttributeKeyTypeUnspecified,
|
||||||
|
IsColumn: true,
|
||||||
|
IsJSON: false,
|
||||||
|
},
|
||||||
|
Temporality: v3.Cumulative,
|
||||||
|
GroupBy: []v3.AttributeKey{{
|
||||||
|
Key: "service_name",
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
}},
|
||||||
|
Expression: "A",
|
||||||
|
Disabled: false,
|
||||||
|
// remaining struct fields are not needed here
|
||||||
|
},
|
||||||
|
expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative'",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test prepare time series with no filters and multiple group by",
|
||||||
|
builderQuery: &v3.BuilderQuery{
|
||||||
|
QueryName: "A",
|
||||||
|
StepInterval: 60,
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateAttribute: v3.AttributeKey{
|
||||||
|
Key: "http_requests",
|
||||||
|
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||||
|
Type: v3.AttributeKeyTypeUnspecified,
|
||||||
|
IsColumn: true,
|
||||||
|
IsJSON: false,
|
||||||
|
},
|
||||||
|
Temporality: v3.Cumulative,
|
||||||
|
GroupBy: []v3.AttributeKey{
|
||||||
|
{
|
||||||
|
Key: "service_name",
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "endpoint",
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Expression: "A",
|
||||||
|
Disabled: false,
|
||||||
|
// remaining struct fields are not needed here
|
||||||
|
},
|
||||||
|
expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative'",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "test prepare time series with filters and multiple group by",
|
||||||
|
builderQuery: &v3.BuilderQuery{
|
||||||
|
QueryName: "A",
|
||||||
|
StepInterval: 60,
|
||||||
|
DataSource: v3.DataSourceMetrics,
|
||||||
|
AggregateAttribute: v3.AttributeKey{
|
||||||
|
Key: "http_requests",
|
||||||
|
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||||
|
Type: v3.AttributeKeyTypeUnspecified,
|
||||||
|
IsColumn: true,
|
||||||
|
IsJSON: false,
|
||||||
|
},
|
||||||
|
Temporality: v3.Cumulative,
|
||||||
|
Filters: &v3.FilterSet{
|
||||||
|
Operator: "AND",
|
||||||
|
Items: []v3.FilterItem{
|
||||||
|
{
|
||||||
|
Key: v3.AttributeKey{
|
||||||
|
Key: "service_name",
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
},
|
||||||
|
Operator: v3.FilterOperatorNotEqual,
|
||||||
|
Value: "payment_service",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: v3.AttributeKey{
|
||||||
|
Key: "endpoint",
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
},
|
||||||
|
Operator: v3.FilterOperatorIn,
|
||||||
|
Value: []interface{}{"/paycallback", "/payme", "/paypal"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
GroupBy: []v3.AttributeKey{{
|
||||||
|
Key: "service_name",
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
}},
|
||||||
|
Expression: "A",
|
||||||
|
Disabled: false,
|
||||||
|
// remaining struct fields are not needed here
|
||||||
|
},
|
||||||
|
expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases {
|
||||||
|
t.Run(testCase.name, func(t *testing.T) {
|
||||||
|
query, err := PrepareTimeseriesFilterQuery(testCase.builderQuery)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Contains(t, query, testCase.expectedQueryContains)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -259,7 +259,7 @@ func (agent *Agent) processStatusUpdate(
|
|||||||
// send the new remote config to the Agent.
|
// send the new remote config to the Agent.
|
||||||
if configChanged ||
|
if configChanged ||
|
||||||
(agent.Status.RemoteConfigStatus != nil &&
|
(agent.Status.RemoteConfigStatus != nil &&
|
||||||
bytes.Compare(agent.Status.RemoteConfigStatus.LastRemoteConfigHash, agent.remoteConfig.ConfigHash) != 0) {
|
!bytes.Equal(agent.Status.RemoteConfigStatus.LastRemoteConfigHash, agent.remoteConfig.ConfigHash)) {
|
||||||
// The new status resulted in a change in the config of the Agent or the Agent
|
// The new status resulted in a change in the config of the Agent or the Agent
|
||||||
// does not have this config (hash is different). Send the new config the Agent.
|
// does not have this config (hash is different). Send the new config the Agent.
|
||||||
response.RemoteConfig = agent.remoteConfig
|
response.RemoteConfig = agent.remoteConfig
|
||||||
@ -352,7 +352,7 @@ func isEqualConfigFile(f1, f2 *protobufs.AgentConfigFile) bool {
|
|||||||
if f1 == nil || f2 == nil {
|
if f1 == nil || f2 == nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return bytes.Compare(f1.Body, f2.Body) == 0 && f1.ContentType == f2.ContentType
|
return bytes.Equal(f1.Body, f2.Body) && f1.ContentType == f2.ContentType
|
||||||
}
|
}
|
||||||
|
|
||||||
func (agent *Agent) SendToAgent(msg *protobufs.ServerToAgent) {
|
func (agent *Agent) SendToAgent(msg *protobufs.ServerToAgent) {
|
||||||
|
@ -417,7 +417,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// if telemetry.GetInstance().IsSampled() {
|
// if telemetry.GetInstance().IsSampled() {
|
||||||
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
|
if _, ok := telemetry.EnabledPaths()[path]; ok {
|
||||||
userEmail, err := auth.GetEmailFromJwt(r.Context())
|
userEmail, err := auth.GetEmailFromJwt(r.Context())
|
||||||
if err == nil {
|
if err == nil {
|
||||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail)
|
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail)
|
||||||
|
@ -71,6 +71,8 @@ type Reader interface {
|
|||||||
GetListResultV3(ctx context.Context, query string) ([]*v3.Row, error)
|
GetListResultV3(ctx context.Context, query string) ([]*v3.Row, error)
|
||||||
LiveTailLogsV3(ctx context.Context, query string, timestampStart uint64, idStart string, client *v3.LogsLiveTailClient)
|
LiveTailLogsV3(ctx context.Context, query string, timestampStart uint64, idStart string, client *v3.LogsLiveTailClient)
|
||||||
|
|
||||||
|
GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error)
|
||||||
|
GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error)
|
||||||
GetTotalSpans(ctx context.Context) (uint64, error)
|
GetTotalSpans(ctx context.Context) (uint64, error)
|
||||||
GetSpansInLastHeartBeatInterval(ctx context.Context) (uint64, error)
|
GetSpansInLastHeartBeatInterval(ctx context.Context) (uint64, error)
|
||||||
GetTimeSeriesInfo(ctx context.Context) (map[string]interface{}, error)
|
GetTimeSeriesInfo(ctx context.Context) (map[string]interface{}, error)
|
||||||
|
@ -55,14 +55,14 @@ var BasicPlan = FeatureSet{
|
|||||||
Name: QueryBuilderPanels,
|
Name: QueryBuilderPanels,
|
||||||
Active: true,
|
Active: true,
|
||||||
Usage: 0,
|
Usage: 0,
|
||||||
UsageLimit: 5,
|
UsageLimit: 20,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
Feature{
|
Feature{
|
||||||
Name: QueryBuilderAlerts,
|
Name: QueryBuilderAlerts,
|
||||||
Active: true,
|
Active: true,
|
||||||
Usage: 0,
|
Usage: 0,
|
||||||
UsageLimit: 5,
|
UsageLimit: 10,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
Feature{
|
Feature{
|
||||||
|
@ -615,6 +615,20 @@ type TagsInfo struct {
|
|||||||
Env string `json:"env"`
|
Env string `json:"env"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type AlertsInfo struct {
|
||||||
|
TotalAlerts int `json:"totalAlerts"`
|
||||||
|
LogsBasedAlerts int `json:"logsBasedAlerts"`
|
||||||
|
MetricBasedAlerts int `json:"metricBasedAlerts"`
|
||||||
|
TracesBasedAlerts int `json:"tracesBasedAlerts"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DashboardsInfo struct {
|
||||||
|
TotalDashboards int `json:"totalDashboards"`
|
||||||
|
LogsBasedPanels int `json:"logsBasedPanels"`
|
||||||
|
MetricBasedPanels int `json:"metricBasedPanels"`
|
||||||
|
TracesBasedPanels int `json:"tracesBasedPanels"`
|
||||||
|
}
|
||||||
|
|
||||||
type TagTelemetryData struct {
|
type TagTelemetryData struct {
|
||||||
ServiceName string `json:"serviceName" ch:"serviceName"`
|
ServiceName string `json:"serviceName" ch:"serviceName"`
|
||||||
Env string `json:"env" ch:"env"`
|
Env string `json:"env" ch:"env"`
|
||||||
|
@ -447,6 +447,38 @@ const (
|
|||||||
Cumulative Temporality = "Cumulative"
|
Cumulative Temporality = "Cumulative"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type TimeAggregation string
|
||||||
|
|
||||||
|
const (
|
||||||
|
TimeAggregationUnspecified TimeAggregation = ""
|
||||||
|
TimeAggregationAnyLast TimeAggregation = "latest"
|
||||||
|
TimeAggregationSum TimeAggregation = "sum"
|
||||||
|
TimeAggregationAvg TimeAggregation = "avg"
|
||||||
|
TimeAggregationMin TimeAggregation = "min"
|
||||||
|
TimeAggregationMax TimeAggregation = "max"
|
||||||
|
TimeAggregationCount TimeAggregation = "count"
|
||||||
|
TimeAggregationCountDistinct TimeAggregation = "count_distinct"
|
||||||
|
TimeAggregationRate TimeAggregation = "rate"
|
||||||
|
TimeAggregationIncrease TimeAggregation = "increase"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SpaceAggregation string
|
||||||
|
|
||||||
|
const (
|
||||||
|
SpaceAggregationUnspecified SpaceAggregation = ""
|
||||||
|
SpaceAggregationSum SpaceAggregation = "sum"
|
||||||
|
SpaceAggregationAvg SpaceAggregation = "avg"
|
||||||
|
SpaceAggregationMin SpaceAggregation = "min"
|
||||||
|
SpaceAggregationMax SpaceAggregation = "max"
|
||||||
|
SpaceAggregationCount SpaceAggregation = "count"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Function struct {
|
||||||
|
Category string `json:"category"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Args []interface{} `json:"args,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type BuilderQuery struct {
|
type BuilderQuery struct {
|
||||||
QueryName string `json:"queryName"`
|
QueryName string `json:"queryName"`
|
||||||
StepInterval int64 `json:"stepInterval"`
|
StepInterval int64 `json:"stepInterval"`
|
||||||
@ -466,6 +498,9 @@ type BuilderQuery struct {
|
|||||||
OrderBy []OrderBy `json:"orderBy,omitempty"`
|
OrderBy []OrderBy `json:"orderBy,omitempty"`
|
||||||
ReduceTo ReduceToOperator `json:"reduceTo,omitempty"`
|
ReduceTo ReduceToOperator `json:"reduceTo,omitempty"`
|
||||||
SelectColumns []AttributeKey `json:"selectColumns,omitempty"`
|
SelectColumns []AttributeKey `json:"selectColumns,omitempty"`
|
||||||
|
TimeAggregation TimeAggregation `json:"timeAggregation,omitempty"`
|
||||||
|
SpaceAggregation SpaceAggregation `json:"spaceAggregation,omitempty"`
|
||||||
|
Functions []Function `json:"functions,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BuilderQuery) Validate() error {
|
func (b *BuilderQuery) Validate() error {
|
||||||
|
@ -3,7 +3,6 @@ package rules
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -367,7 +366,10 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (
|
|||||||
l[lbl.Name] = lbl.Value
|
l[lbl.Name] = lbl.Value
|
||||||
}
|
}
|
||||||
|
|
||||||
tmplData := AlertTemplateData(l, valueFormatter.Format(smpl.F, r.Unit()), strconv.FormatFloat(r.targetVal(), 'f', 2, 64)+converter.UnitToName(r.ruleCondition.TargetUnit))
|
thresholdFormatter := formatter.FromUnit(r.ruleCondition.TargetUnit)
|
||||||
|
threshold := thresholdFormatter.Format(r.targetVal(), r.ruleCondition.TargetUnit)
|
||||||
|
|
||||||
|
tmplData := AlertTemplateData(l, valueFormatter.Format(smpl.F, r.Unit()), threshold)
|
||||||
// Inject some convenience variables that are easier to remember for users
|
// Inject some convenience variables that are easier to remember for users
|
||||||
// who are not used to Go's templating system.
|
// who are not used to Go's templating system.
|
||||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||||
|
@ -14,7 +14,7 @@ import (
|
|||||||
func TestThresholdRuleCombinations(t *testing.T) {
|
func TestThresholdRuleCombinations(t *testing.T) {
|
||||||
postableRule := PostableRule{
|
postableRule := PostableRule{
|
||||||
Alert: "Tricky Condition Tests",
|
Alert: "Tricky Condition Tests",
|
||||||
AlertType: "METRICS_BASED_ALERT",
|
AlertType: "METRIC_BASED_ALERT",
|
||||||
RuleType: RuleTypeThreshold,
|
RuleType: RuleTypeThreshold,
|
||||||
EvalWindow: Duration(5 * time.Minute),
|
EvalWindow: Duration(5 * time.Minute),
|
||||||
Frequency: Duration(1 * time.Minute),
|
Frequency: Duration(1 * time.Minute),
|
||||||
|
@ -1,16 +1,11 @@
|
|||||||
package telemetry
|
package telemetry
|
||||||
|
|
||||||
func IgnoredPaths() map[string]struct{} {
|
func EnabledPaths() map[string]struct{} {
|
||||||
ignoredPaths := map[string]struct{}{
|
enabledPaths := map[string]struct{}{
|
||||||
"/api/v1/tags": {},
|
"/api/v1/channels": {},
|
||||||
"/api/v1/version": {},
|
|
||||||
"/api/v1/query_range": {},
|
|
||||||
"/api/v2/metrics/query_range": {},
|
|
||||||
"/api/v1/health": {},
|
|
||||||
"/api/v1/featureFlags": {},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ignoredPaths
|
return enabledPaths
|
||||||
}
|
}
|
||||||
|
|
||||||
func ignoreEvents(event string, attributes map[string]interface{}) bool {
|
func ignoreEvents(event string, attributes map[string]interface{}) bool {
|
||||||
|
@ -38,6 +38,7 @@ const (
|
|||||||
TELEMETRY_EVENT_LOGS_FILTERS = "Logs Filters"
|
TELEMETRY_EVENT_LOGS_FILTERS = "Logs Filters"
|
||||||
TELEMETRY_EVENT_DISTRIBUTED = "Distributed"
|
TELEMETRY_EVENT_DISTRIBUTED = "Distributed"
|
||||||
TELEMETRY_EVENT_QUERY_RANGE_V3 = "Query Range V3 Metadata"
|
TELEMETRY_EVENT_QUERY_RANGE_V3 = "Query Range V3 Metadata"
|
||||||
|
TELEMETRY_EVENT_DASHBOARDS_ALERTS = "Dashboards/Alerts Info"
|
||||||
TELEMETRY_EVENT_ACTIVE_USER = "Active User"
|
TELEMETRY_EVENT_ACTIVE_USER = "Active User"
|
||||||
TELEMETRY_EVENT_ACTIVE_USER_PH = "Active User V2"
|
TELEMETRY_EVENT_ACTIVE_USER_PH = "Active User V2"
|
||||||
TELEMETRY_EVENT_USER_INVITATION_SENT = "User Invitation Sent"
|
TELEMETRY_EVENT_USER_INVITATION_SENT = "User Invitation Sent"
|
||||||
@ -53,6 +54,7 @@ var SAAS_EVENTS_LIST = map[string]struct{}{
|
|||||||
TELEMETRY_EVENT_ENVIRONMENT: {},
|
TELEMETRY_EVENT_ENVIRONMENT: {},
|
||||||
TELEMETRY_EVENT_USER_INVITATION_SENT: {},
|
TELEMETRY_EVENT_USER_INVITATION_SENT: {},
|
||||||
TELEMETRY_EVENT_USER_INVITATION_ACCEPTED: {},
|
TELEMETRY_EVENT_USER_INVITATION_ACCEPTED: {},
|
||||||
|
TELEMETRY_EVENT_DASHBOARDS_ALERTS: {},
|
||||||
}
|
}
|
||||||
|
|
||||||
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
|
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
|
||||||
@ -61,9 +63,9 @@ const ph_api_key = "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w"
|
|||||||
const IP_NOT_FOUND_PLACEHOLDER = "NA"
|
const IP_NOT_FOUND_PLACEHOLDER = "NA"
|
||||||
const DEFAULT_NUMBER_OF_SERVICES = 6
|
const DEFAULT_NUMBER_OF_SERVICES = 6
|
||||||
|
|
||||||
const HEART_BEAT_DURATION = 6 * time.Hour
|
const HEART_BEAT_DURATION = 12 * time.Hour
|
||||||
|
|
||||||
const ACTIVE_USER_DURATION = 30 * time.Minute
|
const ACTIVE_USER_DURATION = 6 * time.Hour
|
||||||
|
|
||||||
// const HEART_BEAT_DURATION = 30 * time.Second
|
// const HEART_BEAT_DURATION = 30 * time.Second
|
||||||
// const ACTIVE_USER_DURATION = 30 * time.Second
|
// const ACTIVE_USER_DURATION = 30 * time.Second
|
||||||
@ -241,9 +243,30 @@ func createTelemetry() {
|
|||||||
}
|
}
|
||||||
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, "")
|
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, "")
|
||||||
|
|
||||||
|
alertsInfo, err := telemetry.reader.GetAlertsInfo(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, map[string]interface{}{"error": err.Error()}, "")
|
||||||
|
} else {
|
||||||
|
dashboardsInfo, err := telemetry.reader.GetDashboardsInfo(context.Background())
|
||||||
|
if err == nil {
|
||||||
|
dashboardsAlertsData := map[string]interface{}{
|
||||||
|
"totalDashboards": dashboardsInfo.TotalDashboards,
|
||||||
|
"logsBasedPanels": dashboardsInfo.LogsBasedPanels,
|
||||||
|
"metricBasedPanels": dashboardsInfo.MetricBasedPanels,
|
||||||
|
"tracesBasedPanels": dashboardsInfo.TracesBasedPanels,
|
||||||
|
"totalAlerts": alertsInfo.TotalAlerts,
|
||||||
|
"logsBasedAlerts": alertsInfo.LogsBasedAlerts,
|
||||||
|
"metricBasedAlerts": alertsInfo.MetricBasedAlerts,
|
||||||
|
"tracesBasedAlerts": alertsInfo.TracesBasedAlerts,
|
||||||
|
}
|
||||||
|
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, "")
|
||||||
|
} else {
|
||||||
|
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, map[string]interface{}{"error": err.Error()}, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
getDistributedInfoInLastHeartBeatInterval, _ := telemetry.reader.GetDistributedInfoInLastHeartBeatInterval(context.Background())
|
getDistributedInfoInLastHeartBeatInterval, _ := telemetry.reader.GetDistributedInfoInLastHeartBeatInterval(context.Background())
|
||||||
telemetry.SendEvent(TELEMETRY_EVENT_DISTRIBUTED, getDistributedInfoInLastHeartBeatInterval, "")
|
telemetry.SendEvent(TELEMETRY_EVENT_DISTRIBUTED, getDistributedInfoInLastHeartBeatInterval, "")
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
@ -367,17 +367,70 @@ func TestLogPipelinesValidation(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestCanSavePipelinesWithoutConnectedAgents(t *testing.T) {
|
||||||
|
require := require.New(t)
|
||||||
|
testbed := NewTestbedWithoutOpamp(t)
|
||||||
|
|
||||||
|
getPipelinesResp := testbed.GetPipelinesFromQS()
|
||||||
|
require.Equal(0, len(getPipelinesResp.Pipelines))
|
||||||
|
require.Equal(0, len(getPipelinesResp.History))
|
||||||
|
|
||||||
|
postablePipelines := logparsingpipeline.PostablePipelines{
|
||||||
|
Pipelines: []logparsingpipeline.PostablePipeline{
|
||||||
|
{
|
||||||
|
OrderId: 1,
|
||||||
|
Name: "pipeline1",
|
||||||
|
Alias: "pipeline1",
|
||||||
|
Enabled: true,
|
||||||
|
Filter: &v3.FilterSet{
|
||||||
|
Operator: "AND",
|
||||||
|
Items: []v3.FilterItem{
|
||||||
|
{
|
||||||
|
Key: v3.AttributeKey{
|
||||||
|
Key: "method",
|
||||||
|
DataType: v3.AttributeKeyDataTypeString,
|
||||||
|
Type: v3.AttributeKeyTypeTag,
|
||||||
|
},
|
||||||
|
Operator: "=",
|
||||||
|
Value: "GET",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Config: []logparsingpipeline.PipelineOperator{
|
||||||
|
{
|
||||||
|
OrderId: 1,
|
||||||
|
ID: "add",
|
||||||
|
Type: "add",
|
||||||
|
Field: "attributes.test",
|
||||||
|
Value: "val",
|
||||||
|
Enabled: true,
|
||||||
|
Name: "test add",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
testbed.PostPipelinesToQS(postablePipelines)
|
||||||
|
getPipelinesResp = testbed.GetPipelinesFromQS()
|
||||||
|
require.Equal(1, len(getPipelinesResp.Pipelines))
|
||||||
|
require.Equal(1, len(getPipelinesResp.History))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
// LogPipelinesTestBed coordinates and mocks components involved in
|
// LogPipelinesTestBed coordinates and mocks components involved in
|
||||||
// configuring log pipelines and provides test helpers.
|
// configuring log pipelines and provides test helpers.
|
||||||
type LogPipelinesTestBed struct {
|
type LogPipelinesTestBed struct {
|
||||||
t *testing.T
|
t *testing.T
|
||||||
|
testDBFilePath string
|
||||||
testUser *model.User
|
testUser *model.User
|
||||||
apiHandler *app.APIHandler
|
apiHandler *app.APIHandler
|
||||||
|
agentConfMgr *agentConf.Manager
|
||||||
opampServer *opamp.Server
|
opampServer *opamp.Server
|
||||||
opampClientConn *opamp.MockOpAmpConnection
|
opampClientConn *opamp.MockOpAmpConnection
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLogPipelinesTestBed(t *testing.T) *LogPipelinesTestBed {
|
func NewTestbedWithoutOpamp(t *testing.T) *LogPipelinesTestBed {
|
||||||
// Create a tmp file based sqlite db for testing.
|
// Create a tmp file based sqlite db for testing.
|
||||||
testDBFile, err := os.CreateTemp("", "test-signoz-db-*")
|
testDBFile, err := os.CreateTemp("", "test-signoz-db-*")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -408,22 +461,61 @@ func NewLogPipelinesTestBed(t *testing.T) *LogPipelinesTestBed {
|
|||||||
t.Fatalf("could not create a new ApiHandler: %v", err)
|
t.Fatalf("could not create a new ApiHandler: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
opampServer, clientConn := mockOpampAgent(t, testDBFilePath, controller)
|
|
||||||
|
|
||||||
user, apiErr := createTestUser()
|
user, apiErr := createTestUser()
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
t.Fatalf("could not create a test user: %v", apiErr)
|
t.Fatalf("could not create a test user: %v", apiErr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Mock an available opamp agent
|
||||||
|
testDB, err = opampModel.InitDB(testDBFilePath)
|
||||||
|
require.Nil(t, err, "failed to init opamp model")
|
||||||
|
|
||||||
|
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
|
||||||
|
DB: testDB,
|
||||||
|
DBEngine: "sqlite",
|
||||||
|
AgentFeatures: []agentConf.AgentFeature{
|
||||||
|
apiHandler.LogsParsingPipelineController,
|
||||||
|
}})
|
||||||
|
require.Nil(t, err, "failed to init agentConf")
|
||||||
|
|
||||||
return &LogPipelinesTestBed{
|
return &LogPipelinesTestBed{
|
||||||
t: t,
|
t: t,
|
||||||
testUser: user,
|
testDBFilePath: testDBFilePath,
|
||||||
apiHandler: apiHandler,
|
testUser: user,
|
||||||
opampServer: opampServer,
|
apiHandler: apiHandler,
|
||||||
opampClientConn: clientConn,
|
agentConfMgr: agentConfMgr,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewLogPipelinesTestBed(t *testing.T) *LogPipelinesTestBed {
|
||||||
|
testbed := NewTestbedWithoutOpamp(t)
|
||||||
|
|
||||||
|
opampServer := opamp.InitializeServer(nil, testbed.agentConfMgr)
|
||||||
|
err := opampServer.Start(opamp.GetAvailableLocalAddress())
|
||||||
|
require.Nil(t, err, "failed to start opamp server")
|
||||||
|
|
||||||
|
t.Cleanup(func() {
|
||||||
|
opampServer.Stop()
|
||||||
|
})
|
||||||
|
|
||||||
|
opampClientConnection := &opamp.MockOpAmpConnection{}
|
||||||
|
opampServer.OnMessage(
|
||||||
|
opampClientConnection,
|
||||||
|
&protobufs.AgentToServer{
|
||||||
|
InstanceUid: "test",
|
||||||
|
EffectiveConfig: &protobufs.EffectiveConfig{
|
||||||
|
ConfigMap: newInitialAgentConfigMap(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
testbed.opampServer = opampServer
|
||||||
|
testbed.opampClientConn = opampClientConnection
|
||||||
|
|
||||||
|
return testbed
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func (tb *LogPipelinesTestBed) PostPipelinesToQSExpectingStatusCode(
|
func (tb *LogPipelinesTestBed) PostPipelinesToQSExpectingStatusCode(
|
||||||
postablePipelines logparsingpipeline.PostablePipelines,
|
postablePipelines logparsingpipeline.PostablePipelines,
|
||||||
expectedStatusCode int,
|
expectedStatusCode int,
|
||||||
@ -668,43 +760,6 @@ func assertPipelinesResponseMatchesPostedPipelines(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func mockOpampAgent(
|
|
||||||
t *testing.T,
|
|
||||||
testDBFilePath string,
|
|
||||||
pipelinesController *logparsingpipeline.LogParsingPipelineController,
|
|
||||||
) (*opamp.Server, *opamp.MockOpAmpConnection) {
|
|
||||||
// Mock an available opamp agent
|
|
||||||
testDB, err := opampModel.InitDB(testDBFilePath)
|
|
||||||
require.Nil(t, err, "failed to init opamp model")
|
|
||||||
|
|
||||||
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
|
|
||||||
DB: testDB,
|
|
||||||
DBEngine: "sqlite",
|
|
||||||
AgentFeatures: []agentConf.AgentFeature{pipelinesController},
|
|
||||||
})
|
|
||||||
require.Nil(t, err, "failed to init agentConf")
|
|
||||||
|
|
||||||
opampServer := opamp.InitializeServer(nil, agentConfMgr)
|
|
||||||
err = opampServer.Start(opamp.GetAvailableLocalAddress())
|
|
||||||
require.Nil(t, err, "failed to start opamp server")
|
|
||||||
|
|
||||||
t.Cleanup(func() {
|
|
||||||
opampServer.Stop()
|
|
||||||
})
|
|
||||||
|
|
||||||
opampClientConnection := &opamp.MockOpAmpConnection{}
|
|
||||||
opampServer.OnMessage(
|
|
||||||
opampClientConnection,
|
|
||||||
&protobufs.AgentToServer{
|
|
||||||
InstanceUid: "test",
|
|
||||||
EffectiveConfig: &protobufs.EffectiveConfig{
|
|
||||||
ConfigMap: newInitialAgentConfigMap(),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return opampServer, opampClientConnection
|
|
||||||
}
|
|
||||||
|
|
||||||
func newInitialAgentConfigMap() *protobufs.AgentConfigMap {
|
func newInitialAgentConfigMap() *protobufs.AgentConfigMap {
|
||||||
return &protobufs.AgentConfigMap{
|
return &protobufs.AgentConfigMap{
|
||||||
ConfigMap: map[string]*protobufs.AgentConfigFile{
|
ConfigMap: map[string]*protobufs.AgentConfigFile{
|
||||||
|
@ -192,7 +192,7 @@ services:
|
|||||||
<<: *db-depend
|
<<: *db-depend
|
||||||
|
|
||||||
otel-collector-migrator:
|
otel-collector-migrator:
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.1}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.3}
|
||||||
container_name: otel-migrator
|
container_name: otel-migrator
|
||||||
command:
|
command:
|
||||||
- "--dsn=tcp://clickhouse:9000"
|
- "--dsn=tcp://clickhouse:9000"
|
||||||
@ -205,7 +205,7 @@ services:
|
|||||||
# condition: service_healthy
|
# condition: service_healthy
|
||||||
|
|
||||||
otel-collector:
|
otel-collector:
|
||||||
image: signoz/signoz-otel-collector:0.88.1
|
image: signoz/signoz-otel-collector:0.88.3
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
@ -245,7 +245,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
otel-collector-metrics:
|
otel-collector-metrics:
|
||||||
image: signoz/signoz-otel-collector:0.88.1
|
image: signoz/signoz-otel-collector:0.88.3
|
||||||
container_name: signoz-otel-collector-metrics
|
container_name: signoz-otel-collector-metrics
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
|
Loading…
x
Reference in New Issue
Block a user