mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-07-25 07:24:24 +08:00
Merge branch 'develop' into release/v0.43.x
This commit is contained in:
commit
ec0185da61
@ -152,7 +152,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
||||
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metrics/query_range", am.ViewAccess(ah.queryRangeMetricsV2)).Methods(http.MethodPost)
|
||||
|
||||
// PAT APIs
|
||||
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
|
||||
|
@ -1,236 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sync"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/app/metrics"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/parser"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
querytemplate "go.signoz.io/signoz/pkg/query-service/utils/queryTemplate"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) {
|
||||
if !ah.CheckFeature(basemodel.CustomMetricsFunction) {
|
||||
zap.L().Info("CustomMetricsFunction feature is not enabled in this plan")
|
||||
ah.APIHandler.QueryRangeMetricsV2(w, r)
|
||||
return
|
||||
}
|
||||
metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
zap.L().Error("Error in parsing metric query params", zap.Error(apiErrorObj.Err))
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// prometheus instant query needs same timestamp
|
||||
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE &&
|
||||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.PROM {
|
||||
metricsQueryRangeParams.Start = metricsQueryRangeParams.End
|
||||
}
|
||||
|
||||
// round up the end to nearest multiple
|
||||
if metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER {
|
||||
end := (metricsQueryRangeParams.End) / 1000
|
||||
step := metricsQueryRangeParams.Step
|
||||
metricsQueryRangeParams.End = (end / step * step) * 1000
|
||||
}
|
||||
|
||||
type channelResult struct {
|
||||
Series []*basemodel.Series
|
||||
TableName string
|
||||
Err error
|
||||
Name string
|
||||
Query string
|
||||
}
|
||||
|
||||
execClickHouseQueries := func(queries map[string]string) ([]*basemodel.Series, []string, error, map[string]string) {
|
||||
var seriesList []*basemodel.Series
|
||||
var tableName []string
|
||||
ch := make(chan channelResult, len(queries))
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for name, query := range queries {
|
||||
wg.Add(1)
|
||||
go func(name, query string) {
|
||||
defer wg.Done()
|
||||
seriesList, tableName, err := ah.opts.DataConnector.GetMetricResultEE(r.Context(), query)
|
||||
for _, series := range seriesList {
|
||||
series.QueryName = name
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query}
|
||||
return
|
||||
}
|
||||
ch <- channelResult{Series: seriesList, TableName: tableName}
|
||||
}(name, query)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
continue
|
||||
}
|
||||
seriesList = append(seriesList, r.Series...)
|
||||
tableName = append(tableName, r.TableName)
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
|
||||
}
|
||||
return seriesList, tableName, nil, nil
|
||||
}
|
||||
|
||||
execPromQueries := func(metricsQueryRangeParams *basemodel.QueryRangeParamsV2) ([]*basemodel.Series, error, map[string]string) {
|
||||
var seriesList []*basemodel.Series
|
||||
ch := make(chan channelResult, len(metricsQueryRangeParams.CompositeMetricQuery.PromQueries))
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for name, query := range metricsQueryRangeParams.CompositeMetricQuery.PromQueries {
|
||||
if query.Disabled {
|
||||
continue
|
||||
}
|
||||
wg.Add(1)
|
||||
go func(name string, query *basemodel.PromQuery) {
|
||||
var seriesList []*basemodel.Series
|
||||
defer wg.Done()
|
||||
tmpl := template.New("promql-query")
|
||||
tmpl, tmplErr := tmpl.Parse(query.Query)
|
||||
if tmplErr != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
|
||||
return
|
||||
}
|
||||
var queryBuf bytes.Buffer
|
||||
tmplErr = tmpl.Execute(&queryBuf, metricsQueryRangeParams.Variables)
|
||||
if tmplErr != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
|
||||
return
|
||||
}
|
||||
query.Query = queryBuf.String()
|
||||
queryModel := basemodel.QueryRangeParams{
|
||||
Start: time.UnixMilli(metricsQueryRangeParams.Start),
|
||||
End: time.UnixMilli(metricsQueryRangeParams.End),
|
||||
Step: time.Duration(metricsQueryRangeParams.Step * int64(time.Second)),
|
||||
Query: query.Query,
|
||||
}
|
||||
promResult, _, err := ah.opts.DataConnector.GetQueryRangeResult(r.Context(), &queryModel)
|
||||
if err != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query.Query}
|
||||
return
|
||||
}
|
||||
matrix, _ := promResult.Matrix()
|
||||
for _, v := range matrix {
|
||||
var s basemodel.Series
|
||||
s.QueryName = name
|
||||
s.Labels = v.Metric.Copy().Map()
|
||||
for _, p := range v.Floats {
|
||||
s.Points = append(s.Points, basemodel.MetricPoint{Timestamp: p.T, Value: p.F})
|
||||
}
|
||||
seriesList = append(seriesList, &s)
|
||||
}
|
||||
ch <- channelResult{Series: seriesList}
|
||||
}(name, query)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
continue
|
||||
}
|
||||
seriesList = append(seriesList, r.Series...)
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
|
||||
}
|
||||
return seriesList, nil, nil
|
||||
}
|
||||
|
||||
var seriesList []*basemodel.Series
|
||||
var tableName []string
|
||||
var err error
|
||||
var errQuriesByName map[string]string
|
||||
switch metricsQueryRangeParams.CompositeMetricQuery.QueryType {
|
||||
case basemodel.QUERY_BUILDER:
|
||||
runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SIGNOZ_TIMESERIES_TABLENAME)
|
||||
if runQueries.Err != nil {
|
||||
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: runQueries.Err}, nil)
|
||||
return
|
||||
}
|
||||
seriesList, tableName, err, errQuriesByName = execClickHouseQueries(runQueries.Queries)
|
||||
|
||||
case basemodel.CLICKHOUSE:
|
||||
queries := make(map[string]string)
|
||||
|
||||
for name, chQuery := range metricsQueryRangeParams.CompositeMetricQuery.ClickHouseQueries {
|
||||
if chQuery.Disabled {
|
||||
continue
|
||||
}
|
||||
tmpl := template.New("clickhouse-query")
|
||||
tmpl, err := tmpl.Parse(chQuery.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
var query bytes.Buffer
|
||||
|
||||
// replace go template variables
|
||||
querytemplate.AssignReservedVars(metricsQueryRangeParams)
|
||||
|
||||
err = tmpl.Execute(&query, metricsQueryRangeParams.Variables)
|
||||
if err != nil {
|
||||
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
queries[name] = query.String()
|
||||
}
|
||||
seriesList, tableName, err, errQuriesByName = execClickHouseQueries(queries)
|
||||
case basemodel.PROM:
|
||||
seriesList, err, errQuriesByName = execPromQueries(metricsQueryRangeParams)
|
||||
default:
|
||||
err = fmt.Errorf("invalid query type")
|
||||
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, errQuriesByName)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
apiErrObj := &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE &&
|
||||
len(seriesList) > 1 &&
|
||||
(metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER ||
|
||||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.CLICKHOUSE) {
|
||||
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
type ResponseFormat struct {
|
||||
ResultType string `json:"resultType"`
|
||||
Result []*basemodel.Series `json:"result"`
|
||||
TableName []string `json:"tableName"`
|
||||
}
|
||||
resp := ResponseFormat{ResultType: "matrix", Result: seriesList, TableName: tableName}
|
||||
ah.Respond(w, resp)
|
||||
}
|
@ -329,7 +329,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
|
||||
r.Use(loggingMiddleware)
|
||||
|
||||
apiHandler.RegisterRoutes(r, am)
|
||||
apiHandler.RegisterMetricsRoutes(r, am)
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
apiHandler.RegisterIntegrationRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
|
@ -44,6 +44,9 @@
|
||||
"@sentry/webpack-plugin": "2.16.0",
|
||||
"@signozhq/design-tokens": "0.0.8",
|
||||
"@uiw/react-md-editor": "3.23.5",
|
||||
"@visx/group": "3.3.0",
|
||||
"@visx/shape": "3.5.0",
|
||||
"@visx/tooltip": "3.3.0",
|
||||
"@xstate/react": "^3.0.0",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"antd": "5.11.0",
|
||||
|
@ -15,6 +15,7 @@
|
||||
"button_test_channel": "Test",
|
||||
"button_return": "Back",
|
||||
"field_channel_name": "Name",
|
||||
"field_send_resolved": "Send resolved alerts",
|
||||
"field_channel_type": "Type",
|
||||
"field_webhook_url": "Webhook URL",
|
||||
"field_slack_recipient": "Recipient",
|
||||
|
@ -15,6 +15,7 @@
|
||||
"button_test_channel": "Test",
|
||||
"button_return": "Back",
|
||||
"field_channel_name": "Name",
|
||||
"field_send_resolved": "Send resolved alerts",
|
||||
"field_channel_type": "Type",
|
||||
"field_webhook_url": "Webhook URL",
|
||||
"field_slack_recipient": "Recipient",
|
||||
|
@ -12,7 +12,7 @@ const create = async (
|
||||
name: props.name,
|
||||
email_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
to: props.to,
|
||||
html: props.html,
|
||||
headers: props.headers,
|
||||
|
@ -12,7 +12,7 @@ const create = async (
|
||||
name: props.name,
|
||||
msteams_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
webhook_url: props.webhook_url,
|
||||
title: props.title,
|
||||
text: props.text,
|
||||
|
@ -12,7 +12,7 @@ const create = async (
|
||||
name: props.name,
|
||||
pagerduty_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
routing_key: props.routing_key,
|
||||
client: props.client,
|
||||
client_url: props.client_url,
|
||||
|
@ -12,7 +12,7 @@ const create = async (
|
||||
name: props.name,
|
||||
slack_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
api_url: props.api_url,
|
||||
channel: props.channel,
|
||||
title: props.title,
|
||||
|
@ -30,7 +30,7 @@ const create = async (
|
||||
name: props.name,
|
||||
webhook_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
url: props.api_url,
|
||||
http_config: httpConfig,
|
||||
},
|
||||
|
@ -12,7 +12,7 @@ const editEmail = async (
|
||||
name: props.name,
|
||||
email_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
to: props.to,
|
||||
html: props.html,
|
||||
headers: props.headers,
|
||||
|
@ -12,7 +12,7 @@ const editMsTeams = async (
|
||||
name: props.name,
|
||||
msteams_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
webhook_url: props.webhook_url,
|
||||
title: props.title,
|
||||
text: props.text,
|
||||
|
@ -12,7 +12,7 @@ const editOpsgenie = async (
|
||||
name: props.name,
|
||||
opsgenie_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
api_key: props.api_key,
|
||||
description: props.description,
|
||||
priority: props.priority,
|
||||
|
@ -12,7 +12,7 @@ const editPager = async (
|
||||
name: props.name,
|
||||
pagerduty_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
routing_key: props.routing_key,
|
||||
client: props.client,
|
||||
client_url: props.client_url,
|
||||
|
@ -12,7 +12,7 @@ const editSlack = async (
|
||||
name: props.name,
|
||||
slack_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
api_url: props.api_url,
|
||||
channel: props.channel,
|
||||
title: props.title,
|
||||
|
@ -29,7 +29,7 @@ const editWebhook = async (
|
||||
name: props.name,
|
||||
webhook_configs: [
|
||||
{
|
||||
send_resolved: true,
|
||||
send_resolved: props.send_resolved,
|
||||
url: props.api_url,
|
||||
http_config: httpConfig,
|
||||
},
|
||||
|
@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiV4Instance } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { MetricMetaProps } from 'types/api/metrics/getApDex';
|
||||
|
||||
@ -6,4 +6,6 @@ export const getMetricMeta = (
|
||||
metricName: string,
|
||||
servicename: string,
|
||||
): Promise<AxiosResponse<MetricMetaProps>> =>
|
||||
axios.get(`/metric_meta?metricName=${metricName}&serviceName=${servicename}`);
|
||||
ApiV4Instance.get(
|
||||
`/metric/metric_metadata?metricName=${metricName}&serviceName=${servicename}`,
|
||||
);
|
||||
|
@ -1,27 +0,0 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
MetricNameProps,
|
||||
MetricNamesPayloadProps,
|
||||
} from 'types/api/metrics/getMetricName';
|
||||
|
||||
export const getMetricName = async (
|
||||
props: MetricNameProps,
|
||||
): Promise<SuccessResponse<MetricNamesPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`/metrics/autocomplete/list?match=${props || ''}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
@ -1,6 +1,7 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ApiV3Instance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
TagKeyProps,
|
||||
@ -8,15 +9,19 @@ import {
|
||||
TagValueProps,
|
||||
TagValuesPayloadProps,
|
||||
} from 'types/api/metrics/getResourceAttributes';
|
||||
import { DataSource, MetricAggregateOperator } from 'types/common/queryBuilder';
|
||||
|
||||
export const getResourceAttributesTagKeys = async (
|
||||
props: TagKeyProps,
|
||||
): Promise<SuccessResponse<TagKeysPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`/metrics/autocomplete/tagKey?metricName=${props.metricName}${
|
||||
props.match ? `&match=${props.match}` : ''
|
||||
}`,
|
||||
`/autocomplete/attribute_keys?${createQueryParams({
|
||||
aggregateOperator: MetricAggregateOperator.RATE,
|
||||
searchText: props.match,
|
||||
dataSource: DataSource.METRICS,
|
||||
aggregateAttribute: props.metricName,
|
||||
})}`,
|
||||
);
|
||||
|
||||
return {
|
||||
@ -35,7 +40,13 @@ export const getResourceAttributesTagValues = async (
|
||||
): Promise<SuccessResponse<TagValuesPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`/metrics/autocomplete/tagValue?metricName=${props.metricName}&tagKey=${props.tagKey}`,
|
||||
`/autocomplete/attribute_values?${createQueryParams({
|
||||
aggregateOperator: MetricAggregateOperator.RATE,
|
||||
dataSource: DataSource.METRICS,
|
||||
aggregateAttribute: props.metricName,
|
||||
attributeKey: props.tagKey,
|
||||
searchText: '',
|
||||
})}`,
|
||||
);
|
||||
|
||||
return {
|
||||
|
@ -29,6 +29,7 @@ export const getComponentForPanelType = (
|
||||
[PANEL_TYPES.LIST]:
|
||||
dataSource === DataSource.LOGS ? LogsPanelComponent : TracesTableComponent,
|
||||
[PANEL_TYPES.BAR]: Uplot,
|
||||
[PANEL_TYPES.PIE]: null,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: null,
|
||||
};
|
||||
|
||||
|
@ -285,6 +285,7 @@ export enum PANEL_TYPES {
|
||||
LIST = 'list',
|
||||
TRACE = 'trace',
|
||||
BAR = 'bar',
|
||||
PIE = 'pie',
|
||||
EMPTY_WIDGET = 'EMPTY_WIDGET',
|
||||
}
|
||||
|
||||
|
@ -315,7 +315,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
className={cx(
|
||||
'app-layout',
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
!collapsed ? 'docked' : '',
|
||||
!collapsed && !renderFullScreen ? 'docked' : '',
|
||||
)}
|
||||
>
|
||||
{isToDisplayLayout && !renderFullScreen && (
|
||||
|
@ -53,6 +53,7 @@ function CreateAlertChannels({
|
||||
EmailChannel
|
||||
>
|
||||
>({
|
||||
send_resolved: true,
|
||||
text: `{{ range .Alerts -}}
|
||||
*Alert:* {{ .Labels.alertname }}{{ if .Labels.severity }} - {{ .Labels.severity }}{{ end }}
|
||||
|
||||
@ -119,7 +120,7 @@ function CreateAlertChannels({
|
||||
api_url: selectedConfig?.api_url || '',
|
||||
channel: selectedConfig?.channel || '',
|
||||
name: selectedConfig?.name || '',
|
||||
send_resolved: true,
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
text: selectedConfig?.text || '',
|
||||
title: selectedConfig?.title || '',
|
||||
}),
|
||||
@ -158,7 +159,7 @@ function CreateAlertChannels({
|
||||
let request: WebhookChannel = {
|
||||
api_url: selectedConfig?.api_url || '',
|
||||
name: selectedConfig?.name || '',
|
||||
send_resolved: true,
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
};
|
||||
|
||||
if (selectedConfig?.username !== '' || selectedConfig?.password !== '') {
|
||||
@ -226,7 +227,7 @@ function CreateAlertChannels({
|
||||
|
||||
return {
|
||||
name: selectedConfig?.name || '',
|
||||
send_resolved: true,
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
routing_key: selectedConfig?.routing_key || '',
|
||||
client: selectedConfig?.client || '',
|
||||
client_url: selectedConfig?.client_url || '',
|
||||
@ -274,7 +275,7 @@ function CreateAlertChannels({
|
||||
() => ({
|
||||
api_key: selectedConfig?.api_key || '',
|
||||
name: selectedConfig?.name || '',
|
||||
send_resolved: true,
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
description: selectedConfig?.description || '',
|
||||
message: selectedConfig?.message || '',
|
||||
priority: selectedConfig?.priority || '',
|
||||
@ -312,7 +313,7 @@ function CreateAlertChannels({
|
||||
const prepareEmailRequest = useCallback(
|
||||
() => ({
|
||||
name: selectedConfig?.name || '',
|
||||
send_resolved: true,
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
to: selectedConfig?.to || '',
|
||||
html: selectedConfig?.html || '',
|
||||
headers: selectedConfig?.headers || {},
|
||||
@ -350,7 +351,7 @@ function CreateAlertChannels({
|
||||
() => ({
|
||||
webhook_url: selectedConfig?.webhook_url || '',
|
||||
name: selectedConfig?.name || '',
|
||||
send_resolved: true,
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
text: selectedConfig?.text || '',
|
||||
title: selectedConfig?.title || '',
|
||||
}),
|
||||
|
@ -72,7 +72,7 @@ function EditAlertChannels({
|
||||
api_url: selectedConfig?.api_url || '',
|
||||
channel: selectedConfig?.channel || '',
|
||||
name: selectedConfig?.name || '',
|
||||
send_resolved: true,
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
text: selectedConfig?.text || '',
|
||||
title: selectedConfig?.title || '',
|
||||
id,
|
||||
@ -115,7 +115,7 @@ function EditAlertChannels({
|
||||
return {
|
||||
api_url: selectedConfig?.api_url || '',
|
||||
name: name || '',
|
||||
send_resolved: true,
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
username,
|
||||
password,
|
||||
id,
|
||||
@ -284,7 +284,7 @@ function EditAlertChannels({
|
||||
() => ({
|
||||
webhook_url: selectedConfig?.webhook_url || '',
|
||||
name: selectedConfig?.name || '',
|
||||
send_resolved: true,
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
text: selectedConfig?.text || '',
|
||||
title: selectedConfig?.title || '',
|
||||
id,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Form, FormInstance, Input, Select, Typography } from 'antd';
|
||||
import { Form, FormInstance, Input, Select, Switch, Typography } from 'antd';
|
||||
import { Store } from 'antd/lib/form/interface';
|
||||
import UpgradePrompt from 'components/Upgrade/UpgradePrompt';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
@ -95,6 +95,22 @@ function FormAlertChannels({
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
label={t('field_send_resolved')}
|
||||
labelAlign="left"
|
||||
name="send_resolved"
|
||||
>
|
||||
<Switch
|
||||
defaultChecked={initialValue?.send_resolved}
|
||||
onChange={(value): void => {
|
||||
setSelectedConfig((state) => ({
|
||||
...state,
|
||||
send_resolved: value,
|
||||
}));
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item label={t('field_channel_type')} labelAlign="left" name="type">
|
||||
<Select disabled={editing} onChange={onTypeChangeHandler} value={type}>
|
||||
<Select.Option value="slack" key="slack">
|
||||
|
@ -26,5 +26,6 @@ export const PANEL_TYPES_VS_FULL_VIEW_TABLE: PanelTypeAndGraphManagerVisibilityP
|
||||
LIST: false,
|
||||
TRACE: false,
|
||||
BAR: true,
|
||||
PIE: false,
|
||||
EMPTY_WIDGET: false,
|
||||
};
|
||||
|
@ -35,7 +35,11 @@ function GridCardGraph({
|
||||
}: GridCardGraphProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
const [errorMessage, setErrorMessage] = useState<string>();
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
const {
|
||||
toScrollWidgetId,
|
||||
setToScrollWidgetId,
|
||||
variablesToGetUpdated,
|
||||
} = useDashboard();
|
||||
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
@ -90,7 +94,11 @@ function GridCardGraph({
|
||||
const isEmptyWidget =
|
||||
widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget);
|
||||
|
||||
const queryEnabledCondition = isVisible && !isEmptyWidget && isQueryEnabled;
|
||||
const queryEnabledCondition =
|
||||
isVisible &&
|
||||
!isEmptyWidget &&
|
||||
isQueryEnabled &&
|
||||
isEmpty(variablesToGetUpdated);
|
||||
|
||||
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
|
||||
if (widget.panelTypes !== PANEL_TYPES.LIST) {
|
||||
@ -166,7 +174,8 @@ function GridCardGraph({
|
||||
|
||||
const menuList =
|
||||
widget.panelTypes === PANEL_TYPES.TABLE ||
|
||||
widget.panelTypes === PANEL_TYPES.LIST
|
||||
widget.panelTypes === PANEL_TYPES.LIST ||
|
||||
widget.panelTypes === PANEL_TYPES.PIE
|
||||
? headerMenuList.filter((menu) => menu !== MenuItemKeys.CreateAlerts)
|
||||
: headerMenuList;
|
||||
|
||||
|
@ -42,6 +42,7 @@ const GridPanelSwitch = forwardRef<
|
||||
thresholds,
|
||||
},
|
||||
[PANEL_TYPES.LIST]: null,
|
||||
[PANEL_TYPES.PIE]: null,
|
||||
[PANEL_TYPES.TRACE]: null,
|
||||
[PANEL_TYPES.BAR]: {
|
||||
data,
|
||||
|
@ -38,6 +38,7 @@ export type PropsTypePropsMap = {
|
||||
[PANEL_TYPES.VALUE]: GridValueComponentProps;
|
||||
[PANEL_TYPES.TABLE]: GridTableComponentProps;
|
||||
[PANEL_TYPES.TRACE]: null;
|
||||
[PANEL_TYPES.PIE]: null;
|
||||
[PANEL_TYPES.LIST]: null;
|
||||
[PANEL_TYPES.BAR]: UplotProps & {
|
||||
ref: ForwardedRef<ToggleGraphProps | undefined>;
|
||||
|
@ -608,6 +608,7 @@ function LogsExplorerViews({
|
||||
className="periscope-btn"
|
||||
onClick={handleToggleShowFormatOptions}
|
||||
icon={<Sliders size={14} />}
|
||||
data-testid="periscope-btn"
|
||||
/>
|
||||
|
||||
{showFormatMenuItems && (
|
||||
|
@ -0,0 +1,151 @@
|
||||
import { render, RenderResult } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { useGetExplorerQueryRange } from 'hooks/queryBuilder/useGetExplorerQueryRange';
|
||||
import { logsQueryRangeSuccessResponse } from 'mocks-server/__mockdata__/logs_query_range';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { SELECTED_VIEWS } from 'pages/LogsExplorer/utils';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
|
||||
import { I18nextProvider } from 'react-i18next';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { VirtuosoMockContext } from 'react-virtuoso';
|
||||
import i18n from 'ReactI18';
|
||||
import store from 'store';
|
||||
|
||||
import LogsExplorerViews from '..';
|
||||
import { logsQueryRangeSuccessNewFormatResponse } from './mock';
|
||||
|
||||
const logExplorerRoute = '/logs/logs-explorer';
|
||||
|
||||
const queryRangeURL = 'http://localhost/api/v3/query_range';
|
||||
|
||||
const lodsQueryServerRequest = (): void =>
|
||||
server.use(
|
||||
rest.post(queryRangeURL, (req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(logsQueryRangeSuccessResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
// mocking the graph components in this test as this should be handled separately
|
||||
jest.mock(
|
||||
'container/TimeSeriesView/TimeSeriesView',
|
||||
() =>
|
||||
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
|
||||
function () {
|
||||
return <div>Time Series Chart</div>;
|
||||
},
|
||||
);
|
||||
jest.mock(
|
||||
'container/LogsExplorerChart',
|
||||
() =>
|
||||
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
|
||||
function () {
|
||||
return <div>Histogram Chart</div>;
|
||||
},
|
||||
);
|
||||
|
||||
jest.mock('constants/panelTypes', () => ({
|
||||
AVAILABLE_EXPORT_PANEL_TYPES: ['graph', 'table'],
|
||||
}));
|
||||
|
||||
jest.mock('d3-interpolate', () => ({
|
||||
interpolate: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/queryBuilder/useGetExplorerQueryRange', () => ({
|
||||
__esModule: true,
|
||||
useGetExplorerQueryRange: jest.fn(),
|
||||
}));
|
||||
|
||||
// Set up the specific behavior for useGetExplorerQueryRange in individual test cases
|
||||
beforeEach(() => {
|
||||
(useGetExplorerQueryRange as jest.Mock).mockReturnValue({
|
||||
data: { payload: logsQueryRangeSuccessNewFormatResponse },
|
||||
});
|
||||
});
|
||||
|
||||
const renderer = (): RenderResult =>
|
||||
render(
|
||||
<MemoryRouter initialEntries={[logExplorerRoute]}>
|
||||
<Provider store={store}>
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<MockQueryClientProvider>
|
||||
<QueryBuilderProvider>
|
||||
<VirtuosoMockContext.Provider
|
||||
value={{ viewportHeight: 300, itemHeight: 100 }}
|
||||
>
|
||||
<LogsExplorerViews selectedView={SELECTED_VIEWS.SEARCH} showHistogram />
|
||||
</VirtuosoMockContext.Provider>
|
||||
</QueryBuilderProvider>
|
||||
</MockQueryClientProvider>
|
||||
</I18nextProvider>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
describe('LogsExplorerViews -', () => {
|
||||
it('render correctly with props - list and table', async () => {
|
||||
lodsQueryServerRequest();
|
||||
const { queryByText, queryByTestId } = renderer();
|
||||
|
||||
expect(queryByTestId('periscope-btn')).toBeInTheDocument();
|
||||
await userEvent.click(queryByTestId('periscope-btn') as HTMLElement);
|
||||
|
||||
expect(document.querySelector('.menu-container')).toBeInTheDocument();
|
||||
|
||||
const menuItems = document.querySelectorAll('.menu-items .item');
|
||||
expect(menuItems.length).toBe(3);
|
||||
|
||||
// switch to table view
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
await userEvent.click(queryByTestId('table-view') as HTMLElement);
|
||||
|
||||
expect(
|
||||
queryByText(
|
||||
'{"container_id":"container_id","container_name":"container_name","driver":"driver","eta":"2m0s","location":"frontend","log_level":"INFO","message":"Dispatch successful","service":"frontend","span_id":"span_id","trace_id":"span_id"}',
|
||||
),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('check isLoading state', async () => {
|
||||
lodsQueryServerRequest();
|
||||
(useGetExplorerQueryRange as jest.Mock).mockReturnValue({
|
||||
data: { payload: logsQueryRangeSuccessNewFormatResponse },
|
||||
isLoading: true,
|
||||
isFetching: false,
|
||||
});
|
||||
const { queryByText, queryByTestId } = renderer();
|
||||
|
||||
// switch to table view
|
||||
await userEvent.click(queryByTestId('table-view') as HTMLElement);
|
||||
expect(
|
||||
queryByText(
|
||||
'Just a bit of patience, just a little bit’s enough ⎯ we’re getting your logs!',
|
||||
),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('check error state', async () => {
|
||||
lodsQueryServerRequest();
|
||||
(useGetExplorerQueryRange as jest.Mock).mockReturnValue({
|
||||
data: { payload: logsQueryRangeSuccessNewFormatResponse },
|
||||
isLoading: false,
|
||||
isFetching: false,
|
||||
isError: true,
|
||||
});
|
||||
const { queryByText, queryByTestId } = renderer();
|
||||
|
||||
expect(
|
||||
queryByText('Something went wrong. Please try again or contact support.'),
|
||||
).toBeInTheDocument();
|
||||
|
||||
// switch to table view
|
||||
await userEvent.click(queryByTestId('table-view') as HTMLElement);
|
||||
|
||||
expect(
|
||||
queryByText('Something went wrong. Please try again or contact support.'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
51
frontend/src/container/LogsExplorerViews/tests/mock.ts
Normal file
51
frontend/src/container/LogsExplorerViews/tests/mock.ts
Normal file
@ -0,0 +1,51 @@
|
||||
export const logsQueryRangeSuccessNewFormatResponse = {
|
||||
data: {
|
||||
result: [],
|
||||
resultType: '',
|
||||
newResult: {
|
||||
status: 'success',
|
||||
data: {
|
||||
resultType: '',
|
||||
result: [
|
||||
{
|
||||
queryName: 'A',
|
||||
series: null,
|
||||
list: [
|
||||
{
|
||||
timestamp: '2024-02-15T21:20:22Z',
|
||||
data: {
|
||||
attributes_bool: {},
|
||||
attributes_float64: {},
|
||||
attributes_int64: {},
|
||||
attributes_string: {
|
||||
container_id: 'container_id',
|
||||
container_name: 'container_name',
|
||||
driver: 'driver',
|
||||
eta: '2m0s',
|
||||
location: 'frontend',
|
||||
log_level: 'INFO',
|
||||
message: 'Dispatch successful',
|
||||
service: 'frontend',
|
||||
span_id: 'span_id',
|
||||
trace_id: 'span_id',
|
||||
},
|
||||
body:
|
||||
'2024-02-15T21:20:22.035Z\tINFO\tfrontend\tDispatch successful\t{"service": "frontend", "trace_id": "span_id", "span_id": "span_id", "driver": "driver", "eta": "2m0s"}',
|
||||
id: 'id',
|
||||
resources_string: {
|
||||
'container.name': 'container_name',
|
||||
},
|
||||
severity_number: 0,
|
||||
severity_text: '',
|
||||
span_id: '',
|
||||
trace_flags: 0,
|
||||
trace_id: '',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
@ -1,3 +1,5 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
|
||||
import { DownloadOptions } from 'container/Download/Download.types';
|
||||
import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
|
||||
|
||||
@ -20,7 +22,7 @@ export enum FORMULA {
|
||||
ERROR_PERCENTAGE = 'A*100/B',
|
||||
DATABASE_CALLS_AVG_DURATION = 'A/B',
|
||||
APDEX_TRACES = '((B + C)/2)/A',
|
||||
APDEX_DELTA_SPAN_METRICS = '(B + C/2)/A',
|
||||
APDEX_DELTA_SPAN_METRICS = '((B + C)/2)/A',
|
||||
APDEX_CUMULATIVE_SPAN_METRICS = '((B + C)/2)/A',
|
||||
}
|
||||
|
||||
|
@ -33,6 +33,8 @@ export const getNearestHighestBucketValue = (
|
||||
value: number,
|
||||
buckets: number[],
|
||||
): string => {
|
||||
// sort the buckets
|
||||
buckets.sort((a, b) => a - b);
|
||||
const nearestBucket = buckets.find((bucket) => bucket >= value);
|
||||
return nearestBucket?.toString() || '+Inf';
|
||||
};
|
||||
|
@ -10,6 +10,7 @@ export const PANEL_TYPES_INITIAL_QUERY = {
|
||||
[PANEL_TYPES.LIST]: initialQueriesMap.logs,
|
||||
[PANEL_TYPES.TRACE]: initialQueriesMap.traces,
|
||||
[PANEL_TYPES.BAR]: initialQueriesMap.metrics,
|
||||
[PANEL_TYPES.PIE]: initialQueriesMap.metrics,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: initialQueriesMap.metrics,
|
||||
};
|
||||
|
||||
|
@ -1,6 +1,13 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { BarChart3, LineChart, List, SigmaSquare, Table } from 'lucide-react';
|
||||
import {
|
||||
BarChart3,
|
||||
LineChart,
|
||||
List,
|
||||
PieChart,
|
||||
SigmaSquare,
|
||||
Table,
|
||||
} from 'lucide-react';
|
||||
|
||||
const Items: ItemsProps[] = [
|
||||
{
|
||||
@ -28,6 +35,11 @@ const Items: ItemsProps[] = [
|
||||
icon: <BarChart3 size={32} color={Color.BG_ROBIN_400} />,
|
||||
display: 'Bar',
|
||||
},
|
||||
{
|
||||
name: PANEL_TYPES.PIE,
|
||||
icon: <PieChart size={32} color={Color.BG_ROBIN_400} />,
|
||||
display: 'Pie',
|
||||
},
|
||||
];
|
||||
|
||||
export interface ItemsProps {
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { Row } from 'antd';
|
||||
import { isNull } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { memo, useEffect, useState } from 'react';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { convertVariablesToDbFormat } from './util';
|
||||
import VariableItem from './VariableItem';
|
||||
|
||||
function DashboardVariableSelection(): JSX.Element | null {
|
||||
@ -11,15 +11,14 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
selectedDashboard,
|
||||
setSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
} = useDashboard();
|
||||
|
||||
const { data } = selectedDashboard || {};
|
||||
|
||||
const { variables } = data || {};
|
||||
|
||||
const [update, setUpdate] = useState<boolean>(false);
|
||||
const [lastUpdatedVar, setLastUpdatedVar] = useState<string>('');
|
||||
|
||||
const [variablesTableData, setVariablesTableData] = useState<any>([]);
|
||||
|
||||
useEffect(() => {
|
||||
@ -45,8 +44,27 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
}, [variables]);
|
||||
|
||||
const onVarChanged = (name: string): void => {
|
||||
setLastUpdatedVar(name);
|
||||
setUpdate(!update);
|
||||
/**
|
||||
* this function takes care of adding the dependent variables to current update queue and removing
|
||||
* the updated variable name from the queue
|
||||
*/
|
||||
const dependentVariables = variablesTableData
|
||||
?.map((variable: any) => {
|
||||
if (variable.type === 'QUERY') {
|
||||
const re = new RegExp(`\\{\\{\\s*?\\.${name}\\s*?\\}\\}`); // regex for `{{.var}}`
|
||||
const queryValue = variable.queryValue || '';
|
||||
const dependVarReMatch = queryValue.match(re);
|
||||
if (dependVarReMatch !== null && dependVarReMatch.length > 0) {
|
||||
return variable.name;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter((val: string | null) => !isNull(val));
|
||||
setVariablesToGetUpdated((prev) => [
|
||||
...prev.filter((v) => v !== name),
|
||||
...dependentVariables,
|
||||
]);
|
||||
};
|
||||
|
||||
const onValueUpdate = (
|
||||
@ -56,37 +74,31 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
allSelected: boolean,
|
||||
): void => {
|
||||
if (id) {
|
||||
const newVariablesArr = variablesTableData.map(
|
||||
(variable: IDashboardVariable) => {
|
||||
const variableCopy = { ...variable };
|
||||
|
||||
if (variableCopy.id === id) {
|
||||
variableCopy.selectedValue = value;
|
||||
variableCopy.allSelected = allSelected;
|
||||
}
|
||||
|
||||
return variableCopy;
|
||||
},
|
||||
);
|
||||
updateLocalStorageDashboardVariables(name, value, allSelected);
|
||||
|
||||
const variables = convertVariablesToDbFormat(newVariablesArr);
|
||||
|
||||
if (selectedDashboard) {
|
||||
setSelectedDashboard({
|
||||
...selectedDashboard,
|
||||
data: {
|
||||
...selectedDashboard?.data,
|
||||
variables: {
|
||||
...variables,
|
||||
},
|
||||
},
|
||||
setSelectedDashboard((prev) => {
|
||||
if (prev) {
|
||||
return {
|
||||
...prev,
|
||||
data: {
|
||||
...prev?.data,
|
||||
variables: {
|
||||
...prev?.data.variables,
|
||||
[id]: {
|
||||
...prev.data.variables[id],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
return prev;
|
||||
});
|
||||
}
|
||||
|
||||
onVarChanged(name);
|
||||
|
||||
setUpdate(!update);
|
||||
}
|
||||
};
|
||||
|
||||
@ -107,13 +119,12 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
<VariableItem
|
||||
key={`${variable.name}${variable.id}}${variable.order}`}
|
||||
existingVariables={variables}
|
||||
lastUpdatedVar={lastUpdatedVar}
|
||||
variableData={{
|
||||
name: variable.name,
|
||||
...variable,
|
||||
change: update,
|
||||
}}
|
||||
onValueUpdate={onValueUpdate}
|
||||
variablesToGetUpdated={variablesToGetUpdated}
|
||||
/>
|
||||
))}
|
||||
</Row>
|
||||
|
@ -53,7 +53,7 @@ describe('VariableItem', () => {
|
||||
variableData={mockVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
lastUpdatedVar=""
|
||||
variablesToGetUpdated={[]}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@ -68,7 +68,7 @@ describe('VariableItem', () => {
|
||||
variableData={mockVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
lastUpdatedVar=""
|
||||
variablesToGetUpdated={[]}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@ -82,7 +82,7 @@ describe('VariableItem', () => {
|
||||
variableData={mockVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
lastUpdatedVar=""
|
||||
variablesToGetUpdated={[]}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@ -110,7 +110,7 @@ describe('VariableItem', () => {
|
||||
variableData={mockCustomVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
lastUpdatedVar=""
|
||||
variablesToGetUpdated={[]}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@ -131,7 +131,7 @@ describe('VariableItem', () => {
|
||||
variableData={customVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
lastUpdatedVar=""
|
||||
variablesToGetUpdated={[]}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@ -146,7 +146,7 @@ describe('VariableItem', () => {
|
||||
variableData={mockCustomVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
lastUpdatedVar=""
|
||||
variablesToGetUpdated={[]}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
|
@ -32,7 +32,7 @@ interface VariableItemProps {
|
||||
arg1: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
) => void;
|
||||
lastUpdatedVar: string;
|
||||
variablesToGetUpdated: string[];
|
||||
}
|
||||
|
||||
const getSelectValue = (
|
||||
@ -49,7 +49,7 @@ function VariableItem({
|
||||
variableData,
|
||||
existingVariables,
|
||||
onValueUpdate,
|
||||
lastUpdatedVar,
|
||||
variablesToGetUpdated,
|
||||
}: VariableItemProps): JSX.Element {
|
||||
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
|
||||
[],
|
||||
@ -108,16 +108,10 @@ function VariableItem({
|
||||
|
||||
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
|
||||
/* eslint-disable no-useless-escape */
|
||||
const re = new RegExp(`\\{\\{\\s*?\\.${lastUpdatedVar}\\s*?\\}\\}`); // regex for `{{.var}}`
|
||||
// If the variable is dependent on the last updated variable
|
||||
// and contains the last updated variable in its query (of the form `{{.var}}`)
|
||||
// then we need to update the value of the variable
|
||||
const queryValue = variableData.queryValue || '';
|
||||
const dependVarReMatch = queryValue.match(re);
|
||||
if (
|
||||
variableData.type === 'QUERY' &&
|
||||
dependVarReMatch !== null &&
|
||||
dependVarReMatch.length > 0
|
||||
variableData.name &&
|
||||
variablesToGetUpdated.includes(variableData.name)
|
||||
) {
|
||||
let value = variableData.selectedValue;
|
||||
let allSelected = false;
|
||||
|
@ -26,6 +26,7 @@ export const panelTypeVsThreshold: { [key in PANEL_TYPES]: boolean } = {
|
||||
[PANEL_TYPES.VALUE]: true,
|
||||
[PANEL_TYPES.TABLE]: true,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.PIE]: false,
|
||||
[PANEL_TYPES.BAR]: true,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
@ -36,6 +37,7 @@ export const panelTypeVsSoftMinMax: { [key in PANEL_TYPES]: boolean } = {
|
||||
[PANEL_TYPES.VALUE]: false,
|
||||
[PANEL_TYPES.TABLE]: false,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.PIE]: false,
|
||||
[PANEL_TYPES.BAR]: true,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
@ -45,6 +47,7 @@ export const panelTypeVsDragAndDrop: { [key in PANEL_TYPES]: boolean } = {
|
||||
[PANEL_TYPES.TIME_SERIES]: false,
|
||||
[PANEL_TYPES.VALUE]: true,
|
||||
[PANEL_TYPES.TABLE]: true,
|
||||
[PANEL_TYPES.PIE]: false,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.BAR]: false,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
@ -56,6 +59,7 @@ export const panelTypeVsFillSpan: { [key in PANEL_TYPES]: boolean } = {
|
||||
[PANEL_TYPES.VALUE]: false,
|
||||
[PANEL_TYPES.TABLE]: false,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.PIE]: false,
|
||||
[PANEL_TYPES.BAR]: false,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
@ -66,6 +70,7 @@ export const panelTypeVsYAxisUnit: { [key in PANEL_TYPES]: boolean } = {
|
||||
[PANEL_TYPES.VALUE]: true,
|
||||
[PANEL_TYPES.TABLE]: true,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.PIE]: false,
|
||||
[PANEL_TYPES.BAR]: true,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
@ -76,6 +81,7 @@ export const panelTypeVsCreateAlert: { [key in PANEL_TYPES]: boolean } = {
|
||||
[PANEL_TYPES.VALUE]: true,
|
||||
[PANEL_TYPES.TABLE]: false,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.PIE]: false,
|
||||
[PANEL_TYPES.BAR]: true,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
@ -88,6 +94,7 @@ export const panelTypeVsPanelTimePreferences: {
|
||||
[PANEL_TYPES.VALUE]: true,
|
||||
[PANEL_TYPES.TABLE]: true,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.PIE]: true,
|
||||
[PANEL_TYPES.BAR]: true,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
|
@ -25,6 +25,7 @@ export type PartialPanelTypes = {
|
||||
[PANEL_TYPES.TABLE]: 'table';
|
||||
[PANEL_TYPES.TIME_SERIES]: 'graph';
|
||||
[PANEL_TYPES.VALUE]: 'value';
|
||||
[PANEL_TYPES.PIE]: 'pie';
|
||||
};
|
||||
|
||||
export const panelTypeDataSourceFormValuesMap: Record<
|
||||
@ -163,6 +164,50 @@ export const panelTypeDataSourceFormValuesMap: Record<
|
||||
},
|
||||
},
|
||||
},
|
||||
[PANEL_TYPES.PIE]: {
|
||||
[DataSource.LOGS]: {
|
||||
builder: {
|
||||
queryData: [
|
||||
'filters',
|
||||
'aggregateOperator',
|
||||
'aggregateAttribute',
|
||||
'groupBy',
|
||||
'limit',
|
||||
'having',
|
||||
'orderBy',
|
||||
'functions',
|
||||
],
|
||||
},
|
||||
},
|
||||
[DataSource.METRICS]: {
|
||||
builder: {
|
||||
queryData: [
|
||||
'filters',
|
||||
'aggregateOperator',
|
||||
'aggregateAttribute',
|
||||
'groupBy',
|
||||
'limit',
|
||||
'having',
|
||||
'orderBy',
|
||||
'functions',
|
||||
'spaceAggregation',
|
||||
],
|
||||
},
|
||||
},
|
||||
[DataSource.TRACES]: {
|
||||
builder: {
|
||||
queryData: [
|
||||
'filters',
|
||||
'aggregateOperator',
|
||||
'aggregateAttribute',
|
||||
'groupBy',
|
||||
'limit',
|
||||
'having',
|
||||
'orderBy',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
[PANEL_TYPES.LIST]: {
|
||||
[DataSource.LOGS]: {
|
||||
builder: {
|
||||
|
@ -159,7 +159,7 @@ export default function EnvironmentDetails(): JSX.Element {
|
||||
|
||||
<div className="request-entity-container">
|
||||
<Typography.Text>
|
||||
Cannot find what you’re looking for? Request a data source
|
||||
Cannot find what you’re looking for? Request an environment
|
||||
</Typography.Text>
|
||||
|
||||
<div className="form-section">
|
||||
|
@ -17,6 +17,7 @@ import { hasFrameworks } from 'container/OnboardingContainer/utils/dataSourceUti
|
||||
import useAnalytics from 'hooks/analytics/useAnalytics';
|
||||
import history from 'lib/history';
|
||||
import { isEmpty, isNull } from 'lodash-es';
|
||||
import { HelpCircle } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
|
||||
import { useOnboardingContext } from '../../context/OnboardingContext';
|
||||
@ -379,6 +380,30 @@ export default function ModuleStepsContainer({
|
||||
history.push('/');
|
||||
};
|
||||
|
||||
const handleFacingIssuesClick = (): void => {
|
||||
trackEvent('Onboarding V2: Facing Issues Sending Data to SigNoz', {
|
||||
dataSource: selectedDataSource?.id,
|
||||
framework: selectedFramework,
|
||||
environment: selectedEnvironment,
|
||||
module: activeStep?.module?.id,
|
||||
});
|
||||
|
||||
const message = `Hi Team,
|
||||
|
||||
I am facing issues sending data to SigNoz. Here are my application details
|
||||
|
||||
Data Source: ${selectedDataSource?.name}
|
||||
Framework:
|
||||
Environment:
|
||||
Module: ${activeStep?.module?.id}
|
||||
|
||||
Thanks
|
||||
`;
|
||||
if (window.Intercom) {
|
||||
window.Intercom('showNewMessage', message);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="onboarding-module-steps">
|
||||
<div className="steps-container">
|
||||
@ -455,6 +480,15 @@ export default function ModuleStepsContainer({
|
||||
<Button onClick={handleNext} type="primary" icon={<ArrowRightOutlined />}>
|
||||
{current < lastStepIndex ? 'Continue to next step' : 'Done'}
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
className="periscope-btn"
|
||||
onClick={handleFacingIssuesClick}
|
||||
danger
|
||||
icon={<HelpCircle size={14} />}
|
||||
>
|
||||
Facing issues sending data to SigNoz?
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -0,0 +1,50 @@
|
||||
.piechart-no-data {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.piechart-container {
|
||||
height: 90%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.piechart-tooltip {
|
||||
|
||||
.piechart-indicator {
|
||||
width: 15px;
|
||||
height: 3px;
|
||||
border-radius: 2px;
|
||||
}
|
||||
|
||||
.tooltip-value {
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
}
|
||||
}
|
||||
|
||||
.piechart-legend {
|
||||
width: 100%;
|
||||
height: 40px;
|
||||
overflow-y: scroll;
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
|
||||
.piechart-legend-item {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 5px;
|
||||
|
||||
.piechart-legend-label {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
border-radius: 50%;
|
||||
}
|
||||
}
|
||||
}
|
218
frontend/src/container/PanelWrapper/PiePanelWrapper.tsx
Normal file
218
frontend/src/container/PanelWrapper/PiePanelWrapper.tsx
Normal file
@ -0,0 +1,218 @@
|
||||
import './PiePanelWrapper.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Group } from '@visx/group';
|
||||
import { Pie } from '@visx/shape';
|
||||
import { useTooltip, useTooltipInPortal } from '@visx/tooltip';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
||||
import { useRef, useState } from 'react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { PanelWrapperProps, TooltipData } from './panelWrapper.types';
|
||||
import { getLabel, lightenColor, tooltipStyles } from './utils';
|
||||
|
||||
// refernce: https://www.youtube.com/watch?v=bL3P9CqQkKw
|
||||
function PiePanelWrapper({
|
||||
queryResponse,
|
||||
widget,
|
||||
}: PanelWrapperProps): JSX.Element {
|
||||
const [active, setActive] = useState<{
|
||||
label: string;
|
||||
value: string;
|
||||
color: string;
|
||||
} | null>(null);
|
||||
|
||||
const {
|
||||
tooltipOpen,
|
||||
tooltipLeft,
|
||||
tooltipTop,
|
||||
tooltipData,
|
||||
hideTooltip,
|
||||
showTooltip,
|
||||
} = useTooltip<TooltipData>();
|
||||
|
||||
const { containerRef, TooltipInPortal } = useTooltipInPortal({
|
||||
scroll: true,
|
||||
detectBounds: true,
|
||||
});
|
||||
|
||||
const panelData =
|
||||
queryResponse.data?.payload?.data.newResult.data.result || [];
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const pieChartData: {
|
||||
label: string;
|
||||
value: string;
|
||||
color: string;
|
||||
}[] = [].concat(
|
||||
...(panelData
|
||||
.map((d) =>
|
||||
d.series?.map((s) => ({
|
||||
label:
|
||||
d.series?.length === 1
|
||||
? getLabel(Object.values(s.labels)[0], widget.query, d.queryName)
|
||||
: getLabel(Object.values(s.labels)[0], {} as Query, d.queryName, true),
|
||||
value: s.values[0].value,
|
||||
color: generateColor(
|
||||
d.series?.length === 1
|
||||
? getLabel(Object.values(s.labels)[0], widget.query, d.queryName)
|
||||
: getLabel(Object.values(s.labels)[0], {} as Query, d.queryName, true),
|
||||
themeColors.chartcolors,
|
||||
),
|
||||
})),
|
||||
)
|
||||
.filter((d) => d !== undefined) as never[]),
|
||||
);
|
||||
|
||||
let size = 0;
|
||||
let width = 0;
|
||||
let height = 0;
|
||||
|
||||
const chartRef = useRef<HTMLDivElement>(null);
|
||||
if (chartRef.current) {
|
||||
const { offsetWidth, offsetHeight } = chartRef.current;
|
||||
size = Math.min(offsetWidth, offsetHeight);
|
||||
width = offsetWidth;
|
||||
height = offsetHeight;
|
||||
}
|
||||
const half = size / 2;
|
||||
|
||||
const getFillColor = (color: string): string => {
|
||||
if (active === null) {
|
||||
return color;
|
||||
}
|
||||
const lightenedColor = lightenColor(color, 0.4); // Adjust the opacity value (0.7 in this case)
|
||||
return active.color === color ? color : lightenedColor;
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
{!pieChartData.length && <div className="piechart-no-data">No data</div>}
|
||||
{pieChartData.length > 0 && (
|
||||
<>
|
||||
<div className="piechart-container" ref={chartRef}>
|
||||
<svg width={width} height={height} ref={containerRef}>
|
||||
<Group top={height / 2} left={width / 2}>
|
||||
<Pie
|
||||
data={pieChartData}
|
||||
pieValue={(data: {
|
||||
label: string;
|
||||
value: string;
|
||||
color: string;
|
||||
}): number => parseFloat(data.value)}
|
||||
outerRadius={({ data }): number => {
|
||||
if (!active) return half - 3;
|
||||
return data.label === active.label ? half : half - 3;
|
||||
}}
|
||||
padAngle={0.02}
|
||||
cornerRadius={3}
|
||||
width={size}
|
||||
height={size}
|
||||
>
|
||||
{
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
|
||||
(pie) =>
|
||||
pie.arcs.map((arc, index) => {
|
||||
const { label } = arc.data;
|
||||
const [centroidX, centroidY] = pie.path.centroid(arc);
|
||||
const hasSpaceForLabel = arc.endAngle - arc.startAngle >= 0.6;
|
||||
const arcPath = pie.path(arc);
|
||||
const arcFill = arc.data.color;
|
||||
return (
|
||||
<g
|
||||
// eslint-disable-next-line react/no-array-index-key
|
||||
key={`arc-${label}-${index}`}
|
||||
onMouseEnter={(): void => {
|
||||
showTooltip({
|
||||
tooltipData: {
|
||||
label,
|
||||
value: arc.data.value,
|
||||
color: arc.data.color,
|
||||
key: label,
|
||||
},
|
||||
tooltipTop: centroidY + height / 2,
|
||||
tooltipLeft: centroidX + width / 2,
|
||||
});
|
||||
setActive(arc.data);
|
||||
}}
|
||||
onMouseLeave={(): void => {
|
||||
hideTooltip();
|
||||
setActive(null);
|
||||
}}
|
||||
>
|
||||
<path d={arcPath || ''} fill={getFillColor(arcFill)} />
|
||||
{hasSpaceForLabel && (
|
||||
<text
|
||||
x={centroidX}
|
||||
y={centroidY}
|
||||
dy=".33em"
|
||||
fill="#000"
|
||||
fontSize={10}
|
||||
textAnchor="middle"
|
||||
pointerEvents="none"
|
||||
>
|
||||
{arc.data.label}
|
||||
</text>
|
||||
)}
|
||||
</g>
|
||||
);
|
||||
})
|
||||
}
|
||||
</Pie>
|
||||
</Group>
|
||||
</svg>
|
||||
{tooltipOpen && tooltipData && (
|
||||
<TooltipInPortal
|
||||
top={tooltipTop}
|
||||
left={tooltipLeft}
|
||||
style={{
|
||||
...tooltipStyles,
|
||||
background: isDarkMode ? Color.BG_INK_400 : Color.BG_VANILLA_100,
|
||||
color: isDarkMode ? Color.BG_VANILLA_100 : Color.BG_INK_400,
|
||||
}}
|
||||
className="piechart-tooltip"
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
background: tooltipData.color,
|
||||
}}
|
||||
className="piechart-indicator"
|
||||
/>
|
||||
{tooltipData.key}
|
||||
<div className="tooltip-value">{tooltipData.value}</div>
|
||||
</TooltipInPortal>
|
||||
)}
|
||||
</div>
|
||||
<div className="piechart-legend">
|
||||
{pieChartData.length > 0 &&
|
||||
pieChartData.map((data) => (
|
||||
<div
|
||||
key={data.label}
|
||||
className="piechart-legend-item"
|
||||
onMouseEnter={(): void => {
|
||||
setActive(data);
|
||||
}}
|
||||
onMouseLeave={(): void => {
|
||||
setActive(null);
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
backgroundColor: getFillColor(data.color),
|
||||
}}
|
||||
className="piechart-legend-label"
|
||||
/>
|
||||
{data.label}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default PiePanelWrapper;
|
@ -1,6 +1,7 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
|
||||
import ListPanelWrapper from './ListPanelWrapper';
|
||||
import PiePanelWrapper from './PiePanelWrapper';
|
||||
import TablePanelWrapper from './TablePanelWrapper';
|
||||
import UplotPanelWrapper from './UplotPanelWrapper';
|
||||
import ValuePanelWrapper from './ValuePanelWrapper';
|
||||
@ -12,5 +13,6 @@ export const PanelTypeVsPanelWrapper = {
|
||||
[PANEL_TYPES.VALUE]: ValuePanelWrapper,
|
||||
[PANEL_TYPES.TRACE]: null,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: null,
|
||||
[PANEL_TYPES.PIE]: PiePanelWrapper,
|
||||
[PANEL_TYPES.BAR]: UplotPanelWrapper,
|
||||
};
|
||||
|
@ -22,3 +22,10 @@ export type PanelWrapperProps = {
|
||||
onDragSelect: (start: number, end: number) => void;
|
||||
selectedGraph?: PANEL_TYPES;
|
||||
};
|
||||
|
||||
export type TooltipData = {
|
||||
label: string;
|
||||
key: string;
|
||||
value: string;
|
||||
color: string;
|
||||
};
|
||||
|
73
frontend/src/container/PanelWrapper/utils.ts
Normal file
73
frontend/src/container/PanelWrapper/utils.ts
Normal file
@ -0,0 +1,73 @@
|
||||
import { defaultStyles } from '@visx/tooltip';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
export const tooltipStyles = {
|
||||
...defaultStyles,
|
||||
minWidth: 60,
|
||||
backgroundColor: 'rgba(0,0,0,0.9)',
|
||||
color: 'white',
|
||||
zIndex: 9999,
|
||||
display: 'flex',
|
||||
gap: '10px',
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
padding: '5px 10px',
|
||||
};
|
||||
|
||||
export const getLabel = (
|
||||
label: string,
|
||||
query: Query,
|
||||
queryName: string,
|
||||
isQueryContentMultipleResult = false, // If there are more than one aggregation return by the query, this should be set to true. Default is false.
|
||||
): string => {
|
||||
let finalQuery;
|
||||
if (!isQueryContentMultipleResult) {
|
||||
finalQuery = query.builder.queryData.find((q) => q.queryName === queryName);
|
||||
if (!finalQuery) {
|
||||
// If the query is not found in queryData, then check in queryFormulas
|
||||
finalQuery = query.builder.queryFormulas.find(
|
||||
(q) => q.queryName === queryName,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (finalQuery) {
|
||||
if (finalQuery.legend !== '') {
|
||||
return finalQuery.legend;
|
||||
}
|
||||
if (label !== undefined) {
|
||||
return label;
|
||||
}
|
||||
return queryName;
|
||||
}
|
||||
return label;
|
||||
};
|
||||
|
||||
// Function to convert a hex color to RGB format
|
||||
const hexToRgb = (
|
||||
color: string,
|
||||
): { r: number; g: number; b: number } | null => {
|
||||
const hex = color.replace(
|
||||
/^#?([a-f\d])([a-f\d])([a-f\d])$/i,
|
||||
(m, r, g, b) => r + r + g + g + b + b,
|
||||
);
|
||||
const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
|
||||
return result
|
||||
? {
|
||||
r: parseInt(result[1], 16),
|
||||
g: parseInt(result[2], 16),
|
||||
b: parseInt(result[3], 16),
|
||||
}
|
||||
: null;
|
||||
};
|
||||
|
||||
export const lightenColor = (color: string, opacity: number): string => {
|
||||
// Convert the hex color to RGB format
|
||||
const rgbColor = hexToRgb(color);
|
||||
if (!rgbColor) return color; // Return the original color if unable to parse
|
||||
|
||||
// Extract the RGB components
|
||||
const { r, g, b } = rgbColor;
|
||||
|
||||
// Create a new RGBA color string with the specified opacity
|
||||
return `rgba(${r}, ${g}, ${b}, ${opacity})`;
|
||||
};
|
@ -0,0 +1,105 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { SELECTED_VIEWS } from 'pages/LogsExplorer/utils';
|
||||
|
||||
import LeftToolbarActions from '../LeftToolbarActions';
|
||||
import RightToolbarActions from '../RightToolbarActions';
|
||||
|
||||
describe('ToolbarActions', () => {
|
||||
it('LeftToolbarActions - renders correctly with default props', async () => {
|
||||
const handleChangeSelectedView = jest.fn();
|
||||
const handleToggleShowHistogram = jest.fn();
|
||||
const { queryByTestId } = render(
|
||||
<LeftToolbarActions
|
||||
items={{
|
||||
search: {
|
||||
name: 'search',
|
||||
label: 'Search',
|
||||
disabled: false,
|
||||
show: true,
|
||||
},
|
||||
queryBuilder: {
|
||||
name: 'query-builder',
|
||||
label: 'Query Builder',
|
||||
disabled: false,
|
||||
show: true,
|
||||
},
|
||||
clickhouse: {
|
||||
name: 'clickhouse',
|
||||
label: 'Clickhouse',
|
||||
disabled: false,
|
||||
},
|
||||
}}
|
||||
selectedView={SELECTED_VIEWS.SEARCH}
|
||||
onChangeSelectedView={handleChangeSelectedView}
|
||||
onToggleHistrogramVisibility={handleToggleShowHistogram}
|
||||
showHistogram
|
||||
/>,
|
||||
);
|
||||
expect(screen.getByTestId('search-view')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('query-builder-view')).toBeInTheDocument();
|
||||
|
||||
// clickhouse should not be present as its show: false
|
||||
expect(queryByTestId('clickhouse-view')).not.toBeInTheDocument();
|
||||
|
||||
await userEvent.click(screen.getByTestId('search-view'));
|
||||
expect(handleChangeSelectedView).toBeCalled();
|
||||
|
||||
await userEvent.click(screen.getByTestId('query-builder-view'));
|
||||
expect(handleChangeSelectedView).toBeCalled();
|
||||
});
|
||||
|
||||
it('renders - clickhouse view and test histogram toggle', async () => {
|
||||
const handleChangeSelectedView = jest.fn();
|
||||
const handleToggleShowHistogram = jest.fn();
|
||||
const { queryByTestId, getByRole } = render(
|
||||
<LeftToolbarActions
|
||||
items={{
|
||||
search: {
|
||||
name: 'search',
|
||||
label: 'Search',
|
||||
disabled: false,
|
||||
show: false,
|
||||
},
|
||||
queryBuilder: {
|
||||
name: 'query-builder',
|
||||
label: 'Query Builder',
|
||||
disabled: false,
|
||||
show: true,
|
||||
},
|
||||
clickhouse: {
|
||||
name: 'clickhouse',
|
||||
label: 'Clickhouse',
|
||||
disabled: false,
|
||||
show: true,
|
||||
},
|
||||
}}
|
||||
selectedView={SELECTED_VIEWS.QUERY_BUILDER}
|
||||
onChangeSelectedView={handleChangeSelectedView}
|
||||
onToggleHistrogramVisibility={handleToggleShowHistogram}
|
||||
showHistogram
|
||||
/>,
|
||||
);
|
||||
|
||||
const clickHouseView = queryByTestId('clickhouse-view');
|
||||
expect(clickHouseView).toBeInTheDocument();
|
||||
|
||||
await userEvent.click(clickHouseView as HTMLElement);
|
||||
expect(handleChangeSelectedView).toBeCalled();
|
||||
|
||||
await userEvent.click(getByRole('switch'));
|
||||
expect(handleToggleShowHistogram).toBeCalled();
|
||||
});
|
||||
|
||||
it('RightToolbarActions - render correctly with props', async () => {
|
||||
const onStageRunQuery = jest.fn();
|
||||
const { queryByText } = render(
|
||||
<RightToolbarActions onStageRunQuery={onStageRunQuery} />,
|
||||
);
|
||||
|
||||
const stageNRunBtn = queryByText('Stage & Run Query');
|
||||
expect(stageNRunBtn).toBeInTheDocument();
|
||||
await userEvent.click(stageNRunBtn as HTMLElement);
|
||||
expect(onStageRunQuery).toBeCalled();
|
||||
});
|
||||
});
|
@ -61,7 +61,6 @@
|
||||
line-height: 18px;
|
||||
|
||||
background: transparent;
|
||||
border-left: 2px solid transparent;
|
||||
|
||||
transition: 0.2s all linear;
|
||||
|
||||
|
@ -5,4 +5,5 @@ export const Container = styled.div`
|
||||
align-items: center;
|
||||
justify-content: flex-end;
|
||||
gap: 0.3rem;
|
||||
margin: 8px 0;
|
||||
`;
|
||||
|
@ -9,7 +9,6 @@ export const tableStyles: CSSProperties = {
|
||||
export const Container = styled.div`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 15px;
|
||||
`;
|
||||
|
||||
export const ErrorText = styled(Typography)`
|
||||
|
@ -3,7 +3,6 @@ import styled from 'styled-components';
|
||||
export const Container = styled.div`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 15px;
|
||||
`;
|
||||
|
||||
export const ActionsContainer = styled.div`
|
||||
|
@ -67,30 +67,32 @@ export const useDashboardVariablesFromLocalStorage = (
|
||||
setCurrentDashboard(defaultTo(localStoreDashboardVariables[dashboardId], {}));
|
||||
}, [dashboardId]);
|
||||
|
||||
useEffect(() => {
|
||||
try {
|
||||
const serializedData = JSON.stringify(allDashboards);
|
||||
setLocalStorageKey(LOCALSTORAGE.DASHBOARD_VARIABLES, serializedData);
|
||||
} catch {
|
||||
console.error('Failed to set dashboards in local storage');
|
||||
}
|
||||
}, [allDashboards]);
|
||||
|
||||
useEffect(() => {
|
||||
setAllDashboards((prev) => ({
|
||||
...prev,
|
||||
[dashboardId]: { ...currentDashboard },
|
||||
}));
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [currentDashboard]);
|
||||
|
||||
const updateLocalStorageDashboardVariables = (
|
||||
id: string,
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
): void => {
|
||||
const newCurrentDashboard = {
|
||||
...currentDashboard,
|
||||
setCurrentDashboard((prev) => ({
|
||||
...prev,
|
||||
[id]: { selectedValue, allSelected },
|
||||
};
|
||||
|
||||
const newAllDashboards = {
|
||||
...allDashboards,
|
||||
[dashboardId]: newCurrentDashboard,
|
||||
};
|
||||
|
||||
try {
|
||||
const serializedData = JSON.stringify(newAllDashboards);
|
||||
setLocalStorageKey(LOCALSTORAGE.DASHBOARD_VARIABLES, serializedData);
|
||||
} catch {
|
||||
console.error('Failed to set dashboards in local storage');
|
||||
}
|
||||
|
||||
setAllDashboards(newAllDashboards);
|
||||
setCurrentDashboard(newCurrentDashboard);
|
||||
}));
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -109,7 +109,11 @@ export const GetTagKeys = async (): Promise<IOption[]> => {
|
||||
if (!payload || !payload?.data) {
|
||||
return [];
|
||||
}
|
||||
return payload.data
|
||||
|
||||
const keys =
|
||||
payload.data.attributeKeys?.map((attributeKey) => attributeKey.key) || [];
|
||||
|
||||
return keys
|
||||
.filter((tagKey: string) => tagKey !== 'resource_deployment_environment')
|
||||
.map((tagKey: string) => ({
|
||||
label: convertMetricKeyToTrace(tagKey),
|
||||
@ -125,7 +129,9 @@ export const getEnvironmentTagKeys = async (): Promise<IOption[]> => {
|
||||
if (!payload || !payload?.data) {
|
||||
return [];
|
||||
}
|
||||
return payload.data.map((tagKey: string) => ({
|
||||
const keys =
|
||||
payload.data.attributeKeys?.map((attributeKey) => attributeKey.key) || [];
|
||||
return keys.map((tagKey: string) => ({
|
||||
label: convertMetricKeyToTrace(tagKey),
|
||||
value: tagKey,
|
||||
}));
|
||||
@ -140,7 +146,10 @@ export const getEnvironmentTagValues = async (): Promise<IOption[]> => {
|
||||
if (!payload || !payload?.data) {
|
||||
return [];
|
||||
}
|
||||
return payload.data.map((tagValue: string) => ({
|
||||
|
||||
const values = payload.data.stringAttributeValues || [];
|
||||
|
||||
return values.map((tagValue: string) => ({
|
||||
label: tagValue,
|
||||
value: tagValue,
|
||||
}));
|
||||
@ -155,7 +164,10 @@ export const GetTagValues = async (tagKey: string): Promise<IOption[]> => {
|
||||
if (!payload || !payload?.data) {
|
||||
return [];
|
||||
}
|
||||
return payload.data.map((tagValue: string) => ({
|
||||
|
||||
const values = payload.data.stringAttributeValues || [];
|
||||
|
||||
return values.map((tagValue: string) => ({
|
||||
label: tagValue,
|
||||
value: tagValue,
|
||||
}));
|
||||
|
@ -26,7 +26,7 @@ export const handlers = [
|
||||
),
|
||||
|
||||
rest.get(
|
||||
'http://localhost/api/v2/metrics/autocomplete/tagKey',
|
||||
'http://localhost/api/v3/autocomplete/attribute_keys',
|
||||
(req, res, ctx) => {
|
||||
const metricName = req.url.searchParams.get('metricName');
|
||||
const match = req.url.searchParams.get('match');
|
||||
@ -43,7 +43,7 @@ export const handlers = [
|
||||
),
|
||||
|
||||
rest.get(
|
||||
'http://localhost/api/v2/metrics/autocomplete/tagValue',
|
||||
'http://localhost/api/v3/autocomplete/attribute_values',
|
||||
(req, res, ctx) => {
|
||||
// ?metricName=signoz_calls_total&tagKey=resource_signoz_collector_id
|
||||
const metricName = req.url.searchParams.get('metricName');
|
||||
|
@ -1,9 +1,18 @@
|
||||
import { render, waitFor } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import {
|
||||
initialQueriesMap,
|
||||
initialQueryBuilderFormValues,
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { noop } from 'lodash-es';
|
||||
import { logsQueryRangeSuccessResponse } from 'mocks-server/__mockdata__/logs_query_range';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import {
|
||||
QueryBuilderContext,
|
||||
QueryBuilderProvider,
|
||||
} from 'providers/QueryBuilder';
|
||||
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
|
||||
import { I18nextProvider } from 'react-i18next';
|
||||
import { Provider } from 'react-redux';
|
||||
@ -12,8 +21,9 @@ import { MemoryRouter } from 'react-router-dom';
|
||||
import { VirtuosoMockContext } from 'react-virtuoso';
|
||||
import i18n from 'ReactI18';
|
||||
import store from 'store';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import LogsExplorer from '..';
|
||||
import LogsExplorer from '../index';
|
||||
|
||||
const queryRangeURL = 'http://localhost/api/v3/query_range';
|
||||
// mocking the graph components in this test as this should be handled separately
|
||||
@ -42,6 +52,15 @@ jest.mock('d3-interpolate', () => ({
|
||||
interpolate: jest.fn(),
|
||||
}));
|
||||
|
||||
const logExplorerRoute = '/logs/logs-explorer';
|
||||
|
||||
const lodsQueryServerRequest = (): void =>
|
||||
server.use(
|
||||
rest.post(queryRangeURL, (req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(logsQueryRangeSuccessResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
describe('Logs Explorer Tests', () => {
|
||||
test('Logs Explorer default view test without data', async () => {
|
||||
const {
|
||||
@ -51,7 +70,7 @@ describe('Logs Explorer Tests', () => {
|
||||
getByTestId,
|
||||
queryByTestId,
|
||||
} = render(
|
||||
<MemoryRouter initialEntries={['/logs/logs-explorer']}>
|
||||
<MemoryRouter initialEntries={[logExplorerRoute]}>
|
||||
<Provider store={store}>
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<MockQueryClientProvider>
|
||||
@ -95,13 +114,9 @@ describe('Logs Explorer Tests', () => {
|
||||
|
||||
test('Logs Explorer Page should render with data', async () => {
|
||||
// mocking the query range API to return the logs
|
||||
server.use(
|
||||
rest.post(queryRangeURL, (req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(logsQueryRangeSuccessResponse)),
|
||||
),
|
||||
);
|
||||
lodsQueryServerRequest();
|
||||
const { queryByText, queryByTestId } = render(
|
||||
<MemoryRouter initialEntries={['/logs/logs-explorer']}>
|
||||
<MemoryRouter initialEntries={[logExplorerRoute]}>
|
||||
<Provider store={store}>
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<MockQueryClientProvider>
|
||||
@ -144,4 +159,74 @@ describe('Logs Explorer Tests', () => {
|
||||
),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('Multiple Current Queries', async () => {
|
||||
// mocking the query range API to return the logs
|
||||
lodsQueryServerRequest();
|
||||
const { queryAllByText } = render(
|
||||
<MemoryRouter initialEntries={[logExplorerRoute]}>
|
||||
<Provider store={store}>
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<MockQueryClientProvider>
|
||||
<QueryBuilderContext.Provider
|
||||
value={{
|
||||
currentQuery: {
|
||||
...initialQueriesMap.metrics,
|
||||
builder: {
|
||||
...initialQueriesMap.metrics.builder,
|
||||
queryData: [
|
||||
initialQueryBuilderFormValues,
|
||||
initialQueryBuilderFormValues,
|
||||
],
|
||||
},
|
||||
},
|
||||
setSupersetQuery: jest.fn(),
|
||||
supersetQuery: initialQueriesMap.metrics,
|
||||
stagedQuery: initialQueriesMap.metrics,
|
||||
initialDataSource: null,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
isEnabledQuery: false,
|
||||
handleSetQueryData: noop,
|
||||
handleSetFormulaData: noop,
|
||||
handleSetQueryItemData: noop,
|
||||
handleSetConfig: noop,
|
||||
removeQueryBuilderEntityByIndex: noop,
|
||||
removeQueryTypeItemByIndex: noop,
|
||||
addNewBuilderQuery: noop,
|
||||
cloneQuery: noop,
|
||||
addNewFormula: noop,
|
||||
addNewQueryItem: noop,
|
||||
redirectWithQueryBuilderData: noop,
|
||||
handleRunQuery: noop,
|
||||
resetQuery: noop,
|
||||
updateAllQueriesOperators: (): Query => initialQueriesMap.metrics,
|
||||
updateQueriesData: (): Query => initialQueriesMap.metrics,
|
||||
initQueryBuilderData: noop,
|
||||
handleOnUnitsChange: noop,
|
||||
isStagedQueryUpdated: (): boolean => false,
|
||||
}}
|
||||
>
|
||||
<VirtuosoMockContext.Provider
|
||||
value={{ viewportHeight: 300, itemHeight: 100 }}
|
||||
>
|
||||
<LogsExplorer />
|
||||
</VirtuosoMockContext.Provider>
|
||||
</QueryBuilderContext.Provider>
|
||||
</MockQueryClientProvider>
|
||||
</I18nextProvider>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
const queries = queryAllByText(
|
||||
'Search Filter : select options from suggested values, for IN/NOT IN operators - press "Enter" after selecting options',
|
||||
);
|
||||
expect(queries.length).toBe(2);
|
||||
|
||||
const legendFormats = queryAllByText('Legend Format');
|
||||
expect(legendFormats.length).toBe(2);
|
||||
|
||||
const aggrInterval = queryAllByText('AGGREGATION INTERVAL');
|
||||
expect(aggrInterval.length).toBe(2);
|
||||
});
|
||||
});
|
||||
|
@ -53,6 +53,8 @@ const DashboardContext = createContext<IDashboardContext>({
|
||||
toScrollWidgetId: '',
|
||||
setToScrollWidgetId: () => {},
|
||||
updateLocalStorageDashboardVariables: () => {},
|
||||
variablesToGetUpdated: [],
|
||||
setVariablesToGetUpdated: () => {},
|
||||
});
|
||||
|
||||
interface Props {
|
||||
@ -86,6 +88,10 @@ export function DashboardProvider({
|
||||
exact: true,
|
||||
});
|
||||
|
||||
const [variablesToGetUpdated, setVariablesToGetUpdated] = useState<string[]>(
|
||||
[],
|
||||
);
|
||||
|
||||
const [layouts, setLayouts] = useState<Layout[]>([]);
|
||||
|
||||
const { isLoggedIn } = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
@ -171,6 +177,7 @@ export function DashboardProvider({
|
||||
return data;
|
||||
};
|
||||
|
||||
console.log(variablesToGetUpdated);
|
||||
const dashboardResponse = useQuery(
|
||||
[REACT_QUERY_KEY.DASHBOARD_BY_ID, isDashboardPage?.params],
|
||||
{
|
||||
@ -323,6 +330,8 @@ export function DashboardProvider({
|
||||
updatedTimeRef,
|
||||
setToScrollWidgetId,
|
||||
updateLocalStorageDashboardVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
}),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
@ -335,6 +344,8 @@ export function DashboardProvider({
|
||||
toScrollWidgetId,
|
||||
updateLocalStorageDashboardVariables,
|
||||
currentDashboard,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
],
|
||||
);
|
||||
|
||||
|
@ -30,4 +30,6 @@ export interface IDashboardContext {
|
||||
| undefined,
|
||||
allSelected: boolean,
|
||||
) => void;
|
||||
variablesToGetUpdated: string[];
|
||||
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
|
||||
}
|
||||
|
@ -1,9 +1,12 @@
|
||||
import { IAttributeValuesResponse } from '../queryBuilder/getAttributesValues';
|
||||
import { IQueryAutocompleteResponse } from '../queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
export type TagKeyProps = {
|
||||
match?: string;
|
||||
metricName: string;
|
||||
};
|
||||
export type TagKeysPayloadProps = {
|
||||
data: string[];
|
||||
data: IQueryAutocompleteResponse;
|
||||
};
|
||||
|
||||
export type TagValueProps = {
|
||||
@ -11,5 +14,5 @@ export type TagValueProps = {
|
||||
metricName: string;
|
||||
};
|
||||
export type TagValuesPayloadProps = {
|
||||
data: string[];
|
||||
data: IAttributeValuesResponse;
|
||||
};
|
||||
|
@ -5,5 +5,8 @@ export const getGraphType = (panelType: PANEL_TYPES): PANEL_TYPES => {
|
||||
if (panelType === PANEL_TYPES.BAR) {
|
||||
return PANEL_TYPES.TIME_SERIES;
|
||||
}
|
||||
if (panelType === PANEL_TYPES.PIE) {
|
||||
return PANEL_TYPES.TABLE;
|
||||
}
|
||||
return panelType;
|
||||
};
|
||||
|
@ -4186,18 +4186,74 @@
|
||||
tapable "^2.0.0"
|
||||
webpack "^5.1.0"
|
||||
|
||||
"@types/d3-color@*":
|
||||
"@types/d3-array@3.0.3":
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-3.0.3.tgz#87d990bf504d14ad6b16766979d04e943c046dac"
|
||||
integrity sha512-Reoy+pKnvsksN0lQUlcH6dOGjRZ/3WRwXR//m+/8lt1BXeI4xyaUZoqULNjyXXRuh0Mj4LNpkCvhUpQlY3X5xQ==
|
||||
|
||||
"@types/d3-color@*", "@types/d3-color@3.1.0":
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-color/-/d3-color-3.1.0.tgz#6594da178ded6c7c3842f3cc0ac84b156f12f2d4"
|
||||
integrity sha512-HKuicPHJuvPgCD+np6Se9MQvS6OCbJmOjGvylzMJRlDwUXjKTTXs6Pwgk79O09Vj/ho3u1ofXnhFOaEWWPrlwA==
|
||||
|
||||
"@types/d3-interpolate@^3.0.0":
|
||||
"@types/d3-delaunay@6.0.1":
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-delaunay/-/d3-delaunay-6.0.1.tgz#006b7bd838baec1511270cb900bf4fc377bbbf41"
|
||||
integrity sha512-tLxQ2sfT0p6sxdG75c6f/ekqxjyYR0+LwPrsO1mbC9YDBzPJhs2HbJJRrn8Ez1DBoHRo2yx7YEATI+8V1nGMnQ==
|
||||
|
||||
"@types/d3-format@3.0.1":
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-format/-/d3-format-3.0.1.tgz#194f1317a499edd7e58766f96735bdc0216bb89d"
|
||||
integrity sha512-5KY70ifCCzorkLuIkDe0Z9YTf9RR2CjBX1iaJG+rgM/cPP+sO+q9YdQ9WdhQcgPj1EQiJ2/0+yUkkziTG6Lubg==
|
||||
|
||||
"@types/d3-geo@3.1.0":
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-geo/-/d3-geo-3.1.0.tgz#b9e56a079449174f0a2c8684a9a4df3f60522440"
|
||||
integrity sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==
|
||||
dependencies:
|
||||
"@types/geojson" "*"
|
||||
|
||||
"@types/d3-interpolate@3.0.1", "@types/d3-interpolate@^3.0.0":
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-interpolate/-/d3-interpolate-3.0.1.tgz#e7d17fa4a5830ad56fe22ce3b4fac8541a9572dc"
|
||||
integrity sha512-jx5leotSeac3jr0RePOH1KdR9rISG91QIE4Q2PYTu4OymLTZfA3SrnURSLzKH48HmXVUru50b8nje4E79oQSQw==
|
||||
dependencies:
|
||||
"@types/d3-color" "*"
|
||||
|
||||
"@types/d3-path@^1", "@types/d3-path@^1.0.8":
|
||||
version "1.0.11"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-path/-/d3-path-1.0.11.tgz#45420fee2d93387083b34eae4fe6d996edf482bc"
|
||||
integrity sha512-4pQMp8ldf7UaB/gR8Fvvy69psNHkTpD/pVw3vmEi8iZAB9EPMBruB1JvHO4BIq9QkUUd2lV1F5YXpMNj7JPBpw==
|
||||
|
||||
"@types/d3-scale@4.0.2":
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-4.0.2.tgz#41be241126af4630524ead9cb1008ab2f0f26e69"
|
||||
integrity sha512-Yk4htunhPAwN0XGlIwArRomOjdoBFXC3+kCxK2Ubg7I9shQlVSJy/pG/Ht5ASN+gdMIalpk8TJ5xV74jFsetLA==
|
||||
dependencies:
|
||||
"@types/d3-time" "*"
|
||||
|
||||
"@types/d3-shape@^1.3.1":
|
||||
version "1.3.12"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-shape/-/d3-shape-1.3.12.tgz#8f2f9f7a12e631ce6700d6d55b84795ce2c8b259"
|
||||
integrity sha512-8oMzcd4+poSLGgV0R1Q1rOlx/xdmozS4Xab7np0eamFFUYq71AU9pOCJEFnkXW2aI/oXdVYJzw6pssbSut7Z9Q==
|
||||
dependencies:
|
||||
"@types/d3-path" "^1"
|
||||
|
||||
"@types/d3-time-format@2.1.0":
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-time-format/-/d3-time-format-2.1.0.tgz#011e0fb7937be34a9a8f580ae1e2f2f1336a8a22"
|
||||
integrity sha512-/myT3I7EwlukNOX2xVdMzb8FRgNzRMpsZddwst9Ld/VFe6LyJyRp0s32l/V9XoUzk+Gqu56F/oGk6507+8BxrA==
|
||||
|
||||
"@types/d3-time@*":
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-3.0.3.tgz#3c186bbd9d12b9d84253b6be6487ca56b54f88be"
|
||||
integrity sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==
|
||||
|
||||
"@types/d3-time@3.0.0":
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-3.0.0.tgz#e1ac0f3e9e195135361fa1a1d62f795d87e6e819"
|
||||
integrity sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg==
|
||||
|
||||
"@types/debug@^4.0.0":
|
||||
version "4.1.8"
|
||||
resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.8.tgz#cef723a5d0a90990313faec2d1e22aee5eecb317"
|
||||
@ -4282,6 +4338,11 @@
|
||||
resolved "https://registry.npmjs.org/@types/fontfaceobserver/-/fontfaceobserver-2.1.0.tgz"
|
||||
integrity sha512-Vqf183RAiFdIjUi4asKqogf2HIfLDnxn+dQo9GCpnsU5QrrsLMA2bkJU1dHRudQlizLybWD61Csd1zAgUQ3JKQ==
|
||||
|
||||
"@types/geojson@*":
|
||||
version "7946.0.14"
|
||||
resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-7946.0.14.tgz#319b63ad6df705ee2a65a73ef042c8271e696613"
|
||||
integrity sha512-WCfD5Ht3ZesJUsONdhvm84dmzWOiOzOAqOncN0++w0lBw1o8OuDNJF2McvvCef/yBqb/HYRahp1BYtODFQ8bRg==
|
||||
|
||||
"@types/graceful-fs@^4.1.2", "@types/graceful-fs@^4.1.3":
|
||||
version "4.1.6"
|
||||
resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.6.tgz#e14b2576a1c25026b7f02ede1de3b84c3a1efeae"
|
||||
@ -4395,6 +4456,11 @@
|
||||
resolved "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.194.tgz"
|
||||
integrity sha512-r22s9tAS7imvBt2lyHC9B8AGwWnXaYb1tY09oyLkXDs4vArpYJzw09nj8MLx5VfciBPGIb+ZwG0ssYnEPJxn/g==
|
||||
|
||||
"@types/lodash@^4.14.172":
|
||||
version "4.17.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.0.tgz#d774355e41f372d5350a4d0714abb48194a489c3"
|
||||
integrity sha512-t7dhREVv6dbNj0q17X12j7yDG4bD/DHYX7o5/DbDxobP0HnGPgpRz2Ej77aL7TZT3DSw13fqUTj8J4mMnqa7WA==
|
||||
|
||||
"@types/mdast@^3.0.0":
|
||||
version "3.0.12"
|
||||
resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.12.tgz#beeb511b977c875a5b0cc92eab6fcac2f0895514"
|
||||
@ -4526,7 +4592,7 @@
|
||||
dependencies:
|
||||
"@types/react" "*"
|
||||
|
||||
"@types/react-dom@18.0.10", "@types/react-dom@^18.0.0":
|
||||
"@types/react-dom@*", "@types/react-dom@18.0.10", "@types/react-dom@^18.0.0":
|
||||
version "18.0.10"
|
||||
resolved "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.0.10.tgz"
|
||||
integrity sha512-E42GW/JA4Qv15wQdqJq8DL4JhNpB3prJgjgapN3qJT9K2zO5IIAQh4VXvCEDupoqAwnz0cY4RlXeC/ajX5SFHg==
|
||||
@ -4921,6 +4987,93 @@
|
||||
resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406"
|
||||
integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==
|
||||
|
||||
"@visx/bounds@3.3.0":
|
||||
version "3.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@visx/bounds/-/bounds-3.3.0.tgz#6ccecda636f1f970478f392dff2f6136aaead24f"
|
||||
integrity sha512-gESmN+4N2NkeUzqQEDZaS63umkGfMp9XjQcKBqtOR64mjjQtamh3lNVRWvKjJ2Zb421RbYHWq22Wv9nay6ZUOg==
|
||||
dependencies:
|
||||
"@types/react" "*"
|
||||
"@types/react-dom" "*"
|
||||
prop-types "^15.5.10"
|
||||
|
||||
"@visx/curve@3.3.0":
|
||||
version "3.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@visx/curve/-/curve-3.3.0.tgz#a5ed8f1511d404ef83c0b956ed5021088ac64b0e"
|
||||
integrity sha512-G1l1rzGWwIs8ka3mBhO/gj8uYK6XdU/3bwRSoiZ+MockMahQFPog0bUkuVgPwwzPSJfsA/E5u53Y/DNesnHQxg==
|
||||
dependencies:
|
||||
"@types/d3-shape" "^1.3.1"
|
||||
d3-shape "^1.0.6"
|
||||
|
||||
"@visx/group@3.3.0":
|
||||
version "3.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@visx/group/-/group-3.3.0.tgz#20c1b75c1ab31798c3c702b6f58c412c688a6373"
|
||||
integrity sha512-yKepDKwJqlzvnvPS0yDuW13XNrYJE4xzT6xM7J++441nu6IybWWwextyap8ey+kU651cYDb+q1Oi6aHvQwyEyw==
|
||||
dependencies:
|
||||
"@types/react" "*"
|
||||
classnames "^2.3.1"
|
||||
prop-types "^15.6.2"
|
||||
|
||||
"@visx/scale@3.5.0":
|
||||
version "3.5.0"
|
||||
resolved "https://registry.yarnpkg.com/@visx/scale/-/scale-3.5.0.tgz#c3db3863bbdd24d44781104ef5ee4cdc8df6f11d"
|
||||
integrity sha512-xo3zrXV2IZxrMq9Y9RUVJUpd93h3NO/r/y3GVi5F9AsbOzOhsLIbsPkunhO9mpUSR8LZ9TiumLEBrY+3frRBSg==
|
||||
dependencies:
|
||||
"@visx/vendor" "3.5.0"
|
||||
|
||||
"@visx/shape@3.5.0":
|
||||
version "3.5.0"
|
||||
resolved "https://registry.yarnpkg.com/@visx/shape/-/shape-3.5.0.tgz#fa4bb7a9ed863360be541d75c434503246305e36"
|
||||
integrity sha512-DP3t9jBQ7dSE3e6ptA1xO4QAIGxO55GrY/6P+S6YREuQGjZgq20TLYLAsiaoPEzFSS4tp0m12ZTPivWhU2VBTw==
|
||||
dependencies:
|
||||
"@types/d3-path" "^1.0.8"
|
||||
"@types/d3-shape" "^1.3.1"
|
||||
"@types/lodash" "^4.14.172"
|
||||
"@types/react" "*"
|
||||
"@visx/curve" "3.3.0"
|
||||
"@visx/group" "3.3.0"
|
||||
"@visx/scale" "3.5.0"
|
||||
classnames "^2.3.1"
|
||||
d3-path "^1.0.5"
|
||||
d3-shape "^1.2.0"
|
||||
lodash "^4.17.21"
|
||||
prop-types "^15.5.10"
|
||||
|
||||
"@visx/tooltip@3.3.0":
|
||||
version "3.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@visx/tooltip/-/tooltip-3.3.0.tgz#9d5e6199dacd82052678207204a33f028fe15e02"
|
||||
integrity sha512-0ovbxnvAphEU/RVJprWHdOJT7p3YfBDpwXclXRuhIY2EkH59g8sDHatDcYwiNPeqk61jBh1KACRZxqToMuutlg==
|
||||
dependencies:
|
||||
"@types/react" "*"
|
||||
"@visx/bounds" "3.3.0"
|
||||
classnames "^2.3.1"
|
||||
prop-types "^15.5.10"
|
||||
react-use-measure "^2.0.4"
|
||||
|
||||
"@visx/vendor@3.5.0":
|
||||
version "3.5.0"
|
||||
resolved "https://registry.yarnpkg.com/@visx/vendor/-/vendor-3.5.0.tgz#a9990382ba759b9c4049be303d65d2cb3ca034a8"
|
||||
integrity sha512-yt3SEZRVmt36+APsCISSO9eSOtzQkBjt+QRxNRzcTWuzwMAaF3PHCCSe31++kkpgY9yFoF+Gfes1TBe5NlETiQ==
|
||||
dependencies:
|
||||
"@types/d3-array" "3.0.3"
|
||||
"@types/d3-color" "3.1.0"
|
||||
"@types/d3-delaunay" "6.0.1"
|
||||
"@types/d3-format" "3.0.1"
|
||||
"@types/d3-geo" "3.1.0"
|
||||
"@types/d3-interpolate" "3.0.1"
|
||||
"@types/d3-scale" "4.0.2"
|
||||
"@types/d3-time" "3.0.0"
|
||||
"@types/d3-time-format" "2.1.0"
|
||||
d3-array "3.2.1"
|
||||
d3-color "3.1.0"
|
||||
d3-delaunay "6.0.2"
|
||||
d3-format "3.1.0"
|
||||
d3-geo "3.1.0"
|
||||
d3-interpolate "3.0.1"
|
||||
d3-scale "4.0.2"
|
||||
d3-time "3.1.0"
|
||||
d3-time-format "4.1.0"
|
||||
internmap "2.0.3"
|
||||
|
||||
"@volar/language-core@1.11.1", "@volar/language-core@~1.11.1":
|
||||
version "1.11.1"
|
||||
resolved "https://registry.yarnpkg.com/@volar/language-core/-/language-core-1.11.1.tgz#ecdf12ea8dc35fb8549e517991abcbf449a5ad4f"
|
||||
@ -7401,16 +7554,37 @@ cwd@^0.10.0:
|
||||
dependencies:
|
||||
internmap "1 - 2"
|
||||
|
||||
"d3-array@2.5.0 - 3":
|
||||
version "3.2.4"
|
||||
resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.4.tgz#15fec33b237f97ac5d7c986dc77da273a8ed0bb5"
|
||||
integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==
|
||||
dependencies:
|
||||
internmap "1 - 2"
|
||||
|
||||
d3-array@3.2.1:
|
||||
version "3.2.1"
|
||||
resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.1.tgz#39331ea706f5709417d31bbb6ec152e0328b39b3"
|
||||
integrity sha512-gUY/qeHq/yNqqoCKNq4vtpFLdoCdvyNpWoC/KNjhGbhDuQpAM9sIQQKkXSNpXa9h5KySs/gzm7R88WkUutgwWQ==
|
||||
dependencies:
|
||||
internmap "1 - 2"
|
||||
|
||||
d3-binarytree@1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.npmjs.org/d3-binarytree/-/d3-binarytree-1.0.2.tgz"
|
||||
integrity sha512-cElUNH+sHu95L04m92pG73t2MEJXKu+GeKUN1TJkFsu93E5W8E9Sc3kHEGJKgenGvj19m6upSn2EunvMgMD2Yw==
|
||||
|
||||
"d3-color@1 - 3":
|
||||
"d3-color@1 - 3", d3-color@3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz"
|
||||
integrity sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==
|
||||
|
||||
d3-delaunay@6.0.2:
|
||||
version "6.0.2"
|
||||
resolved "https://registry.yarnpkg.com/d3-delaunay/-/d3-delaunay-6.0.2.tgz#7fd3717ad0eade2fc9939f4260acfb503f984e92"
|
||||
integrity sha512-IMLNldruDQScrcfT+MWnazhHbDJhcRJyOEBAJfwQnHle1RPh6WDuLvxNArUju2VSMSUuKlY5BGHRJ2cYyoFLQQ==
|
||||
dependencies:
|
||||
delaunator "5"
|
||||
|
||||
"d3-dispatch@1 - 3":
|
||||
version "3.0.1"
|
||||
resolved "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz"
|
||||
@ -7440,11 +7614,18 @@ d3-binarytree@1:
|
||||
d3-quadtree "1 - 3"
|
||||
d3-timer "1 - 3"
|
||||
|
||||
"d3-format@1 - 3":
|
||||
"d3-format@1 - 3", d3-format@3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz"
|
||||
integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==
|
||||
|
||||
d3-geo@3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/d3-geo/-/d3-geo-3.1.0.tgz#74fd54e1f4cebd5185ac2039217a98d39b0a4c0e"
|
||||
integrity sha512-JEo5HxXDdDYXCaWdwLRt79y7giK8SbhZJbFWXqbRTolCHFI5jRqteLzCsq51NKbUoX0PjBVSohxrx+NoOUujYA==
|
||||
dependencies:
|
||||
d3-array "2.5.0 - 3"
|
||||
|
||||
"d3-interpolate@1 - 3", "d3-interpolate@1.2.0 - 3", d3-interpolate@3.0.1:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d"
|
||||
@ -7457,6 +7638,11 @@ d3-octree@1:
|
||||
resolved "https://registry.npmjs.org/d3-octree/-/d3-octree-1.0.2.tgz"
|
||||
integrity sha512-Qxg4oirJrNXauiuC94uKMbgxwnhdda9xRLl9ihq45srlJ4Ga3CSgqGcAL8iW7N5CIv4Oz8x3E734ulxyvHPvwA==
|
||||
|
||||
d3-path@1, d3-path@^1.0.5:
|
||||
version "1.0.9"
|
||||
resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-1.0.9.tgz#48c050bb1fe8c262493a8caf5524e3e9591701cf"
|
||||
integrity sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==
|
||||
|
||||
"d3-quadtree@1 - 3":
|
||||
version "3.0.1"
|
||||
resolved "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz"
|
||||
@ -7470,7 +7656,7 @@ d3-octree@1:
|
||||
d3-color "1 - 3"
|
||||
d3-interpolate "1 - 3"
|
||||
|
||||
"d3-scale@1 - 4":
|
||||
"d3-scale@1 - 4", d3-scale@4.0.2:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz"
|
||||
integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==
|
||||
@ -7486,14 +7672,21 @@ d3-octree@1:
|
||||
resolved "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz"
|
||||
integrity sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==
|
||||
|
||||
"d3-time-format@2 - 4":
|
||||
d3-shape@^1.0.6, d3-shape@^1.2.0:
|
||||
version "1.3.7"
|
||||
resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-1.3.7.tgz#df63801be07bc986bc54f63789b4fe502992b5d7"
|
||||
integrity sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==
|
||||
dependencies:
|
||||
d3-path "1"
|
||||
|
||||
"d3-time-format@2 - 4", d3-time-format@4.1.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz"
|
||||
integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==
|
||||
dependencies:
|
||||
d3-time "1 - 3"
|
||||
|
||||
"d3-time@1 - 3", "d3-time@2.1.1 - 3":
|
||||
"d3-time@1 - 3", "d3-time@2.1.1 - 3", d3-time@3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz"
|
||||
integrity sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==
|
||||
@ -7568,6 +7761,11 @@ de-indent@^1.0.2:
|
||||
resolved "https://registry.yarnpkg.com/de-indent/-/de-indent-1.0.2.tgz#b2038e846dc33baa5796128d0804b455b8c1e21d"
|
||||
integrity sha512-e/1zu3xH5MQryN2zdVaF0OrdNLUbvWxzMbi+iNA6Bky7l1RoP8a2fIbRocyHclXt/arDrrR6lL3TqFD9pMQTsg==
|
||||
|
||||
debounce@^1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/debounce/-/debounce-1.2.1.tgz#38881d8f4166a5c5848020c11827b834bcb3e0a5"
|
||||
integrity sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==
|
||||
|
||||
debug@2.6.9, debug@4, debug@4.3.4, debug@^3.2.6, debug@^3.2.7, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4, debug@ngokevin/debug#noTimestamp:
|
||||
version "4.3.4"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
|
||||
@ -7702,6 +7900,13 @@ define-properties@^1.1.3, define-properties@^1.1.4, define-properties@^1.2.0:
|
||||
has-property-descriptors "^1.0.0"
|
||||
object-keys "^1.1.1"
|
||||
|
||||
delaunator@5:
|
||||
version "5.0.1"
|
||||
resolved "https://registry.yarnpkg.com/delaunator/-/delaunator-5.0.1.tgz#39032b08053923e924d6094fe2cde1a99cc51278"
|
||||
integrity sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==
|
||||
dependencies:
|
||||
robust-predicates "^3.0.2"
|
||||
|
||||
delayed-stream@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz"
|
||||
@ -10132,7 +10337,7 @@ internal-slot@^1.0.3, internal-slot@^1.0.4, internal-slot@^1.0.5:
|
||||
has "^1.0.3"
|
||||
side-channel "^1.0.4"
|
||||
|
||||
"internmap@1 - 2":
|
||||
"internmap@1 - 2", internmap@2.0.3:
|
||||
version "2.0.3"
|
||||
resolved "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz"
|
||||
integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==
|
||||
@ -14138,7 +14343,7 @@ prompts@^2.0.1, prompts@^2.4.1:
|
||||
kleur "^3.0.3"
|
||||
sisteransi "^1.0.5"
|
||||
|
||||
prop-types@15, prop-types@15.x, prop-types@^15.0.0, prop-types@^15.5.8, prop-types@^15.6.1, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1:
|
||||
prop-types@15, prop-types@15.x, prop-types@^15.0.0, prop-types@^15.5.10, prop-types@^15.5.8, prop-types@^15.6.1, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1:
|
||||
version "15.8.1"
|
||||
resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5"
|
||||
integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==
|
||||
@ -14969,6 +15174,13 @@ react-universal-interface@^0.6.2:
|
||||
resolved "https://registry.npmjs.org/react-universal-interface/-/react-universal-interface-0.6.2.tgz"
|
||||
integrity sha512-dg8yXdcQmvgR13RIlZbTRQOoUrDciFVoSBZILwjE2LFISxZZ8loVJKAkuzswl5js8BHda79bIb2b84ehU8IjXw==
|
||||
|
||||
react-use-measure@^2.0.4:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/react-use-measure/-/react-use-measure-2.1.1.tgz#5824537f4ee01c9469c45d5f7a8446177c6cc4ba"
|
||||
integrity sha512-nocZhN26cproIiIduswYpV5y5lQpSQS1y/4KuvUCjSKmw7ZWIS/+g3aFnX3WdBkyuGUtTLif3UTqnLLhbDoQig==
|
||||
dependencies:
|
||||
debounce "^1.2.1"
|
||||
|
||||
react-use@17.4.0, react-use@^17.3.2:
|
||||
version "17.4.0"
|
||||
resolved "https://registry.yarnpkg.com/react-use/-/react-use-17.4.0.tgz#cefef258b0a6c534a5c8021c2528ac6e1a4cdc6d"
|
||||
@ -15553,6 +15765,11 @@ rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2:
|
||||
dependencies:
|
||||
glob "^7.1.3"
|
||||
|
||||
robust-predicates@^3.0.2:
|
||||
version "3.0.2"
|
||||
resolved "https://registry.yarnpkg.com/robust-predicates/-/robust-predicates-3.0.2.tgz#d5b28528c4824d20fc48df1928d41d9efa1ad771"
|
||||
integrity sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==
|
||||
|
||||
rtl-css-js@^1.14.0:
|
||||
version "1.16.1"
|
||||
resolved "https://registry.npmjs.org/rtl-css-js/-/rtl-css-js-1.16.1.tgz"
|
||||
|
@ -33,15 +33,3 @@ func (aH *APIHandler) getApdexSettings(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
aH.WriteJSON(w, r, apdexSet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getLatencyMetricMetadata(w http.ResponseWriter, r *http.Request) {
|
||||
metricName := r.URL.Query().Get("metricName")
|
||||
serviceName := r.URL.Query().Get("serviceName")
|
||||
metricMetadata, err := aH.reader.GetLatencyMetricMetadata(r.Context(), metricName, serviceName, aH.preferDelta)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, metricMetadata)
|
||||
}
|
||||
|
@ -3071,117 +3071,6 @@ func (r *ClickHouseReader) getPrevErrorID(ctx context.Context, queryParams *mode
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMetricAutocompleteTagKey(ctx context.Context, params *model.MetricAutocompleteTagParams) (*[]string, *model.ApiError) {
|
||||
|
||||
var query string
|
||||
var err error
|
||||
var tagKeyList []string
|
||||
var rows driver.Rows
|
||||
|
||||
tagsWhereClause := ""
|
||||
|
||||
for key, val := range params.MetricTags {
|
||||
tagsWhereClause += fmt.Sprintf(" AND JSONExtractString(labels, '%s') = '%s' ", key, val)
|
||||
}
|
||||
// "select distinctTagKeys from (SELECT DISTINCT arrayJoin(tagKeys) distinctTagKeys from (SELECT DISTINCT(JSONExtractKeys(labels)) tagKeys from signoz_metrics.time_series WHERE JSONExtractString(labels,'__name__')='node_udp_queues')) WHERE distinctTagKeys ILIKE '%host%';"
|
||||
if len(params.Match) != 0 {
|
||||
query = fmt.Sprintf("select distinctTagKeys from (SELECT DISTINCT arrayJoin(tagKeys) distinctTagKeys from (SELECT DISTINCT(JSONExtractKeys(labels)) tagKeys from %s.%s WHERE metric_name=$1 %s)) WHERE distinctTagKeys ILIKE $2;", signozMetricDBName, signozTSTableName, tagsWhereClause)
|
||||
|
||||
rows, err = r.db.Query(ctx, query, params.MetricName, fmt.Sprintf("%%%s%%", params.Match))
|
||||
|
||||
} else {
|
||||
query = fmt.Sprintf("select distinctTagKeys from (SELECT DISTINCT arrayJoin(tagKeys) distinctTagKeys from (SELECT DISTINCT(JSONExtractKeys(labels)) tagKeys from %s.%s WHERE metric_name=$1 %s ));", signozMetricDBName, signozTSTableName, tagsWhereClause)
|
||||
|
||||
rows, err = r.db.Query(ctx, query, params.MetricName)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
|
||||
}
|
||||
|
||||
defer rows.Close()
|
||||
var tagKey string
|
||||
for rows.Next() {
|
||||
if err := rows.Scan(&tagKey); err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
|
||||
}
|
||||
tagKeyList = append(tagKeyList, tagKey)
|
||||
}
|
||||
return &tagKeyList, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMetricAutocompleteTagValue(ctx context.Context, params *model.MetricAutocompleteTagParams) (*[]string, *model.ApiError) {
|
||||
|
||||
var query string
|
||||
var err error
|
||||
var tagValueList []string
|
||||
var rows driver.Rows
|
||||
tagsWhereClause := ""
|
||||
|
||||
for key, val := range params.MetricTags {
|
||||
tagsWhereClause += fmt.Sprintf(" AND JSONExtractString(labels, '%s') = '%s' ", key, val)
|
||||
}
|
||||
|
||||
if len(params.Match) != 0 {
|
||||
query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, '%s')) from %s.%s WHERE metric_name=$1 %s AND JSONExtractString(labels, '%s') ILIKE $2;", params.TagKey, signozMetricDBName, signozTSTableName, tagsWhereClause, params.TagKey)
|
||||
|
||||
rows, err = r.db.Query(ctx, query, params.TagKey, params.MetricName, fmt.Sprintf("%%%s%%", params.Match))
|
||||
|
||||
} else {
|
||||
query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, '%s')) FROM %s.%s WHERE metric_name=$2 %s;", params.TagKey, signozMetricDBName, signozTSTableName, tagsWhereClause)
|
||||
rows, err = r.db.Query(ctx, query, params.TagKey, params.MetricName)
|
||||
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
|
||||
}
|
||||
|
||||
defer rows.Close()
|
||||
var tagValue string
|
||||
for rows.Next() {
|
||||
if err := rows.Scan(&tagValue); err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
|
||||
}
|
||||
tagValueList = append(tagValueList, tagValue)
|
||||
}
|
||||
|
||||
return &tagValueList, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMetricAutocompleteMetricNames(ctx context.Context, matchText string, limit int) (*[]string, *model.ApiError) {
|
||||
|
||||
var query string
|
||||
var err error
|
||||
var metricNameList []string
|
||||
var rows driver.Rows
|
||||
|
||||
query = fmt.Sprintf("SELECT DISTINCT(metric_name) from %s.%s WHERE metric_name ILIKE $1", signozMetricDBName, signozTSTableName)
|
||||
if limit != 0 {
|
||||
query = query + fmt.Sprintf(" LIMIT %d;", limit)
|
||||
}
|
||||
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", matchText))
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
|
||||
}
|
||||
|
||||
defer rows.Close()
|
||||
var metricName string
|
||||
for rows.Next() {
|
||||
if err := rows.Scan(&metricName); err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
|
||||
}
|
||||
metricNameList = append(metricNameList, metricName)
|
||||
}
|
||||
|
||||
return &metricNameList, nil
|
||||
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*model.Series, string, error) {
|
||||
zap.L().Error("GetMetricResultEE is not implemented for opensource version")
|
||||
return nil, "", fmt.Errorf("GetMetricResultEE is not implemented for opensource version")
|
||||
@ -4165,66 +4054,15 @@ func (r *ClickHouseReader) GetMetricAttributeValues(ctx context.Context, req *v3
|
||||
return &attributeValues, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricName, serviceName string, preferDelta bool) (*v3.LatencyMetricMetadataResponse, error) {
|
||||
query := fmt.Sprintf("SELECT DISTINCT(temporality) from %s.%s WHERE metric_name='%s' AND JSONExtractString(labels, 'service_name') = '%s'", signozMetricDBName, signozTSTableName, metricName, serviceName)
|
||||
rows, err := r.db.Query(ctx, query, metricName)
|
||||
if err != nil {
|
||||
zap.L().Error("Error while executing query", zap.Error(err))
|
||||
return nil, fmt.Errorf("error while executing query: %s", err.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var deltaExists bool
|
||||
for rows.Next() {
|
||||
var temporality string
|
||||
if err := rows.Scan(&temporality); err != nil {
|
||||
return nil, fmt.Errorf("error while scanning rows: %s", err.Error())
|
||||
}
|
||||
if temporality == string(v3.Delta) {
|
||||
deltaExists = true
|
||||
}
|
||||
}
|
||||
|
||||
query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, 'le')) as le from %s.%s WHERE metric_name='%s' AND JSONExtractString(labels, 'service_name') = '%s' ORDER BY le", signozMetricDBName, signozTSTableName, metricName, serviceName)
|
||||
rows, err = r.db.Query(ctx, query, metricName)
|
||||
if err != nil {
|
||||
zap.L().Error("Error while executing query", zap.Error(err))
|
||||
return nil, fmt.Errorf("error while executing query: %s", err.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var leFloat64 []float64
|
||||
for rows.Next() {
|
||||
var leStr string
|
||||
if err := rows.Scan(&leStr); err != nil {
|
||||
return nil, fmt.Errorf("error while scanning rows: %s", err.Error())
|
||||
}
|
||||
le, err := strconv.ParseFloat(leStr, 64)
|
||||
// ignore the error and continue if the value is not a float
|
||||
// ideally this should not happen but we have seen ClickHouse
|
||||
// returning empty string for some values
|
||||
if err != nil {
|
||||
zap.L().Error("error while parsing le value", zap.Error(err))
|
||||
continue
|
||||
}
|
||||
if math.IsInf(le, 0) {
|
||||
continue
|
||||
}
|
||||
leFloat64 = append(leFloat64, le)
|
||||
}
|
||||
|
||||
return &v3.LatencyMetricMetadataResponse{
|
||||
Delta: deltaExists && preferDelta,
|
||||
Le: leFloat64,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, metricName, serviceName string) (*v3.MetricMetadataResponse, error) {
|
||||
|
||||
unixMilli := common.PastDayRoundOff()
|
||||
|
||||
// Note: metric metadata should be accessible regardless of the time range selection
|
||||
// our standard retention period is 30 days, so we are querying the table v4_1_day to reduce the
|
||||
// amount of data scanned
|
||||
query := fmt.Sprintf("SELECT DISTINCT temporality, description, type, unit, is_monotonic from %s.%s WHERE metric_name=$1", signozMetricDBName, signozTSTableNameV41Day)
|
||||
rows, err := r.db.Query(ctx, query, metricName)
|
||||
query := fmt.Sprintf("SELECT temporality, description, type, unit, is_monotonic from %s.%s WHERE metric_name=$1 AND unix_milli >= $2 GROUP BY temporality, description, type, unit, is_monotonic", signozMetricDBName, signozTSTableNameV41Day)
|
||||
rows, err := r.db.Query(ctx, query, metricName, unixMilli)
|
||||
if err != nil {
|
||||
zap.L().Error("Error while fetching metric metadata", zap.Error(err))
|
||||
return nil, fmt.Errorf("error while fetching metric metadata: %s", err.Error())
|
||||
@ -4242,8 +4080,8 @@ func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, metricName, se
|
||||
}
|
||||
}
|
||||
|
||||
query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, 'le')) as le from %s.%s WHERE metric_name=$1 AND type = 'Histogram' AND JSONExtractString(labels, 'service_name') = $2 ORDER BY le", signozMetricDBName, signozTSTableNameV41Day)
|
||||
rows, err = r.db.Query(ctx, query, metricName, serviceName)
|
||||
query = fmt.Sprintf("SELECT JSONExtractString(labels, 'le') as le from %s.%s WHERE metric_name=$1 AND unix_milli >= $2 AND type = 'Histogram' AND JSONExtractString(labels, 'service_name') = $3 GROUP BY le ORDER BY le", signozMetricDBName, signozTSTableNameV41Day)
|
||||
rows, err = r.db.Query(ctx, query, metricName, unixMilli, serviceName)
|
||||
if err != nil {
|
||||
zap.L().Error("Error while executing query", zap.Error(err))
|
||||
return nil, fmt.Errorf("error while executing query: %s", err.Error())
|
||||
|
@ -30,7 +30,6 @@ import (
|
||||
logsv3 "go.signoz.io/signoz/pkg/query-service/app/logs/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/metrics"
|
||||
metricsv3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/parser"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/querier"
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
@ -39,7 +38,6 @@ import (
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
querytemplate "go.signoz.io/signoz/pkg/query-service/utils/queryTemplate"
|
||||
|
||||
"go.uber.org/multierr"
|
||||
"go.uber.org/zap"
|
||||
@ -326,14 +324,6 @@ func writeHttpResponse(w http.ResponseWriter, data interface{}) {
|
||||
}
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterMetricsRoutes(router *mux.Router, am *AuthMiddleware) {
|
||||
subRouter := router.PathPrefix("/api/v2/metrics").Subrouter()
|
||||
subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QueryRangeMetricsV2)).Methods(http.MethodPost)
|
||||
subRouter.HandleFunc("/autocomplete/list", am.ViewAccess(aH.metricAutocompleteMetricName)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/autocomplete/tagKey", am.ViewAccess(aH.metricAutocompleteTagKey)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/autocomplete/tagValue", am.ViewAccess(aH.metricAutocompleteTagValue)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *AuthMiddleware) {
|
||||
subRouter := router.PathPrefix("/api/v3").Subrouter()
|
||||
subRouter.HandleFunc("/autocomplete/aggregate_attributes", am.ViewAccess(
|
||||
@ -419,8 +409,6 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *AuthMiddleware) {
|
||||
router.HandleFunc("/api/v1/settings/ingestion_key", am.AdminAccess(aH.insertIngestionKey)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/ingestion_key", am.ViewAccess(aH.getIngestionKeys)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/metric_meta", am.ViewAccess(aH.getLatencyMetricMetadata)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/version", am.OpenAccess(aH.getVersion)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/featureFlags", am.OpenAccess(aH.getFeatureFlags)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/configs", am.OpenAccess(aH.getConfigs)).Methods(http.MethodGet)
|
||||
@ -495,62 +483,6 @@ func (aH *APIHandler) getRule(w http.ResponseWriter, r *http.Request) {
|
||||
aH.Respond(w, ruleResponse)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) metricAutocompleteMetricName(w http.ResponseWriter, r *http.Request) {
|
||||
matchText := r.URL.Query().Get("match")
|
||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
if err != nil {
|
||||
limit = 0 // no limit
|
||||
}
|
||||
|
||||
metricNameList, apiErrObj := aH.reader.GetMetricAutocompleteMetricNames(r.Context(), matchText, limit)
|
||||
|
||||
if apiErrObj != nil {
|
||||
RespondError(w, apiErrObj, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, metricNameList)
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) metricAutocompleteTagKey(w http.ResponseWriter, r *http.Request) {
|
||||
metricsAutocompleteTagKeyParams, apiErrorObj := parser.ParseMetricAutocompleteTagParams(r)
|
||||
if apiErrorObj != nil {
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
|
||||
tagKeyList, apiErrObj := aH.reader.GetMetricAutocompleteTagKey(r.Context(), metricsAutocompleteTagKeyParams)
|
||||
|
||||
if apiErrObj != nil {
|
||||
RespondError(w, apiErrObj, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, tagKeyList)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) metricAutocompleteTagValue(w http.ResponseWriter, r *http.Request) {
|
||||
metricsAutocompleteTagValueParams, apiErrorObj := parser.ParseMetricAutocompleteTagParams(r)
|
||||
|
||||
if len(metricsAutocompleteTagValueParams.TagKey) == 0 {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("tagKey not present in params")}
|
||||
RespondError(w, apiErrObj, nil)
|
||||
return
|
||||
}
|
||||
if apiErrorObj != nil {
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
|
||||
tagValueList, apiErrObj := aH.reader.GetMetricAutocompleteTagValue(r.Context(), metricsAutocompleteTagValueParams)
|
||||
|
||||
if apiErrObj != nil {
|
||||
RespondError(w, apiErrObj, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, tagValueList)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) addTemporality(ctx context.Context, qp *v3.QueryRangeParamsV3) error {
|
||||
|
||||
metricNames := make([]string, 0)
|
||||
@ -593,215 +525,6 @@ func (aH *APIHandler) addTemporality(ctx context.Context, qp *v3.QueryRangeParam
|
||||
return nil
|
||||
}
|
||||
|
||||
func (aH *APIHandler) QueryRangeMetricsV2(w http.ResponseWriter, r *http.Request) {
|
||||
metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err))
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// prometheus instant query needs same timestamp
|
||||
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == model.QUERY_VALUE &&
|
||||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.PROM {
|
||||
metricsQueryRangeParams.Start = metricsQueryRangeParams.End
|
||||
}
|
||||
|
||||
// round up the end to neaerest multiple
|
||||
if metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.QUERY_BUILDER {
|
||||
end := (metricsQueryRangeParams.End) / 1000
|
||||
step := metricsQueryRangeParams.Step
|
||||
metricsQueryRangeParams.End = (end / step * step) * 1000
|
||||
}
|
||||
|
||||
type channelResult struct {
|
||||
Series []*model.Series
|
||||
Err error
|
||||
Name string
|
||||
Query string
|
||||
}
|
||||
|
||||
execClickHouseQueries := func(queries map[string]string) ([]*model.Series, error, map[string]string) {
|
||||
var seriesList []*model.Series
|
||||
ch := make(chan channelResult, len(queries))
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for name, query := range queries {
|
||||
wg.Add(1)
|
||||
go func(name, query string) {
|
||||
defer wg.Done()
|
||||
seriesList, err := aH.reader.GetMetricResult(r.Context(), query)
|
||||
for _, series := range seriesList {
|
||||
series.QueryName = name
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query}
|
||||
return
|
||||
}
|
||||
ch <- channelResult{Series: seriesList}
|
||||
}(name, query)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
continue
|
||||
}
|
||||
seriesList = append(seriesList, r.Series...)
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
|
||||
}
|
||||
return seriesList, nil, nil
|
||||
}
|
||||
|
||||
execPromQueries := func(metricsQueryRangeParams *model.QueryRangeParamsV2) ([]*model.Series, error, map[string]string) {
|
||||
var seriesList []*model.Series
|
||||
ch := make(chan channelResult, len(metricsQueryRangeParams.CompositeMetricQuery.PromQueries))
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for name, query := range metricsQueryRangeParams.CompositeMetricQuery.PromQueries {
|
||||
if query.Disabled {
|
||||
continue
|
||||
}
|
||||
wg.Add(1)
|
||||
go func(name string, query *model.PromQuery) {
|
||||
var seriesList []*model.Series
|
||||
defer wg.Done()
|
||||
tmpl := template.New("promql-query")
|
||||
tmpl, tmplErr := tmpl.Parse(query.Query)
|
||||
if tmplErr != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
|
||||
return
|
||||
}
|
||||
var queryBuf bytes.Buffer
|
||||
tmplErr = tmpl.Execute(&queryBuf, metricsQueryRangeParams.Variables)
|
||||
if tmplErr != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
|
||||
return
|
||||
}
|
||||
query.Query = queryBuf.String()
|
||||
queryModel := model.QueryRangeParams{
|
||||
Start: time.UnixMilli(metricsQueryRangeParams.Start),
|
||||
End: time.UnixMilli(metricsQueryRangeParams.End),
|
||||
Step: time.Duration(metricsQueryRangeParams.Step * int64(time.Second)),
|
||||
Query: query.Query,
|
||||
}
|
||||
promResult, _, err := aH.reader.GetQueryRangeResult(r.Context(), &queryModel)
|
||||
if err != nil {
|
||||
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query.Query}
|
||||
return
|
||||
}
|
||||
matrix, _ := promResult.Matrix()
|
||||
for _, v := range matrix {
|
||||
var s model.Series
|
||||
s.QueryName = name
|
||||
s.Labels = v.Metric.Copy().Map()
|
||||
for _, p := range v.Floats {
|
||||
s.Points = append(s.Points, model.MetricPoint{Timestamp: p.T, Value: p.F})
|
||||
}
|
||||
seriesList = append(seriesList, &s)
|
||||
}
|
||||
ch <- channelResult{Series: seriesList}
|
||||
}(name, query)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
close(ch)
|
||||
|
||||
var errs []error
|
||||
errQuriesByName := make(map[string]string)
|
||||
// read values from the channel
|
||||
for r := range ch {
|
||||
if r.Err != nil {
|
||||
errs = append(errs, r.Err)
|
||||
errQuriesByName[r.Name] = r.Query
|
||||
continue
|
||||
}
|
||||
seriesList = append(seriesList, r.Series...)
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
|
||||
}
|
||||
return seriesList, nil, nil
|
||||
}
|
||||
|
||||
var seriesList []*model.Series
|
||||
var err error
|
||||
var errQuriesByName map[string]string
|
||||
switch metricsQueryRangeParams.CompositeMetricQuery.QueryType {
|
||||
case model.QUERY_BUILDER:
|
||||
runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SIGNOZ_TIMESERIES_TABLENAME)
|
||||
if runQueries.Err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: runQueries.Err}, nil)
|
||||
return
|
||||
}
|
||||
seriesList, err, errQuriesByName = execClickHouseQueries(runQueries.Queries)
|
||||
|
||||
case model.CLICKHOUSE:
|
||||
queries := make(map[string]string)
|
||||
for name, chQuery := range metricsQueryRangeParams.CompositeMetricQuery.ClickHouseQueries {
|
||||
if chQuery.Disabled {
|
||||
continue
|
||||
}
|
||||
tmpl := template.New("clickhouse-query")
|
||||
tmpl, err := tmpl.Parse(chQuery.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
var query bytes.Buffer
|
||||
|
||||
// replace go template variables
|
||||
querytemplate.AssignReservedVars(metricsQueryRangeParams)
|
||||
|
||||
err = tmpl.Execute(&query, metricsQueryRangeParams.Variables)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
queries[name] = query.String()
|
||||
}
|
||||
seriesList, err, errQuriesByName = execClickHouseQueries(queries)
|
||||
case model.PROM:
|
||||
seriesList, err, errQuriesByName = execPromQueries(metricsQueryRangeParams)
|
||||
default:
|
||||
err = fmt.Errorf("invalid query type")
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, errQuriesByName)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == model.QUERY_VALUE &&
|
||||
len(seriesList) > 1 &&
|
||||
(metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.QUERY_BUILDER ||
|
||||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.CLICKHOUSE) {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
type ResponseFormat struct {
|
||||
ResultType string `json:"resultType"`
|
||||
Result []*model.Series `json:"result"`
|
||||
}
|
||||
resp := ResponseFormat{ResultType: "matrix", Result: seriesList}
|
||||
aH.Respond(w, resp)
|
||||
}
|
||||
|
||||
// populateTemporality same as addTemporality but for v4 and better
|
||||
func (aH *APIHandler) populateTemporality(ctx context.Context, qp *v3.QueryRangeParamsV3) error {
|
||||
|
||||
|
@ -5,60 +5,9 @@ import (
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/govaluate"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type RunQueries struct {
|
||||
Queries map[string]string
|
||||
Err error
|
||||
}
|
||||
|
||||
var AggregateOperatorToPercentile = map[model.AggregateOperator]float64{
|
||||
model.P05: 0.5,
|
||||
model.P10: 0.10,
|
||||
model.P20: 0.20,
|
||||
model.P25: 0.25,
|
||||
model.P50: 0.50,
|
||||
model.P75: 0.75,
|
||||
model.P90: 0.90,
|
||||
model.P95: 0.95,
|
||||
model.P99: 0.99,
|
||||
model.HIST_QUANTILE_50: 0.50,
|
||||
model.HIST_QUANTILE_75: 0.75,
|
||||
model.HIST_QUANTILE_90: 0.90,
|
||||
model.HIST_QUANTILE_95: 0.95,
|
||||
model.HIST_QUANTILE_99: 0.99,
|
||||
}
|
||||
|
||||
var AggregateOperatorToSQLFunc = map[model.AggregateOperator]string{
|
||||
model.AVG: "avg",
|
||||
model.MAX: "max",
|
||||
model.MIN: "min",
|
||||
model.SUM: "sum",
|
||||
model.RATE_SUM: "sum",
|
||||
model.RATE_AVG: "avg",
|
||||
model.RATE_MAX: "max",
|
||||
model.RATE_MIN: "min",
|
||||
}
|
||||
|
||||
// See https://github.com/SigNoz/signoz/issues/2151#issuecomment-1467249056
|
||||
var rateWithoutNegative = `if (runningDifference(value) < 0 OR runningDifference(ts) <= 0, nan, runningDifference(value)/runningDifference(ts))`
|
||||
|
||||
var SupportedFunctions = []string{"exp", "log", "ln", "exp2", "log2", "exp10", "log10", "sqrt", "cbrt", "erf", "erfc", "lgamma", "tgamma", "sin", "cos", "tan", "asin", "acos", "atan", "degrees", "radians"}
|
||||
|
||||
func GoValuateFuncs() map[string]govaluate.ExpressionFunction {
|
||||
var GoValuateFuncs = map[string]govaluate.ExpressionFunction{}
|
||||
for _, fn := range SupportedFunctions {
|
||||
GoValuateFuncs[fn] = func(args ...interface{}) (interface{}, error) {
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
return GoValuateFuncs
|
||||
}
|
||||
|
||||
// FormattedValue formats the value to be used in clickhouse query
|
||||
func FormattedValue(v interface{}) string {
|
||||
switch x := v.(type) {
|
||||
@ -97,398 +46,6 @@ func FormattedValue(v interface{}) string {
|
||||
}
|
||||
}
|
||||
|
||||
// BuildMetricsTimeSeriesFilterQuery builds the sub-query to be used for filtering
|
||||
// timeseries based on search criteria
|
||||
func BuildMetricsTimeSeriesFilterQuery(fs *model.FilterSet, groupTags []string, metricName string, aggregateOperator model.AggregateOperator) (string, error) {
|
||||
var conditions []string
|
||||
conditions = append(conditions, fmt.Sprintf("metric_name = %s", FormattedValue(metricName)))
|
||||
if fs != nil && len(fs.Items) != 0 {
|
||||
for _, item := range fs.Items {
|
||||
toFormat := item.Value
|
||||
op := strings.ToLower(strings.TrimSpace(item.Operator))
|
||||
// if the received value is an array for like/match op, just take the first value
|
||||
if op == "like" || op == "match" || op == "nlike" || op == "nmatch" {
|
||||
x, ok := item.Value.([]interface{})
|
||||
if ok {
|
||||
if len(x) == 0 {
|
||||
continue
|
||||
}
|
||||
toFormat = x[0]
|
||||
}
|
||||
}
|
||||
fmtVal := FormattedValue(toFormat)
|
||||
switch op {
|
||||
case "eq":
|
||||
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') = %s", item.Key, fmtVal))
|
||||
case "neq":
|
||||
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') != %s", item.Key, fmtVal))
|
||||
case "in":
|
||||
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') IN %s", item.Key, fmtVal))
|
||||
case "nin":
|
||||
conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') NOT IN %s", item.Key, fmtVal))
|
||||
case "like":
|
||||
conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key, fmtVal))
|
||||
case "nlike":
|
||||
conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key, fmtVal))
|
||||
case "match":
|
||||
conditions = append(conditions, fmt.Sprintf("match(JSONExtractString(labels, '%s'), %s)", item.Key, fmtVal))
|
||||
case "nmatch":
|
||||
conditions = append(conditions, fmt.Sprintf("not match(JSONExtractString(labels, '%s'), %s)", item.Key, fmtVal))
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported operation")
|
||||
}
|
||||
}
|
||||
}
|
||||
queryString := strings.Join(conditions, " AND ")
|
||||
|
||||
var selectLabels string
|
||||
if aggregateOperator == model.NOOP || aggregateOperator == model.RATE {
|
||||
selectLabels = "labels,"
|
||||
} else {
|
||||
for _, tag := range groupTags {
|
||||
selectLabels += fmt.Sprintf(" JSONExtractString(labels, '%s') as %s,", tag, tag)
|
||||
}
|
||||
}
|
||||
|
||||
filterSubQuery := fmt.Sprintf("SELECT %s fingerprint FROM %s.%s WHERE %s", selectLabels, constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_LOCAL_TABLENAME, queryString)
|
||||
|
||||
return filterSubQuery, nil
|
||||
}
|
||||
|
||||
func BuildMetricQuery(qp *model.QueryRangeParamsV2, mq *model.MetricQuery, tableName string) (string, error) {
|
||||
|
||||
if qp.CompositeMetricQuery.PanelType == model.QUERY_VALUE && len(mq.GroupingTags) != 0 {
|
||||
return "", fmt.Errorf("reduce operator cannot be applied for the query")
|
||||
}
|
||||
|
||||
filterSubQuery, err := BuildMetricsTimeSeriesFilterQuery(mq.TagFilters, mq.GroupingTags, mq.MetricName, mq.AggregateOperator)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", FormattedValue(mq.MetricName), qp.Start, qp.End)
|
||||
|
||||
// Select the aggregate value for interval
|
||||
queryTmpl :=
|
||||
"SELECT %s" +
|
||||
" toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
|
||||
" %s as value" +
|
||||
" FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
|
||||
" INNER JOIN" +
|
||||
" (%s) as filtered_time_series" +
|
||||
" USING fingerprint" +
|
||||
" WHERE " + samplesTableTimeFilter +
|
||||
" GROUP BY %s" +
|
||||
" ORDER BY %s ts"
|
||||
|
||||
tagsWithoutLe := []string{}
|
||||
for _, tag := range mq.GroupingTags {
|
||||
if tag != "le" {
|
||||
tagsWithoutLe = append(tagsWithoutLe, tag)
|
||||
}
|
||||
}
|
||||
|
||||
groupByWithoutLe := groupBy(tagsWithoutLe...)
|
||||
groupTagsWithoutLe := groupSelect(tagsWithoutLe...)
|
||||
|
||||
groupBy := groupBy(mq.GroupingTags...)
|
||||
groupTags := groupSelect(mq.GroupingTags...)
|
||||
|
||||
switch mq.AggregateOperator {
|
||||
case model.RATE:
|
||||
// Calculate rate of change of metric for each unique time series
|
||||
groupBy = "fingerprint, ts"
|
||||
groupTags = "fingerprint,"
|
||||
op := "max(value)" // max value should be the closest value for point in time
|
||||
subQuery := fmt.Sprintf(
|
||||
queryTmpl, "any(labels) as labels, "+groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags,
|
||||
) // labels will be same so any should be fine
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + ` as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
|
||||
query = fmt.Sprintf(query, "labels as fullLabels,", subQuery)
|
||||
return query, nil
|
||||
case model.SUM_RATE:
|
||||
rateGroupBy := "fingerprint, " + groupBy
|
||||
rateGroupTags := "fingerprint, " + groupTags
|
||||
op := "max(value)"
|
||||
subQuery := fmt.Sprintf(
|
||||
queryTmpl, rateGroupTags, qp.Step, op, filterSubQuery, rateGroupBy, rateGroupTags,
|
||||
) // labels will be same so any should be fine
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + `as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery)
|
||||
query = fmt.Sprintf(`SELECT %s ts, sum(value) as value FROM (%s) GROUP BY %s ORDER BY %s ts`, groupTags, query, groupBy, groupTags)
|
||||
return query, nil
|
||||
case model.RATE_SUM, model.RATE_MAX, model.RATE_AVG, model.RATE_MIN:
|
||||
op := fmt.Sprintf("%s(value)", AggregateOperatorToSQLFunc[mq.AggregateOperator])
|
||||
subQuery := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags)
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + `as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery)
|
||||
return query, nil
|
||||
case model.P05, model.P10, model.P20, model.P25, model.P50, model.P75, model.P90, model.P95, model.P99:
|
||||
op := fmt.Sprintf("quantile(%v)(value)", AggregateOperatorToPercentile[mq.AggregateOperator])
|
||||
query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags)
|
||||
return query, nil
|
||||
case model.HIST_QUANTILE_50, model.HIST_QUANTILE_75, model.HIST_QUANTILE_90, model.HIST_QUANTILE_95, model.HIST_QUANTILE_99:
|
||||
rateGroupBy := "fingerprint, " + groupBy
|
||||
rateGroupTags := "fingerprint, " + groupTags
|
||||
op := "max(value)"
|
||||
subQuery := fmt.Sprintf(
|
||||
queryTmpl, rateGroupTags, qp.Step, op, filterSubQuery, rateGroupBy, rateGroupTags,
|
||||
) // labels will be same so any should be fine
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + ` as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery)
|
||||
// filter out NaN values from the rate query as histogramQuantile doesn't support NaN values
|
||||
query = fmt.Sprintf(`SELECT %s ts, sum(value) as value FROM (%s) GROUP BY %s HAVING isNaN(value) = 0 ORDER BY %s ts`, groupTags, query, groupBy, groupTags)
|
||||
value := AggregateOperatorToPercentile[mq.AggregateOperator]
|
||||
|
||||
query = fmt.Sprintf(`SELECT %s ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s ts`, groupTagsWithoutLe, value, query, groupByWithoutLe, groupTagsWithoutLe)
|
||||
return query, nil
|
||||
case model.AVG, model.SUM, model.MIN, model.MAX:
|
||||
op := fmt.Sprintf("%s(value)", AggregateOperatorToSQLFunc[mq.AggregateOperator])
|
||||
query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags)
|
||||
return query, nil
|
||||
case model.COUNT:
|
||||
op := "toFloat64(count(*))"
|
||||
query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags)
|
||||
return query, nil
|
||||
case model.COUNT_DISTINCT:
|
||||
op := "toFloat64(count(distinct(value)))"
|
||||
query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags)
|
||||
return query, nil
|
||||
case model.NOOP:
|
||||
queryTmpl :=
|
||||
"SELECT fingerprint, labels as fullLabels," +
|
||||
" toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
|
||||
" any(value) as value" +
|
||||
" FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
|
||||
" INNER JOIN" +
|
||||
" (%s) as filtered_time_series" +
|
||||
" USING fingerprint" +
|
||||
" WHERE " + samplesTableTimeFilter +
|
||||
" GROUP BY fingerprint, labels, ts" +
|
||||
" ORDER BY fingerprint, labels, ts"
|
||||
query := fmt.Sprintf(queryTmpl, qp.Step, filterSubQuery)
|
||||
return query, nil
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported aggregate operator")
|
||||
}
|
||||
}
|
||||
|
||||
func groupBy(tags ...string) string {
|
||||
tags = append(tags, "ts")
|
||||
return strings.Join(tags, ",")
|
||||
}
|
||||
|
||||
func groupSelect(tags ...string) string {
|
||||
groupTags := strings.Join(tags, ",")
|
||||
if len(tags) != 0 {
|
||||
groupTags += ", "
|
||||
}
|
||||
return groupTags
|
||||
}
|
||||
|
||||
// validateExpressions validates the math expressions using the list of
|
||||
// allowed functions.
|
||||
func validateExpressions(expressions []string, funcs map[string]govaluate.ExpressionFunction) []error {
|
||||
var errs []error
|
||||
for _, exp := range expressions {
|
||||
_, err := govaluate.NewEvaluableExpressionWithFunctions(exp, funcs)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
}
|
||||
return errs
|
||||
}
|
||||
|
||||
// FormatErrs returns formatted error string
|
||||
func FormatErrs(errs []error, separator string) string {
|
||||
var errStrs []string
|
||||
for _, err := range errs {
|
||||
errStrs = append(errStrs, err.Error())
|
||||
}
|
||||
return strings.Join(errStrs, separator)
|
||||
}
|
||||
|
||||
func reduceQuery(query string, reduceTo model.ReduceToOperator, aggregateOperator model.AggregateOperator) (string, error) {
|
||||
var selectLabels string
|
||||
var groupBy string
|
||||
// NOOP and RATE can possibly return multiple time series and reduce should be applied
|
||||
// for each uniques series. When the final result contains more than one series we throw
|
||||
// an error post DB fetching. Otherwise just return the single data. This is not known until queried so the
|
||||
// the query is prepared accordingly.
|
||||
if aggregateOperator == model.NOOP || aggregateOperator == model.RATE {
|
||||
selectLabels = ", any(fullLabels) as fullLabels"
|
||||
groupBy = "GROUP BY fingerprint"
|
||||
}
|
||||
// the timestamp picked is not relevant here since the final value used is show the single
|
||||
// chart with just the query value. For the quer
|
||||
switch reduceTo {
|
||||
case model.RLAST:
|
||||
query = fmt.Sprintf("SELECT anyLast(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy)
|
||||
case model.RSUM:
|
||||
query = fmt.Sprintf("SELECT sum(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy)
|
||||
case model.RAVG:
|
||||
query = fmt.Sprintf("SELECT avg(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy)
|
||||
case model.RMAX:
|
||||
query = fmt.Sprintf("SELECT max(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy)
|
||||
case model.RMIN:
|
||||
query = fmt.Sprintf("SELECT min(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy)
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported reduce operator")
|
||||
}
|
||||
return query, nil
|
||||
}
|
||||
|
||||
// varToQuery constructs the query for each named builder block
|
||||
func varToQuery(qp *model.QueryRangeParamsV2, tableName string) (map[string]string, error) {
|
||||
evalFuncs := GoValuateFuncs()
|
||||
varToQuery := make(map[string]string)
|
||||
for _, builderQuery := range qp.CompositeMetricQuery.BuilderQueries {
|
||||
// err should be nil here since the expression is already validated
|
||||
expression, _ := govaluate.NewEvaluableExpressionWithFunctions(builderQuery.Expression, evalFuncs)
|
||||
|
||||
// Use the parsed expression and build the query for each variable
|
||||
// if not already exists
|
||||
var errs []error
|
||||
for _, _var := range expression.Vars() {
|
||||
if _, ok := varToQuery[_var]; !ok {
|
||||
mq, varExists := qp.CompositeMetricQuery.BuilderQueries[_var]
|
||||
if !varExists {
|
||||
errs = append(errs, fmt.Errorf("variable %s not found in builder queries", _var))
|
||||
continue
|
||||
}
|
||||
query, err := BuildMetricQuery(qp, mq, tableName)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
} else {
|
||||
if qp.CompositeMetricQuery.PanelType == model.QUERY_VALUE {
|
||||
query, err = reduceQuery(query, mq.ReduceTo, mq.AggregateOperator)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
varToQuery[_var] = query
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, fmt.Errorf("error while creating query: %s", FormatErrs(errs, "\n"))
|
||||
}
|
||||
}
|
||||
return varToQuery, nil
|
||||
}
|
||||
|
||||
func unique(slice []string) []string {
|
||||
keys := make(map[string]struct{})
|
||||
list := []string{}
|
||||
for _, entry := range slice {
|
||||
if _, value := keys[entry]; !value {
|
||||
keys[entry] = struct{}{}
|
||||
list = append(list, entry)
|
||||
}
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
// expressionToQuery constructs the query for the expression
|
||||
func expressionToQuery(qp *model.QueryRangeParamsV2, varToQuery map[string]string, expression *govaluate.EvaluableExpression) (string, error) {
|
||||
var formulaQuery string
|
||||
vars := unique(expression.Vars())
|
||||
for idx, var_ := range vars[1:] {
|
||||
x, y := vars[idx], var_
|
||||
if !reflect.DeepEqual(qp.CompositeMetricQuery.BuilderQueries[x].GroupingTags, qp.CompositeMetricQuery.BuilderQueries[y].GroupingTags) {
|
||||
return "", fmt.Errorf("group by must be same")
|
||||
}
|
||||
}
|
||||
var modified []govaluate.ExpressionToken
|
||||
tokens := expression.Tokens()
|
||||
for idx := range tokens {
|
||||
token := tokens[idx]
|
||||
if token.Kind == govaluate.VARIABLE {
|
||||
token.Value = fmt.Sprintf("%v.value", token.Value)
|
||||
token.Meta = fmt.Sprintf("%v.value", token.Meta)
|
||||
}
|
||||
modified = append(modified, token)
|
||||
}
|
||||
// err should be nil here since the expression is already validated
|
||||
formula, _ := govaluate.NewEvaluableExpressionFromTokens(modified)
|
||||
|
||||
var formulaSubQuery string
|
||||
var joinUsing string
|
||||
var prevVar string
|
||||
for idx, var_ := range vars {
|
||||
query := varToQuery[var_]
|
||||
groupTags := qp.CompositeMetricQuery.BuilderQueries[var_].GroupingTags
|
||||
groupTags = append(groupTags, "ts")
|
||||
if joinUsing == "" {
|
||||
for _, tag := range groupTags {
|
||||
joinUsing += fmt.Sprintf("%s.%s as %s, ", var_, tag, tag)
|
||||
}
|
||||
joinUsing = strings.TrimSuffix(joinUsing, ", ")
|
||||
}
|
||||
formulaSubQuery += fmt.Sprintf("(%s) as %s ", query, var_)
|
||||
if idx > 0 {
|
||||
formulaSubQuery += " ON "
|
||||
for _, tag := range groupTags {
|
||||
formulaSubQuery += fmt.Sprintf("%s.%s = %s.%s AND ", prevVar, tag, var_, tag)
|
||||
}
|
||||
formulaSubQuery = strings.TrimSuffix(formulaSubQuery, " AND ")
|
||||
}
|
||||
if idx < len(vars)-1 {
|
||||
formulaSubQuery += " INNER JOIN"
|
||||
}
|
||||
prevVar = var_
|
||||
}
|
||||
formulaQuery = fmt.Sprintf("SELECT %s, %s as value FROM ", joinUsing, formula.ExpressionString()) + formulaSubQuery
|
||||
return formulaQuery, nil
|
||||
}
|
||||
|
||||
// PrepareBuilderMetricQueries constructs the queries to be run for query range timeseries
|
||||
func PrepareBuilderMetricQueries(qp *model.QueryRangeParamsV2, tableName string) *RunQueries {
|
||||
evalFuncs := GoValuateFuncs()
|
||||
|
||||
// validate the expressions
|
||||
var expressions []string
|
||||
for _, bq := range qp.CompositeMetricQuery.BuilderQueries {
|
||||
expressions = append(expressions, bq.Expression)
|
||||
}
|
||||
if errs := validateExpressions(expressions, evalFuncs); len(errs) != 0 {
|
||||
return &RunQueries{Err: fmt.Errorf("invalid expressions: %s", FormatErrs(errs, "\n"))}
|
||||
}
|
||||
|
||||
varToQuery, err := varToQuery(qp, tableName)
|
||||
if err != nil {
|
||||
return &RunQueries{Err: err}
|
||||
}
|
||||
|
||||
namedQueries := make(map[string]string)
|
||||
|
||||
var errs []error
|
||||
for _, builderQuery := range qp.CompositeMetricQuery.BuilderQueries {
|
||||
if builderQuery.Disabled {
|
||||
continue
|
||||
}
|
||||
expression, _ := govaluate.NewEvaluableExpressionWithFunctions(builderQuery.Expression, evalFuncs)
|
||||
tokens := expression.Tokens()
|
||||
// expression with one token is used to represent
|
||||
// that there are no functions applied on query
|
||||
if len(tokens) == 1 {
|
||||
_var := tokens[0].Value.(string)
|
||||
namedQueries[builderQuery.QueryName] = varToQuery[_var]
|
||||
} else {
|
||||
query, err := expressionToQuery(qp, varToQuery, expression)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
namedQueries[builderQuery.QueryName] = query
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return &RunQueries{Err: fmt.Errorf("errors with formulas: %s", FormatErrs(errs, "\n"))}
|
||||
}
|
||||
return &RunQueries{Queries: namedQueries}
|
||||
}
|
||||
|
||||
// PromFormattedValue formats the value to be used in promql
|
||||
func PromFormattedValue(v interface{}) string {
|
||||
switch x := v.(type) {
|
||||
|
@ -1,261 +0,0 @@
|
||||
package metrics
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
func TestBuildQuery(t *testing.T) {
|
||||
Convey("TestSimpleQueryWithName", t, func() {
|
||||
q := &model.QueryRangeParamsV2{
|
||||
Start: 1650991982000,
|
||||
End: 1651078382000,
|
||||
Step: 60,
|
||||
CompositeMetricQuery: &model.CompositeMetricQuery{
|
||||
BuilderQueries: map[string]*model.MetricQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
MetricName: "name",
|
||||
AggregateOperator: model.RATE_MAX,
|
||||
Expression: "A",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
queries := PrepareBuilderMetricQueries(q, "table").Queries
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries["A"], ShouldContainSubstring, "WHERE metric_name = 'name'")
|
||||
So(queries["A"], ShouldContainSubstring, rateWithoutNegative)
|
||||
})
|
||||
|
||||
Convey("TestSimpleQueryWithHistQuantile", t, func() {
|
||||
q := &model.QueryRangeParamsV2{
|
||||
Start: 1650991982000,
|
||||
End: 1651078382000,
|
||||
Step: 60,
|
||||
CompositeMetricQuery: &model.CompositeMetricQuery{
|
||||
BuilderQueries: map[string]*model.MetricQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
MetricName: "name",
|
||||
AggregateOperator: model.HIST_QUANTILE_99,
|
||||
Expression: "A",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
queries := PrepareBuilderMetricQueries(q, "table").Queries
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries["A"], ShouldContainSubstring, "WHERE metric_name = 'name'")
|
||||
So(queries["A"], ShouldContainSubstring, rateWithoutNegative)
|
||||
So(queries["A"], ShouldContainSubstring, "HAVING isNaN(value) = 0")
|
||||
})
|
||||
}
|
||||
|
||||
func TestBuildQueryWithFilters(t *testing.T) {
|
||||
Convey("TestBuildQueryWithFilters", t, func() {
|
||||
q := &model.QueryRangeParamsV2{
|
||||
Start: 1650991982000,
|
||||
End: 1651078382000,
|
||||
Step: 60,
|
||||
CompositeMetricQuery: &model.CompositeMetricQuery{
|
||||
BuilderQueries: map[string]*model.MetricQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
MetricName: "name",
|
||||
TagFilters: &model.FilterSet{Operator: "AND", Items: []model.FilterItem{
|
||||
{Key: "a", Value: "b", Operator: "neq"},
|
||||
{Key: "code", Value: "ERROR_*", Operator: "nmatch"},
|
||||
}},
|
||||
AggregateOperator: model.RATE_MAX,
|
||||
Expression: "A",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
queries := PrepareBuilderMetricQueries(q, "table").Queries
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
|
||||
So(queries["A"], ShouldContainSubstring, "WHERE metric_name = 'name' AND JSONExtractString(labels, 'a') != 'b'")
|
||||
So(queries["A"], ShouldContainSubstring, rateWithoutNegative)
|
||||
So(queries["A"], ShouldContainSubstring, "not match(JSONExtractString(labels, 'code'), 'ERROR_*')")
|
||||
})
|
||||
}
|
||||
|
||||
func TestBuildQueryWithMultipleQueries(t *testing.T) {
|
||||
Convey("TestBuildQueryWithFilters", t, func() {
|
||||
q := &model.QueryRangeParamsV2{
|
||||
Start: 1650991982000,
|
||||
End: 1651078382000,
|
||||
Step: 60,
|
||||
CompositeMetricQuery: &model.CompositeMetricQuery{
|
||||
BuilderQueries: map[string]*model.MetricQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
MetricName: "name",
|
||||
TagFilters: &model.FilterSet{Operator: "AND", Items: []model.FilterItem{
|
||||
{Key: "in", Value: []interface{}{"a", "b", "c"}, Operator: "in"},
|
||||
}},
|
||||
AggregateOperator: model.RATE_AVG,
|
||||
Expression: "A",
|
||||
},
|
||||
"B": {
|
||||
QueryName: "B",
|
||||
MetricName: "name2",
|
||||
AggregateOperator: model.RATE_MAX,
|
||||
Expression: "B",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
queries := PrepareBuilderMetricQueries(q, "table").Queries
|
||||
So(len(queries), ShouldEqual, 2)
|
||||
So(queries["A"], ShouldContainSubstring, "WHERE metric_name = 'name' AND JSONExtractString(labels, 'in') IN ['a','b','c']")
|
||||
So(queries["A"], ShouldContainSubstring, rateWithoutNegative)
|
||||
})
|
||||
}
|
||||
|
||||
func TestBuildQueryWithMultipleQueriesAndFormula(t *testing.T) {
|
||||
Convey("TestBuildQueryWithFilters", t, func() {
|
||||
q := &model.QueryRangeParamsV2{
|
||||
Start: 1650991982000,
|
||||
End: 1651078382000,
|
||||
Step: 60,
|
||||
CompositeMetricQuery: &model.CompositeMetricQuery{
|
||||
BuilderQueries: map[string]*model.MetricQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
MetricName: "name",
|
||||
TagFilters: &model.FilterSet{Operator: "AND", Items: []model.FilterItem{
|
||||
{Key: "in", Value: []interface{}{"a", "b", "c"}, Operator: "in"},
|
||||
}},
|
||||
AggregateOperator: model.RATE_MAX,
|
||||
Expression: "A",
|
||||
},
|
||||
"B": {
|
||||
MetricName: "name2",
|
||||
AggregateOperator: model.RATE_AVG,
|
||||
Expression: "B",
|
||||
},
|
||||
"C": {
|
||||
QueryName: "C",
|
||||
Expression: "A/B",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
queries := PrepareBuilderMetricQueries(q, "table").Queries
|
||||
So(len(queries), ShouldEqual, 3)
|
||||
So(queries["C"], ShouldContainSubstring, "SELECT A.ts as ts, A.value / B.value")
|
||||
So(queries["C"], ShouldContainSubstring, "WHERE metric_name = 'name' AND JSONExtractString(labels, 'in') IN ['a','b','c']")
|
||||
So(queries["C"], ShouldContainSubstring, rateWithoutNegative)
|
||||
})
|
||||
}
|
||||
|
||||
func TestBuildQueryWithIncorrectQueryRef(t *testing.T) {
|
||||
Convey("TestBuildQueryWithFilters", t, func() {
|
||||
q := &model.QueryRangeParamsV2{
|
||||
Start: 1650991982000,
|
||||
End: 1651078382000,
|
||||
Step: 60,
|
||||
CompositeMetricQuery: &model.CompositeMetricQuery{
|
||||
BuilderQueries: map[string]*model.MetricQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
MetricName: "name",
|
||||
TagFilters: &model.FilterSet{Operator: "AND", Items: []model.FilterItem{
|
||||
{Key: "in", Value: []interface{}{"a", "b", "c"}, Operator: "in"},
|
||||
}},
|
||||
AggregateOperator: model.RATE_MAX,
|
||||
Expression: "A",
|
||||
},
|
||||
"C": {
|
||||
QueryName: "C",
|
||||
Expression: "D*2",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
res := PrepareBuilderMetricQueries(q, "table")
|
||||
So(res.Err, ShouldNotBeNil)
|
||||
So(res.Err.Error(), ShouldContainSubstring, "variable D not found in builder queries")
|
||||
})
|
||||
}
|
||||
|
||||
func TestBuildQueryWithThreeOrMoreQueriesRefAndFormula(t *testing.T) {
|
||||
Convey("TestBuildQueryWithFilters", t, func() {
|
||||
q := &model.QueryRangeParamsV2{
|
||||
Start: 1650991982000,
|
||||
End: 1651078382000,
|
||||
Step: 60,
|
||||
CompositeMetricQuery: &model.CompositeMetricQuery{
|
||||
BuilderQueries: map[string]*model.MetricQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
MetricName: "name",
|
||||
TagFilters: &model.FilterSet{Operator: "AND", Items: []model.FilterItem{
|
||||
{Key: "in", Value: []interface{}{"a", "b", "c"}, Operator: "in"},
|
||||
}},
|
||||
AggregateOperator: model.RATE_MAX,
|
||||
Expression: "A",
|
||||
Disabled: true,
|
||||
},
|
||||
"B": {
|
||||
MetricName: "name2",
|
||||
AggregateOperator: model.RATE_AVG,
|
||||
Expression: "B",
|
||||
Disabled: true,
|
||||
},
|
||||
"C": {
|
||||
MetricName: "name3",
|
||||
AggregateOperator: model.SUM_RATE,
|
||||
Expression: "C",
|
||||
Disabled: true,
|
||||
},
|
||||
"F1": {
|
||||
QueryName: "F1",
|
||||
Expression: "A/B",
|
||||
},
|
||||
"F2": {
|
||||
QueryName: "F2",
|
||||
Expression: "A/(B+C)",
|
||||
},
|
||||
"F3": {
|
||||
QueryName: "F3",
|
||||
Expression: "A*A",
|
||||
},
|
||||
"F4": {
|
||||
QueryName: "F4",
|
||||
Expression: "A*B*C",
|
||||
},
|
||||
"F5": {
|
||||
QueryName: "F5",
|
||||
Expression: "((A - B) / B) * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
res := PrepareBuilderMetricQueries(q, "table")
|
||||
So(res.Err, ShouldBeNil)
|
||||
queries := res.Queries
|
||||
So(len(queries), ShouldEqual, 5)
|
||||
So(queries["F1"], ShouldContainSubstring, "SELECT A.ts as ts, A.value / B.value")
|
||||
So(strings.Count(queries["F1"], " ON "), ShouldEqual, 1)
|
||||
|
||||
So(queries["F2"], ShouldContainSubstring, "SELECT A.ts as ts, A.value / (B.value + C.value)")
|
||||
So(strings.Count(queries["F2"], " ON "), ShouldEqual, 2)
|
||||
|
||||
// Working with same query multiple times should not join on itself
|
||||
So(queries["F3"], ShouldNotContainSubstring, " ON ")
|
||||
|
||||
So(queries["F4"], ShouldContainSubstring, "SELECT A.ts as ts, A.value * B.value * C.value")
|
||||
// Number of times JOIN ON appears is N-1 where N is number of unique queries
|
||||
So(strings.Count(queries["F4"], " ON "), ShouldEqual, 2)
|
||||
|
||||
So(queries["F5"], ShouldContainSubstring, "SELECT A.ts as ts, ((A.value - B.value) / B.value) * 100")
|
||||
So(strings.Count(queries["F5"], " ON "), ShouldEqual, 1)
|
||||
})
|
||||
}
|
@ -1,117 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/app/metrics"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
func validateQueryRangeParamsV2(qp *model.QueryRangeParamsV2) error {
|
||||
var errs []error
|
||||
if !(qp.DataSource >= model.METRICS && qp.DataSource <= model.LOGS) {
|
||||
errs = append(errs, fmt.Errorf("unsupported data source"))
|
||||
}
|
||||
if !(qp.CompositeMetricQuery.QueryType >= model.QUERY_BUILDER && qp.CompositeMetricQuery.QueryType <= model.PROM) {
|
||||
errs = append(errs, fmt.Errorf("unsupported query type"))
|
||||
}
|
||||
if !(qp.CompositeMetricQuery.PanelType >= model.TIME_SERIES && qp.CompositeMetricQuery.PanelType <= model.QUERY_VALUE) {
|
||||
errs = append(errs, fmt.Errorf("unsupported panel type"))
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return fmt.Errorf("one or more errors found : %s", metrics.FormatErrs(errs, ","))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseMetricQueryRangeParams(r *http.Request) (*model.QueryRangeParamsV2, *model.ApiError) {
|
||||
|
||||
var postData *model.QueryRangeParamsV2
|
||||
|
||||
if err := json.NewDecoder(r.Body).Decode(&postData); err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
}
|
||||
if err := validateQueryRangeParamsV2(postData); err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
}
|
||||
// prepare the variables for the corrspnding query type
|
||||
formattedVars := make(map[string]interface{})
|
||||
for name, value := range postData.Variables {
|
||||
if postData.CompositeMetricQuery.QueryType == model.PROM {
|
||||
formattedVars[name] = metrics.PromFormattedValue(value)
|
||||
} else if postData.CompositeMetricQuery.QueryType == model.CLICKHOUSE {
|
||||
formattedVars[name] = metrics.FormattedValue(value)
|
||||
}
|
||||
}
|
||||
// replace the variables in metrics builder filter item with actual value
|
||||
if postData.CompositeMetricQuery.QueryType == model.QUERY_BUILDER {
|
||||
for _, query := range postData.CompositeMetricQuery.BuilderQueries {
|
||||
if query.TagFilters == nil || len(query.TagFilters.Items) == 0 {
|
||||
continue
|
||||
}
|
||||
for idx := range query.TagFilters.Items {
|
||||
item := &query.TagFilters.Items[idx]
|
||||
value := item.Value
|
||||
if value != nil {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
variableName := strings.Trim(x, "{{ . }}")
|
||||
if _, ok := postData.Variables[variableName]; ok {
|
||||
item.Value = postData.Variables[variableName]
|
||||
}
|
||||
case []interface{}:
|
||||
if len(x) > 0 {
|
||||
switch x[0].(type) {
|
||||
case string:
|
||||
variableName := strings.Trim(x[0].(string), "{{ . }}")
|
||||
if _, ok := postData.Variables[variableName]; ok {
|
||||
item.Value = postData.Variables[variableName]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
postData.Variables = formattedVars
|
||||
|
||||
return postData, nil
|
||||
}
|
||||
|
||||
func ParseMetricAutocompleteTagParams(r *http.Request) (*model.MetricAutocompleteTagParams, *model.ApiError) {
|
||||
|
||||
metricName := r.URL.Query().Get("metricName")
|
||||
if len(metricName) == 0 {
|
||||
err := fmt.Errorf("metricName not present in params")
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
}
|
||||
|
||||
tagsStr := r.URL.Query().Get("tags")
|
||||
|
||||
// parsing tags
|
||||
var tags map[string]string
|
||||
if tagsStr != "" && len(tagsStr) != 0 {
|
||||
|
||||
err := json.Unmarshal([]byte(tagsStr), &tags)
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("unable to parse tags in params: %v", err)}
|
||||
}
|
||||
}
|
||||
|
||||
matchText := r.URL.Query().Get("match")
|
||||
|
||||
tagKey := r.URL.Query().Get("tagKey")
|
||||
|
||||
metricAutocompleteTagParams := &model.MetricAutocompleteTagParams{
|
||||
MetricName: metricName,
|
||||
MetricTags: tags,
|
||||
Match: matchText,
|
||||
TagKey: tagKey,
|
||||
}
|
||||
|
||||
return metricAutocompleteTagParams, nil
|
||||
}
|
@ -9,62 +9,11 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/smartystreets/assertions/should"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/metrics"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
func TestParseFilterSingleFilter(t *testing.T) {
|
||||
Convey("TestParseFilterSingleFilter", t, func() {
|
||||
postBody := []byte(`{
|
||||
"op": "AND",
|
||||
"items": [
|
||||
{"key": "namespace", "value": "a", "op": "EQ"}
|
||||
]
|
||||
}`)
|
||||
req, _ := http.NewRequest("POST", "", bytes.NewReader(postBody))
|
||||
res, _ := parseFilterSet(req)
|
||||
query, _ := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NOOP)
|
||||
So(query, ShouldContainSubstring, "WHERE metric_name = 'table' AND JSONExtractString(labels, 'namespace') = 'a'")
|
||||
})
|
||||
}
|
||||
|
||||
func TestParseFilterMultipleFilter(t *testing.T) {
|
||||
Convey("TestParseFilterMultipleFilter", t, func() {
|
||||
postBody := []byte(`{
|
||||
"op": "AND",
|
||||
"items": [
|
||||
{"key": "namespace", "value": "a", "op": "EQ"},
|
||||
{"key": "host", "value": ["host-1", "host-2"], "op": "IN"}
|
||||
]
|
||||
}`)
|
||||
req, _ := http.NewRequest("POST", "", bytes.NewReader(postBody))
|
||||
res, _ := parseFilterSet(req)
|
||||
query, _ := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NOOP)
|
||||
So(query, should.ContainSubstring, "JSONExtractString(labels, 'host') IN ['host-1','host-2']")
|
||||
So(query, should.ContainSubstring, "JSONExtractString(labels, 'namespace') = 'a'")
|
||||
})
|
||||
}
|
||||
|
||||
func TestParseFilterNotSupportedOp(t *testing.T) {
|
||||
Convey("TestParseFilterNotSupportedOp", t, func() {
|
||||
postBody := []byte(`{
|
||||
"op": "AND",
|
||||
"items": [
|
||||
{"key": "namespace", "value": "a", "op": "PO"}
|
||||
]
|
||||
}`)
|
||||
req, _ := http.NewRequest("POST", "", bytes.NewReader(postBody))
|
||||
res, _ := parseFilterSet(req)
|
||||
_, err := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NOOP)
|
||||
So(err, should.BeError, "unsupported operation")
|
||||
})
|
||||
}
|
||||
|
||||
func TestParseAggregateAttrReques(t *testing.T) {
|
||||
reqCases := []struct {
|
||||
desc string
|
||||
|
@ -276,7 +276,6 @@ func (s *Server) createPublicServer(api *APIHandler) (*http.Server, error) {
|
||||
am := NewAuthMiddleware(auth.GetUserFromRequest)
|
||||
|
||||
api.RegisterRoutes(r, am)
|
||||
api.RegisterMetricsRoutes(r, am)
|
||||
api.RegisterLogsRoutes(r, am)
|
||||
api.RegisterIntegrationRoutes(r, am)
|
||||
api.RegisterQueryRangeV3Routes(r, am)
|
||||
|
@ -58,9 +58,6 @@ type Reader interface {
|
||||
SetTTL(ctx context.Context, ttlParams *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError)
|
||||
|
||||
FetchTemporality(ctx context.Context, metricNames []string) (map[string]map[v3.Temporality]bool, error)
|
||||
GetMetricAutocompleteMetricNames(ctx context.Context, matchText string, limit int) (*[]string, *model.ApiError)
|
||||
GetMetricAutocompleteTagKey(ctx context.Context, params *model.MetricAutocompleteTagParams) (*[]string, *model.ApiError)
|
||||
GetMetricAutocompleteTagValue(ctx context.Context, params *model.MetricAutocompleteTagParams) (*[]string, *model.ApiError)
|
||||
GetMetricResult(ctx context.Context, query string) ([]*model.Series, error)
|
||||
GetMetricResultEE(ctx context.Context, query string) ([]*model.Series, string, error)
|
||||
GetMetricAggregateAttributes(ctx context.Context, req *v3.AggregateAttributeRequest) (*v3.AggregateAttributeResponse, error)
|
||||
@ -106,7 +103,6 @@ type Reader interface {
|
||||
QueryDashboardVars(ctx context.Context, query string) (*model.DashboardVar, error)
|
||||
CheckClickHouse(ctx context.Context) error
|
||||
|
||||
GetLatencyMetricMetadata(context.Context, string, string, bool) (*v3.LatencyMetricMetadataResponse, error)
|
||||
GetMetricMetadata(context.Context, string, string) (*v3.MetricMetadataResponse, error)
|
||||
}
|
||||
|
||||
|
@ -3,27 +3,9 @@ package querytemplate
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
// AssignReservedVars assigns values for go template vars. assumes that
|
||||
// model.QueryRangeParamsV2.Start and End are Unix Nano timestamps
|
||||
func AssignReservedVars(metricsQueryRangeParams *model.QueryRangeParamsV2) {
|
||||
metricsQueryRangeParams.Variables["start_timestamp"] = metricsQueryRangeParams.Start / 1000
|
||||
metricsQueryRangeParams.Variables["end_timestamp"] = metricsQueryRangeParams.End / 1000
|
||||
|
||||
metricsQueryRangeParams.Variables["start_timestamp_ms"] = metricsQueryRangeParams.Start
|
||||
metricsQueryRangeParams.Variables["end_timestamp_ms"] = metricsQueryRangeParams.End
|
||||
|
||||
metricsQueryRangeParams.Variables["start_timestamp_nano"] = metricsQueryRangeParams.Start * 1e6
|
||||
metricsQueryRangeParams.Variables["end_timestamp_nano"] = metricsQueryRangeParams.End * 1e6
|
||||
|
||||
metricsQueryRangeParams.Variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", metricsQueryRangeParams.Start/1000)
|
||||
metricsQueryRangeParams.Variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", metricsQueryRangeParams.End/1000)
|
||||
|
||||
}
|
||||
|
||||
// AssignReservedVars assigns values for go template vars. assumes that
|
||||
// model.QueryRangeParamsV3.Start and End are Unix Nano timestamps
|
||||
func AssignReservedVarsV3(metricsQueryRangeParams *v3.QueryRangeParamsV3) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user