chore: remove telemetrytests package and add generic type for aggregation (#8019)

This commit is contained in:
Srikanth Chekuri 2025-05-23 14:59:15 +05:30 committed by GitHub
parent 77d1492aac
commit f4dc2a8fb8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 953 additions and 122 deletions

View File

@ -1,11 +1,10 @@
package telemetrytests
package telemetrylogs
import (
"fmt"
"testing"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
@ -13,8 +12,8 @@ import (
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
func TestFilterExprLogsBodyJSON(t *testing.T) {
fm := telemetrylogs.NewFieldMapper()
cb := telemetrylogs.NewConditionBuilder(fm)
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()
@ -27,7 +26,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
Name: "body",
},
JsonBodyPrefix: "body",
JsonKeyToKey: telemetrylogs.GetBodyJSONKey,
JsonKeyToKey: GetBodyJSONKey,
}
testCases := []struct {

View File

@ -1,11 +1,10 @@
package telemetrytests
package telemetrylogs
import (
"fmt"
"testing"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
@ -13,8 +12,8 @@ import (
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
func TestFilterExprLogs(t *testing.T) {
fm := telemetrylogs.NewFieldMapper()
cb := telemetrylogs.NewConditionBuilder(fm)
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()
@ -27,7 +26,7 @@ func TestFilterExprLogs(t *testing.T) {
Name: "body",
},
JsonBodyPrefix: "body",
JsonKeyToKey: telemetrylogs.GetBodyJSONKey,
JsonKeyToKey: GetBodyJSONKey,
}
testCases := []struct {

View File

@ -1,4 +1,4 @@
package telemetrytests
package telemetrylogs
import (
"strings"

View File

@ -1,98 +0,0 @@
package telemetrytests
import (
"testing"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/require"
)
// TestAggRewrite tests rewrite set of aggregation expressions
func TestAggRewrite(t *testing.T) {
fm := telemetrytraces.NewFieldMapper()
cb := telemetrytraces.NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()
opts := querybuilder.AggExprRewriterOptions{
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{
Name: "body",
},
JsonBodyPrefix: "body",
JsonKeyToKey: telemetrylogs.GetBodyJSONKey,
RateInterval: 60,
}
testCases := []struct {
expr string
shouldPass bool
expectedExpr string
expectedArgs []any
expectedErrorContains string
}{
{
expr: "count()",
shouldPass: true,
expectedExpr: "count()",
},
{
expr: `countIf(service.name = "redis")`,
shouldPass: true,
expectedExpr: "countIf((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))",
expectedArgs: []any{"redis", true},
},
{
expr: `countIf(service.name = "redis" AND status = 200)`,
shouldPass: true,
expectedExpr: "countIf(((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?)))",
expectedArgs: []any{"redis", true, float64(200), true},
},
{
expr: `p05(duration_nano)`,
shouldPass: true,
expectedExpr: "quantile(0.05)(duration_nano)",
},
{
expr: `rate()`,
shouldPass: true,
expectedExpr: "count()/60",
},
{
expr: `avg(duration_nano)`,
shouldPass: true,
expectedExpr: "avg(duration_nano)",
},
{
expr: `sum(total_orders)`,
shouldPass: true,
expectedExpr: "sum(attributes_number['total_orders'])",
},
}
rewriter := querybuilder.NewAggExprRewriter(opts)
for _, tc := range testCases {
t.Run(limitString(tc.expr, 50), func(t *testing.T) {
expr, args, err := rewriter.Rewrite(tc.expr)
if tc.shouldPass {
if err != nil {
t.Errorf("Failed to parse query: %s\nError: %v\n", tc.expr, err)
return
}
// Build the SQL and print it for debugging
require.Equal(t, tc.expectedExpr, expr)
require.Equal(t, tc.expectedArgs, args)
} else {
require.Error(t, err, "Expected error for query: %s", tc.expr)
require.Contains(t, err.Error(), tc.expectedErrorContains)
}
})
}
}

View File

@ -135,7 +135,14 @@ var (
ReduceToMedian = ReduceTo{valuer.NewString("median")}
)
type Aggregation struct {
type TraceAggregation struct {
// aggregation expression - example: count(), sum(item_price), countIf(day > 10)
Expression string `json:"expression"`
// if any, it will be used as the alias of the aggregation in the result
Alias string `json:"alias,omitempty"`
}
type LogAggregation struct {
// aggregation expression - example: count(), sum(item_price), countIf(day > 10)
Expression string `json:"expression"`
// if any, it will be used as the alias of the aggregation in the result

View File

@ -4,7 +4,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type QueryBuilderQuery struct {
type QueryBuilderQuery[T any] struct {
// name of the query, mainly used when query is used in formula
Name string `json:"name"`
@ -16,7 +16,7 @@ type QueryBuilderQuery struct {
// we want to support multiple aggregations
// currently supported: []Aggregation, []MetricAggregation
Aggregations []any `json:"aggregations,omitempty"`
Aggregations []T `json:"aggregations,omitempty"`
// disabled if true, the query will not be executed
Disabled bool `json:"disabled,omitempty"`

View File

@ -13,6 +13,7 @@ var (
ErrColumnNotFound = errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "field not found")
ErrBetweenValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values")
ErrInValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) in operator requires a list of values")
ErrUnsupportedOperator = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported operator")
)
type JsonKeyToFieldFunc func(context.Context, *telemetrytypes.TelemetryFieldKey, FilterOperator, any) (string, any)
@ -38,9 +39,19 @@ type FilterCompiler interface {
Compile(ctx context.Context, filter string) (*sqlbuilder.WhereClause, []string, error)
}
type RewriteCtx struct {
RateInterval uint64
}
type RewriteOption func(*RewriteCtx)
func WithRateInterval(interval uint64) RewriteOption {
return func(c *RewriteCtx) { c.RateInterval = interval }
}
type AggExprRewriter interface {
// Rewrite rewrites the aggregation expression to be used in the query.
Rewrite(ctx context.Context, expr string) (string, []any, error)
Rewrite(ctx context.Context, expr string, opts ...RewriteOption) (string, []any, error)
}
type Statement struct {
@ -50,7 +61,7 @@ type Statement struct {
}
// StatementBuilder builds the query.
type StatementBuilder interface {
type StatementBuilder[T any] interface {
// Build builds the query.
Build(ctx context.Context, start, end uint64, requestType RequestType, query QueryBuilderQuery) (*Statement, error)
Build(ctx context.Context, start, end uint64, requestType RequestType, query QueryBuilderQuery[T]) (*Statement, error)
}

View File

@ -4,6 +4,7 @@ import (
"encoding/json"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type QueryEnvelope struct {
@ -32,11 +33,35 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
// 2. Decode the spec based on the Type.
switch shadow.Type {
case QueryTypeBuilder, QueryTypeSubQuery:
var spec QueryBuilderQuery
var header struct {
Signal telemetrytypes.Signal `json:"signal"`
}
if err := json.Unmarshal(shadow.Spec, &header); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "cannot detect builder signal")
}
switch header.Signal {
case telemetrytypes.SignalTraces:
var spec QueryBuilderQuery[TraceAggregation]
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid builder query spec")
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid trace builder query spec")
}
q.Spec = spec
case telemetrytypes.SignalLogs:
var spec QueryBuilderQuery[LogAggregation]
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid log builder query spec")
}
q.Spec = spec
case telemetrytypes.SignalMetrics:
var spec QueryBuilderQuery[MetricAggregation]
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid metric builder query spec")
}
q.Spec = spec
default:
return errors.WrapInvalidInputf(nil, errors.CodeInvalidInput, "unknown builder signal %q", header.Signal)
}
case QueryTypeFormula:
var spec QueryBuilderFormula

View File

@ -0,0 +1,637 @@
package querybuildertypesv5
import (
"encoding/json"
"testing"
"time"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
tests := []struct {
name string
jsonData string
expected QueryRangeRequest
wantErr bool
}{
{
name: "valid trace builder query",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": {
"queries": [{
"name": "A",
"type": "builder_query",
"spec": {
"signal": "traces",
"aggregations": [{
"expression": "count()",
"alias": "trace_count"
}],
"stepInterval": "60s",
"filter": {
"expression": "service.name = 'frontend'"
},
"groupBy": [{
"name": "service.name",
"fieldContext": "resource"
}],
"order": [{
"key": {
"name": "timestamp",
"fieldContext": "span"
},
"direction": "desc"
}],
"limit": 100
}
}]
},
"variables": {
"service": "frontend"
}
}`,
expected: QueryRangeRequest{
SchemaVersion: "v1",
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{{
Name: "A",
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{{
Expression: "count()",
Alias: "trace_count",
}},
StepInterval: Step{Duration: 60 * time.Second},
Filter: &Filter{
Expression: "service.name = 'frontend'",
},
GroupBy: []GroupByKey{{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
},
}},
Order: []OrderBy{{
Key: OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "timestamp",
FieldContext: telemetrytypes.FieldContextSpan,
},
},
Direction: OrderDirectionDesc,
}},
Limit: 100,
},
}},
},
Variables: map[string]any{
"service": "frontend",
},
},
wantErr: false,
},
{
name: "valid log builder query",
jsonData: `{
"schemaVersion": "v2",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": {
"queries": [{
"name": "B",
"type": "builder_query",
"spec": {
"signal": "logs",
"stepInterval": "30s",
"filter": {
"expression": "severity_text = 'ERROR'"
},
"selectFields": [{
"key": "body",
"type": "log"
}],
"limit": 50,
"offset": 10
}
}]
}
}`,
expected: QueryRangeRequest{
SchemaVersion: "v2",
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeRaw,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{{
Name: "B",
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[LogAggregation]{
Signal: telemetrytypes.SignalLogs,
StepInterval: Step{Duration: 30 * time.Second},
Filter: &Filter{
Expression: "severity_text = 'ERROR'",
},
SelectFields: []telemetrytypes.TelemetryFieldKey{{
Name: "body",
FieldContext: telemetrytypes.FieldContextLog,
}},
Limit: 50,
Offset: 10,
},
}},
},
},
wantErr: false,
},
{
name: "valid metric builder query",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": {
"queries": [{
"name": "C",
"type": "builder_query",
"spec": {
"signal": "metrics",
"aggregations": [{
"metricName": "http_requests_total",
"temporality": "cumulative",
"timeAggregation": "rate",
"spaceAggregation": "sum"
}],
"stepInterval": 120,
"groupBy": [{
"key": "method",
"type": "tag"
}]
}
}]
}
}`,
expected: QueryRangeRequest{
SchemaVersion: "v1",
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{{
Name: "C",
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[MetricAggregation]{
Signal: telemetrytypes.SignalMetrics,
Aggregations: []MetricAggregation{{
MetricName: "http_requests_total",
Temporality: metrictypes.Cumulative,
TimeAggregation: metrictypes.TimeAggregationRate,
SpaceAggregation: metrictypes.SpaceAggregationSum,
}},
StepInterval: Step{Duration: 120 * time.Second},
GroupBy: []GroupByKey{{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "method",
FieldContext: telemetrytypes.FieldContextAttribute,
},
}},
},
}},
},
},
wantErr: false,
},
{
name: "valid formula query",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": {
"queries": [{
"name": "F1",
"type": "builder_formula",
"spec": {
"name": "error_rate",
"expression": "A / B * 100",
"functions": [{
"name": "absolute",
"args": []
}]
}
}]
}
}`,
expected: QueryRangeRequest{
SchemaVersion: "v1",
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{{
Name: "F1",
Type: QueryTypeFormula,
Spec: QueryBuilderFormula{
Name: "error_rate",
Expression: "A / B * 100",
Functions: []Function{{
Name: "absolute",
Args: []struct {
Name string `json:"name,omitempty"`
Value string `json:"value"`
}{},
}},
},
}},
},
},
wantErr: false,
},
{
name: "valid join query",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": {
"queries": [{
"name": "J1",
"type": "builder_join",
"spec": {
"name": "join_traces_logs",
"left": {"name": "A"},
"right": {"name": "B"},
"type": "inner",
"on": "trace_id = trace_id",
"aggregations": [],
"limit": 1000
}
}]
}
}`,
expected: QueryRangeRequest{
SchemaVersion: "v1",
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeScalar,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{{
Name: "J1",
Type: QueryTypeJoin,
Spec: QueryBuilderJoin{
Name: "join_traces_logs",
Left: QueryRef{Name: "A"},
Right: QueryRef{Name: "B"},
Type: JoinTypeInner,
On: "trace_id = trace_id",
Aggregations: []any{},
Limit: 1000,
},
}},
},
},
wantErr: false,
},
{
name: "valid PromQL query",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": {
"queries": [{
"name": "P1",
"type": "promql",
"spec": {
"name": "cpu_usage",
"query": "rate(cpu_usage_total[5m])",
"disabled": false
}
}]
}
}`,
expected: QueryRangeRequest{
SchemaVersion: "v1",
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{{
Name: "P1",
Type: QueryTypePromQL,
Spec: PromQuery{
Name: "cpu_usage",
Query: "rate(cpu_usage_total[5m])",
Disabled: false,
},
}},
},
},
wantErr: false,
},
{
name: "valid ClickHouse SQL query",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": {
"queries": [{
"name": "CH1",
"type": "clickhouse_sql",
"spec": {
"name": "custom_query",
"query": "SELECT count(*) FROM logs WHERE timestamp >= ? AND timestamp <= ?",
"disabled": false
}
}]
}
}`,
expected: QueryRangeRequest{
SchemaVersion: "v1",
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeRaw,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{{
Name: "CH1",
Type: QueryTypeClickHouseSQL,
Spec: ClickHouseQuery{
Name: "custom_query",
Query: "SELECT count(*) FROM logs WHERE timestamp >= ? AND timestamp <= ?",
Disabled: false,
},
}},
},
},
wantErr: false,
},
{
name: "multiple queries",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": {
"queries": [
{
"name": "A",
"type": "builder_query",
"spec": {
"signal": "traces",
"aggregations": [{"expression": "count()"}],
"disabled": false
}
},
{
"name": "B",
"type": "builder_formula",
"spec": {
"name": "rate",
"expression": "A * 100"
}
}
]
}
}`,
expected: QueryRangeRequest{
SchemaVersion: "v1",
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Name: "A",
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{{Expression: "count()"}},
Disabled: false,
},
},
{
Name: "B",
Type: QueryTypeFormula,
Spec: QueryBuilderFormula{
Name: "rate",
Expression: "A * 100",
},
},
},
},
},
wantErr: false,
},
{
name: "step interval as string",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": {
"queries": [{
"name": "A",
"type": "builder_query",
"spec": {
"signal": "metrics",
"aggregations": [{"metricName": "test"}],
"stepInterval": "5m"
}
}]
}
}`,
expected: QueryRangeRequest{
SchemaVersion: "v1",
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{{
Name: "A",
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[MetricAggregation]{
Signal: telemetrytypes.SignalMetrics,
Aggregations: []MetricAggregation{{MetricName: "test"}},
StepInterval: Step{Duration: 5 * time.Minute},
},
}},
},
},
wantErr: false,
},
{
name: "invalid JSON",
jsonData: `{"invalid": json}`,
wantErr: true,
},
{
name: "unknown query type",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": {
"queries": [{
"name": "A",
"type": "unknown_type",
"spec": {}
}]
}
}`,
wantErr: true,
},
{
name: "unknown signal type",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": {
"queries": [{
"name": "A",
"type": "builder_query",
"spec": {
"signal": "unknown_signal",
"aggregations": []
}
}]
}
}`,
wantErr: true,
},
{
name: "invalid step interval",
jsonData: `{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": {
"queries": [{
"name": "A",
"type": "builder_query",
"spec": {
"signal": "traces",
"aggregations": [],
"stepInterval": "invalid_duration"
}
}]
}
}`,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var req QueryRangeRequest
err := json.Unmarshal([]byte(tt.jsonData), &req)
if tt.wantErr {
assert.Error(t, err)
return
}
require.NoError(t, err)
assert.Equal(t, tt.expected.SchemaVersion, req.SchemaVersion)
assert.Equal(t, tt.expected.Start, req.Start)
assert.Equal(t, tt.expected.End, req.End)
assert.Equal(t, tt.expected.RequestType, req.RequestType)
assert.Equal(t, len(tt.expected.CompositeQuery.Queries), len(req.CompositeQuery.Queries))
for i, expectedQuery := range tt.expected.CompositeQuery.Queries {
actualQuery := req.CompositeQuery.Queries[i]
assert.Equal(t, expectedQuery.Name, actualQuery.Name)
assert.Equal(t, expectedQuery.Type, actualQuery.Type)
switch expectedQuery.Type {
case QueryTypeBuilder:
switch expectedSpec := expectedQuery.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
actualSpec, ok := actualQuery.Spec.(QueryBuilderQuery[TraceAggregation])
require.True(t, ok, "Expected TraceBuilderQuery but got %T", actualQuery.Spec)
assert.Equal(t, expectedSpec.Signal, actualSpec.Signal)
assert.Equal(t, expectedSpec.StepInterval, actualSpec.StepInterval)
assert.Equal(t, expectedSpec.Disabled, actualSpec.Disabled)
assert.Equal(t, len(expectedSpec.Aggregations), len(actualSpec.Aggregations))
case QueryBuilderQuery[LogAggregation]:
actualSpec, ok := actualQuery.Spec.(QueryBuilderQuery[LogAggregation])
require.True(t, ok, "Expected LogBuilderQuery but got %T", actualQuery.Spec)
assert.Equal(t, expectedSpec.Signal, actualSpec.Signal)
assert.Equal(t, expectedSpec.StepInterval, actualSpec.StepInterval)
assert.Equal(t, expectedSpec.Disabled, actualSpec.Disabled)
assert.Equal(t, len(expectedSpec.Aggregations), len(actualSpec.Aggregations))
case QueryBuilderQuery[MetricAggregation]:
actualSpec, ok := actualQuery.Spec.(QueryBuilderQuery[MetricAggregation])
require.True(t, ok, "Expected MetricBuilderQuery but got %T", actualQuery.Spec)
assert.Equal(t, expectedSpec.Signal, actualSpec.Signal)
assert.Equal(t, expectedSpec.StepInterval, actualSpec.StepInterval)
assert.Equal(t, expectedSpec.Disabled, actualSpec.Disabled)
assert.Equal(t, len(expectedSpec.Aggregations), len(actualSpec.Aggregations))
for j, expectedAgg := range expectedSpec.Aggregations {
actualAgg := actualSpec.Aggregations[j]
assert.Equal(t, expectedAgg.MetricName, actualAgg.MetricName)
assert.Equal(t, expectedAgg.Temporality, actualAgg.Temporality)
assert.Equal(t, expectedAgg.TimeAggregation, actualAgg.TimeAggregation)
assert.Equal(t, expectedAgg.SpaceAggregation, actualAgg.SpaceAggregation)
}
}
case QueryTypeFormula:
expectedSpec := expectedQuery.Spec.(QueryBuilderFormula)
actualSpec, ok := actualQuery.Spec.(QueryBuilderFormula)
require.True(t, ok, "Expected QueryBuilderFormula but got %T", actualQuery.Spec)
assert.Equal(t, expectedSpec.Expression, actualSpec.Expression)
assert.Equal(t, expectedSpec.Name, actualSpec.Name)
case QueryTypeJoin:
expectedSpec := expectedQuery.Spec.(QueryBuilderJoin)
actualSpec, ok := actualQuery.Spec.(QueryBuilderJoin)
require.True(t, ok, "Expected QueryBuilderJoin but got %T", actualQuery.Spec)
assert.Equal(t, expectedSpec.Name, actualSpec.Name)
assert.Equal(t, expectedSpec.Type, actualSpec.Type)
assert.Equal(t, expectedSpec.On, actualSpec.On)
case QueryTypePromQL:
expectedSpec := expectedQuery.Spec.(PromQuery)
actualSpec, ok := actualQuery.Spec.(PromQuery)
require.True(t, ok, "Expected PromQuery but got %T", actualQuery.Spec)
assert.Equal(t, expectedSpec.Query, actualSpec.Query)
assert.Equal(t, expectedSpec.Name, actualSpec.Name)
assert.Equal(t, expectedSpec.Disabled, actualSpec.Disabled)
case QueryTypeClickHouseSQL:
expectedSpec := expectedQuery.Spec.(ClickHouseQuery)
actualSpec, ok := actualQuery.Spec.(ClickHouseQuery)
require.True(t, ok, "Expected ClickHouseQuery but got %T", actualQuery.Spec)
assert.Equal(t, expectedSpec.Query, actualSpec.Query)
assert.Equal(t, expectedSpec.Name, actualSpec.Name)
assert.Equal(t, expectedSpec.Disabled, actualSpec.Disabled)
}
}
if tt.expected.Variables != nil {
assert.Equal(t, tt.expected.Variables, req.Variables)
}
})
}
}

View File

@ -0,0 +1,251 @@
package telemetrytypestest
import (
"context"
"strings"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
// MockMetadataStore implements the MetadataStore interface for testing purposes
type MockMetadataStore struct {
// Maps to store test data
KeysMap map[string][]*telemetrytypes.TelemetryFieldKey
RelatedValuesMap map[string][]string
AllValuesMap map[string]*telemetrytypes.TelemetryFieldValues
}
// NewMockMetadataStore creates a new instance of MockMetadataStore with initialized maps
func NewMockMetadataStore() *MockMetadataStore {
return &MockMetadataStore{
KeysMap: make(map[string][]*telemetrytypes.TelemetryFieldKey),
RelatedValuesMap: make(map[string][]string),
AllValuesMap: make(map[string]*telemetrytypes.TelemetryFieldValues),
}
}
// GetKeys returns a map of field keys types.TelemetryFieldKey by name
func (m *MockMetadataStore) GetKeys(ctx context.Context, fieldKeySelector *telemetrytypes.FieldKeySelector) (map[string][]*telemetrytypes.TelemetryFieldKey, error) {
result := make(map[string][]*telemetrytypes.TelemetryFieldKey)
// If selector is nil, return all keys
if fieldKeySelector == nil {
return m.KeysMap, nil
}
// Apply selector logic
for name, keys := range m.KeysMap {
// Check if name matches
if matchesName(fieldKeySelector, name) {
filteredKeys := []*telemetrytypes.TelemetryFieldKey{}
for _, key := range keys {
if matchesKey(fieldKeySelector, key) {
filteredKeys = append(filteredKeys, key)
}
}
if len(filteredKeys) > 0 {
result[name] = filteredKeys
}
}
}
return result, nil
}
// GetKeysMulti applies multiple selectors and returns combined results
func (m *MockMetadataStore) GetKeysMulti(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) (map[string][]*telemetrytypes.TelemetryFieldKey, error) {
result := make(map[string][]*telemetrytypes.TelemetryFieldKey)
// Process each selector
for _, selector := range fieldKeySelectors {
selectorCopy := selector // Create a copy to avoid issues with pointer semantics
selectorResults, err := m.GetKeys(ctx, selectorCopy)
if err != nil {
return nil, err
}
// Merge results
for name, keys := range selectorResults {
if existingKeys, exists := result[name]; exists {
// Merge without duplicates
keySet := make(map[string]bool)
for _, key := range existingKeys {
keySet[keyIdentifier(key)] = true
}
for _, key := range keys {
if !keySet[keyIdentifier(key)] {
result[name] = append(result[name], key)
}
}
} else {
result[name] = keys
}
}
}
return result, nil
}
// GetKey returns a list of keys with the given name
func (m *MockMetadataStore) GetKey(ctx context.Context, fieldKeySelector *telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, error) {
if fieldKeySelector == nil {
return nil, nil
}
result := []*telemetrytypes.TelemetryFieldKey{}
// Find keys matching the selector
for name, keys := range m.KeysMap {
if matchesName(fieldKeySelector, name) {
for _, key := range keys {
if matchesKey(fieldKeySelector, key) {
result = append(result, key)
}
}
}
}
return result, nil
}
// GetRelatedValues returns a list of related values for the given key name and selection
func (m *MockMetadataStore) GetRelatedValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) ([]string, error) {
if fieldValueSelector == nil {
return nil, nil
}
// Generate a lookup key from the selector
lookupKey := generateLookupKey(fieldValueSelector)
if values, exists := m.RelatedValuesMap[lookupKey]; exists {
return values, nil
}
// Return empty slice if no values found
return []string{}, nil
}
// GetAllValues returns all values for a given field
func (m *MockMetadataStore) GetAllValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, error) {
if fieldValueSelector == nil {
return nil, nil
}
// Generate a lookup key from the selector
lookupKey := generateLookupKey(fieldValueSelector)
if values, exists := m.AllValuesMap[lookupKey]; exists {
return values, nil
}
// Return empty values object if not found
return &telemetrytypes.TelemetryFieldValues{}, nil
}
// Helper functions to avoid adding methods to structs
// matchesName checks if a field name matches the selector criteria
func matchesName(selector *telemetrytypes.FieldKeySelector, name string) bool {
if selector == nil || selector.Name == "" {
return true
}
if selector.SelectorMatchType.String == telemetrytypes.FieldSelectorMatchTypeExact.String {
return selector.Name == name
}
// Fuzzy matching for FieldSelectorMatchTypeFuzzy
return strings.Contains(strings.ToLower(name), strings.ToLower(selector.Name))
}
// matchesKey checks if a field key matches the selector criteria
func matchesKey(selector *telemetrytypes.FieldKeySelector, key *telemetrytypes.TelemetryFieldKey) bool {
if selector == nil {
return true
}
// Check name (already checked in matchesName, but double-check here)
if selector.Name != "" && !matchesName(selector, key.Name) {
return false
}
// Check signal
if selector.Signal != telemetrytypes.SignalUnspecified && selector.Signal != key.Signal {
return false
}
// Check field context
if selector.FieldContext != telemetrytypes.FieldContextUnspecified &&
selector.FieldContext != key.FieldContext {
return false
}
// Check field data type
if selector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified &&
selector.FieldDataType != key.FieldDataType {
return false
}
return true
}
// keyIdentifier generates a unique identifier for the key
func keyIdentifier(key *telemetrytypes.TelemetryFieldKey) string {
return key.Name + "-" + key.FieldContext.StringValue() + "-" + key.FieldDataType.StringValue()
}
// generateLookupKey creates a lookup key for the selector
func generateLookupKey(selector *telemetrytypes.FieldValueSelector) string {
if selector == nil {
return ""
}
parts := []string{selector.Name}
if selector.FieldKeySelector != nil {
if selector.FieldKeySelector.Signal != telemetrytypes.SignalUnspecified {
parts = append(parts, selector.FieldKeySelector.Signal.StringValue())
}
if selector.FieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified {
parts = append(parts, selector.FieldKeySelector.FieldContext.StringValue())
}
if selector.FieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
parts = append(parts, selector.FieldKeySelector.FieldDataType.StringValue())
}
}
if selector.ExistingQuery != "" {
parts = append(parts, selector.ExistingQuery)
}
return strings.Join(parts, "-")
}
// SetKey adds a test key to the mock store
func (m *MockMetadataStore) SetKey(key *telemetrytypes.TelemetryFieldKey) {
name := key.Name
if _, exists := m.KeysMap[name]; !exists {
m.KeysMap[name] = []*telemetrytypes.TelemetryFieldKey{}
}
m.KeysMap[name] = append(m.KeysMap[name], key)
}
// SetKeys adds a list of test keys to the mock store
func (m *MockMetadataStore) SetKeys(keys []*telemetrytypes.TelemetryFieldKey) {
for _, key := range keys {
m.SetKey(key)
}
}
// SetRelatedValues sets related values for testing
func (m *MockMetadataStore) SetRelatedValues(lookupKey string, values []string) {
m.RelatedValuesMap[lookupKey] = values
}
// SetAllValues sets all values for testing
func (m *MockMetadataStore) SetAllValues(lookupKey string, values *telemetrytypes.TelemetryFieldValues) {
m.AllValuesMap[lookupKey] = values
}