mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-12 01:49:05 +08:00
chore: add query builder types (#7940)
This commit is contained in:
parent
9fbf111976
commit
03600f4d6f
67
pkg/types/metrictypes/metrictypes.go
Normal file
67
pkg/types/metrictypes/metrictypes.go
Normal file
@ -0,0 +1,67 @@
|
||||
package metrictypes
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// Temporality is the temporality of the metric specified in OTLP data model
|
||||
// Read more here https://opentelemetry.io/docs/specs/otel/metrics/data-model/#temporality
|
||||
type Temporality struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
Delta = Temporality{valuer.NewString("delta")}
|
||||
Cumulative = Temporality{valuer.NewString("cumulative")}
|
||||
Unspecified = Temporality{valuer.NewString("")}
|
||||
)
|
||||
|
||||
// Type is the type of the metric in OTLP data model
|
||||
// Read more here https://opentelemetry.io/docs/specs/otel/metrics/data-model/#metric-points
|
||||
type Type struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
GaugeType = Type{valuer.NewString("gauge")}
|
||||
SumType = Type{valuer.NewString("sum")}
|
||||
HistogramType = Type{valuer.NewString("histogram")}
|
||||
SummaryType = Type{valuer.NewString("summary")}
|
||||
ExpHistogramType = Type{valuer.NewString("exponential_histogram")}
|
||||
UnspecifiedType = Type{valuer.NewString("")}
|
||||
)
|
||||
|
||||
type TimeAggregation struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
TimeAggregationUnspecified = TimeAggregation{valuer.NewString("")}
|
||||
TimeAggregationLatest = TimeAggregation{valuer.NewString("latest")}
|
||||
TimeAggregationSum = TimeAggregation{valuer.NewString("sum")}
|
||||
TimeAggregationAvg = TimeAggregation{valuer.NewString("avg")}
|
||||
TimeAggregationMin = TimeAggregation{valuer.NewString("min")}
|
||||
TimeAggregationMax = TimeAggregation{valuer.NewString("max")}
|
||||
TimeAggregationCount = TimeAggregation{valuer.NewString("count")}
|
||||
TimeAggregationCountDistinct = TimeAggregation{valuer.NewString("count_distinct")}
|
||||
TimeAggregationRate = TimeAggregation{valuer.NewString("rate")}
|
||||
TimeAggregationIncrease = TimeAggregation{valuer.NewString("increase")}
|
||||
)
|
||||
|
||||
type SpaceAggregation struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
|
||||
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
|
||||
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
|
||||
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
|
||||
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
|
||||
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
|
||||
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
|
||||
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
|
||||
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
|
||||
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
|
||||
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
|
||||
)
|
@ -0,0 +1,190 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Step struct{ time.Duration }
|
||||
|
||||
func (s *Step) UnmarshalJSON(b []byte) error {
|
||||
if len(b) == 0 {
|
||||
return nil
|
||||
}
|
||||
if b[0] == '"' { // "15s", "1m", ISO‑8601
|
||||
var str string
|
||||
if err := json.Unmarshal(b, &str); err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid step")
|
||||
}
|
||||
d, err := time.ParseDuration(str)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(
|
||||
err,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid step, expected a duration string (example: 15s, 1m, 1h), valid time units are ns, u, ms, s, m, h",
|
||||
)
|
||||
}
|
||||
s.Duration = d
|
||||
return nil
|
||||
}
|
||||
var sec float64 // 30 → 30 s ; 0.5 → 500 ms
|
||||
if err := json.Unmarshal(b, &sec); err != nil {
|
||||
return errors.WrapInvalidInputf(
|
||||
err,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid step, expected duration in seconds (example: 60 - 1 minute, 240 - 4 minutes, 3600 - 1 hour)",
|
||||
)
|
||||
}
|
||||
s.Duration = time.Duration(sec * float64(time.Second))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s Step) MarshalJSON() ([]byte, error) {
|
||||
// Emit human‑friendly string → "30s"
|
||||
return json.Marshal(s.Duration.String())
|
||||
}
|
||||
|
||||
// FilterOperator is the operator for the filter.
|
||||
type FilterOperator int
|
||||
|
||||
const (
|
||||
FilterOperatorUnknown FilterOperator = iota
|
||||
FilterOperatorEqual
|
||||
FilterOperatorNotEqual
|
||||
FilterOperatorGreaterThan
|
||||
FilterOperatorGreaterThanOrEq
|
||||
FilterOperatorLessThan
|
||||
FilterOperatorLessThanOrEq
|
||||
|
||||
FilterOperatorLike
|
||||
FilterOperatorNotLike
|
||||
FilterOperatorILike
|
||||
FilterOperatorNotILike
|
||||
|
||||
FilterOperatorBetween
|
||||
FilterOperatorNotBetween
|
||||
|
||||
FilterOperatorIn
|
||||
FilterOperatorNotIn
|
||||
|
||||
FilterOperatorExists
|
||||
FilterOperatorNotExists
|
||||
|
||||
FilterOperatorRegexp
|
||||
FilterOperatorNotRegexp
|
||||
|
||||
FilterOperatorContains
|
||||
FilterOperatorNotContains
|
||||
)
|
||||
|
||||
type OrderDirection struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
OrderDirectionAsc = OrderDirection{valuer.NewString("asc")}
|
||||
OrderDirectionDesc = OrderDirection{valuer.NewString("desc")}
|
||||
)
|
||||
|
||||
type ReduceTo struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
ReduceToUnknown = ReduceTo{valuer.NewString("")}
|
||||
ReduceToSum = ReduceTo{valuer.NewString("sum")}
|
||||
ReduceToCount = ReduceTo{valuer.NewString("count")}
|
||||
ReduceToAvg = ReduceTo{valuer.NewString("avg")}
|
||||
ReduceToMin = ReduceTo{valuer.NewString("min")}
|
||||
ReduceToMax = ReduceTo{valuer.NewString("max")}
|
||||
ReduceToLast = ReduceTo{valuer.NewString("last")}
|
||||
ReduceToMedian = ReduceTo{valuer.NewString("median")}
|
||||
)
|
||||
|
||||
type Aggregation struct {
|
||||
// aggregation expression - example: count(), sum(item_price), countIf(day > 10)
|
||||
Expression string `json:"expression"`
|
||||
// if any, it will be used as the alias of the aggregation in the result
|
||||
Alias string `json:"alias,omitempty"`
|
||||
}
|
||||
|
||||
type MetricAggregation struct {
|
||||
// metric to query
|
||||
MetricName string `json:"metricName"`
|
||||
// temporality to apply to the query
|
||||
Temporality metrictypes.Temporality `json:"temporality"`
|
||||
// time aggregation to apply to the query
|
||||
TimeAggregation metrictypes.TimeAggregation `json:"timeAggregation"`
|
||||
// space aggregation to apply to the query
|
||||
SpaceAggregation metrictypes.SpaceAggregation `json:"spaceAggregation"`
|
||||
}
|
||||
|
||||
type Filter struct {
|
||||
// expression to filter by following the filter syntax
|
||||
Expression string `json:"expression"`
|
||||
}
|
||||
|
||||
type GroupByKey struct {
|
||||
telemetrytypes.TelemetryFieldKey
|
||||
}
|
||||
|
||||
type Having struct {
|
||||
// expression to filter by following the filter syntax
|
||||
Expression string `json:"expression"`
|
||||
}
|
||||
|
||||
type OrderByKey struct {
|
||||
telemetrytypes.TelemetryFieldKey
|
||||
}
|
||||
|
||||
// key to order by
|
||||
type OrderBy struct {
|
||||
// key to order by
|
||||
Key OrderByKey `json:"key"`
|
||||
// direction to order by
|
||||
Direction OrderDirection `json:"direction"`
|
||||
}
|
||||
|
||||
// secondary aggregation to apply to the query
|
||||
type SecondaryAggregation struct {
|
||||
// stepInterval of the query
|
||||
// if not set, it will use the step interval of the primary aggregation
|
||||
StepInterval Step `json:"stepInterval,omitempty"`
|
||||
// expression to aggregate. example: count(), sum(item_price), countIf(day > 10)
|
||||
Expression string `json:"expression"`
|
||||
// if any, it will be used as the alias of the aggregation in the result
|
||||
Alias string `json:"alias,omitempty"`
|
||||
// groupBy fields to group by
|
||||
GroupBy []GroupByKey `json:"groupBy,omitempty"`
|
||||
// order by keys and directions
|
||||
Order []OrderBy `json:"order,omitempty"`
|
||||
// limit the maximum number of rows to return
|
||||
Limit int `json:"limit,omitempty"`
|
||||
// limitBy fields to limit by
|
||||
LimitBy LimitBy `json:"limitBy,omitempty"`
|
||||
}
|
||||
|
||||
type Function struct {
|
||||
// name of the function
|
||||
Name string `json:"name"`
|
||||
|
||||
// args is the arguments to the function
|
||||
Args []struct {
|
||||
// name of the argument
|
||||
Name string `json:"name,omitempty"`
|
||||
// value of the argument
|
||||
Value string `json:"value"`
|
||||
} `json:"args,omitempty"`
|
||||
}
|
||||
|
||||
type LimitBy struct {
|
||||
// keys to limit by
|
||||
Keys []string `json:"keys"`
|
||||
// value to limit by
|
||||
Value string `json:"value"`
|
||||
}
|
@ -0,0 +1,58 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type QueryBuilderQuery struct {
|
||||
// name of the query, mainly used when query is used in formula
|
||||
Name string `json:"name"`
|
||||
|
||||
// stepInterval of the query
|
||||
StepInterval Step `json:"stepInterval,omitempty"`
|
||||
|
||||
// signal to query
|
||||
Signal telemetrytypes.Signal `json:"signal,omitempty"`
|
||||
|
||||
// we want to support multiple aggregations
|
||||
// currently supported: []Aggregation, []MetricAggregation
|
||||
Aggregations []any `json:"aggregations,omitempty"`
|
||||
|
||||
// disabled if true, the query will not be executed
|
||||
Disabled bool `json:"disabled,omitempty"`
|
||||
|
||||
// search query is simple string
|
||||
Filter *Filter `json:"filter,omitempty"`
|
||||
|
||||
// group by keys to group by
|
||||
GroupBy []GroupByKey `json:"groupBy,omitempty"`
|
||||
|
||||
// order by keys and directions
|
||||
Order []OrderBy `json:"order,omitempty"`
|
||||
|
||||
// select columns to select
|
||||
SelectFields []telemetrytypes.TelemetryFieldKey `json:"selectFields,omitempty"`
|
||||
|
||||
// limit the maximum number of rows to return
|
||||
Limit int `json:"limit,omitempty"`
|
||||
|
||||
// limitBy fields to limit by
|
||||
LimitBy LimitBy `json:"limitBy,omitempty"`
|
||||
|
||||
// offset the number of rows to skip
|
||||
// TODO: remove this once we have cursor-based pagination everywhere?
|
||||
Offset int `json:"offset,omitempty"`
|
||||
|
||||
// cursor to paginate the query
|
||||
Cursor string `json:"cursor,omitempty"`
|
||||
|
||||
// having clause to apply to the query
|
||||
Having *Having `json:"having,omitempty"`
|
||||
|
||||
// secondary aggregation to apply to the query
|
||||
// on top of the primary aggregation
|
||||
SecondaryAggregations []SecondaryAggregation `json:"secondaryAggregations,omitempty"`
|
||||
|
||||
// functions to apply to the query
|
||||
Functions []Function `json:"functions,omitempty"`
|
||||
}
|
9
pkg/types/querybuildertypes/querybuildertypesv5/cache.go
Normal file
9
pkg/types/querybuildertypes/querybuildertypesv5/cache.go
Normal file
@ -0,0 +1,9 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
// BucketCache is the only thing orchestrator cares about.
|
||||
type BucketCache interface {
|
||||
// cached portion + list of gaps to fetch
|
||||
GetMissRanges(q Query) (cached Result, missing []TimeRange)
|
||||
// store fresh buckets for future hits
|
||||
Put(q Query, fresh Result)
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
type ClickHouseQuery struct {
|
||||
// name of the query
|
||||
Name string `json:"name"`
|
||||
// query to execute
|
||||
Query string `json:"query"`
|
||||
// disabled if true, the query will not be executed
|
||||
Disabled bool `json:"disabled"`
|
||||
}
|
11
pkg/types/querybuildertypes/querybuildertypesv5/formula.go
Normal file
11
pkg/types/querybuildertypes/querybuildertypesv5/formula.go
Normal file
@ -0,0 +1,11 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
type QueryBuilderFormula struct {
|
||||
// name of the formula
|
||||
Name string `json:"name"`
|
||||
// expression to apply to the query
|
||||
Expression string `json:"expression"`
|
||||
|
||||
// functions to apply to the formula result
|
||||
Functions []Function `json:"functions,omitempty"`
|
||||
}
|
27
pkg/types/querybuildertypes/querybuildertypesv5/functions.go
Normal file
27
pkg/types/querybuildertypes/querybuildertypesv5/functions.go
Normal file
@ -0,0 +1,27 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
type FunctionName struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
FunctionNameCutOffMin = FunctionName{valuer.NewString("cutOffMin")}
|
||||
FunctionNameCutOffMax = FunctionName{valuer.NewString("cutOffMax")}
|
||||
FunctionNameClampMin = FunctionName{valuer.NewString("clampMin")}
|
||||
FunctionNameClampMax = FunctionName{valuer.NewString("clampMax")}
|
||||
FunctionNameAbsolute = FunctionName{valuer.NewString("absolute")}
|
||||
FunctionNameRunningDiff = FunctionName{valuer.NewString("runningDiff")}
|
||||
FunctionNameLog2 = FunctionName{valuer.NewString("log2")}
|
||||
FunctionNameLog10 = FunctionName{valuer.NewString("log10")}
|
||||
FunctionNameCumSum = FunctionName{valuer.NewString("cumSum")}
|
||||
FunctionNameEWMA3 = FunctionName{valuer.NewString("ewma3")}
|
||||
FunctionNameEWMA5 = FunctionName{valuer.NewString("ewma5")}
|
||||
FunctionNameEWMA7 = FunctionName{valuer.NewString("ewma7")}
|
||||
FunctionNameMedian3 = FunctionName{valuer.NewString("median3")}
|
||||
FunctionNameMedian5 = FunctionName{valuer.NewString("median5")}
|
||||
FunctionNameMedian7 = FunctionName{valuer.NewString("median7")}
|
||||
FunctionNameTimeShift = FunctionName{valuer.NewString("timeShift")}
|
||||
FunctionNameAnomaly = FunctionName{valuer.NewString("anomaly")}
|
||||
)
|
49
pkg/types/querybuildertypes/querybuildertypesv5/join.go
Normal file
49
pkg/types/querybuildertypes/querybuildertypesv5/join.go
Normal file
@ -0,0 +1,49 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// JoinType is the SQL‐style join operator.
|
||||
type JoinType struct{ valuer.String }
|
||||
|
||||
var (
|
||||
JoinTypeInner = JoinType{valuer.NewString("inner")}
|
||||
JoinTypeLeft = JoinType{valuer.NewString("left")}
|
||||
JoinTypeRight = JoinType{valuer.NewString("right")}
|
||||
JoinTypeFull = JoinType{valuer.NewString("full")}
|
||||
JoinTypeCross = JoinType{valuer.NewString("cross")}
|
||||
)
|
||||
|
||||
type QueryRef struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type QueryBuilderJoin struct {
|
||||
Name string `json:"name"`
|
||||
Disabled bool `json:"disabled,omitempty"`
|
||||
|
||||
// references into flat registry of queries
|
||||
Left QueryRef `json:"left"`
|
||||
Right QueryRef `json:"right"`
|
||||
|
||||
// join type + condition ON
|
||||
Type JoinType `json:"type"`
|
||||
On string `json:"on"`
|
||||
|
||||
// primary aggregations: if empty ⇒ raw columns
|
||||
// currently supported: []Aggregation, []MetricAggregation
|
||||
Aggregations []any `json:"aggregations,omitempty"`
|
||||
// select columns to select
|
||||
SelectFields []telemetrytypes.TelemetryFieldKey `json:"selectFields,omitempty"`
|
||||
|
||||
// post-join clauses (also used for aggregated joins)
|
||||
Filter *Filter `json:"filter,omitempty"`
|
||||
GroupBy []GroupByKey `json:"groupBy,omitempty"`
|
||||
Having *Having `json:"having,omitempty"`
|
||||
Order []OrderBy `json:"order,omitempty"`
|
||||
Limit int `json:"limit,omitempty"`
|
||||
SecondaryAggregations []SecondaryAggregation `json:"secondaryAggregations,omitempty"`
|
||||
Functions []Function `json:"functions,omitempty"`
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
type PromQuery struct {
|
||||
// name of the query
|
||||
Name string `json:"name"`
|
||||
// query to execute
|
||||
Query string `json:"query"`
|
||||
// disabled if true, the query will not be executed
|
||||
Disabled bool `json:"disabled"`
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package types
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"context"
|
||||
@ -15,39 +15,6 @@ var (
|
||||
ErrInValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) in operator requires a list of values")
|
||||
)
|
||||
|
||||
// FilterOperator is the operator for the filter.
|
||||
type FilterOperator int
|
||||
|
||||
const (
|
||||
FilterOperatorUnknown FilterOperator = iota
|
||||
FilterOperatorEqual
|
||||
FilterOperatorNotEqual
|
||||
FilterOperatorGreaterThan
|
||||
FilterOperatorGreaterThanOrEq
|
||||
FilterOperatorLessThan
|
||||
FilterOperatorLessThanOrEq
|
||||
|
||||
FilterOperatorLike
|
||||
FilterOperatorNotLike
|
||||
FilterOperatorILike
|
||||
FilterOperatorNotILike
|
||||
|
||||
FilterOperatorBetween
|
||||
FilterOperatorNotBetween
|
||||
|
||||
FilterOperatorIn
|
||||
FilterOperatorNotIn
|
||||
|
||||
FilterOperatorExists
|
||||
FilterOperatorNotExists
|
||||
|
||||
FilterOperatorRegexp
|
||||
FilterOperatorNotRegexp
|
||||
|
||||
FilterOperatorContains
|
||||
FilterOperatorNotContains
|
||||
)
|
||||
|
||||
// ConditionBuilder is the interface for building the condition part of the query.
|
||||
type ConditionBuilder interface {
|
||||
// GetColumn returns the column for the given key.
|
||||
|
@ -0,0 +1,7 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import "context"
|
||||
|
||||
type Querier interface {
|
||||
QueryRange(ctx context.Context, req QueryRangeRequest) (QueryRangeResponse, error)
|
||||
}
|
29
pkg/types/querybuildertypes/querybuildertypesv5/query.go
Normal file
29
pkg/types/querybuildertypes/querybuildertypesv5/query.go
Normal file
@ -0,0 +1,29 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
type Query interface {
|
||||
// Fingerprint must return a deterministic key that uniquely identifies
|
||||
// (query-text, params, step, etc..) but *not* the time range.
|
||||
Fingerprint() string
|
||||
// Window returns [from, to) in epoch‑ms so cache can slice/merge.
|
||||
Window() (startMS, endMS uint64)
|
||||
// Execute runs the query; implementors must be side‑effect‑free.
|
||||
Execute(ctx context.Context) (Result, error)
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
Type RequestType
|
||||
Value any // concrete Go value (to be type asserted based on the RequestType)
|
||||
Stats ExecStats
|
||||
}
|
||||
|
||||
type ExecStats struct {
|
||||
RowsScanned int64 `json:"rowsScanned"`
|
||||
BytesScanned int64 `json:"bytesScanned"`
|
||||
DurationMs int64 `json:"durationMs"`
|
||||
}
|
||||
|
||||
type TimeRange struct{ From, To uint64 } // ms since epoch
|
@ -0,0 +1,17 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
type QueryType struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
QueryTypeUnknown = QueryType{valuer.NewString("unknown")}
|
||||
QueryTypeBuilder = QueryType{valuer.NewString("builder_query")}
|
||||
QueryTypeFormula = QueryType{valuer.NewString("builder_formula")}
|
||||
QueryTypeSubQuery = QueryType{valuer.NewString("builder_sub_query")}
|
||||
QueryTypeJoin = QueryType{valuer.NewString("builder_join")}
|
||||
QueryTypeClickHouseSQL = QueryType{valuer.NewString("clickhouse_sql")}
|
||||
QueryTypePromQL = QueryType{valuer.NewString("promql")}
|
||||
)
|
@ -0,0 +1,6 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
// Renderer is the interface for rendering the result of a query.
|
||||
type Renderer interface {
|
||||
Render(res Result) (any, error)
|
||||
}
|
94
pkg/types/querybuildertypes/querybuildertypesv5/req.go
Normal file
94
pkg/types/querybuildertypes/querybuildertypesv5/req.go
Normal file
@ -0,0 +1,94 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
)
|
||||
|
||||
type QueryEnvelope struct {
|
||||
// Name is the unique identifier for the query.
|
||||
Name string `json:"name"`
|
||||
// Type is the type of the query.
|
||||
Type QueryType `json:"type"` // "builder_query" | "builder_formula" | "builder_sub_query" | "builder_join" | "promql" | "clickhouse_sql"
|
||||
// Spec is the deferred decoding of the query if any.
|
||||
Spec any `json:"spec"`
|
||||
}
|
||||
|
||||
// implement custom json unmarshaler for the QueryEnvelope
|
||||
func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
var shadow struct {
|
||||
Name string `json:"name"`
|
||||
Type QueryType `json:"type"`
|
||||
Spec json.RawMessage `json:"spec"`
|
||||
}
|
||||
if err := json.Unmarshal(data, &shadow); err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid query envelope")
|
||||
}
|
||||
|
||||
q.Name = shadow.Name
|
||||
q.Type = shadow.Type
|
||||
|
||||
// 2. Decode the spec based on the Type.
|
||||
switch shadow.Type {
|
||||
case QueryTypeBuilder, QueryTypeSubQuery:
|
||||
var spec QueryBuilderQuery
|
||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid builder query spec")
|
||||
}
|
||||
q.Spec = spec
|
||||
|
||||
case QueryTypeFormula:
|
||||
var spec QueryBuilderFormula
|
||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid formula spec")
|
||||
}
|
||||
q.Spec = spec
|
||||
|
||||
case QueryTypeJoin:
|
||||
var spec QueryBuilderJoin
|
||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid join spec")
|
||||
}
|
||||
q.Spec = spec
|
||||
|
||||
case QueryTypePromQL:
|
||||
var spec PromQuery
|
||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid PromQL spec")
|
||||
}
|
||||
q.Spec = spec
|
||||
|
||||
case QueryTypeClickHouseSQL:
|
||||
var spec ClickHouseQuery
|
||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid ClickHouse SQL spec")
|
||||
}
|
||||
q.Spec = spec
|
||||
|
||||
default:
|
||||
return errors.WrapInvalidInputf(nil, errors.CodeInvalidInput, "unknown query type %q", shadow.Type)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type CompositeQuery struct {
|
||||
// Queries is the queries to use for the request.
|
||||
Queries []QueryEnvelope `json:"queries"`
|
||||
}
|
||||
|
||||
type QueryRangeRequest struct {
|
||||
// SchemaVersion is the version of the schema to use for the request payload.
|
||||
SchemaVersion string `json:"schemaVersion"`
|
||||
// Start is the start time of the query in epoch milliseconds.
|
||||
Start uint64 `json:"start"`
|
||||
// End is the end time of the query in epoch milliseconds.
|
||||
End uint64 `json:"end"`
|
||||
// RequestType is the type of the request.
|
||||
RequestType RequestType `json:"requestType"`
|
||||
// CompositeQuery is the composite query to use for the request.
|
||||
CompositeQuery CompositeQuery `json:"compositeQuery"`
|
||||
// Variables is the variables to use for the request.
|
||||
Variables map[string]any `json:"variables,omitempty"`
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
type RequestType struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
RequestTypeUnknown = RequestType{valuer.NewString("")}
|
||||
// Scalar result(s), example: number panel, and table panel
|
||||
RequestTypeScalar = RequestType{valuer.NewString("scalar")}
|
||||
// []Point (struct{TS int64; Val float64}), example: line/area/bar chart
|
||||
RequestTypeTimeSeries = RequestType{valuer.NewString("time_series")}
|
||||
// [][]any, SQL result set, but paginated, example: list view
|
||||
RequestTypeRaw = RequestType{valuer.NewString("raw")}
|
||||
// []Bucket (struct{Lower,Upper,Count float64}), example: histogram
|
||||
RequestTypeDistribution = RequestType{valuer.NewString("distribution")}
|
||||
)
|
80
pkg/types/querybuildertypes/querybuildertypesv5/resp.go
Normal file
80
pkg/types/querybuildertypes/querybuildertypesv5/resp.go
Normal file
@ -0,0 +1,80 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type QueryRangeResponse struct {
|
||||
Type RequestType `json:"type"`
|
||||
Data any `json:"data"`
|
||||
Meta ExecStats `json:"meta"`
|
||||
}
|
||||
|
||||
type TimeSeriesData struct {
|
||||
QueryName string `json:"queryName"`
|
||||
Aggregations []AggregationBucket `json:"aggregations"`
|
||||
}
|
||||
|
||||
type AggregationBucket struct {
|
||||
Index int `json:"index"` // or string Alias
|
||||
Alias string `json:"alias"`
|
||||
Series []TimeSeries `json:"series"` // no extra nesting
|
||||
}
|
||||
|
||||
type TimeSeries struct {
|
||||
Labels []Label `json:"labels,omitempty"`
|
||||
Values []TimeSeriesValue `json:"values"`
|
||||
}
|
||||
|
||||
type Label struct {
|
||||
Key telemetrytypes.TelemetryFieldKey `json:"key"`
|
||||
Value any `json:"value"`
|
||||
}
|
||||
|
||||
type TimeSeriesValue struct {
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
Value float64 `json:"value,omitempty"`
|
||||
// for the heatmap type chart
|
||||
Values []float64 `json:"values,omitempty"`
|
||||
Bucket Bucket `json:"bucket,omitempty"`
|
||||
}
|
||||
|
||||
type Bucket struct {
|
||||
Step float64 `json:"step"`
|
||||
}
|
||||
|
||||
type ColumnType struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
// for the group by part of the query
|
||||
ColumnTypeGroup = ColumnType{valuer.NewString("group")}
|
||||
// for the aggregation part of the query
|
||||
ColumnTypeAggregation = ColumnType{valuer.NewString("aggregation")}
|
||||
)
|
||||
|
||||
type ColumnDescriptor struct {
|
||||
telemetrytypes.TelemetryFieldKey
|
||||
QueryName string `json:"queryName"`
|
||||
AggregationIndex int64 `json:"aggregationIndex"`
|
||||
Type ColumnType `json:"columnType"`
|
||||
}
|
||||
|
||||
type ScalarData struct {
|
||||
Columns []ColumnDescriptor `json:"columns"`
|
||||
Data [][]any `json:"data"`
|
||||
}
|
||||
|
||||
type RawData struct {
|
||||
QueryName string `json:"queryName"`
|
||||
Rows []RawRow `json:"rows"`
|
||||
}
|
||||
|
||||
type RawRow struct {
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
Data map[string]any `json:"data"`
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user