mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-12 02:29:03 +08:00
chore: add querier base implementation (#8028)
This commit is contained in:
parent
93de4681a9
commit
0ec1be1ddf
204
pkg/querier/builder_query.go
Normal file
204
pkg/querier/builder_query.go
Normal file
@ -0,0 +1,204 @@
|
|||||||
|
package querier
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
)
|
||||||
|
|
||||||
|
type builderQuery[T any] struct {
|
||||||
|
telemetryStore telemetrystore.TelemetryStore
|
||||||
|
stmtBuilder qbtypes.StatementBuilder[T]
|
||||||
|
spec qbtypes.QueryBuilderQuery[T]
|
||||||
|
|
||||||
|
fromMS uint64
|
||||||
|
toMS uint64
|
||||||
|
kind qbtypes.RequestType
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ qbtypes.Query = (*builderQuery[any])(nil)
|
||||||
|
|
||||||
|
func newBuilderQuery[T any](
|
||||||
|
telemetryStore telemetrystore.TelemetryStore,
|
||||||
|
stmtBuilder qbtypes.StatementBuilder[T],
|
||||||
|
spec qbtypes.QueryBuilderQuery[T],
|
||||||
|
tr qbtypes.TimeRange,
|
||||||
|
kind qbtypes.RequestType,
|
||||||
|
) *builderQuery[T] {
|
||||||
|
return &builderQuery[T]{
|
||||||
|
telemetryStore: telemetryStore,
|
||||||
|
stmtBuilder: stmtBuilder,
|
||||||
|
spec: spec,
|
||||||
|
fromMS: tr.From,
|
||||||
|
toMS: tr.To,
|
||||||
|
kind: kind,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *builderQuery[T]) Fingerprint() string {
|
||||||
|
// TODO: implement this
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *builderQuery[T]) Window() (uint64, uint64) {
|
||||||
|
return q.fromMS, q.toMS
|
||||||
|
}
|
||||||
|
|
||||||
|
// must be a single query, ordered by timestamp (logs need an id tie-break).
|
||||||
|
func (q *builderQuery[T]) isWindowList() bool {
|
||||||
|
if len(q.spec.Order) == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// first ORDER BY must be `timestamp`
|
||||||
|
if q.spec.Order[0].Key.Name != "timestamp" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if q.spec.Signal == telemetrytypes.SignalLogs {
|
||||||
|
// logs require timestamp,id with identical direction
|
||||||
|
if len(q.spec.Order) != 2 || q.spec.Order[1].Key.Name != "id" ||
|
||||||
|
q.spec.Order[1].Direction != q.spec.Order[0].Direction {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *builderQuery[T]) Execute(ctx context.Context) (*qbtypes.Result, error) {
|
||||||
|
|
||||||
|
// can we do window based pagination?
|
||||||
|
if q.kind == qbtypes.RequestTypeRaw && q.isWindowList() {
|
||||||
|
return q.executeWindowList(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
stmt, err := q.stmtBuilder.Build(ctx, q.fromMS, q.toMS, q.kind, q.spec)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
chQuery := qbtypes.ClickHouseQuery{
|
||||||
|
Name: q.spec.Name,
|
||||||
|
Query: stmt.Query,
|
||||||
|
}
|
||||||
|
|
||||||
|
chExec := newchSQLQuery(q.telemetryStore, chQuery, stmt.Args, qbtypes.TimeRange{From: q.fromMS, To: q.toMS}, q.kind)
|
||||||
|
result, err := chExec.Execute(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
result.Warnings = stmt.Warnings
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *builderQuery[T]) executeWindowList(ctx context.Context) (*qbtypes.Result, error) {
|
||||||
|
isAsc := len(q.spec.Order) > 0 &&
|
||||||
|
strings.ToLower(string(q.spec.Order[0].Direction.StringValue())) == "asc"
|
||||||
|
|
||||||
|
// Adjust [fromMS,toMS] window if a cursor was supplied
|
||||||
|
if cur := strings.TrimSpace(q.spec.Cursor); cur != "" {
|
||||||
|
if ts, err := decodeCursor(cur); err == nil {
|
||||||
|
if isAsc {
|
||||||
|
if uint64(ts) >= q.fromMS {
|
||||||
|
q.fromMS = uint64(ts + 1)
|
||||||
|
}
|
||||||
|
} else { // DESC
|
||||||
|
if uint64(ts) <= q.toMS {
|
||||||
|
q.toMS = uint64(ts - 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
reqLimit := q.spec.Limit
|
||||||
|
if reqLimit == 0 {
|
||||||
|
reqLimit = 10_000 // sane upper-bound default
|
||||||
|
}
|
||||||
|
offsetLeft := q.spec.Offset
|
||||||
|
need := reqLimit + offsetLeft // rows to fetch from ClickHouse
|
||||||
|
|
||||||
|
var rows []*qbtypes.RawRow
|
||||||
|
|
||||||
|
totalRows := uint64(0)
|
||||||
|
totalBytes := uint64(0)
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
for _, r := range makeBuckets(q.fromMS, q.toMS) {
|
||||||
|
q.spec.Offset = 0
|
||||||
|
q.spec.Limit = need
|
||||||
|
|
||||||
|
stmt, err := q.stmtBuilder.Build(ctx, r.fromNS/1e6, r.toNS/1e6, q.kind, q.spec)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
chExec := newchSQLQuery(
|
||||||
|
q.telemetryStore,
|
||||||
|
qbtypes.ClickHouseQuery{Name: q.spec.Name, Query: stmt.Query},
|
||||||
|
stmt.Args,
|
||||||
|
qbtypes.TimeRange{From: q.fromMS, To: q.toMS},
|
||||||
|
q.kind,
|
||||||
|
)
|
||||||
|
res, err := chExec.Execute(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
totalRows += res.Stats.RowsScanned
|
||||||
|
totalBytes += res.Stats.BytesScanned
|
||||||
|
|
||||||
|
rawRows := res.Value.(*qbtypes.RawData).Rows
|
||||||
|
need -= len(rawRows)
|
||||||
|
|
||||||
|
for _, rr := range rawRows {
|
||||||
|
if offsetLeft > 0 { // client-requested initial offset
|
||||||
|
offsetLeft--
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
rows = append(rows, rr)
|
||||||
|
if len(rows) >= reqLimit { // page filled
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(rows) >= reqLimit {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nextCursor := ""
|
||||||
|
if len(rows) == reqLimit {
|
||||||
|
lastTS := rows[len(rows)-1].Timestamp.UnixMilli()
|
||||||
|
nextCursor = encodeCursor(lastTS)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &qbtypes.Result{
|
||||||
|
Type: qbtypes.RequestTypeRaw,
|
||||||
|
Value: &qbtypes.RawData{
|
||||||
|
QueryName: q.spec.Name,
|
||||||
|
Rows: rows,
|
||||||
|
NextCursor: nextCursor,
|
||||||
|
},
|
||||||
|
Stats: qbtypes.ExecStats{
|
||||||
|
RowsScanned: totalRows,
|
||||||
|
BytesScanned: totalBytes,
|
||||||
|
DurationMS: uint64(time.Since(start).Milliseconds()),
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func encodeCursor(tsMilli int64) string {
|
||||||
|
return base64.StdEncoding.EncodeToString([]byte(strconv.FormatInt(tsMilli, 10)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeCursor(cur string) (int64, error) {
|
||||||
|
b, err := base64.StdEncoding.DecodeString(cur)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return strconv.ParseInt(string(b), 10, 64)
|
||||||
|
}
|
77
pkg/querier/clickhouse_query.go
Normal file
77
pkg/querier/clickhouse_query.go
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
package querier
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/ClickHouse/clickhouse-go/v2"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
)
|
||||||
|
|
||||||
|
type chSQLQuery struct {
|
||||||
|
telemetryStore telemetrystore.TelemetryStore
|
||||||
|
|
||||||
|
query qbtypes.ClickHouseQuery
|
||||||
|
args []any
|
||||||
|
fromMS uint64
|
||||||
|
toMS uint64
|
||||||
|
kind qbtypes.RequestType
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ qbtypes.Query = (*chSQLQuery)(nil)
|
||||||
|
|
||||||
|
func newchSQLQuery(
|
||||||
|
telemetryStore telemetrystore.TelemetryStore,
|
||||||
|
query qbtypes.ClickHouseQuery,
|
||||||
|
args []any,
|
||||||
|
tr qbtypes.TimeRange,
|
||||||
|
kind qbtypes.RequestType,
|
||||||
|
) *chSQLQuery {
|
||||||
|
return &chSQLQuery{
|
||||||
|
telemetryStore: telemetryStore,
|
||||||
|
query: query,
|
||||||
|
args: args,
|
||||||
|
fromMS: tr.From,
|
||||||
|
toMS: tr.To,
|
||||||
|
kind: kind,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: use the same query hash scheme as ClickHouse
|
||||||
|
func (q *chSQLQuery) Fingerprint() string { return q.query.Query }
|
||||||
|
func (q *chSQLQuery) Window() (uint64, uint64) { return q.fromMS, q.toMS }
|
||||||
|
|
||||||
|
func (q *chSQLQuery) Execute(ctx context.Context) (*qbtypes.Result, error) {
|
||||||
|
|
||||||
|
totalRows := uint64(0)
|
||||||
|
totalBytes := uint64(0)
|
||||||
|
elapsed := time.Duration(0)
|
||||||
|
|
||||||
|
ctx = clickhouse.Context(ctx, clickhouse.WithProgress(func(p *clickhouse.Progress) {
|
||||||
|
totalRows += p.Rows
|
||||||
|
totalBytes += p.Bytes
|
||||||
|
elapsed += p.Elapsed
|
||||||
|
}))
|
||||||
|
|
||||||
|
rows, err := q.telemetryStore.ClickhouseDB().Query(ctx, q.query.Query, q.args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
// TODO: map the errors from ClickHouse to our error types
|
||||||
|
payload, err := consume(rows, q.kind)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &qbtypes.Result{
|
||||||
|
Type: q.kind,
|
||||||
|
Value: payload,
|
||||||
|
Stats: qbtypes.ExecStats{
|
||||||
|
RowsScanned: totalRows,
|
||||||
|
BytesScanned: totalBytes,
|
||||||
|
DurationMS: uint64(elapsed.Milliseconds()),
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
373
pkg/querier/consume.go
Normal file
373
pkg/querier/consume.go
Normal file
@ -0,0 +1,373 @@
|
|||||||
|
package querier
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"reflect"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
aggRe = regexp.MustCompile(`^__result_(\d+)$`)
|
||||||
|
)
|
||||||
|
|
||||||
|
// consume reads every row and shapes it into the payload expected for the
|
||||||
|
// given request type.
|
||||||
|
//
|
||||||
|
// * Time-series - []*qbtypes.TimeSeriesData
|
||||||
|
// * Scalar - []*qbtypes.ScalarData
|
||||||
|
// * Raw - []*qbtypes.RawData
|
||||||
|
// * Distribution- []*qbtypes.DistributionData
|
||||||
|
func consume(rows driver.Rows, kind qbtypes.RequestType) (any, error) {
|
||||||
|
var (
|
||||||
|
payload any
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
switch kind {
|
||||||
|
case qbtypes.RequestTypeTimeSeries:
|
||||||
|
payload, err = readAsTimeSeries(rows)
|
||||||
|
case qbtypes.RequestTypeScalar:
|
||||||
|
payload, err = readAsScalar(rows)
|
||||||
|
case qbtypes.RequestTypeRaw:
|
||||||
|
payload, err = readAsRaw(rows)
|
||||||
|
// TODO: add support for other request types
|
||||||
|
}
|
||||||
|
|
||||||
|
return payload, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func readAsTimeSeries(rows driver.Rows) ([]*qbtypes.TimeSeriesData, error) {
|
||||||
|
|
||||||
|
colTypes := rows.ColumnTypes()
|
||||||
|
colNames := rows.Columns()
|
||||||
|
|
||||||
|
slots := make([]any, len(colTypes))
|
||||||
|
numericColsCount := 0
|
||||||
|
for i, ct := range colTypes {
|
||||||
|
slots[i] = reflect.New(ct.ScanType()).Interface()
|
||||||
|
if numericKind(ct.ScanType().Kind()) {
|
||||||
|
numericColsCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type sKey struct {
|
||||||
|
agg int
|
||||||
|
key string // deterministic join of label values
|
||||||
|
}
|
||||||
|
seriesMap := map[sKey]*qbtypes.TimeSeries{}
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
if err := rows.Scan(slots...); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
ts int64
|
||||||
|
lblVals []string
|
||||||
|
lblObjs []*qbtypes.Label
|
||||||
|
aggValues = map[int]float64{} // all __result_N in this row
|
||||||
|
fallbackValue float64 // value when NO __result_N columns exist
|
||||||
|
fallbackSeen bool
|
||||||
|
)
|
||||||
|
|
||||||
|
for idx, ptr := range slots {
|
||||||
|
name := colNames[idx]
|
||||||
|
|
||||||
|
switch v := ptr.(type) {
|
||||||
|
case *time.Time:
|
||||||
|
ts = v.UnixMilli()
|
||||||
|
|
||||||
|
case *float64, *float32, *int64, *int32, *uint64, *uint32:
|
||||||
|
val := numericAsFloat(reflect.ValueOf(ptr).Elem().Interface())
|
||||||
|
if m := aggRe.FindStringSubmatch(name); m != nil {
|
||||||
|
id, _ := strconv.Atoi(m[1])
|
||||||
|
aggValues[id] = val
|
||||||
|
} else if numericColsCount == 1 { // classic single-value query
|
||||||
|
fallbackValue = val
|
||||||
|
fallbackSeen = true
|
||||||
|
} else {
|
||||||
|
// numeric label
|
||||||
|
lblVals = append(lblVals, fmt.Sprint(val))
|
||||||
|
lblObjs = append(lblObjs, &qbtypes.Label{
|
||||||
|
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||||
|
Value: val,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
case **float64, **float32, **int64, **int32, **uint64, **uint32:
|
||||||
|
tempVal := reflect.ValueOf(ptr)
|
||||||
|
if tempVal.IsValid() && !tempVal.IsNil() && !tempVal.Elem().IsNil() {
|
||||||
|
val := numericAsFloat(tempVal.Elem().Elem().Interface())
|
||||||
|
if m := aggRe.FindStringSubmatch(name); m != nil {
|
||||||
|
id, _ := strconv.Atoi(m[1])
|
||||||
|
aggValues[id] = val
|
||||||
|
} else if numericColsCount == 1 { // classic single-value query
|
||||||
|
fallbackValue = val
|
||||||
|
fallbackSeen = true
|
||||||
|
} else {
|
||||||
|
// numeric label
|
||||||
|
lblVals = append(lblVals, fmt.Sprint(val))
|
||||||
|
lblObjs = append(lblObjs, &qbtypes.Label{
|
||||||
|
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||||
|
Value: val,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case *string:
|
||||||
|
lblVals = append(lblVals, *v)
|
||||||
|
lblObjs = append(lblObjs, &qbtypes.Label{
|
||||||
|
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||||
|
Value: *v,
|
||||||
|
})
|
||||||
|
|
||||||
|
case **string:
|
||||||
|
val := *v
|
||||||
|
if val == nil {
|
||||||
|
var empty string
|
||||||
|
val = &empty
|
||||||
|
}
|
||||||
|
lblVals = append(lblVals, *val)
|
||||||
|
lblObjs = append(lblObjs, &qbtypes.Label{
|
||||||
|
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||||
|
Value: val,
|
||||||
|
})
|
||||||
|
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Edge-case: no __result_N columns, but a single numeric column present
|
||||||
|
if len(aggValues) == 0 && fallbackSeen {
|
||||||
|
aggValues[0] = fallbackValue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ts == 0 || len(aggValues) == 0 {
|
||||||
|
continue // nothing useful
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Strings(lblVals)
|
||||||
|
labelsKey := strings.Join(lblVals, ",")
|
||||||
|
|
||||||
|
// one point per aggregation in this row
|
||||||
|
for aggIdx, val := range aggValues {
|
||||||
|
if math.IsNaN(val) || math.IsInf(val, 0) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
key := sKey{agg: aggIdx, key: labelsKey}
|
||||||
|
|
||||||
|
series, ok := seriesMap[key]
|
||||||
|
if !ok {
|
||||||
|
series = &qbtypes.TimeSeries{Labels: lblObjs}
|
||||||
|
seriesMap[key] = series
|
||||||
|
}
|
||||||
|
series.Values = append(series.Values, &qbtypes.TimeSeriesValue{
|
||||||
|
Timestamp: ts,
|
||||||
|
Value: val,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
maxAgg := -1
|
||||||
|
for k := range seriesMap {
|
||||||
|
if k.agg > maxAgg {
|
||||||
|
maxAgg = k.agg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if maxAgg < 0 {
|
||||||
|
return nil, nil // empty result-set
|
||||||
|
}
|
||||||
|
|
||||||
|
buckets := make([]*qbtypes.AggregationBucket, maxAgg+1)
|
||||||
|
for i := range buckets {
|
||||||
|
buckets[i] = &qbtypes.AggregationBucket{
|
||||||
|
Index: i,
|
||||||
|
Alias: "__result_" + strconv.Itoa(i),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for k, s := range seriesMap {
|
||||||
|
buckets[k.agg].Series = append(buckets[k.agg].Series, s)
|
||||||
|
}
|
||||||
|
|
||||||
|
var nonEmpty []*qbtypes.AggregationBucket
|
||||||
|
for _, b := range buckets {
|
||||||
|
if len(b.Series) > 0 {
|
||||||
|
nonEmpty = append(nonEmpty, b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return []*qbtypes.TimeSeriesData{{
|
||||||
|
Aggregations: nonEmpty,
|
||||||
|
}}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func numericKind(k reflect.Kind) bool {
|
||||||
|
switch k {
|
||||||
|
case reflect.Float32, reflect.Float64,
|
||||||
|
reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
|
||||||
|
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func readAsScalar(rows driver.Rows) (*qbtypes.ScalarData, error) {
|
||||||
|
colNames := rows.Columns()
|
||||||
|
colTypes := rows.ColumnTypes()
|
||||||
|
|
||||||
|
cd := make([]*qbtypes.ColumnDescriptor, len(colNames))
|
||||||
|
|
||||||
|
for i, name := range colNames {
|
||||||
|
colType := qbtypes.ColumnTypeGroup
|
||||||
|
if aggRe.MatchString(name) {
|
||||||
|
colType = qbtypes.ColumnTypeAggregation
|
||||||
|
}
|
||||||
|
cd[i] = &qbtypes.ColumnDescriptor{
|
||||||
|
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||||
|
AggregationIndex: int64(i),
|
||||||
|
Type: colType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var data [][]any
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
scan := make([]any, len(colTypes))
|
||||||
|
for i := range scan {
|
||||||
|
scan[i] = reflect.New(colTypes[i].ScanType()).Interface()
|
||||||
|
}
|
||||||
|
if err := rows.Scan(scan...); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. deref each slot into the output row
|
||||||
|
row := make([]any, len(scan))
|
||||||
|
for i, cell := range scan {
|
||||||
|
valPtr := reflect.ValueOf(cell)
|
||||||
|
if valPtr.Kind() == reflect.Pointer && !valPtr.IsNil() {
|
||||||
|
row[i] = valPtr.Elem().Interface()
|
||||||
|
} else {
|
||||||
|
row[i] = nil // Nullable columns come back as nil pointers
|
||||||
|
}
|
||||||
|
}
|
||||||
|
data = append(data, row)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &qbtypes.ScalarData{
|
||||||
|
Columns: cd,
|
||||||
|
Data: data,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func readAsRaw(rows driver.Rows) (*qbtypes.RawData, error) {
|
||||||
|
|
||||||
|
colNames := rows.Columns()
|
||||||
|
colTypes := rows.ColumnTypes()
|
||||||
|
colCnt := len(colNames)
|
||||||
|
|
||||||
|
// Build a template slice of correctly-typed pointers once
|
||||||
|
scanTpl := make([]any, colCnt)
|
||||||
|
for i, ct := range colTypes {
|
||||||
|
scanTpl[i] = reflect.New(ct.ScanType()).Interface()
|
||||||
|
}
|
||||||
|
|
||||||
|
var outRows []*qbtypes.RawRow
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
// fresh copy of the scan slice (otherwise the driver reuses pointers)
|
||||||
|
scan := make([]any, colCnt)
|
||||||
|
for i := range scanTpl {
|
||||||
|
scan[i] = reflect.New(colTypes[i].ScanType()).Interface()
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Scan(scan...); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
rr := qbtypes.RawRow{
|
||||||
|
Data: make(map[string]*any, colCnt),
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, cellPtr := range scan {
|
||||||
|
name := colNames[i]
|
||||||
|
|
||||||
|
// de-reference the typed pointer to any
|
||||||
|
val := reflect.ValueOf(cellPtr).Elem().Interface()
|
||||||
|
|
||||||
|
// special-case: timestamp column
|
||||||
|
if name == "timestamp" || name == "timestamp_datetime" {
|
||||||
|
switch t := val.(type) {
|
||||||
|
case time.Time:
|
||||||
|
rr.Timestamp = t
|
||||||
|
case uint64: // epoch-ns stored as integer
|
||||||
|
rr.Timestamp = time.Unix(0, int64(t))
|
||||||
|
case int64:
|
||||||
|
rr.Timestamp = time.Unix(0, t)
|
||||||
|
default:
|
||||||
|
// leave zero time if unrecognised
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// store value in map as *any, to match the schema
|
||||||
|
v := any(val)
|
||||||
|
rr.Data[name] = &v
|
||||||
|
}
|
||||||
|
outRows = append(outRows, &rr)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &qbtypes.RawData{
|
||||||
|
Rows: outRows,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func numericAsFloat(v any) float64 {
|
||||||
|
switch x := v.(type) {
|
||||||
|
case float64:
|
||||||
|
return x
|
||||||
|
case float32:
|
||||||
|
return float64(x)
|
||||||
|
case int64:
|
||||||
|
return float64(x)
|
||||||
|
case int32:
|
||||||
|
return float64(x)
|
||||||
|
case int16:
|
||||||
|
return float64(x)
|
||||||
|
case int8:
|
||||||
|
return float64(x)
|
||||||
|
case int:
|
||||||
|
return float64(x)
|
||||||
|
case uint64:
|
||||||
|
return float64(x)
|
||||||
|
case uint32:
|
||||||
|
return float64(x)
|
||||||
|
case uint16:
|
||||||
|
return float64(x)
|
||||||
|
case uint8:
|
||||||
|
return float64(x)
|
||||||
|
case uint:
|
||||||
|
return float64(x)
|
||||||
|
default:
|
||||||
|
return math.NaN()
|
||||||
|
}
|
||||||
|
}
|
36
pkg/querier/list_range.go
Normal file
36
pkg/querier/list_range.go
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
package querier
|
||||||
|
|
||||||
|
import "github.com/SigNoz/signoz/pkg/querybuilder"
|
||||||
|
|
||||||
|
const hourNanos = int64(3_600_000_000_000) // 1 h in ns
|
||||||
|
|
||||||
|
type tsRange struct{ fromNS, toNS uint64 }
|
||||||
|
|
||||||
|
// slice the timerange into exponentially growing buckets
|
||||||
|
func makeBuckets(start, end uint64) []tsRange {
|
||||||
|
startNS := querybuilder.ToNanoSecs(start)
|
||||||
|
endNS := querybuilder.ToNanoSecs(end)
|
||||||
|
|
||||||
|
if endNS-startNS <= uint64(hourNanos) {
|
||||||
|
return []tsRange{{fromNS: startNS, toNS: endNS}}
|
||||||
|
}
|
||||||
|
|
||||||
|
var out []tsRange
|
||||||
|
bucket := uint64(hourNanos)
|
||||||
|
curEnd := endNS
|
||||||
|
|
||||||
|
for {
|
||||||
|
curStart := curEnd - bucket
|
||||||
|
if curStart < startNS {
|
||||||
|
curStart = startNS
|
||||||
|
}
|
||||||
|
out = append(out, tsRange{fromNS: curStart, toNS: curEnd})
|
||||||
|
|
||||||
|
if curStart == startNS {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
curEnd = curStart
|
||||||
|
bucket *= 2
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
41
pkg/querier/promql_query.go
Normal file
41
pkg/querier/promql_query.go
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
package querier
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||||
|
qbv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
)
|
||||||
|
|
||||||
|
type promqlQuery struct {
|
||||||
|
promEngine prometheus.Prometheus
|
||||||
|
query qbv5.PromQuery
|
||||||
|
tr qbv5.TimeRange
|
||||||
|
requestType qbv5.RequestType
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ qbv5.Query = (*promqlQuery)(nil)
|
||||||
|
|
||||||
|
func newPromqlQuery(
|
||||||
|
promEngine prometheus.Prometheus,
|
||||||
|
query qbv5.PromQuery,
|
||||||
|
tr qbv5.TimeRange,
|
||||||
|
requestType qbv5.RequestType,
|
||||||
|
) *promqlQuery {
|
||||||
|
return &promqlQuery{promEngine, query, tr, requestType}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *promqlQuery) Fingerprint() string {
|
||||||
|
// TODO: Implement this
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *promqlQuery) Window() (uint64, uint64) {
|
||||||
|
return q.tr.From, q.tr.To
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
|
||||||
|
// TODO: Implement this
|
||||||
|
//nolint:nilnil
|
||||||
|
return nil, nil
|
||||||
|
}
|
96
pkg/querier/querier.go
Normal file
96
pkg/querier/querier.go
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
package querier
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
)
|
||||||
|
|
||||||
|
type querier struct {
|
||||||
|
telemetryStore telemetrystore.TelemetryStore
|
||||||
|
metadataStore telemetrytypes.MetadataStore
|
||||||
|
promEngine prometheus.Prometheus
|
||||||
|
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
||||||
|
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
||||||
|
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewQuerier(
|
||||||
|
telemetryStore telemetrystore.TelemetryStore,
|
||||||
|
metadataStore telemetrytypes.MetadataStore,
|
||||||
|
promEngine prometheus.Prometheus,
|
||||||
|
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
||||||
|
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||||
|
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
|
||||||
|
) *querier {
|
||||||
|
return &querier{
|
||||||
|
telemetryStore: telemetryStore,
|
||||||
|
metadataStore: metadataStore,
|
||||||
|
promEngine: promEngine,
|
||||||
|
traceStmtBuilder: traceStmtBuilder,
|
||||||
|
logStmtBuilder: logStmtBuilder,
|
||||||
|
metricStmtBuilder: metricStmtBuilder,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *querier) QueryRange(ctx context.Context, orgID string, req *qbtypes.QueryRangeRequest) (*qbtypes.QueryRangeResponse, error) {
|
||||||
|
|
||||||
|
queries := make(map[string]qbtypes.Query)
|
||||||
|
|
||||||
|
for _, query := range req.CompositeQuery.Queries {
|
||||||
|
switch query.Type {
|
||||||
|
case qbtypes.QueryTypePromQL:
|
||||||
|
promQuery, ok := query.Spec.(qbtypes.PromQuery)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", query.Spec)
|
||||||
|
}
|
||||||
|
promqlQuery := newPromqlQuery(q.promEngine, promQuery, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||||
|
queries[query.Name] = promqlQuery
|
||||||
|
case qbtypes.QueryTypeClickHouseSQL:
|
||||||
|
chQuery, ok := query.Spec.(qbtypes.ClickHouseQuery)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid clickhouse query spec %T", query.Spec)
|
||||||
|
}
|
||||||
|
chSQLQuery := newchSQLQuery(q.telemetryStore, chQuery, nil, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||||
|
queries[query.Name] = chSQLQuery
|
||||||
|
case qbtypes.QueryTypeBuilder:
|
||||||
|
switch spec := query.Spec.(type) {
|
||||||
|
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||||
|
bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||||
|
queries[query.Name] = bq
|
||||||
|
|
||||||
|
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||||
|
bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||||
|
queries[query.Name] = bq
|
||||||
|
|
||||||
|
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||||
|
bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||||
|
queries[query.Name] = bq
|
||||||
|
default:
|
||||||
|
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported builder spec type %T", query.Spec)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return q.run(ctx, orgID, queries, req.RequestType)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *querier) run(ctx context.Context, _ string, qs map[string]qbtypes.Query, kind qbtypes.RequestType) (*qbtypes.QueryRangeResponse, error) {
|
||||||
|
results := make([]*qbtypes.Result, 0, len(qs))
|
||||||
|
for _, query := range qs {
|
||||||
|
// TODO: run in controlled batches
|
||||||
|
result, err := query.Execute(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
results = append(results, result)
|
||||||
|
}
|
||||||
|
return &qbtypes.QueryRangeResponse{
|
||||||
|
Type: kind,
|
||||||
|
Data: results,
|
||||||
|
}, nil
|
||||||
|
}
|
@ -13,6 +13,7 @@ type AggrFunc struct {
|
|||||||
FuncName string
|
FuncName string
|
||||||
Aliases []valuer.String
|
Aliases []valuer.String
|
||||||
RequireArgs bool
|
RequireArgs bool
|
||||||
|
Numeric bool
|
||||||
FuncCombinator bool
|
FuncCombinator bool
|
||||||
Rate bool
|
Rate bool
|
||||||
MinArgs int
|
MinArgs int
|
||||||
@ -46,156 +47,156 @@ var (
|
|||||||
AggrFuncSum = AggrFunc{
|
AggrFuncSum = AggrFunc{
|
||||||
Name: valuer.NewString("sum"),
|
Name: valuer.NewString("sum"),
|
||||||
FuncName: "sum",
|
FuncName: "sum",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncSumIf = AggrFunc{
|
AggrFuncSumIf = AggrFunc{
|
||||||
Name: valuer.NewString("sumif"),
|
Name: valuer.NewString("sumif"),
|
||||||
FuncName: "sumIf",
|
FuncName: "sumIf",
|
||||||
Aliases: []valuer.String{valuer.NewString("sum_if")},
|
Aliases: []valuer.String{valuer.NewString("sum_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncAvg = AggrFunc{
|
AggrFuncAvg = AggrFunc{
|
||||||
Name: valuer.NewString("avg"),
|
Name: valuer.NewString("avg"),
|
||||||
FuncName: "avg",
|
FuncName: "avg",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncAvgIf = AggrFunc{
|
AggrFuncAvgIf = AggrFunc{
|
||||||
Name: valuer.NewString("avgif"),
|
Name: valuer.NewString("avgif"),
|
||||||
FuncName: "avgIf",
|
FuncName: "avgIf",
|
||||||
Aliases: []valuer.String{valuer.NewString("avg_if")},
|
Aliases: []valuer.String{valuer.NewString("avg_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncMin = AggrFunc{
|
AggrFuncMin = AggrFunc{
|
||||||
Name: valuer.NewString("min"),
|
Name: valuer.NewString("min"),
|
||||||
FuncName: "min",
|
FuncName: "min",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncMinIf = AggrFunc{
|
AggrFuncMinIf = AggrFunc{
|
||||||
Name: valuer.NewString("minif"),
|
Name: valuer.NewString("minif"),
|
||||||
FuncName: "minIf",
|
FuncName: "minIf",
|
||||||
Aliases: []valuer.String{valuer.NewString("min_if")},
|
Aliases: []valuer.String{valuer.NewString("min_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncMax = AggrFunc{
|
AggrFuncMax = AggrFunc{
|
||||||
Name: valuer.NewString("max"),
|
Name: valuer.NewString("max"),
|
||||||
FuncName: "max",
|
FuncName: "max",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncMaxIf = AggrFunc{
|
AggrFuncMaxIf = AggrFunc{
|
||||||
Name: valuer.NewString("maxif"),
|
Name: valuer.NewString("maxif"),
|
||||||
FuncName: "maxIf",
|
FuncName: "maxIf",
|
||||||
Aliases: []valuer.String{valuer.NewString("max_if")},
|
Aliases: []valuer.String{valuer.NewString("max_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncP05 = AggrFunc{
|
AggrFuncP05 = AggrFunc{
|
||||||
Name: valuer.NewString("p05"),
|
Name: valuer.NewString("p05"),
|
||||||
FuncName: "quantile(0.05)",
|
FuncName: "quantile(0.05)",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncP05IF = AggrFunc{
|
AggrFuncP05IF = AggrFunc{
|
||||||
Name: valuer.NewString("p05if"),
|
Name: valuer.NewString("p05if"),
|
||||||
FuncName: "quantileIf(0.05)",
|
FuncName: "quantileIf(0.05)",
|
||||||
Aliases: []valuer.String{valuer.NewString("p05_if")},
|
Aliases: []valuer.String{valuer.NewString("p05_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncP10 = AggrFunc{
|
AggrFuncP10 = AggrFunc{
|
||||||
Name: valuer.NewString("p10"),
|
Name: valuer.NewString("p10"),
|
||||||
FuncName: "quantile(0.10)",
|
FuncName: "quantile(0.10)",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncP10IF = AggrFunc{
|
AggrFuncP10IF = AggrFunc{
|
||||||
Name: valuer.NewString("p10if"),
|
Name: valuer.NewString("p10if"),
|
||||||
FuncName: "quantileIf(0.10)",
|
FuncName: "quantileIf(0.10)",
|
||||||
Aliases: []valuer.String{valuer.NewString("p10_if")},
|
Aliases: []valuer.String{valuer.NewString("p10_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncP20 = AggrFunc{
|
AggrFuncP20 = AggrFunc{
|
||||||
Name: valuer.NewString("p20"),
|
Name: valuer.NewString("p20"),
|
||||||
FuncName: "quantile(0.20)",
|
FuncName: "quantile(0.20)",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncP20IF = AggrFunc{
|
AggrFuncP20IF = AggrFunc{
|
||||||
Name: valuer.NewString("p20if"),
|
Name: valuer.NewString("p20if"),
|
||||||
FuncName: "quantileIf(0.20)",
|
FuncName: "quantileIf(0.20)",
|
||||||
Aliases: []valuer.String{valuer.NewString("p20_if")},
|
Aliases: []valuer.String{valuer.NewString("p20_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncP25 = AggrFunc{
|
AggrFuncP25 = AggrFunc{
|
||||||
Name: valuer.NewString("p25"),
|
Name: valuer.NewString("p25"),
|
||||||
FuncName: "quantile(0.25)",
|
FuncName: "quantile(0.25)",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncP25IF = AggrFunc{
|
AggrFuncP25IF = AggrFunc{
|
||||||
Name: valuer.NewString("p25if"),
|
Name: valuer.NewString("p25if"),
|
||||||
FuncName: "quantileIf(0.25)",
|
FuncName: "quantileIf(0.25)",
|
||||||
Aliases: []valuer.String{valuer.NewString("p25_if")},
|
Aliases: []valuer.String{valuer.NewString("p25_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncP50 = AggrFunc{
|
AggrFuncP50 = AggrFunc{
|
||||||
Name: valuer.NewString("p50"),
|
Name: valuer.NewString("p50"),
|
||||||
FuncName: "quantile(0.50)",
|
FuncName: "quantile(0.50)",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncP50IF = AggrFunc{
|
AggrFuncP50IF = AggrFunc{
|
||||||
Name: valuer.NewString("p50if"),
|
Name: valuer.NewString("p50if"),
|
||||||
FuncName: "quantileIf(0.50)",
|
FuncName: "quantileIf(0.50)",
|
||||||
Aliases: []valuer.String{valuer.NewString("p50_if")},
|
Aliases: []valuer.String{valuer.NewString("p50_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncP75 = AggrFunc{
|
AggrFuncP75 = AggrFunc{
|
||||||
Name: valuer.NewString("p75"),
|
Name: valuer.NewString("p75"),
|
||||||
FuncName: "quantile(0.75)",
|
FuncName: "quantile(0.75)",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncP75IF = AggrFunc{
|
AggrFuncP75IF = AggrFunc{
|
||||||
Name: valuer.NewString("p75if"),
|
Name: valuer.NewString("p75if"),
|
||||||
FuncName: "quantileIf(0.75)",
|
FuncName: "quantileIf(0.75)",
|
||||||
Aliases: []valuer.String{valuer.NewString("p75_if")},
|
Aliases: []valuer.String{valuer.NewString("p75_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncP90 = AggrFunc{
|
AggrFuncP90 = AggrFunc{
|
||||||
Name: valuer.NewString("p90"),
|
Name: valuer.NewString("p90"),
|
||||||
FuncName: "quantile(0.90)",
|
FuncName: "quantile(0.90)",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncP90IF = AggrFunc{
|
AggrFuncP90IF = AggrFunc{
|
||||||
Name: valuer.NewString("p90if"),
|
Name: valuer.NewString("p90if"),
|
||||||
FuncName: "quantileIf(0.90)",
|
FuncName: "quantileIf(0.90)",
|
||||||
Aliases: []valuer.String{valuer.NewString("p90_if")},
|
Aliases: []valuer.String{valuer.NewString("p90_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncP95 = AggrFunc{
|
AggrFuncP95 = AggrFunc{
|
||||||
Name: valuer.NewString("p95"),
|
Name: valuer.NewString("p95"),
|
||||||
FuncName: "quantile(0.95)",
|
FuncName: "quantile(0.95)",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncP95IF = AggrFunc{
|
AggrFuncP95IF = AggrFunc{
|
||||||
Name: valuer.NewString("p95if"),
|
Name: valuer.NewString("p95if"),
|
||||||
FuncName: "quantileIf(0.95)",
|
FuncName: "quantileIf(0.95)",
|
||||||
Aliases: []valuer.String{valuer.NewString("p95_if")},
|
Aliases: []valuer.String{valuer.NewString("p95_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncP99 = AggrFunc{
|
AggrFuncP99 = AggrFunc{
|
||||||
Name: valuer.NewString("p99"),
|
Name: valuer.NewString("p99"),
|
||||||
FuncName: "quantile(0.99)",
|
FuncName: "quantile(0.99)",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncP99IF = AggrFunc{
|
AggrFuncP99IF = AggrFunc{
|
||||||
Name: valuer.NewString("p99if"),
|
Name: valuer.NewString("p99if"),
|
||||||
FuncName: "quantileIf(0.99)",
|
FuncName: "quantileIf(0.99)",
|
||||||
Aliases: []valuer.String{valuer.NewString("p99_if")},
|
Aliases: []valuer.String{valuer.NewString("p99_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncP999 = AggrFunc{
|
AggrFuncP999 = AggrFunc{
|
||||||
Name: valuer.NewString("p999"),
|
Name: valuer.NewString("p999"),
|
||||||
FuncName: "quantile(0.999)",
|
FuncName: "quantile(0.999)",
|
||||||
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncP999IF = AggrFunc{
|
AggrFuncP999IF = AggrFunc{
|
||||||
Name: valuer.NewString("p999if"),
|
Name: valuer.NewString("p999if"),
|
||||||
FuncName: "quantileIf(0.999)",
|
FuncName: "quantileIf(0.999)",
|
||||||
Aliases: []valuer.String{valuer.NewString("p999_if")},
|
Aliases: []valuer.String{valuer.NewString("p999_if")},
|
||||||
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
|
||||||
}
|
}
|
||||||
AggrFuncRate = AggrFunc{
|
AggrFuncRate = AggrFunc{
|
||||||
Name: valuer.NewString("rate"),
|
Name: valuer.NewString("rate"),
|
||||||
@ -211,22 +212,22 @@ var (
|
|||||||
AggrFuncRateSum = AggrFunc{
|
AggrFuncRateSum = AggrFunc{
|
||||||
Name: valuer.NewString("rate_sum"),
|
Name: valuer.NewString("rate_sum"),
|
||||||
FuncName: "sum",
|
FuncName: "sum",
|
||||||
RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, Rate: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncRateAvg = AggrFunc{
|
AggrFuncRateAvg = AggrFunc{
|
||||||
Name: valuer.NewString("rate_avg"),
|
Name: valuer.NewString("rate_avg"),
|
||||||
FuncName: "avg",
|
FuncName: "avg",
|
||||||
RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, Rate: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncRateMin = AggrFunc{
|
AggrFuncRateMin = AggrFunc{
|
||||||
Name: valuer.NewString("rate_min"),
|
Name: valuer.NewString("rate_min"),
|
||||||
FuncName: "min",
|
FuncName: "min",
|
||||||
RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, Rate: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
AggrFuncRateMax = AggrFunc{
|
AggrFuncRateMax = AggrFunc{
|
||||||
Name: valuer.NewString("rate_max"),
|
Name: valuer.NewString("rate_max"),
|
||||||
FuncName: "max",
|
FuncName: "max",
|
||||||
RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1,
|
RequireArgs: true, Numeric: true, Rate: true, MinArgs: 1, MaxArgs: 1,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -13,33 +13,41 @@ import (
|
|||||||
"github.com/huandu/go-sqlbuilder"
|
"github.com/huandu/go-sqlbuilder"
|
||||||
)
|
)
|
||||||
|
|
||||||
type AggExprRewriterOptions struct {
|
|
||||||
MetadataStore telemetrytypes.MetadataStore
|
|
||||||
FullTextColumn *telemetrytypes.TelemetryFieldKey
|
|
||||||
FieldMapper qbtypes.FieldMapper
|
|
||||||
ConditionBuilder qbtypes.ConditionBuilder
|
|
||||||
FilterCompiler qbtypes.FilterCompiler
|
|
||||||
JsonBodyPrefix string
|
|
||||||
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
|
||||||
}
|
|
||||||
|
|
||||||
type aggExprRewriter struct {
|
type aggExprRewriter struct {
|
||||||
opts AggExprRewriterOptions
|
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||||
|
fieldMapper qbtypes.FieldMapper
|
||||||
|
conditionBuilder qbtypes.ConditionBuilder
|
||||||
|
jsonBodyPrefix string
|
||||||
|
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewAggExprRewriter(opts AggExprRewriterOptions) *aggExprRewriter {
|
var _ qbtypes.AggExprRewriter = (*aggExprRewriter)(nil)
|
||||||
return &aggExprRewriter{opts: opts}
|
|
||||||
|
func NewAggExprRewriter(
|
||||||
|
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||||
|
fieldMapper qbtypes.FieldMapper,
|
||||||
|
conditionBuilder qbtypes.ConditionBuilder,
|
||||||
|
jsonBodyPrefix string,
|
||||||
|
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||||
|
) *aggExprRewriter {
|
||||||
|
return &aggExprRewriter{
|
||||||
|
fullTextColumn: fullTextColumn,
|
||||||
|
fieldMapper: fieldMapper,
|
||||||
|
conditionBuilder: conditionBuilder,
|
||||||
|
jsonBodyPrefix: jsonBodyPrefix,
|
||||||
|
jsonKeyToKey: jsonKeyToKey,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rewrite parses the given aggregation expression, maps the column, and condition to
|
// Rewrite parses the given aggregation expression, maps the column, and condition to
|
||||||
// valid data source column and condition expression, and returns the rewritten expression
|
// valid data source column and condition expression, and returns the rewritten expression
|
||||||
// and the args if the parametric aggregation function is used.
|
// and the args if the parametric aggregation function is used.
|
||||||
func (r *aggExprRewriter) Rewrite(ctx context.Context, expr string, opts ...qbtypes.RewriteOption) (string, []any, error) {
|
func (r *aggExprRewriter) Rewrite(
|
||||||
|
ctx context.Context,
|
||||||
rctx := &qbtypes.RewriteCtx{}
|
expr string,
|
||||||
for _, opt := range opts {
|
rateInterval uint64,
|
||||||
opt(rctx)
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
}
|
) (string, []any, error) {
|
||||||
|
|
||||||
wrapped := fmt.Sprintf("SELECT %s", expr)
|
wrapped := fmt.Sprintf("SELECT %s", expr)
|
||||||
p := chparser.NewParser(wrapped)
|
p := chparser.NewParser(wrapped)
|
||||||
@ -62,19 +70,12 @@ func (r *aggExprRewriter) Rewrite(ctx context.Context, expr string, opts ...qbty
|
|||||||
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
|
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
|
||||||
}
|
}
|
||||||
|
|
||||||
selectors := QueryStringToKeysSelectors(expr)
|
|
||||||
|
|
||||||
keys, err := r.opts.MetadataStore.GetKeysMulti(ctx, selectors)
|
|
||||||
if err != nil {
|
|
||||||
return "", nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
visitor := newExprVisitor(keys,
|
visitor := newExprVisitor(keys,
|
||||||
r.opts.FullTextColumn,
|
r.fullTextColumn,
|
||||||
r.opts.FieldMapper,
|
r.fieldMapper,
|
||||||
r.opts.ConditionBuilder,
|
r.conditionBuilder,
|
||||||
r.opts.JsonBodyPrefix,
|
r.jsonBodyPrefix,
|
||||||
r.opts.JsonKeyToKey,
|
r.jsonKeyToKey,
|
||||||
)
|
)
|
||||||
// Rewrite the first select item (our expression)
|
// Rewrite the first select item (our expression)
|
||||||
if err := sel.SelectItems[0].Accept(visitor); err != nil {
|
if err := sel.SelectItems[0].Accept(visitor); err != nil {
|
||||||
@ -82,26 +83,23 @@ func (r *aggExprRewriter) Rewrite(ctx context.Context, expr string, opts ...qbty
|
|||||||
}
|
}
|
||||||
|
|
||||||
if visitor.isRate {
|
if visitor.isRate {
|
||||||
return fmt.Sprintf("%s/%d", sel.SelectItems[0].String(), rctx.RateInterval), visitor.chArgs, nil
|
return fmt.Sprintf("%s/%d", sel.SelectItems[0].String(), rateInterval), visitor.chArgs, nil
|
||||||
}
|
}
|
||||||
return sel.SelectItems[0].String(), visitor.chArgs, nil
|
return sel.SelectItems[0].String(), visitor.chArgs, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// RewriteMultiple rewrites a slice of expressions.
|
// RewriteMulti rewrites a slice of expressions.
|
||||||
func (r *aggExprRewriter) RewriteMultiple(
|
func (r *aggExprRewriter) RewriteMulti(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
exprs []string,
|
exprs []string,
|
||||||
opts ...qbtypes.RewriteOption,
|
rateInterval uint64,
|
||||||
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
) ([]string, [][]any, error) {
|
) ([]string, [][]any, error) {
|
||||||
rctx := &qbtypes.RewriteCtx{}
|
|
||||||
for _, opt := range opts {
|
|
||||||
opt(rctx)
|
|
||||||
}
|
|
||||||
out := make([]string, len(exprs))
|
out := make([]string, len(exprs))
|
||||||
var errs []error
|
var errs []error
|
||||||
var chArgsList [][]any
|
var chArgsList [][]any
|
||||||
for i, e := range exprs {
|
for i, e := range exprs {
|
||||||
w, chArgs, err := r.Rewrite(ctx, e, opts...)
|
w, chArgs, err := r.Rewrite(ctx, e, rateInterval, keys)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errs = append(errs, err)
|
errs = append(errs, err)
|
||||||
out[i] = e
|
out[i] = e
|
||||||
@ -173,6 +171,11 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
|||||||
v.isRate = true
|
v.isRate = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dataType := telemetrytypes.FieldDataTypeString
|
||||||
|
if aggFunc.Numeric {
|
||||||
|
dataType = telemetrytypes.FieldDataTypeFloat64
|
||||||
|
}
|
||||||
|
|
||||||
// Handle *If functions with predicate + values
|
// Handle *If functions with predicate + values
|
||||||
if aggFunc.FuncCombinator {
|
if aggFunc.FuncCombinator {
|
||||||
// Map the predicate (last argument)
|
// Map the predicate (last argument)
|
||||||
@ -205,11 +208,13 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
|||||||
// Map each value column argument
|
// Map each value column argument
|
||||||
for i := 0; i < len(args)-1; i++ {
|
for i := 0; i < len(args)-1; i++ {
|
||||||
origVal := args[i].String()
|
origVal := args[i].String()
|
||||||
colName, err := v.fieldMapper.ColumnExpressionFor(context.Background(), &telemetrytypes.TelemetryFieldKey{Name: origVal}, v.fieldKeys)
|
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||||
|
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||||
}
|
}
|
||||||
newVal := colName
|
v.chArgs = append(v.chArgs, exprArgs...)
|
||||||
|
newVal := expr
|
||||||
parsedVal, err := parseFragment(newVal)
|
parsedVal, err := parseFragment(newVal)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -221,11 +226,13 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
|||||||
// Non-If functions: map every argument as a column/value
|
// Non-If functions: map every argument as a column/value
|
||||||
for i, arg := range args {
|
for i, arg := range args {
|
||||||
orig := arg.String()
|
orig := arg.String()
|
||||||
colName, err := v.fieldMapper.ColumnExpressionFor(context.Background(), &telemetrytypes.TelemetryFieldKey{Name: orig}, v.fieldKeys)
|
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||||
|
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", orig)
|
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", orig)
|
||||||
}
|
}
|
||||||
newCol := colName
|
v.chArgs = append(v.chArgs, exprArgs...)
|
||||||
|
newCol := expr
|
||||||
parsed, err := parseFragment(newCol)
|
parsed, err := parseFragment(newCol)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
27
pkg/querybuilder/cte.go
Normal file
27
pkg/querybuilder/cte.go
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
package querybuilder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// combineCTEs takes any number of individual CTE fragments like
|
||||||
|
//
|
||||||
|
// "__resource_filter AS (...)", "__limit_cte AS (...)"
|
||||||
|
//
|
||||||
|
// and renders the final `WITH …` clause.
|
||||||
|
func CombineCTEs(ctes []string) string {
|
||||||
|
if len(ctes) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return "WITH " + strings.Join(ctes, ", ") + " "
|
||||||
|
}
|
||||||
|
|
||||||
|
// prependArgs ensures CTE arguments appear before main-query arguments
|
||||||
|
// in the final slice so their ordinal positions match the SQL string.
|
||||||
|
func PrependArgs(cteArgs [][]any, mainArgs []any) []any {
|
||||||
|
out := make([]any, 0, len(mainArgs)+len(cteArgs))
|
||||||
|
for _, a := range cteArgs { // CTEs first, in declaration order
|
||||||
|
out = append(out, a...)
|
||||||
|
}
|
||||||
|
return append(out, mainArgs...)
|
||||||
|
}
|
96
pkg/querybuilder/fallback_expr.go
Normal file
96
pkg/querybuilder/fallback_expr.go
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
package querybuilder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
"github.com/huandu/go-sqlbuilder"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func CollisionHandledFinalExpr(
|
||||||
|
ctx context.Context,
|
||||||
|
field *telemetrytypes.TelemetryFieldKey,
|
||||||
|
fm qbtypes.FieldMapper,
|
||||||
|
cb qbtypes.ConditionBuilder,
|
||||||
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
|
requiredDataType telemetrytypes.FieldDataType,
|
||||||
|
) (string, []any, error) {
|
||||||
|
|
||||||
|
if requiredDataType != telemetrytypes.FieldDataTypeString &&
|
||||||
|
requiredDataType != telemetrytypes.FieldDataTypeFloat64 {
|
||||||
|
return "", nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported data type %s", requiredDataType)
|
||||||
|
}
|
||||||
|
|
||||||
|
var dummyValue any
|
||||||
|
if requiredDataType == telemetrytypes.FieldDataTypeFloat64 {
|
||||||
|
dummyValue = 0.0
|
||||||
|
} else {
|
||||||
|
dummyValue = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var stmts []string
|
||||||
|
var allArgs []any
|
||||||
|
|
||||||
|
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||||
|
sb := sqlbuilder.NewSelectBuilder()
|
||||||
|
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
sb.Where(condition)
|
||||||
|
|
||||||
|
expr, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
expr = strings.TrimPrefix(expr, "WHERE ")
|
||||||
|
stmts = append(stmts, expr)
|
||||||
|
allArgs = append(allArgs, args...)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
colName, err := fm.FieldFor(ctx, field)
|
||||||
|
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||||
|
// the key didn't have the right context to be added to the query
|
||||||
|
// we try to use the context we know of
|
||||||
|
keysForField := keys[field.Name]
|
||||||
|
if len(keysForField) == 0 {
|
||||||
|
// - the context is not provided
|
||||||
|
// - there are not keys for the field
|
||||||
|
// - it is not a static field
|
||||||
|
// - the next best thing to do is see if there is a typo
|
||||||
|
// and suggest a correction
|
||||||
|
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
|
||||||
|
if found {
|
||||||
|
// we found a close match, in the error message send the suggestion
|
||||||
|
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||||
|
} else {
|
||||||
|
// not even a close match, return an error
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, key := range keysForField {
|
||||||
|
err := addCondition(key)
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
colName, _ = fm.FieldFor(ctx, key)
|
||||||
|
colName, _ = telemetrytypes.DataTypeCollisionHandledFieldName(key, dummyValue, colName)
|
||||||
|
stmts = append(stmts, colName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
err := addCondition(field)
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
colName, _ = telemetrytypes.DataTypeCollisionHandledFieldName(field, dummyValue, colName)
|
||||||
|
stmts = append(stmts, colName)
|
||||||
|
}
|
||||||
|
|
||||||
|
multiIfStmt := fmt.Sprintf("multiIf(%s, NULL)", strings.Join(stmts, ", "))
|
||||||
|
|
||||||
|
return multiIfStmt, allArgs, nil
|
||||||
|
}
|
@ -1,47 +0,0 @@
|
|||||||
package resourcefilter
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
|
||||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
|
||||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
|
||||||
"github.com/huandu/go-sqlbuilder"
|
|
||||||
)
|
|
||||||
|
|
||||||
type FilterCompilerOpts struct {
|
|
||||||
FieldMapper qbtypes.FieldMapper
|
|
||||||
ConditionBuilder qbtypes.ConditionBuilder
|
|
||||||
MetadataStore telemetrytypes.MetadataStore
|
|
||||||
}
|
|
||||||
|
|
||||||
type filterCompiler struct {
|
|
||||||
opts FilterCompilerOpts
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewFilterCompiler(opts FilterCompilerOpts) *filterCompiler {
|
|
||||||
return &filterCompiler{
|
|
||||||
opts: opts,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *filterCompiler) Compile(ctx context.Context, expr string) (*sqlbuilder.WhereClause, []string, error) {
|
|
||||||
selectors := querybuilder.QueryStringToKeysSelectors(expr)
|
|
||||||
|
|
||||||
keys, err := c.opts.MetadataStore.GetKeysMulti(ctx, selectors)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
filterWhereClause, warnings, err := querybuilder.PrepareWhereClause(expr, querybuilder.FilterExprVisitorOpts{
|
|
||||||
FieldMapper: c.opts.FieldMapper,
|
|
||||||
ConditionBuilder: c.opts.ConditionBuilder,
|
|
||||||
FieldKeys: keys,
|
|
||||||
})
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return filterWhereClause, warnings, nil
|
|
||||||
}
|
|
@ -5,17 +5,12 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
"github.com/huandu/go-sqlbuilder"
|
"github.com/huandu/go-sqlbuilder"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ResourceFilterStatementBuilderOpts struct {
|
|
||||||
FieldMapper qbtypes.FieldMapper
|
|
||||||
ConditionBuilder qbtypes.ConditionBuilder
|
|
||||||
Compiler qbtypes.FilterCompiler
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
ErrUnsupportedSignal = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported signal type")
|
ErrUnsupportedSignal = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported signal type")
|
||||||
)
|
)
|
||||||
@ -39,8 +34,10 @@ var signalConfigs = map[telemetrytypes.Signal]signalConfig{
|
|||||||
|
|
||||||
// Generic resource filter statement builder
|
// Generic resource filter statement builder
|
||||||
type resourceFilterStatementBuilder[T any] struct {
|
type resourceFilterStatementBuilder[T any] struct {
|
||||||
opts ResourceFilterStatementBuilderOpts
|
fieldMapper qbtypes.FieldMapper
|
||||||
signal telemetrytypes.Signal
|
conditionBuilder qbtypes.ConditionBuilder
|
||||||
|
metadataStore telemetrytypes.MetadataStore
|
||||||
|
signal telemetrytypes.Signal
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure interface compliance at compile time
|
// Ensure interface compliance at compile time
|
||||||
@ -50,20 +47,47 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Constructor functions
|
// Constructor functions
|
||||||
func NewTraceResourceFilterStatementBuilder(opts ResourceFilterStatementBuilderOpts) *resourceFilterStatementBuilder[qbtypes.TraceAggregation] {
|
func NewTraceResourceFilterStatementBuilder(
|
||||||
|
fieldMapper qbtypes.FieldMapper,
|
||||||
|
conditionBuilder qbtypes.ConditionBuilder,
|
||||||
|
metadataStore telemetrytypes.MetadataStore,
|
||||||
|
) *resourceFilterStatementBuilder[qbtypes.TraceAggregation] {
|
||||||
return &resourceFilterStatementBuilder[qbtypes.TraceAggregation]{
|
return &resourceFilterStatementBuilder[qbtypes.TraceAggregation]{
|
||||||
opts: opts,
|
fieldMapper: fieldMapper,
|
||||||
signal: telemetrytypes.SignalTraces,
|
conditionBuilder: conditionBuilder,
|
||||||
|
metadataStore: metadataStore,
|
||||||
|
signal: telemetrytypes.SignalTraces,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLogResourceFilterStatementBuilder(opts ResourceFilterStatementBuilderOpts) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
|
func NewLogResourceFilterStatementBuilder(
|
||||||
|
fieldMapper qbtypes.FieldMapper,
|
||||||
|
conditionBuilder qbtypes.ConditionBuilder,
|
||||||
|
metadataStore telemetrytypes.MetadataStore,
|
||||||
|
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
|
||||||
return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{
|
return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{
|
||||||
opts: opts,
|
fieldMapper: fieldMapper,
|
||||||
signal: telemetrytypes.SignalLogs,
|
conditionBuilder: conditionBuilder,
|
||||||
|
metadataStore: metadataStore,
|
||||||
|
signal: telemetrytypes.SignalLogs,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryBuilderQuery[T]) []*telemetrytypes.FieldKeySelector {
|
||||||
|
var keySelectors []*telemetrytypes.FieldKeySelector
|
||||||
|
|
||||||
|
if query.Filter != nil && query.Filter.Expression != "" {
|
||||||
|
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(query.Filter.Expression)
|
||||||
|
keySelectors = append(keySelectors, whereClauseSelectors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range keySelectors {
|
||||||
|
keySelectors[idx].Signal = b.signal
|
||||||
|
}
|
||||||
|
|
||||||
|
return keySelectors
|
||||||
|
}
|
||||||
|
|
||||||
// Build builds a SQL query based on the given parameters
|
// Build builds a SQL query based on the given parameters
|
||||||
func (b *resourceFilterStatementBuilder[T]) Build(
|
func (b *resourceFilterStatementBuilder[T]) Build(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
@ -77,15 +101,21 @@ func (b *resourceFilterStatementBuilder[T]) Build(
|
|||||||
return nil, fmt.Errorf("%w: %s", ErrUnsupportedSignal, b.signal)
|
return nil, fmt.Errorf("%w: %s", ErrUnsupportedSignal, b.signal)
|
||||||
}
|
}
|
||||||
|
|
||||||
q := sqlbuilder.ClickHouse.NewSelectBuilder()
|
q := sqlbuilder.NewSelectBuilder()
|
||||||
q.Select("fingerprint")
|
q.Select("fingerprint")
|
||||||
q.From(fmt.Sprintf("%s.%s", config.dbName, config.tableName))
|
q.From(fmt.Sprintf("%s.%s", config.dbName, config.tableName))
|
||||||
|
|
||||||
if err := b.addConditions(ctx, q, start, end, query); err != nil {
|
keySelectors := b.getKeySelectors(query)
|
||||||
|
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
|
||||||
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
stmt, args := q.Build()
|
if err := b.addConditions(ctx, q, start, end, query, keys); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
stmt, args := q.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
return &qbtypes.Statement{
|
return &qbtypes.Statement{
|
||||||
Query: stmt,
|
Query: stmt,
|
||||||
Args: args,
|
Args: args,
|
||||||
@ -94,14 +124,22 @@ func (b *resourceFilterStatementBuilder[T]) Build(
|
|||||||
|
|
||||||
// addConditions adds both filter and time conditions to the query
|
// addConditions adds both filter and time conditions to the query
|
||||||
func (b *resourceFilterStatementBuilder[T]) addConditions(
|
func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||||
ctx context.Context,
|
_ context.Context,
|
||||||
sb *sqlbuilder.SelectBuilder,
|
sb *sqlbuilder.SelectBuilder,
|
||||||
start, end uint64,
|
start, end uint64,
|
||||||
query qbtypes.QueryBuilderQuery[T],
|
query qbtypes.QueryBuilderQuery[T],
|
||||||
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
) error {
|
) error {
|
||||||
// Add filter condition if present
|
// Add filter condition if present
|
||||||
if query.Filter != nil && query.Filter.Expression != "" {
|
if query.Filter != nil && query.Filter.Expression != "" {
|
||||||
filterWhereClause, _, err := b.opts.Compiler.Compile(ctx, query.Filter.Expression)
|
|
||||||
|
// warnings would be encountered as part of the main condition already
|
||||||
|
filterWhereClause, _, err := querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||||
|
FieldMapper: b.fieldMapper,
|
||||||
|
ConditionBuilder: b.conditionBuilder,
|
||||||
|
FieldKeys: keys,
|
||||||
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -118,13 +156,9 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
|||||||
// addTimeFilter adds time-based filtering conditions
|
// addTimeFilter adds time-based filtering conditions
|
||||||
func (b *resourceFilterStatementBuilder[T]) addTimeFilter(sb *sqlbuilder.SelectBuilder, start, end uint64) {
|
func (b *resourceFilterStatementBuilder[T]) addTimeFilter(sb *sqlbuilder.SelectBuilder, start, end uint64) {
|
||||||
// Convert nanoseconds to seconds and adjust start bucket
|
// Convert nanoseconds to seconds and adjust start bucket
|
||||||
const (
|
|
||||||
nsToSeconds = 1000000000
|
|
||||||
bucketAdjustment = 1800 // 30 minutes
|
|
||||||
)
|
|
||||||
|
|
||||||
startBucket := start/nsToSeconds - bucketAdjustment
|
startBucket := start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment
|
||||||
endBucket := end / nsToSeconds
|
endBucket := end / querybuilder.NsToSeconds
|
||||||
|
|
||||||
sb.Where(
|
sb.Where(
|
||||||
sb.GE("seen_at_ts_bucket_start", startBucket),
|
sb.GE("seen_at_ts_bucket_start", startBucket),
|
||||||
|
@ -2,6 +2,11 @@ package querybuilder
|
|||||||
|
|
||||||
import "math"
|
import "math"
|
||||||
|
|
||||||
|
const (
|
||||||
|
NsToSeconds = 1000000000
|
||||||
|
BucketAdjustment = 1800 // 30 minutes
|
||||||
|
)
|
||||||
|
|
||||||
// ToNanoSecs takes epoch and returns it in ns
|
// ToNanoSecs takes epoch and returns it in ns
|
||||||
func ToNanoSecs(epoch uint64) uint64 {
|
func ToNanoSecs(epoch uint64) uint64 {
|
||||||
temp := epoch
|
temp := epoch
|
||||||
|
@ -147,6 +147,11 @@ func (c *conditionBuilder) conditionFor(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// if the field is intrinsic, it always exists
|
||||||
|
if slices.Contains(IntrinsicFields, key.Name) {
|
||||||
|
return "true", nil
|
||||||
|
}
|
||||||
|
|
||||||
var value any
|
var value any
|
||||||
switch column.Type {
|
switch column.Type {
|
||||||
case schema.ColumnTypeString, schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}:
|
case schema.ColumnTypeString, schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}:
|
||||||
|
@ -249,8 +249,7 @@ func TestConditionFor(t *testing.T) {
|
|||||||
},
|
},
|
||||||
operator: qbtypes.FilterOperatorExists,
|
operator: qbtypes.FilterOperatorExists,
|
||||||
value: nil,
|
value: nil,
|
||||||
expectedSQL: "body <> ?",
|
expectedSQL: "true",
|
||||||
expectedArgs: []any{""},
|
|
||||||
expectedError: nil,
|
expectedError: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -261,8 +260,7 @@ func TestConditionFor(t *testing.T) {
|
|||||||
},
|
},
|
||||||
operator: qbtypes.FilterOperatorNotExists,
|
operator: qbtypes.FilterOperatorNotExists,
|
||||||
value: nil,
|
value: nil,
|
||||||
expectedSQL: "body = ?",
|
expectedSQL: "true",
|
||||||
expectedArgs: []any{""},
|
|
||||||
expectedError: nil,
|
expectedError: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -273,8 +271,7 @@ func TestConditionFor(t *testing.T) {
|
|||||||
},
|
},
|
||||||
operator: qbtypes.FilterOperatorExists,
|
operator: qbtypes.FilterOperatorExists,
|
||||||
value: nil,
|
value: nil,
|
||||||
expectedSQL: "timestamp <> ?",
|
expectedSQL: "true",
|
||||||
expectedArgs: []any{0},
|
|
||||||
expectedError: nil,
|
expectedError: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -3,6 +3,7 @@ package telemetrylogs
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
@ -16,32 +17,32 @@ var (
|
|||||||
ErrUnsupportedAggregation = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported aggregation")
|
ErrUnsupportedAggregation = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported aggregation")
|
||||||
)
|
)
|
||||||
|
|
||||||
type LogQueryStatementBuilderOpts struct {
|
|
||||||
MetadataStore telemetrytypes.MetadataStore
|
|
||||||
FieldMapper qbtypes.FieldMapper
|
|
||||||
ConditionBuilder qbtypes.ConditionBuilder
|
|
||||||
ResourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
|
||||||
Compiler qbtypes.FilterCompiler
|
|
||||||
AggExprRewriter qbtypes.AggExprRewriter
|
|
||||||
}
|
|
||||||
|
|
||||||
type logQueryStatementBuilder struct {
|
type logQueryStatementBuilder struct {
|
||||||
opts LogQueryStatementBuilderOpts
|
logger *slog.Logger
|
||||||
fm qbtypes.FieldMapper
|
metadataStore telemetrytypes.MetadataStore
|
||||||
cb qbtypes.ConditionBuilder
|
fm qbtypes.FieldMapper
|
||||||
compiler qbtypes.FilterCompiler
|
cb qbtypes.ConditionBuilder
|
||||||
aggExprRewriter qbtypes.AggExprRewriter
|
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
||||||
|
aggExprRewriter qbtypes.AggExprRewriter
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ qbtypes.StatementBuilder[qbtypes.LogAggregation] = (*logQueryStatementBuilder)(nil)
|
var _ qbtypes.StatementBuilder[qbtypes.LogAggregation] = (*logQueryStatementBuilder)(nil)
|
||||||
|
|
||||||
func NewLogQueryStatementBuilder(opts LogQueryStatementBuilderOpts) *logQueryStatementBuilder {
|
func NewLogQueryStatementBuilder(
|
||||||
|
logger *slog.Logger,
|
||||||
|
metadataStore telemetrytypes.MetadataStore,
|
||||||
|
fieldMapper qbtypes.FieldMapper,
|
||||||
|
conditionBuilder qbtypes.ConditionBuilder,
|
||||||
|
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||||
|
aggExprRewriter qbtypes.AggExprRewriter,
|
||||||
|
) *logQueryStatementBuilder {
|
||||||
return &logQueryStatementBuilder{
|
return &logQueryStatementBuilder{
|
||||||
opts: opts,
|
logger: logger,
|
||||||
fm: opts.FieldMapper,
|
metadataStore: metadataStore,
|
||||||
cb: opts.ConditionBuilder,
|
fm: fieldMapper,
|
||||||
compiler: opts.Compiler,
|
cb: conditionBuilder,
|
||||||
aggExprRewriter: opts.AggExprRewriter,
|
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
|
||||||
|
aggExprRewriter: aggExprRewriter,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,13 +59,13 @@ func (b *logQueryStatementBuilder) Build(
|
|||||||
end = querybuilder.ToNanoSecs(end)
|
end = querybuilder.ToNanoSecs(end)
|
||||||
|
|
||||||
keySelectors := getKeySelectors(query)
|
keySelectors := getKeySelectors(query)
|
||||||
keys, err := b.opts.MetadataStore.GetKeysMulti(ctx, keySelectors)
|
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create SQL builder
|
// Create SQL builder
|
||||||
q := sqlbuilder.ClickHouse.NewSelectBuilder()
|
q := sqlbuilder.NewSelectBuilder()
|
||||||
|
|
||||||
switch requestType {
|
switch requestType {
|
||||||
case qbtypes.RequestTypeRaw:
|
case qbtypes.RequestTypeRaw:
|
||||||
@ -87,8 +88,29 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) []
|
|||||||
keySelectors = append(keySelectors, selectors...)
|
keySelectors = append(keySelectors, selectors...)
|
||||||
}
|
}
|
||||||
|
|
||||||
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(query.Filter.Expression)
|
if query.Filter != nil && query.Filter.Expression != "" {
|
||||||
keySelectors = append(keySelectors, whereClauseSelectors...)
|
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(query.Filter.Expression)
|
||||||
|
keySelectors = append(keySelectors, whereClauseSelectors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range query.GroupBy {
|
||||||
|
groupBy := query.GroupBy[idx]
|
||||||
|
selectors := querybuilder.QueryStringToKeysSelectors(groupBy.TelemetryFieldKey.Name)
|
||||||
|
keySelectors = append(keySelectors, selectors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range query.Order {
|
||||||
|
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
|
||||||
|
Name: query.Order[idx].Key.Name,
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
FieldContext: query.Order[idx].Key.FieldContext,
|
||||||
|
FieldDataType: query.Order[idx].Key.FieldDataType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range keySelectors {
|
||||||
|
keySelectors[idx].Signal = telemetrytypes.SignalLogs
|
||||||
|
}
|
||||||
|
|
||||||
return keySelectors
|
return keySelectors
|
||||||
}
|
}
|
||||||
@ -99,7 +121,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
|||||||
sb *sqlbuilder.SelectBuilder,
|
sb *sqlbuilder.SelectBuilder,
|
||||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||||
start, end uint64,
|
start, end uint64,
|
||||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
) (*qbtypes.Statement, error) {
|
) (*qbtypes.Statement, error) {
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -123,7 +145,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
|||||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||||
|
|
||||||
// Add filter conditions
|
// Add filter conditions
|
||||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query)
|
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -144,8 +166,8 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
|||||||
|
|
||||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
|
||||||
finalSQL := combineCTEs(cteFragments) + mainSQL
|
finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
finalArgs := prependArgs(cteArgs, mainArgs)
|
finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
|
|
||||||
return &qbtypes.Statement{
|
return &qbtypes.Statement{
|
||||||
Query: finalSQL,
|
Query: finalSQL,
|
||||||
@ -179,21 +201,29 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
|||||||
int64(query.StepInterval.Seconds()),
|
int64(query.StepInterval.Seconds()),
|
||||||
))
|
))
|
||||||
|
|
||||||
|
var allGroupByArgs []any
|
||||||
|
|
||||||
// Keep original column expressions so we can build the tuple
|
// Keep original column expressions so we can build the tuple
|
||||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||||
for _, gb := range query.GroupBy {
|
for _, gb := range query.GroupBy {
|
||||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &gb.TelemetryFieldKey, keys)
|
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
sb.SelectMore(colExpr)
|
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||||
|
allGroupByArgs = append(allGroupByArgs, args...)
|
||||||
|
sb.SelectMore(sqlbuilder.Escape(colExpr))
|
||||||
fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name))
|
fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Aggregations
|
// Aggregations
|
||||||
allAggChArgs := make([]any, 0)
|
allAggChArgs := make([]any, 0)
|
||||||
for i, agg := range query.Aggregations {
|
for i, agg := range query.Aggregations {
|
||||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(ctx, agg.Expression)
|
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||||
|
ctx, agg.Expression,
|
||||||
|
uint64(query.StepInterval.Seconds()),
|
||||||
|
keys,
|
||||||
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -202,7 +232,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
|||||||
}
|
}
|
||||||
|
|
||||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query)
|
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -212,7 +242,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
|||||||
|
|
||||||
if query.Limit > 0 {
|
if query.Limit > 0 {
|
||||||
// build the scalar “top/bottom-N” query in its own builder.
|
// build the scalar “top/bottom-N” query in its own builder.
|
||||||
cteSB := sqlbuilder.ClickHouse.NewSelectBuilder()
|
cteSB := sqlbuilder.NewSelectBuilder()
|
||||||
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true)
|
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -231,11 +261,13 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
|||||||
sb.Having(query.Having.Expression)
|
sb.Having(query.Having.Expression)
|
||||||
}
|
}
|
||||||
|
|
||||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, allAggChArgs...)
|
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||||
|
|
||||||
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||||
|
|
||||||
// Stitch it all together: WITH … SELECT …
|
// Stitch it all together: WITH … SELECT …
|
||||||
finalSQL = combineCTEs(cteFragments) + mainSQL
|
finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
finalArgs = prependArgs(cteArgs, mainArgs)
|
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
sb.GroupBy("ALL")
|
sb.GroupBy("ALL")
|
||||||
@ -243,11 +275,13 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
|||||||
sb.Having(query.Having.Expression)
|
sb.Having(query.Having.Expression)
|
||||||
}
|
}
|
||||||
|
|
||||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, allAggChArgs...)
|
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||||
|
|
||||||
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||||
|
|
||||||
// Stitch it all together: WITH … SELECT …
|
// Stitch it all together: WITH … SELECT …
|
||||||
finalSQL = combineCTEs(cteFragments) + mainSQL
|
finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
finalArgs = prependArgs(cteArgs, mainArgs)
|
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &qbtypes.Statement{
|
return &qbtypes.Statement{
|
||||||
@ -281,20 +315,30 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
|||||||
|
|
||||||
allAggChArgs := []any{}
|
allAggChArgs := []any{}
|
||||||
|
|
||||||
// Add group by columns
|
var allGroupByArgs []any
|
||||||
for _, groupBy := range query.GroupBy {
|
|
||||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &groupBy.TelemetryFieldKey, keys)
|
for _, gb := range query.GroupBy {
|
||||||
|
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
sb.SelectMore(colExpr)
|
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||||
|
allGroupByArgs = append(allGroupByArgs, args...)
|
||||||
|
sb.SelectMore(sqlbuilder.Escape(colExpr))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// for scalar queries, the rate would be end-start
|
||||||
|
rateInterval := (end - start) / querybuilder.NsToSeconds
|
||||||
|
|
||||||
// Add aggregation
|
// Add aggregation
|
||||||
if len(query.Aggregations) > 0 {
|
if len(query.Aggregations) > 0 {
|
||||||
for idx := range query.Aggregations {
|
for idx := range query.Aggregations {
|
||||||
aggExpr := query.Aggregations[idx]
|
aggExpr := query.Aggregations[idx]
|
||||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(ctx, aggExpr.Expression)
|
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||||
|
ctx, aggExpr.Expression,
|
||||||
|
rateInterval,
|
||||||
|
keys,
|
||||||
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -307,7 +351,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
|||||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||||
|
|
||||||
// Add filter conditions
|
// Add filter conditions
|
||||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query)
|
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -340,10 +384,12 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
|||||||
sb.Limit(query.Limit)
|
sb.Limit(query.Limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, allAggChArgs...)
|
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||||
|
|
||||||
finalSQL := combineCTEs(cteFragments) + mainSQL
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||||
finalArgs := prependArgs(cteArgs, mainArgs)
|
|
||||||
|
finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
|
finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
|
|
||||||
return &qbtypes.Statement{
|
return &qbtypes.Statement{
|
||||||
Query: finalSQL,
|
Query: finalSQL,
|
||||||
@ -353,11 +399,21 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// buildFilterCondition builds SQL condition from filter expression
|
// buildFilterCondition builds SQL condition from filter expression
|
||||||
func (b *logQueryStatementBuilder) addFilterCondition(ctx context.Context, sb *sqlbuilder.SelectBuilder, start, end uint64, query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) ([]string, error) {
|
func (b *logQueryStatementBuilder) addFilterCondition(
|
||||||
|
_ context.Context,
|
||||||
|
sb *sqlbuilder.SelectBuilder,
|
||||||
|
start, end uint64,
|
||||||
|
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||||
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
|
) ([]string, error) {
|
||||||
|
|
||||||
// add filter expression
|
// add filter expression
|
||||||
|
filterWhereClause, warnings, err := querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||||
filterWhereClause, warnings, err := b.compiler.Compile(ctx, query.Filter.Expression)
|
FieldMapper: b.fm,
|
||||||
|
ConditionBuilder: b.cb,
|
||||||
|
FieldKeys: keys,
|
||||||
|
SkipResourceFilter: true,
|
||||||
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -368,36 +424,14 @@ func (b *logQueryStatementBuilder) addFilterCondition(ctx context.Context, sb *s
|
|||||||
}
|
}
|
||||||
|
|
||||||
// add time filter
|
// add time filter
|
||||||
startBucket := start/1000000000 - 1800
|
startBucket := start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment
|
||||||
endBucket := end / 1000000000
|
endBucket := end / querybuilder.NsToSeconds
|
||||||
|
|
||||||
sb.Where(sb.GE("timestamp", start), sb.LE("timestamp", end), sb.GE("ts_bucket_start", startBucket), sb.LE("ts_bucket_start", endBucket))
|
sb.Where(sb.GE("timestamp", fmt.Sprintf("%d", start)), sb.LE("timestamp", fmt.Sprintf("%d", end)), sb.GE("ts_bucket_start", startBucket), sb.LE("ts_bucket_start", endBucket))
|
||||||
|
|
||||||
return warnings, nil
|
return warnings, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// combineCTEs takes any number of individual CTE fragments like
|
|
||||||
//
|
|
||||||
// "__resource_filter AS (...)", "__limit_cte AS (...)"
|
|
||||||
//
|
|
||||||
// and renders the final `WITH …` clause.
|
|
||||||
func combineCTEs(ctes []string) string {
|
|
||||||
if len(ctes) == 0 {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return "WITH " + strings.Join(ctes, ", ") + " "
|
|
||||||
}
|
|
||||||
|
|
||||||
// prependArgs ensures CTE arguments appear before main-query arguments
|
|
||||||
// in the final slice so their ordinal positions match the SQL string.
|
|
||||||
func prependArgs(cteArgs [][]any, mainArgs []any) []any {
|
|
||||||
out := make([]any, 0, len(mainArgs)+len(cteArgs))
|
|
||||||
for _, a := range cteArgs { // CTEs first, in declaration order
|
|
||||||
out = append(out, a...)
|
|
||||||
}
|
|
||||||
return append(out, mainArgs...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func aggOrderBy(k qbtypes.OrderBy, q qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) (int, bool) {
|
func aggOrderBy(k qbtypes.OrderBy, q qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) (int, bool) {
|
||||||
for i, agg := range q.Aggregations {
|
for i, agg := range q.Aggregations {
|
||||||
if k.Key.Name == agg.Alias ||
|
if k.Key.Name == agg.Alias ||
|
||||||
@ -432,7 +466,7 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
|
|||||||
start, end uint64,
|
start, end uint64,
|
||||||
) (*qbtypes.Statement, error) {
|
) (*qbtypes.Statement, error) {
|
||||||
|
|
||||||
return b.opts.ResourceFilterStmtBuilder.Build(
|
return b.resourceFilterStmtBuilder.Build(
|
||||||
ctx,
|
ctx,
|
||||||
start,
|
start,
|
||||||
end,
|
end,
|
||||||
|
@ -2,6 +2,7 @@ package telemetrylogs
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"log/slog"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -17,18 +18,19 @@ func resourceFilterStmtBuilder() (qbtypes.StatementBuilder[qbtypes.LogAggregatio
|
|||||||
fm := resourcefilter.NewFieldMapper()
|
fm := resourcefilter.NewFieldMapper()
|
||||||
cb := resourcefilter.NewConditionBuilder(fm)
|
cb := resourcefilter.NewConditionBuilder(fm)
|
||||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
keysMap := buildCompleteFieldKeyMap()
|
||||||
compiler := resourcefilter.NewFilterCompiler(resourcefilter.FilterCompilerOpts{
|
for _, keys := range keysMap {
|
||||||
FieldMapper: fm,
|
for _, key := range keys {
|
||||||
ConditionBuilder: cb,
|
key.Signal = telemetrytypes.SignalLogs
|
||||||
MetadataStore: mockMetadataStore,
|
}
|
||||||
})
|
}
|
||||||
|
mockMetadataStore.KeysMap = keysMap
|
||||||
|
|
||||||
return resourcefilter.NewLogResourceFilterStatementBuilder(resourcefilter.ResourceFilterStatementBuilderOpts{
|
return resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||||
FieldMapper: fm,
|
fm,
|
||||||
ConditionBuilder: cb,
|
cb,
|
||||||
Compiler: compiler,
|
mockMetadataStore,
|
||||||
}), nil
|
), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestStatementBuilder(t *testing.T) {
|
func TestStatementBuilder(t *testing.T) {
|
||||||
@ -63,8 +65,8 @@ func TestStatementBuilder(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: qbtypes.Statement{
|
expected: qbtypes.Statement{
|
||||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT resources_string['service.name'] AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, resources_string['service.name'] AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
|
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
|
||||||
Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), uint64(1747947419000000000), uint64(1747983448000000000), uint64(1747945619), uint64(1747983448), 10, uint64(1747947419000000000), uint64(1747983448000000000), uint64(1747945619), uint64(1747983448)},
|
Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
|
||||||
},
|
},
|
||||||
expectedErr: nil,
|
expectedErr: nil,
|
||||||
},
|
},
|
||||||
@ -74,29 +76,20 @@ func TestStatementBuilder(t *testing.T) {
|
|||||||
cb := NewConditionBuilder(fm)
|
cb := NewConditionBuilder(fm)
|
||||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||||
compiler := NewFilterCompiler(FilterCompilerOpts{
|
|
||||||
FieldMapper: fm,
|
aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil)
|
||||||
ConditionBuilder: cb,
|
|
||||||
MetadataStore: mockMetadataStore,
|
|
||||||
SkipResourceFilter: true,
|
|
||||||
})
|
|
||||||
aggExprRewriter := querybuilder.NewAggExprRewriter(querybuilder.AggExprRewriterOptions{
|
|
||||||
FieldMapper: fm,
|
|
||||||
ConditionBuilder: cb,
|
|
||||||
MetadataStore: mockMetadataStore,
|
|
||||||
})
|
|
||||||
|
|
||||||
resourceFilterStmtBuilder, err := resourceFilterStmtBuilder()
|
resourceFilterStmtBuilder, err := resourceFilterStmtBuilder()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
statementBuilder := NewLogQueryStatementBuilder(LogQueryStatementBuilderOpts{
|
statementBuilder := NewLogQueryStatementBuilder(
|
||||||
FieldMapper: fm,
|
slog.Default(),
|
||||||
ConditionBuilder: cb,
|
mockMetadataStore,
|
||||||
Compiler: compiler,
|
fm,
|
||||||
MetadataStore: mockMetadataStore,
|
cb,
|
||||||
AggExprRewriter: aggExprRewriter,
|
resourceFilterStmtBuilder,
|
||||||
ResourceFilterStmtBuilder: resourceFilterStmtBuilder,
|
aggExprRewriter,
|
||||||
})
|
)
|
||||||
|
|
||||||
for _, c := range cases {
|
for _, c := range cases {
|
||||||
t.Run(c.name, func(t *testing.T) {
|
t.Run(c.name, func(t *testing.T) {
|
||||||
|
@ -19,7 +19,7 @@ func limitString(s string, maxLen int) string {
|
|||||||
|
|
||||||
// Function to build a complete field key map for testing all scenarios
|
// Function to build a complete field key map for testing all scenarios
|
||||||
func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||||
return map[string][]*telemetrytypes.TelemetryFieldKey{
|
keysMap := map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||||
"service.name": {
|
"service.name": {
|
||||||
{
|
{
|
||||||
Name: "service.name",
|
Name: "service.name",
|
||||||
@ -856,4 +856,11 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, keys := range keysMap {
|
||||||
|
for _, key := range keys {
|
||||||
|
key.Signal = telemetrytypes.SignalLogs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return keysMap
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
"log/slog"
|
"log/slog"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
@ -36,7 +37,6 @@ type telemetryMetaStore struct {
|
|||||||
|
|
||||||
fm qbtypes.FieldMapper
|
fm qbtypes.FieldMapper
|
||||||
conditionBuilder qbtypes.ConditionBuilder
|
conditionBuilder qbtypes.ConditionBuilder
|
||||||
compiler qbtypes.FilterCompiler
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewTelemetryMetaStore(
|
func NewTelemetryMetaStore(
|
||||||
@ -563,7 +563,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
|||||||
sb := sqlbuilder.Select("DISTINCT " + selectColumn).From(t.relatedMetadataDBName + "." + t.relatedMetadataTblName)
|
sb := sqlbuilder.Select("DISTINCT " + selectColumn).From(t.relatedMetadataDBName + "." + t.relatedMetadataTblName)
|
||||||
|
|
||||||
if len(fieldValueSelector.ExistingQuery) != 0 {
|
if len(fieldValueSelector.ExistingQuery) != 0 {
|
||||||
whereClause, _, err := t.compiler.Compile(ctx, fieldValueSelector.ExistingQuery)
|
keySelectors := querybuilder.QueryStringToKeysSelectors(fieldValueSelector.ExistingQuery)
|
||||||
|
for _, keySelector := range keySelectors {
|
||||||
|
keySelector.Signal = fieldValueSelector.Signal
|
||||||
|
}
|
||||||
|
keys, err := t.GetKeysMulti(ctx, keySelectors)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClause, _, err := querybuilder.PrepareWhereClause(fieldValueSelector.ExistingQuery, querybuilder.FilterExprVisitorOpts{
|
||||||
|
FieldMapper: t.fm,
|
||||||
|
ConditionBuilder: t.conditionBuilder,
|
||||||
|
FieldKeys: keys,
|
||||||
|
})
|
||||||
if err == nil {
|
if err == nil {
|
||||||
sb.AddWhereClause(whereClause)
|
sb.AddWhereClause(whereClause)
|
||||||
} else {
|
} else {
|
||||||
|
@ -126,6 +126,11 @@ func (c *conditionBuilder) conditionFor(
|
|||||||
// in the query builder, `exists` and `not exists` are used for
|
// in the query builder, `exists` and `not exists` are used for
|
||||||
// key membership checks, so depending on the column type, the condition changes
|
// key membership checks, so depending on the column type, the condition changes
|
||||||
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
||||||
|
// if the field is intrinsic, it always exists
|
||||||
|
if slices.Contains(IntrinsicFields, tblFieldName) || slices.Contains(CalculatedFields, tblFieldName) {
|
||||||
|
return "true", nil
|
||||||
|
}
|
||||||
|
|
||||||
var value any
|
var value any
|
||||||
switch column.Type {
|
switch column.Type {
|
||||||
case schema.ColumnTypeString,
|
case schema.ColumnTypeString,
|
||||||
|
@ -149,7 +149,7 @@ func (m *defaultFieldMapper) getColumn(
|
|||||||
case telemetrytypes.FieldDataTypeBool:
|
case telemetrytypes.FieldDataTypeBool:
|
||||||
return indexV3Columns["attributes_bool"], nil
|
return indexV3Columns["attributes_bool"], nil
|
||||||
}
|
}
|
||||||
case telemetrytypes.FieldContextSpan:
|
case telemetrytypes.FieldContextSpan, telemetrytypes.FieldContextUnspecified:
|
||||||
if col, ok := indexV3Columns[key.Name]; ok {
|
if col, ok := indexV3Columns[key.Name]; ok {
|
||||||
return col, nil
|
return col, nil
|
||||||
}
|
}
|
||||||
|
@ -1,55 +0,0 @@
|
|||||||
package telemetrytraces
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
|
||||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
|
||||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
|
||||||
"github.com/huandu/go-sqlbuilder"
|
|
||||||
)
|
|
||||||
|
|
||||||
type FilterCompilerOpts struct {
|
|
||||||
FieldMapper qbtypes.FieldMapper
|
|
||||||
ConditionBuilder qbtypes.ConditionBuilder
|
|
||||||
MetadataStore telemetrytypes.MetadataStore
|
|
||||||
FullTextColumn *telemetrytypes.TelemetryFieldKey
|
|
||||||
JsonBodyPrefix string
|
|
||||||
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
|
||||||
SkipResourceFilter bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type filterCompiler struct {
|
|
||||||
opts FilterCompilerOpts
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewFilterCompiler(opts FilterCompilerOpts) *filterCompiler {
|
|
||||||
return &filterCompiler{
|
|
||||||
opts: opts,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *filterCompiler) Compile(ctx context.Context, expr string) (*sqlbuilder.WhereClause, []string, error) {
|
|
||||||
selectors := querybuilder.QueryStringToKeysSelectors(expr)
|
|
||||||
|
|
||||||
keys, err := c.opts.MetadataStore.GetKeysMulti(ctx, selectors)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
filterWhereClause, warnings, err := querybuilder.PrepareWhereClause(expr, querybuilder.FilterExprVisitorOpts{
|
|
||||||
FieldMapper: c.opts.FieldMapper,
|
|
||||||
ConditionBuilder: c.opts.ConditionBuilder,
|
|
||||||
FieldKeys: keys,
|
|
||||||
FullTextColumn: c.opts.FullTextColumn,
|
|
||||||
JsonBodyPrefix: c.opts.JsonBodyPrefix,
|
|
||||||
JsonKeyToKey: c.opts.JsonKeyToKey,
|
|
||||||
SkipResourceFilter: c.opts.SkipResourceFilter,
|
|
||||||
})
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return filterWhereClause, warnings, nil
|
|
||||||
}
|
|
@ -3,6 +3,7 @@ package telemetrytraces
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
@ -16,32 +17,32 @@ var (
|
|||||||
ErrUnsupportedAggregation = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported aggregation")
|
ErrUnsupportedAggregation = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported aggregation")
|
||||||
)
|
)
|
||||||
|
|
||||||
type TraceQueryStatementBuilderOpts struct {
|
|
||||||
MetadataStore telemetrytypes.MetadataStore
|
|
||||||
FieldMapper qbtypes.FieldMapper
|
|
||||||
ConditionBuilder qbtypes.ConditionBuilder
|
|
||||||
ResourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
|
||||||
Compiler qbtypes.FilterCompiler
|
|
||||||
AggExprRewriter qbtypes.AggExprRewriter
|
|
||||||
}
|
|
||||||
|
|
||||||
type traceQueryStatementBuilder struct {
|
type traceQueryStatementBuilder struct {
|
||||||
opts TraceQueryStatementBuilderOpts
|
logger *slog.Logger
|
||||||
fm qbtypes.FieldMapper
|
metadataStore telemetrytypes.MetadataStore
|
||||||
cb qbtypes.ConditionBuilder
|
fm qbtypes.FieldMapper
|
||||||
compiler qbtypes.FilterCompiler
|
cb qbtypes.ConditionBuilder
|
||||||
aggExprRewriter qbtypes.AggExprRewriter
|
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
||||||
|
aggExprRewriter qbtypes.AggExprRewriter
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ qbtypes.StatementBuilder[qbtypes.TraceAggregation] = (*traceQueryStatementBuilder)(nil)
|
var _ qbtypes.StatementBuilder[qbtypes.TraceAggregation] = (*traceQueryStatementBuilder)(nil)
|
||||||
|
|
||||||
func NewTraceQueryStatementBuilder(opts TraceQueryStatementBuilderOpts) *traceQueryStatementBuilder {
|
func NewTraceQueryStatementBuilder(
|
||||||
|
logger *slog.Logger,
|
||||||
|
metadataStore telemetrytypes.MetadataStore,
|
||||||
|
fieldMapper qbtypes.FieldMapper,
|
||||||
|
conditionBuilder qbtypes.ConditionBuilder,
|
||||||
|
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
||||||
|
aggExprRewriter qbtypes.AggExprRewriter,
|
||||||
|
) *traceQueryStatementBuilder {
|
||||||
return &traceQueryStatementBuilder{
|
return &traceQueryStatementBuilder{
|
||||||
opts: opts,
|
logger: logger,
|
||||||
fm: opts.FieldMapper,
|
metadataStore: metadataStore,
|
||||||
cb: opts.ConditionBuilder,
|
fm: fieldMapper,
|
||||||
compiler: opts.Compiler,
|
cb: conditionBuilder,
|
||||||
aggExprRewriter: opts.AggExprRewriter,
|
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
|
||||||
|
aggExprRewriter: aggExprRewriter,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,13 +59,14 @@ func (b *traceQueryStatementBuilder) Build(
|
|||||||
end = querybuilder.ToNanoSecs(end)
|
end = querybuilder.ToNanoSecs(end)
|
||||||
|
|
||||||
keySelectors := getKeySelectors(query)
|
keySelectors := getKeySelectors(query)
|
||||||
keys, err := b.opts.MetadataStore.GetKeysMulti(ctx, keySelectors)
|
|
||||||
|
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create SQL builder
|
// Create SQL builder
|
||||||
q := sqlbuilder.ClickHouse.NewSelectBuilder()
|
q := sqlbuilder.NewSelectBuilder()
|
||||||
|
|
||||||
switch requestType {
|
switch requestType {
|
||||||
case qbtypes.RequestTypeRaw:
|
case qbtypes.RequestTypeRaw:
|
||||||
@ -87,8 +89,38 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation])
|
|||||||
keySelectors = append(keySelectors, selectors...)
|
keySelectors = append(keySelectors, selectors...)
|
||||||
}
|
}
|
||||||
|
|
||||||
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(query.Filter.Expression)
|
if query.Filter != nil && query.Filter.Expression != "" {
|
||||||
keySelectors = append(keySelectors, whereClauseSelectors...)
|
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(query.Filter.Expression)
|
||||||
|
keySelectors = append(keySelectors, whereClauseSelectors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range query.GroupBy {
|
||||||
|
groupBy := query.GroupBy[idx]
|
||||||
|
selectors := querybuilder.QueryStringToKeysSelectors(groupBy.TelemetryFieldKey.Name)
|
||||||
|
keySelectors = append(keySelectors, selectors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range query.SelectFields {
|
||||||
|
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
|
||||||
|
Name: query.SelectFields[idx].Name,
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
FieldContext: query.SelectFields[idx].FieldContext,
|
||||||
|
FieldDataType: query.SelectFields[idx].FieldDataType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range query.Order {
|
||||||
|
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
|
||||||
|
Name: query.Order[idx].Key.Name,
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
FieldContext: query.Order[idx].Key.FieldContext,
|
||||||
|
FieldDataType: query.Order[idx].Key.FieldDataType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range keySelectors {
|
||||||
|
keySelectors[idx].Signal = telemetrytypes.SignalTraces
|
||||||
|
}
|
||||||
|
|
||||||
return keySelectors
|
return keySelectors
|
||||||
}
|
}
|
||||||
@ -120,31 +152,32 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
|||||||
"trace_id",
|
"trace_id",
|
||||||
"span_id",
|
"span_id",
|
||||||
"name",
|
"name",
|
||||||
"resource_string_service$$name",
|
sqlbuilder.Escape("resource_string_service$$name"),
|
||||||
"duration_nano",
|
"duration_nano",
|
||||||
"response_status_code",
|
"response_status_code",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// TODO: should we deprecate `SelectFields` and return everything from a span like we do for logs?
|
||||||
for _, field := range query.SelectFields {
|
for _, field := range query.SelectFields {
|
||||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &field, keys)
|
colExpr, err := b.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
sb.SelectMore(colExpr)
|
sb.SelectMore(sqlbuilder.Escape(colExpr))
|
||||||
}
|
}
|
||||||
|
|
||||||
// From table
|
// From table
|
||||||
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
||||||
|
|
||||||
// Add filter conditions
|
// Add filter conditions
|
||||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query)
|
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add order by
|
// Add order by
|
||||||
for _, orderBy := range query.Order {
|
for _, orderBy := range query.Order {
|
||||||
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction))
|
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add limit and offset
|
// Add limit and offset
|
||||||
@ -158,8 +191,8 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
|||||||
|
|
||||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
|
||||||
finalSQL := combineCTEs(cteFragments) + mainSQL
|
finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
finalArgs := prependArgs(cteArgs, mainArgs)
|
finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
|
|
||||||
return &qbtypes.Statement{
|
return &qbtypes.Statement{
|
||||||
Query: finalSQL,
|
Query: finalSQL,
|
||||||
@ -193,21 +226,29 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
|||||||
int64(query.StepInterval.Seconds()),
|
int64(query.StepInterval.Seconds()),
|
||||||
))
|
))
|
||||||
|
|
||||||
|
var allGroupByArgs []any
|
||||||
|
|
||||||
// Keep original column expressions so we can build the tuple
|
// Keep original column expressions so we can build the tuple
|
||||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||||
for _, gb := range query.GroupBy {
|
for _, gb := range query.GroupBy {
|
||||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &gb.TelemetryFieldKey, keys)
|
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
sb.SelectMore(colExpr)
|
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||||
|
allGroupByArgs = append(allGroupByArgs, args...)
|
||||||
|
sb.SelectMore(sqlbuilder.Escape(colExpr))
|
||||||
fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name))
|
fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Aggregations
|
// Aggregations
|
||||||
allAggChArgs := make([]any, 0)
|
allAggChArgs := make([]any, 0)
|
||||||
for i, agg := range query.Aggregations {
|
for i, agg := range query.Aggregations {
|
||||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(ctx, agg.Expression)
|
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||||
|
ctx, agg.Expression,
|
||||||
|
uint64(query.StepInterval.Seconds()),
|
||||||
|
keys,
|
||||||
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -216,7 +257,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
|||||||
}
|
}
|
||||||
|
|
||||||
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
||||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query)
|
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -226,7 +267,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
|||||||
|
|
||||||
if query.Limit > 0 {
|
if query.Limit > 0 {
|
||||||
// build the scalar “top/bottom-N” query in its own builder.
|
// build the scalar “top/bottom-N” query in its own builder.
|
||||||
cteSB := sqlbuilder.ClickHouse.NewSelectBuilder()
|
cteSB := sqlbuilder.NewSelectBuilder()
|
||||||
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true)
|
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -245,11 +286,12 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
|||||||
sb.Having(query.Having.Expression)
|
sb.Having(query.Having.Expression)
|
||||||
}
|
}
|
||||||
|
|
||||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, allAggChArgs...)
|
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||||
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||||
|
|
||||||
// Stitch it all together: WITH … SELECT …
|
// Stitch it all together: WITH … SELECT …
|
||||||
finalSQL = combineCTEs(cteFragments) + mainSQL
|
finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
finalArgs = prependArgs(cteArgs, mainArgs)
|
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
sb.GroupBy("ALL")
|
sb.GroupBy("ALL")
|
||||||
@ -257,11 +299,12 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
|||||||
sb.Having(query.Having.Expression)
|
sb.Having(query.Having.Expression)
|
||||||
}
|
}
|
||||||
|
|
||||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, allAggChArgs...)
|
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||||
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||||
|
|
||||||
// Stitch it all together: WITH … SELECT …
|
// Stitch it all together: WITH … SELECT …
|
||||||
finalSQL = combineCTEs(cteFragments) + mainSQL
|
finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
finalArgs = prependArgs(cteArgs, mainArgs)
|
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &qbtypes.Statement{
|
return &qbtypes.Statement{
|
||||||
@ -295,20 +338,29 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
|||||||
|
|
||||||
allAggChArgs := []any{}
|
allAggChArgs := []any{}
|
||||||
|
|
||||||
// Add group by columns
|
var allGroupByArgs []any
|
||||||
for _, groupBy := range query.GroupBy {
|
for _, gb := range query.GroupBy {
|
||||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &groupBy.TelemetryFieldKey, keys)
|
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
sb.SelectMore(colExpr)
|
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||||
|
allGroupByArgs = append(allGroupByArgs, args...)
|
||||||
|
sb.SelectMore(sqlbuilder.Escape(colExpr))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// for scalar queries, the rate would be end-start
|
||||||
|
rateInterval := (end - start) / querybuilder.NsToSeconds
|
||||||
|
|
||||||
// Add aggregation
|
// Add aggregation
|
||||||
if len(query.Aggregations) > 0 {
|
if len(query.Aggregations) > 0 {
|
||||||
for idx := range query.Aggregations {
|
for idx := range query.Aggregations {
|
||||||
aggExpr := query.Aggregations[idx]
|
aggExpr := query.Aggregations[idx]
|
||||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(ctx, aggExpr.Expression)
|
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||||
|
ctx, aggExpr.Expression,
|
||||||
|
rateInterval,
|
||||||
|
keys,
|
||||||
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -321,7 +373,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
|||||||
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
||||||
|
|
||||||
// Add filter conditions
|
// Add filter conditions
|
||||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query)
|
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -338,9 +390,9 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
|||||||
for _, orderBy := range query.Order {
|
for _, orderBy := range query.Order {
|
||||||
idx, ok := aggOrderBy(orderBy, query)
|
idx, ok := aggOrderBy(orderBy, query)
|
||||||
if ok {
|
if ok {
|
||||||
sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction))
|
sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction.StringValue()))
|
||||||
} else {
|
} else {
|
||||||
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction))
|
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -354,10 +406,12 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
|||||||
sb.Limit(query.Limit)
|
sb.Limit(query.Limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, allAggChArgs...)
|
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||||
|
|
||||||
finalSQL := combineCTEs(cteFragments) + mainSQL
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||||
finalArgs := prependArgs(cteArgs, mainArgs)
|
|
||||||
|
finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
|
finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
|
|
||||||
return &qbtypes.Statement{
|
return &qbtypes.Statement{
|
||||||
Query: finalSQL,
|
Query: finalSQL,
|
||||||
@ -367,14 +421,30 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// buildFilterCondition builds SQL condition from filter expression
|
// buildFilterCondition builds SQL condition from filter expression
|
||||||
func (b *traceQueryStatementBuilder) addFilterCondition(ctx context.Context, sb *sqlbuilder.SelectBuilder, start, end uint64, query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]) ([]string, error) {
|
func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||||
|
_ context.Context,
|
||||||
|
sb *sqlbuilder.SelectBuilder,
|
||||||
|
start, end uint64,
|
||||||
|
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||||
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
|
) ([]string, error) {
|
||||||
|
|
||||||
// add filter expression
|
var filterWhereClause *sqlbuilder.WhereClause
|
||||||
|
var warnings []string
|
||||||
|
var err error
|
||||||
|
|
||||||
filterWhereClause, warnings, err := b.compiler.Compile(ctx, query.Filter.Expression)
|
if query.Filter != nil && query.Filter.Expression != "" {
|
||||||
|
// add filter expression
|
||||||
|
filterWhereClause, warnings, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||||
|
FieldMapper: b.fm,
|
||||||
|
ConditionBuilder: b.cb,
|
||||||
|
FieldKeys: keys,
|
||||||
|
SkipResourceFilter: true,
|
||||||
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if filterWhereClause != nil {
|
if filterWhereClause != nil {
|
||||||
@ -382,36 +452,14 @@ func (b *traceQueryStatementBuilder) addFilterCondition(ctx context.Context, sb
|
|||||||
}
|
}
|
||||||
|
|
||||||
// add time filter
|
// add time filter
|
||||||
startBucket := start/1000000000 - 1800
|
startBucket := start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment
|
||||||
endBucket := end / 1000000000
|
endBucket := end / querybuilder.NsToSeconds
|
||||||
|
|
||||||
sb.Where(sb.GE("timestamp", start), sb.LE("timestamp", end), sb.GE("ts_bucket_start", startBucket), sb.LE("ts_bucket_start", endBucket))
|
sb.Where(sb.GE("timestamp", fmt.Sprintf("%d", start)), sb.LE("timestamp", fmt.Sprintf("%d", end)), sb.GE("ts_bucket_start", startBucket), sb.LE("ts_bucket_start", endBucket))
|
||||||
|
|
||||||
return warnings, nil
|
return warnings, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// combineCTEs takes any number of individual CTE fragments like
|
|
||||||
//
|
|
||||||
// "__resource_filter AS (...)", "__limit_cte AS (...)"
|
|
||||||
//
|
|
||||||
// and renders the final `WITH …` clause.
|
|
||||||
func combineCTEs(ctes []string) string {
|
|
||||||
if len(ctes) == 0 {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return "WITH " + strings.Join(ctes, ", ") + " "
|
|
||||||
}
|
|
||||||
|
|
||||||
// prependArgs ensures CTE arguments appear before main-query arguments
|
|
||||||
// in the final slice so their ordinal positions match the SQL string.
|
|
||||||
func prependArgs(cteArgs [][]any, mainArgs []any) []any {
|
|
||||||
out := make([]any, 0, len(mainArgs)+len(cteArgs))
|
|
||||||
for _, a := range cteArgs { // CTEs first, in declaration order
|
|
||||||
out = append(out, a...)
|
|
||||||
}
|
|
||||||
return append(out, mainArgs...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func aggOrderBy(k qbtypes.OrderBy, q qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]) (int, bool) {
|
func aggOrderBy(k qbtypes.OrderBy, q qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]) (int, bool) {
|
||||||
for i, agg := range q.Aggregations {
|
for i, agg := range q.Aggregations {
|
||||||
if k.Key.Name == agg.Alias ||
|
if k.Key.Name == agg.Alias ||
|
||||||
@ -446,7 +494,7 @@ func (b *traceQueryStatementBuilder) buildResourceFilterCTE(
|
|||||||
start, end uint64,
|
start, end uint64,
|
||||||
) (*qbtypes.Statement, error) {
|
) (*qbtypes.Statement, error) {
|
||||||
|
|
||||||
return b.opts.ResourceFilterStmtBuilder.Build(
|
return b.resourceFilterStmtBuilder.Build(
|
||||||
ctx,
|
ctx,
|
||||||
start,
|
start,
|
||||||
end,
|
end,
|
||||||
|
@ -2,6 +2,7 @@ package telemetrytraces
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"log/slog"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -18,17 +19,12 @@ func resourceFilterStmtBuilder() (qbtypes.StatementBuilder[qbtypes.TraceAggregat
|
|||||||
cb := resourcefilter.NewConditionBuilder(fm)
|
cb := resourcefilter.NewConditionBuilder(fm)
|
||||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||||
compiler := resourcefilter.NewFilterCompiler(resourcefilter.FilterCompilerOpts{
|
|
||||||
FieldMapper: fm,
|
|
||||||
ConditionBuilder: cb,
|
|
||||||
MetadataStore: mockMetadataStore,
|
|
||||||
})
|
|
||||||
|
|
||||||
return resourcefilter.NewTraceResourceFilterStatementBuilder(resourcefilter.ResourceFilterStatementBuilderOpts{
|
return resourcefilter.NewTraceResourceFilterStatementBuilder(
|
||||||
FieldMapper: fm,
|
fm,
|
||||||
ConditionBuilder: cb,
|
cb,
|
||||||
Compiler: compiler,
|
mockMetadataStore,
|
||||||
}), nil
|
), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestStatementBuilder(t *testing.T) {
|
func TestStatementBuilder(t *testing.T) {
|
||||||
@ -63,8 +59,8 @@ func TestStatementBuilder(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: qbtypes.Statement{
|
expected: qbtypes.Statement{
|
||||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT resources_string['service.name'] AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, resources_string['service.name'] AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
|
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
|
||||||
Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), uint64(1747947419000000000), uint64(1747983448000000000), uint64(1747945619), uint64(1747983448), 10, uint64(1747947419000000000), uint64(1747983448000000000), uint64(1747945619), uint64(1747983448)},
|
Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
|
||||||
},
|
},
|
||||||
expectedErr: nil,
|
expectedErr: nil,
|
||||||
},
|
},
|
||||||
@ -74,29 +70,19 @@ func TestStatementBuilder(t *testing.T) {
|
|||||||
cb := NewConditionBuilder(fm)
|
cb := NewConditionBuilder(fm)
|
||||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||||
compiler := NewFilterCompiler(FilterCompilerOpts{
|
aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil)
|
||||||
FieldMapper: fm,
|
|
||||||
ConditionBuilder: cb,
|
|
||||||
MetadataStore: mockMetadataStore,
|
|
||||||
SkipResourceFilter: true,
|
|
||||||
})
|
|
||||||
aggExprRewriter := querybuilder.NewAggExprRewriter(querybuilder.AggExprRewriterOptions{
|
|
||||||
FieldMapper: fm,
|
|
||||||
ConditionBuilder: cb,
|
|
||||||
MetadataStore: mockMetadataStore,
|
|
||||||
})
|
|
||||||
|
|
||||||
resourceFilterStmtBuilder, err := resourceFilterStmtBuilder()
|
resourceFilterStmtBuilder, err := resourceFilterStmtBuilder()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
statementBuilder := NewTraceQueryStatementBuilder(TraceQueryStatementBuilderOpts{
|
statementBuilder := NewTraceQueryStatementBuilder(
|
||||||
FieldMapper: fm,
|
slog.Default(),
|
||||||
ConditionBuilder: cb,
|
mockMetadataStore,
|
||||||
Compiler: compiler,
|
fm,
|
||||||
MetadataStore: mockMetadataStore,
|
cb,
|
||||||
AggExprRewriter: aggExprRewriter,
|
resourceFilterStmtBuilder,
|
||||||
ResourceFilterStmtBuilder: resourceFilterStmtBuilder,
|
aggExprRewriter,
|
||||||
})
|
)
|
||||||
|
|
||||||
for _, c := range cases {
|
for _, c := range cases {
|
||||||
t.Run(c.name, func(t *testing.T) {
|
t.Run(c.name, func(t *testing.T) {
|
||||||
|
@ -5,7 +5,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||||
return map[string][]*telemetrytypes.TelemetryFieldKey{
|
keysMap := map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||||
"service.name": {
|
"service.name": {
|
||||||
{
|
{
|
||||||
Name: "service.name",
|
Name: "service.name",
|
||||||
@ -35,4 +35,10 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
for _, keys := range keysMap {
|
||||||
|
for _, key := range keys {
|
||||||
|
key.Signal = telemetrytypes.SignalTraces
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return keysMap
|
||||||
}
|
}
|
||||||
|
@ -34,24 +34,10 @@ type ConditionBuilder interface {
|
|||||||
ConditionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, operator FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error)
|
ConditionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, operator FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type FilterCompiler interface {
|
|
||||||
// Compile compiles the filter into a sqlbuilder.WhereClause.
|
|
||||||
Compile(ctx context.Context, filter string) (*sqlbuilder.WhereClause, []string, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type RewriteCtx struct {
|
|
||||||
RateInterval uint64
|
|
||||||
}
|
|
||||||
|
|
||||||
type RewriteOption func(*RewriteCtx)
|
|
||||||
|
|
||||||
func WithRateInterval(interval uint64) RewriteOption {
|
|
||||||
return func(c *RewriteCtx) { c.RateInterval = interval }
|
|
||||||
}
|
|
||||||
|
|
||||||
type AggExprRewriter interface {
|
type AggExprRewriter interface {
|
||||||
// Rewrite rewrites the aggregation expression to be used in the query.
|
// Rewrite rewrites the aggregation expression to be used in the query.
|
||||||
Rewrite(ctx context.Context, expr string, opts ...RewriteOption) (string, []any, error)
|
Rewrite(ctx context.Context, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error)
|
||||||
|
RewriteMulti(ctx context.Context, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type Statement struct {
|
type Statement struct {
|
||||||
|
@ -11,19 +11,20 @@ type Query interface {
|
|||||||
// Window returns [from, to) in epoch‑ms so cache can slice/merge.
|
// Window returns [from, to) in epoch‑ms so cache can slice/merge.
|
||||||
Window() (startMS, endMS uint64)
|
Window() (startMS, endMS uint64)
|
||||||
// Execute runs the query; implementors must be side‑effect‑free.
|
// Execute runs the query; implementors must be side‑effect‑free.
|
||||||
Execute(ctx context.Context) (Result, error)
|
Execute(ctx context.Context) (*Result, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type Result struct {
|
type Result struct {
|
||||||
Type RequestType
|
Type RequestType
|
||||||
Value any // concrete Go value (to be type asserted based on the RequestType)
|
Value any // concrete Go value (to be type asserted based on the RequestType)
|
||||||
Stats ExecStats
|
Stats ExecStats
|
||||||
|
Warnings []string
|
||||||
}
|
}
|
||||||
|
|
||||||
type ExecStats struct {
|
type ExecStats struct {
|
||||||
RowsScanned int64 `json:"rowsScanned"`
|
RowsScanned uint64 `json:"rowsScanned"`
|
||||||
BytesScanned int64 `json:"bytesScanned"`
|
BytesScanned uint64 `json:"bytesScanned"`
|
||||||
DurationMS int64 `json:"durationMs"`
|
DurationMS uint64 `json:"durationMs"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type TimeRange struct{ From, To uint64 } // ms since epoch
|
type TimeRange struct{ From, To uint64 } // ms since epoch
|
||||||
|
@ -14,19 +14,19 @@ type QueryRangeResponse struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type TimeSeriesData struct {
|
type TimeSeriesData struct {
|
||||||
QueryName string `json:"queryName"`
|
QueryName string `json:"queryName"`
|
||||||
Aggregations []AggregationBucket `json:"aggregations"`
|
Aggregations []*AggregationBucket `json:"aggregations"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type AggregationBucket struct {
|
type AggregationBucket struct {
|
||||||
Index int `json:"index"` // or string Alias
|
Index int `json:"index"` // or string Alias
|
||||||
Alias string `json:"alias"`
|
Alias string `json:"alias"`
|
||||||
Series []TimeSeries `json:"series"` // no extra nesting
|
Series []*TimeSeries `json:"series"` // no extra nesting
|
||||||
}
|
}
|
||||||
|
|
||||||
type TimeSeries struct {
|
type TimeSeries struct {
|
||||||
Labels []Label `json:"labels,omitempty"`
|
Labels []*Label `json:"labels,omitempty"`
|
||||||
Values []TimeSeriesValue `json:"values"`
|
Values []*TimeSeriesValue `json:"values"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Label struct {
|
type Label struct {
|
||||||
@ -36,10 +36,10 @@ type Label struct {
|
|||||||
|
|
||||||
type TimeSeriesValue struct {
|
type TimeSeriesValue struct {
|
||||||
Timestamp int64 `json:"timestamp"`
|
Timestamp int64 `json:"timestamp"`
|
||||||
Value float64 `json:"value,omitempty"`
|
Value float64 `json:"value"`
|
||||||
// for the heatmap type chart
|
// for the heatmap type chart
|
||||||
Values []float64 `json:"values,omitempty"`
|
Values []float64 `json:"values,omitempty"`
|
||||||
Bucket Bucket `json:"bucket,omitempty"`
|
Bucket *Bucket `json:"bucket,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Bucket struct {
|
type Bucket struct {
|
||||||
@ -65,16 +65,17 @@ type ColumnDescriptor struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ScalarData struct {
|
type ScalarData struct {
|
||||||
Columns []ColumnDescriptor `json:"columns"`
|
Columns []*ColumnDescriptor `json:"columns"`
|
||||||
Data [][]any `json:"data"`
|
Data [][]any `json:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type RawData struct {
|
type RawData struct {
|
||||||
QueryName string `json:"queryName"`
|
QueryName string `json:"queryName"`
|
||||||
Rows []RawRow `json:"rows"`
|
NextCursor string `json:"nextCursor"`
|
||||||
|
Rows []*RawRow `json:"rows"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type RawRow struct {
|
type RawRow struct {
|
||||||
Timestamp time.Time `json:"timestamp"`
|
Timestamp time.Time `json:"timestamp"`
|
||||||
Data map[string]any `json:"data"`
|
Data map[string]*any `json:"data"`
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user