mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-12 20:49:00 +08:00
API for fields added
This commit is contained in:
parent
80c80b2180
commit
ef141d2cee
@ -18,16 +18,20 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
defaultDatasource string = "tcp://localhost:9000"
|
defaultDatasource string = "tcp://localhost:9000"
|
||||||
defaultTraceDB string = "signoz_traces"
|
defaultTraceDB string = "signoz_traces"
|
||||||
defaultOperationsTable string = "signoz_operations"
|
defaultOperationsTable string = "signoz_operations"
|
||||||
defaultIndexTable string = "signoz_index_v2"
|
defaultIndexTable string = "signoz_index_v2"
|
||||||
defaultErrorTable string = "signoz_error_index"
|
defaultErrorTable string = "signoz_error_index"
|
||||||
defaulDurationTable string = "durationSortMV"
|
defaulDurationTable string = "durationSortMV"
|
||||||
defaultSpansTable string = "signoz_spans"
|
defaultSpansTable string = "signoz_spans"
|
||||||
defaultWriteBatchDelay time.Duration = 5 * time.Second
|
defaultLogsDB string = "signoz_logs"
|
||||||
defaultWriteBatchSize int = 10000
|
defaultLogsTable string = "logs"
|
||||||
defaultEncoding Encoding = EncodingJSON
|
defaultLogAttributeKeysTable string = "logs_atrribute_keys"
|
||||||
|
defaultLogResourceKeysTable string = "logs_resource_keys"
|
||||||
|
defaultWriteBatchDelay time.Duration = 5 * time.Second
|
||||||
|
defaultWriteBatchSize int = 10000
|
||||||
|
defaultEncoding Encoding = EncodingJSON
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -43,19 +47,23 @@ const (
|
|||||||
|
|
||||||
// NamespaceConfig is Clickhouse's internal configuration data
|
// NamespaceConfig is Clickhouse's internal configuration data
|
||||||
type namespaceConfig struct {
|
type namespaceConfig struct {
|
||||||
namespace string
|
namespace string
|
||||||
Enabled bool
|
Enabled bool
|
||||||
Datasource string
|
Datasource string
|
||||||
TraceDB string
|
TraceDB string
|
||||||
OperationsTable string
|
OperationsTable string
|
||||||
IndexTable string
|
IndexTable string
|
||||||
DurationTable string
|
DurationTable string
|
||||||
SpansTable string
|
SpansTable string
|
||||||
ErrorTable string
|
ErrorTable string
|
||||||
WriteBatchDelay time.Duration
|
LogsDB string
|
||||||
WriteBatchSize int
|
LogsTable string
|
||||||
Encoding Encoding
|
LogsAttributeKeysTable string
|
||||||
Connector Connector
|
LogsResourceKeysTable string
|
||||||
|
WriteBatchDelay time.Duration
|
||||||
|
WriteBatchSize int
|
||||||
|
Encoding Encoding
|
||||||
|
Connector Connector
|
||||||
}
|
}
|
||||||
|
|
||||||
// Connecto defines how to connect to the database
|
// Connecto defines how to connect to the database
|
||||||
@ -102,19 +110,23 @@ func NewOptions(datasource string, primaryNamespace string, otherNamespaces ...s
|
|||||||
|
|
||||||
options := &Options{
|
options := &Options{
|
||||||
primary: &namespaceConfig{
|
primary: &namespaceConfig{
|
||||||
namespace: primaryNamespace,
|
namespace: primaryNamespace,
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
Datasource: datasource,
|
Datasource: datasource,
|
||||||
TraceDB: defaultTraceDB,
|
TraceDB: defaultTraceDB,
|
||||||
OperationsTable: defaultOperationsTable,
|
OperationsTable: defaultOperationsTable,
|
||||||
IndexTable: defaultIndexTable,
|
IndexTable: defaultIndexTable,
|
||||||
ErrorTable: defaultErrorTable,
|
ErrorTable: defaultErrorTable,
|
||||||
DurationTable: defaulDurationTable,
|
DurationTable: defaulDurationTable,
|
||||||
SpansTable: defaultSpansTable,
|
SpansTable: defaultSpansTable,
|
||||||
WriteBatchDelay: defaultWriteBatchDelay,
|
LogsDB: defaultLogsDB,
|
||||||
WriteBatchSize: defaultWriteBatchSize,
|
LogsTable: defaultLogsTable,
|
||||||
Encoding: defaultEncoding,
|
LogsAttributeKeysTable: defaultLogAttributeKeysTable,
|
||||||
Connector: defaultConnector,
|
LogsResourceKeysTable: defaultLogResourceKeysTable,
|
||||||
|
WriteBatchDelay: defaultWriteBatchDelay,
|
||||||
|
WriteBatchSize: defaultWriteBatchSize,
|
||||||
|
Encoding: defaultEncoding,
|
||||||
|
Connector: defaultConnector,
|
||||||
},
|
},
|
||||||
others: make(map[string]*namespaceConfig, len(otherNamespaces)),
|
others: make(map[string]*namespaceConfig, len(otherNamespaces)),
|
||||||
}
|
}
|
||||||
@ -122,16 +134,20 @@ func NewOptions(datasource string, primaryNamespace string, otherNamespaces ...s
|
|||||||
for _, namespace := range otherNamespaces {
|
for _, namespace := range otherNamespaces {
|
||||||
if namespace == archiveNamespace {
|
if namespace == archiveNamespace {
|
||||||
options.others[namespace] = &namespaceConfig{
|
options.others[namespace] = &namespaceConfig{
|
||||||
namespace: namespace,
|
namespace: namespace,
|
||||||
Datasource: datasource,
|
Datasource: datasource,
|
||||||
TraceDB: "",
|
TraceDB: "",
|
||||||
OperationsTable: "",
|
OperationsTable: "",
|
||||||
IndexTable: "",
|
IndexTable: "",
|
||||||
ErrorTable: "",
|
ErrorTable: "",
|
||||||
WriteBatchDelay: defaultWriteBatchDelay,
|
LogsDB: "",
|
||||||
WriteBatchSize: defaultWriteBatchSize,
|
LogsTable: "",
|
||||||
Encoding: defaultEncoding,
|
LogsAttributeKeysTable: "",
|
||||||
Connector: defaultConnector,
|
LogsResourceKeysTable: "",
|
||||||
|
WriteBatchDelay: defaultWriteBatchDelay,
|
||||||
|
WriteBatchSize: defaultWriteBatchSize,
|
||||||
|
Encoding: defaultEncoding,
|
||||||
|
Connector: defaultConnector,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
options.others[namespace] = &namespaceConfig{namespace: namespace}
|
options.others[namespace] = &namespaceConfig{namespace: namespace}
|
||||||
|
@ -83,19 +83,23 @@ var (
|
|||||||
|
|
||||||
// SpanWriter for reading spans from ClickHouse
|
// SpanWriter for reading spans from ClickHouse
|
||||||
type ClickHouseReader struct {
|
type ClickHouseReader struct {
|
||||||
db clickhouse.Conn
|
db clickhouse.Conn
|
||||||
localDB *sqlx.DB
|
localDB *sqlx.DB
|
||||||
traceDB string
|
traceDB string
|
||||||
operationsTable string
|
operationsTable string
|
||||||
durationTable string
|
durationTable string
|
||||||
indexTable string
|
indexTable string
|
||||||
errorTable string
|
errorTable string
|
||||||
spansTable string
|
spansTable string
|
||||||
queryEngine *promql.Engine
|
logsDB string
|
||||||
remoteStorage *remote.Storage
|
logsTable string
|
||||||
ruleManager *rules.Manager
|
logsAttributeKeys string
|
||||||
promConfig *config.Config
|
logsResourceKeys string
|
||||||
alertManager am.Manager
|
queryEngine *promql.Engine
|
||||||
|
remoteStorage *remote.Storage
|
||||||
|
ruleManager *rules.Manager
|
||||||
|
promConfig *config.Config
|
||||||
|
alertManager am.Manager
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewTraceReader returns a TraceReader for the database
|
// NewTraceReader returns a TraceReader for the database
|
||||||
@ -113,15 +117,19 @@ func NewReader(localDB *sqlx.DB) *ClickHouseReader {
|
|||||||
alertManager := am.New("")
|
alertManager := am.New("")
|
||||||
|
|
||||||
return &ClickHouseReader{
|
return &ClickHouseReader{
|
||||||
db: db,
|
db: db,
|
||||||
localDB: localDB,
|
localDB: localDB,
|
||||||
traceDB: options.primary.TraceDB,
|
traceDB: options.primary.TraceDB,
|
||||||
alertManager: alertManager,
|
alertManager: alertManager,
|
||||||
operationsTable: options.primary.OperationsTable,
|
operationsTable: options.primary.OperationsTable,
|
||||||
indexTable: options.primary.IndexTable,
|
indexTable: options.primary.IndexTable,
|
||||||
errorTable: options.primary.ErrorTable,
|
errorTable: options.primary.ErrorTable,
|
||||||
durationTable: options.primary.DurationTable,
|
durationTable: options.primary.DurationTable,
|
||||||
spansTable: options.primary.SpansTable,
|
spansTable: options.primary.SpansTable,
|
||||||
|
logsDB: options.primary.LogsDB,
|
||||||
|
logsTable: options.primary.LogsTable,
|
||||||
|
logsAttributeKeys: options.primary.LogsAttributeKeysTable,
|
||||||
|
logsResourceKeys: options.primary.LogsResourceKeysTable,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2985,3 +2993,89 @@ func (r *ClickHouseReader) GetSamplesInfoInLastHeartBeatInterval(ctx context.Con
|
|||||||
|
|
||||||
return totalSamples, nil
|
return totalSamples, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError) {
|
||||||
|
// response will contain top level fields from the otel log model
|
||||||
|
response := model.GetFieldsResponse{
|
||||||
|
Selected: constants.StaticSelectedLogFields,
|
||||||
|
Interesting: []model.LogField{},
|
||||||
|
}
|
||||||
|
|
||||||
|
// get attribute keys
|
||||||
|
attributes := &[]model.LogField{}
|
||||||
|
query := fmt.Sprintf("SELECT DISTINCT name, datatype from %s.%s group by name, datatype", r.logsDB, r.logsAttributeKeys)
|
||||||
|
err := r.db.Select(ctx, attributes, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, &model.ApiError{Err: err, Typ: model.ErrorInternal}
|
||||||
|
}
|
||||||
|
|
||||||
|
// get resource keys
|
||||||
|
resources := &[]model.LogField{}
|
||||||
|
query = fmt.Sprintf("SELECT DISTINCT name, datatype from %s.%s group by name, datatype", r.logsDB, r.logsResourceKeys)
|
||||||
|
err = r.db.Select(ctx, resources, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, &model.ApiError{Err: err, Typ: model.ErrorInternal}
|
||||||
|
}
|
||||||
|
|
||||||
|
statements := []model.CreateTableStatement{}
|
||||||
|
query = fmt.Sprintf("SHOW CREATE TABLE %s.%s", r.logsDB, r.logsTable)
|
||||||
|
err = r.db.Select(ctx, &statements, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, &model.ApiError{Err: err, Typ: model.ErrorInternal}
|
||||||
|
}
|
||||||
|
|
||||||
|
extractSelectedAndInterestingFields(statements[0].Statement, constants.Attributes, attributes, &response)
|
||||||
|
extractSelectedAndInterestingFields(statements[0].Statement, constants.Resources, resources, &response)
|
||||||
|
extractSelectedAndInterestingFields(statements[0].Statement, constants.Static, &constants.StaticInterestingLogFields, &response)
|
||||||
|
|
||||||
|
return &response, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractSelectedAndInterestingFields(tableStatement string, fieldType string, fields *[]model.LogField, response *model.GetFieldsResponse) {
|
||||||
|
for _, field := range *fields {
|
||||||
|
field.Type = fieldType
|
||||||
|
if strings.Contains(tableStatement, fmt.Sprintf("INDEX %s_idx", field.Name)) {
|
||||||
|
response.Selected = append(response.Selected, field)
|
||||||
|
} else {
|
||||||
|
response.Interesting = append(response.Interesting, field)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.UpdateField) *model.ApiError {
|
||||||
|
// if a field is selected it means that the field is indexed
|
||||||
|
if field.Selected {
|
||||||
|
// if the type is attribute or resource, create the materialized column first
|
||||||
|
if field.Type == constants.Attributes || field.Type == constants.Resources {
|
||||||
|
// create materialized
|
||||||
|
query := fmt.Sprintf("ALTER TABLE %s.%s ADD COLUMN IF NOT EXISTS %s %s MATERIALIZED %s_%s_value[indexOf(%s_%s_key, '%s')]", r.logsDB, r.logsTable, field.Name, field.DataType, field.Type, strings.ToLower(field.DataType), field.Type, strings.ToLower(field.DataType), field.Name)
|
||||||
|
err := r.db.Exec(ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return &model.ApiError{Err: err, Typ: model.ErrorInternal}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// create the index
|
||||||
|
if field.IndexType == nil {
|
||||||
|
iType := constants.DefaultLogSkipIndexType
|
||||||
|
field.IndexType = &iType
|
||||||
|
}
|
||||||
|
if field.IndexGranularity == nil {
|
||||||
|
granularity := constants.DefaultLogSkipIndexGranularity
|
||||||
|
field.IndexGranularity = &granularity
|
||||||
|
}
|
||||||
|
query := fmt.Sprintf("ALTER TABLE %s.%s ADD INDEX IF NOT EXISTS %s_idx (%s) TYPE %s GRANULARITY %d", r.logsDB, r.logsTable, field.Name, field.Name, *field.IndexType, *field.IndexGranularity)
|
||||||
|
err := r.db.Exec(ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return &model.ApiError{Err: err, Typ: model.ErrorInternal}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// remove index
|
||||||
|
query := fmt.Sprintf("ALTER TABLE %s.%s DROP INDEX IF EXISTS %s_idx", r.logsDB, r.logsTable, field.Name)
|
||||||
|
err := r.db.Exec(ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return &model.ApiError{Err: err, Typ: model.ErrorInternal}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
@ -16,6 +16,7 @@ import (
|
|||||||
_ "github.com/mattn/go-sqlite3"
|
_ "github.com/mattn/go-sqlite3"
|
||||||
"github.com/prometheus/prometheus/promql"
|
"github.com/prometheus/prometheus/promql"
|
||||||
"go.signoz.io/query-service/app/dashboards"
|
"go.signoz.io/query-service/app/dashboards"
|
||||||
|
"go.signoz.io/query-service/app/logs"
|
||||||
"go.signoz.io/query-service/app/metrics"
|
"go.signoz.io/query-service/app/metrics"
|
||||||
"go.signoz.io/query-service/app/parser"
|
"go.signoz.io/query-service/app/parser"
|
||||||
"go.signoz.io/query-service/auth"
|
"go.signoz.io/query-service/auth"
|
||||||
@ -1816,3 +1817,43 @@ func (aH *APIHandler) writeJSON(w http.ResponseWriter, r *http.Request, response
|
|||||||
w.Header().Set("Content-Type", "application/json")
|
w.Header().Set("Content-Type", "application/json")
|
||||||
w.Write(resp)
|
w.Write(resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// logs
|
||||||
|
func (aH *APIHandler) RegisterLogsRoutes(router *mux.Router) {
|
||||||
|
subRouter := router.PathPrefix("/api/v1/logs").Subrouter()
|
||||||
|
subRouter.HandleFunc("/fields", ViewAccess(aH.logFields)).Methods(http.MethodGet)
|
||||||
|
subRouter.HandleFunc("/fields", ViewAccess(aH.logFieldUpdate)).Methods(http.MethodPost)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aH *APIHandler) logFields(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
fields, apiErr := (*aH.reader).GetLogFields(r.Context())
|
||||||
|
if apiErr != nil {
|
||||||
|
respondError(w, apiErr, "Failed to fetch org from the DB")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
aH.writeJSON(w, r, fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aH *APIHandler) logFieldUpdate(w http.ResponseWriter, r *http.Request) {
|
||||||
|
field := model.UpdateField{}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&field); err != nil {
|
||||||
|
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||||
|
respondError(w, apiErr, "Failed to decode payload")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := logs.ValidateUpdateFieldPayload(&field)
|
||||||
|
if err != nil {
|
||||||
|
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||||
|
respondError(w, apiErr, "Incorrect payload")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
apiErr := (*aH.reader).UpdateLogField(r.Context(), &field)
|
||||||
|
if apiErr != nil {
|
||||||
|
respondError(w, apiErr, "Failed to fetch org from the DB")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
aH.writeJSON(w, r, field)
|
||||||
|
}
|
||||||
|
40
pkg/query-service/app/logs/validator.go
Normal file
40
pkg/query-service/app/logs/validator.go
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
package logs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
|
||||||
|
"go.signoz.io/query-service/constants"
|
||||||
|
"go.signoz.io/query-service/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ValidateUpdateFieldPayload(field *model.UpdateField) error {
|
||||||
|
if field.Name == "" {
|
||||||
|
return fmt.Errorf("name cannot be empty")
|
||||||
|
}
|
||||||
|
if field.Type == "" {
|
||||||
|
return fmt.Errorf("type cannot be empty")
|
||||||
|
}
|
||||||
|
if field.DataType == "" {
|
||||||
|
return fmt.Errorf("dataType cannot be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
matched, err := regexp.MatchString(fmt.Sprintf("^(%s|%s|%s)$", constants.Static, constants.Attributes, constants.Resources), field.Type)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !matched {
|
||||||
|
return fmt.Errorf("type %s not supported", field.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
if field.IndexType != nil {
|
||||||
|
matched, err := regexp.MatchString(`^(minmax|set\([0-9]\)|bloom_filter\((0?.?[0-9]+|1)\)|tokenbf_v1\([0-9]+,[0-9]+,[0-9]+\)|ngrambf_v1\([0-9]+,[0-9]+,[0-9]+,[0-9]+\))$`, *field.IndexType)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !matched {
|
||||||
|
return fmt.Errorf("index type %s not supported", *field.IndexType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
@ -145,6 +145,7 @@ func (s *Server) createPublicServer(api *APIHandler) (*http.Server, error) {
|
|||||||
|
|
||||||
api.RegisterRoutes(r)
|
api.RegisterRoutes(r)
|
||||||
api.RegisterMetricsRoutes(r)
|
api.RegisterMetricsRoutes(r)
|
||||||
|
api.RegisterLogsRoutes(r)
|
||||||
|
|
||||||
c := cors.New(cors.Options{
|
c := cors.New(cors.Options{
|
||||||
AllowedOrigins: []string{"*"},
|
AllowedOrigins: []string{"*"},
|
||||||
|
@ -3,6 +3,8 @@ package constants
|
|||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
|
"go.signoz.io/query-service/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -38,29 +40,34 @@ var AmChannelApiPath = GetOrDefaultEnv("ALERTMANAGER_API_CHANNEL_PATH", "v1/rout
|
|||||||
var RELATIONAL_DATASOURCE_PATH = GetOrDefaultEnv("SIGNOZ_LOCAL_DB_PATH", "/var/lib/signoz/signoz.db")
|
var RELATIONAL_DATASOURCE_PATH = GetOrDefaultEnv("SIGNOZ_LOCAL_DB_PATH", "/var/lib/signoz/signoz.db")
|
||||||
|
|
||||||
const (
|
const (
|
||||||
ServiceName = "serviceName"
|
ServiceName = "serviceName"
|
||||||
HttpRoute = "httpRoute"
|
HttpRoute = "httpRoute"
|
||||||
HttpCode = "httpCode"
|
HttpCode = "httpCode"
|
||||||
HttpHost = "httpHost"
|
HttpHost = "httpHost"
|
||||||
HttpUrl = "httpUrl"
|
HttpUrl = "httpUrl"
|
||||||
HttpMethod = "httpMethod"
|
HttpMethod = "httpMethod"
|
||||||
Component = "component"
|
Component = "component"
|
||||||
OperationDB = "name"
|
OperationDB = "name"
|
||||||
OperationRequest = "operation"
|
OperationRequest = "operation"
|
||||||
Status = "status"
|
Status = "status"
|
||||||
Duration = "duration"
|
Duration = "duration"
|
||||||
DBName = "dbName"
|
DBName = "dbName"
|
||||||
DBOperation = "dbOperation"
|
DBOperation = "dbOperation"
|
||||||
DBSystem = "dbSystem"
|
DBSystem = "dbSystem"
|
||||||
MsgSystem = "msgSystem"
|
MsgSystem = "msgSystem"
|
||||||
MsgOperation = "msgOperation"
|
MsgOperation = "msgOperation"
|
||||||
Timestamp = "timestamp"
|
Timestamp = "timestamp"
|
||||||
Descending = "descending"
|
Descending = "descending"
|
||||||
Ascending = "ascending"
|
Ascending = "ascending"
|
||||||
ContextTimeout = 60 // seconds
|
ContextTimeout = 60 // seconds
|
||||||
StatusPending = "pending"
|
StatusPending = "pending"
|
||||||
StatusFailed = "failed"
|
StatusFailed = "failed"
|
||||||
StatusSuccess = "success"
|
StatusSuccess = "success"
|
||||||
|
Attributes = "attributes"
|
||||||
|
Resources = "resources"
|
||||||
|
Static = "static"
|
||||||
|
DefaultLogSkipIndexType = "bloom_filter(0.01)"
|
||||||
|
DefaultLogSkipIndexGranularity = 64
|
||||||
)
|
)
|
||||||
const (
|
const (
|
||||||
SIGNOZ_METRIC_DBNAME = "signoz_metrics"
|
SIGNOZ_METRIC_DBNAME = "signoz_metrics"
|
||||||
@ -75,3 +82,44 @@ func GetOrDefaultEnv(key string, fallback string) string {
|
|||||||
}
|
}
|
||||||
return v
|
return v
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var StaticInterestingLogFields = []model.LogField{
|
||||||
|
{
|
||||||
|
Name: "trace_id",
|
||||||
|
DataType: "String",
|
||||||
|
Type: Static,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "span_id",
|
||||||
|
DataType: "String",
|
||||||
|
Type: Static,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "trace_flags",
|
||||||
|
DataType: "UInt32",
|
||||||
|
Type: Static,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "severity_text",
|
||||||
|
DataType: "LowCardinality(String)",
|
||||||
|
Type: Static,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "severity_number",
|
||||||
|
DataType: "Int32",
|
||||||
|
Type: Static,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var StaticSelectedLogFields = []model.LogField{
|
||||||
|
{
|
||||||
|
Name: "timestamp",
|
||||||
|
DataType: "UInt64",
|
||||||
|
Type: Static,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "id",
|
||||||
|
DataType: "String",
|
||||||
|
Type: Static,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
@ -59,4 +59,8 @@ type Reader interface {
|
|||||||
GetSpansInLastHeartBeatInterval(ctx context.Context) (uint64, error)
|
GetSpansInLastHeartBeatInterval(ctx context.Context) (uint64, error)
|
||||||
GetTimeSeriesInfo(ctx context.Context) (map[string]interface{}, error)
|
GetTimeSeriesInfo(ctx context.Context) (map[string]interface{}, error)
|
||||||
GetSamplesInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error)
|
GetSamplesInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error)
|
||||||
|
|
||||||
|
// Logs
|
||||||
|
GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError)
|
||||||
|
UpdateLogField(ctx context.Context, field *model.UpdateField) *model.ApiError
|
||||||
}
|
}
|
||||||
|
@ -303,3 +303,12 @@ type FilterSet struct {
|
|||||||
Operator string `json:"op,omitempty"`
|
Operator string `json:"op,omitempty"`
|
||||||
Items []FilterItem `json:"items"`
|
Items []FilterItem `json:"items"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type UpdateField struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
DataType string `json:"dataType"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
Selected bool `json:"selected"`
|
||||||
|
IndexType *string `json:"index"`
|
||||||
|
IndexGranularity *int `json:"indexGranularity"`
|
||||||
|
}
|
||||||
|
@ -373,3 +373,18 @@ func (p *MetricPoint) MarshalJSON() ([]byte, error) {
|
|||||||
v := strconv.FormatFloat(p.Value, 'f', -1, 64)
|
v := strconv.FormatFloat(p.Value, 'f', -1, 64)
|
||||||
return json.Marshal([...]interface{}{float64(p.Timestamp) / 1000, v})
|
return json.Marshal([...]interface{}{float64(p.Timestamp) / 1000, v})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type CreateTableStatement struct {
|
||||||
|
Statement string `json:"statement" ch:"statement"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type LogField struct {
|
||||||
|
Name string `json:"name" ch:"name"`
|
||||||
|
DataType string `json:"dataType" ch:"datatype"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetFieldsResponse struct {
|
||||||
|
Selected []LogField `json:"selected"`
|
||||||
|
Interesting []LogField `json:"interesting"`
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user