ci: add golangci to workflow (#1369)

* style: reformat the code to follow go guidelines
* chore: add golangci lint
* chore: remove context check
* chore: go fmt
This commit is contained in:
Srikanth Chekuri 2022-07-13 23:44:42 +05:30 committed by GitHub
parent 64e638fd58
commit 7aeaecaf1f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 500 additions and 394 deletions

View File

@ -8,6 +8,21 @@ on:
- release/v*
jobs:
lint-and-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2.4.0
- uses: actions/setup-go@v2
with:
go-version: 1.17
- name: Install tools
run: make install-ci
- name: Run unit tests
run: make test-ci
build-frontend:
runs-on: ubuntu-latest
steps:

37
.golangci.yml Normal file
View File

@ -0,0 +1,37 @@
run:
timeout: 10m
linters-settings:
depguard:
list-type: blacklist
include-go-root: true
gofmt:
simplify: true
gosimple:
go: '1.17'
linters:
enable:
- gofmt
- goimports
- misspell
disable:
- staticcheck
- typecheck
- gosec
- govet
- errcheck
- gocritic
- revive
- deadcode
- gosimple
- ineffassign
- depguard
- errorlint
- structcheck
- varcheck
- unused
issues:
exclude-rules:
- path: _test\.go
linters:
- gosec

View File

@ -14,6 +14,14 @@ QUERY_SERVICE_DIRECTORY ?= pkg/query-service
STANDALONE_DIRECTORY ?= deploy/docker/clickhouse-setup
SWARM_DIRECTORY ?= deploy/docker-swarm/clickhouse-setup
GOOS ?= $(shell go env GOOS)
GOARCH ?= $(shell go env GOARCH)
GOPATH ?= $(shell go env GOPATH)
GOTEST=go test -v $(RACE)
GOFMT=gofmt
FMT_LOG=.fmt.log
IMPORT_LOG=.import.log
REPONAME ?= signoz
DOCKER_TAG ?= latest
@ -30,6 +38,12 @@ gitBranch=${PACKAGE}/version.gitBranch
LD_FLAGS="-X ${buildHash}=${BUILD_HASH} -X ${buildTime}=${BUILD_TIME} -X ${buildVersion}=${BUILD_VERSION} -X ${gitBranch}=${BUILD_BRANCH}"
all: build-push-frontend build-push-query-service
.DEFAULT_GOAL := test-and-lint
.PHONY: test-and-lint
test-and-lint: fmt lint
# Steps to build and push docker image of frontend
.PHONY: build-frontend-amd64 build-push-frontend
# Step to build docker image of frontend in amd64 (used in build pipeline)
@ -92,3 +106,23 @@ clear-standalone-data:
clear-swarm-data:
@docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \
sh -c "cd /pwd && rm -rf alertmanager/* clickhouse/* signoz/*"
.PHONY: install-tools
install-tools:
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.42.0
.PHONY: lint
lint:
@cd $(QUERY_SERVICE_DIRECTORY) && \
$(GOPATH)/bin/golangci-lint -v run
.PHONY: fmt
fmt:
@echo Running go fmt on query service ...
@$(GOFMT) -e -s -l -w $(QUERY_SERVICE_DIRECTORY)
.PHONY: install-ci
install-ci: install-tools
.PHONY: test-ci
test-ci: lint

View File

@ -23,7 +23,7 @@ const (
defaultOperationsTable string = "signoz_operations"
defaultIndexTable string = "signoz_index_v2"
defaultErrorTable string = "signoz_error_index_v2"
defaulDurationTable string = "durationSortMV"
defaultDurationTable string = "durationSortMV"
defaultSpansTable string = "signoz_spans"
defaultWriteBatchDelay time.Duration = 5 * time.Second
defaultWriteBatchSize int = 10000
@ -58,12 +58,15 @@ type namespaceConfig struct {
Connector Connector
}
// Connecto defines how to connect to the database
// Connector defines how to connect to the database
type Connector func(cfg *namespaceConfig) (clickhouse.Conn, error)
func defaultConnector(cfg *namespaceConfig) (clickhouse.Conn, error) {
ctx := context.Background()
dsnURL, err := url.Parse(cfg.Datasource)
if err != nil {
return nil, err
}
options := &clickhouse.Options{
Addr: []string{dsnURL.Host},
}
@ -109,7 +112,7 @@ func NewOptions(datasource string, primaryNamespace string, otherNamespaces ...s
OperationsTable: defaultOperationsTable,
IndexTable: defaultIndexTable,
ErrorTable: defaultErrorTable,
DurationTable: defaulDurationTable,
DurationTable: defaultDurationTable,
SpansTable: defaultSpansTable,
WriteBatchDelay: defaultWriteBatchDelay,
WriteBatchSize: defaultWriteBatchSize,

File diff suppressed because it is too large Load Diff

View File

@ -17,7 +17,7 @@ import (
// This time the global variable is unexported.
var db *sqlx.DB
// InitDB sets up setting up the connection pool global variable.
// InitDB sets up the connection pool global variable.
func InitDB(dataSourceName string) (*sqlx.DB, error) {
var err error
@ -26,7 +26,7 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) {
return nil, err
}
table_schema := `CREATE TABLE IF NOT EXISTS dashboards (
tableSchema := `CREATE TABLE IF NOT EXISTS dashboards (
id INTEGER PRIMARY KEY AUTOINCREMENT,
uuid TEXT NOT NULL UNIQUE,
created_at datetime NOT NULL,
@ -34,24 +34,24 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) {
data TEXT NOT NULL
);`
_, err = db.Exec(table_schema)
_, err = db.Exec(tableSchema)
if err != nil {
return nil, fmt.Errorf("Error in creating dashboard table: %s", err.Error())
return nil, fmt.Errorf("error in creating dashboard table: %s", err.Error())
}
table_schema = `CREATE TABLE IF NOT EXISTS rules (
tableSchema = `CREATE TABLE IF NOT EXISTS rules (
id INTEGER PRIMARY KEY AUTOINCREMENT,
updated_at datetime NOT NULL,
deleted INTEGER DEFAULT 0,
data TEXT NOT NULL
);`
_, err = db.Exec(table_schema)
_, err = db.Exec(tableSchema)
if err != nil {
return nil, fmt.Errorf("Error in creating rules table: %s", err.Error())
return nil, fmt.Errorf("error in creating rules table: %s", err.Error())
}
table_schema = `CREATE TABLE IF NOT EXISTS notification_channels (
tableSchema = `CREATE TABLE IF NOT EXISTS notification_channels (
id INTEGER PRIMARY KEY AUTOINCREMENT,
created_at datetime NOT NULL,
updated_at datetime NOT NULL,
@ -61,12 +61,12 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) {
data TEXT NOT NULL
);`
_, err = db.Exec(table_schema)
_, err = db.Exec(tableSchema)
if err != nil {
return nil, fmt.Errorf("Error in creating notification_channles table: %s", err.Error())
return nil, fmt.Errorf("error in creating notification_channles table: %s", err.Error())
}
table_schema = `CREATE TABLE IF NOT EXISTS ttl_status (
tableSchema = `CREATE TABLE IF NOT EXISTS ttl_status (
id INTEGER PRIMARY KEY AUTOINCREMENT,
transaction_id TEXT NOT NULL,
created_at datetime NOT NULL,
@ -77,9 +77,9 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) {
status TEXT NOT NULL
);`
_, err = db.Exec(table_schema)
_, err = db.Exec(tableSchema)
if err != nil {
return nil, fmt.Errorf("Error in creating ttl_status table: %s", err.Error())
return nil, fmt.Errorf("error in creating ttl_status table: %s", err.Error())
}
return db, nil
@ -128,17 +128,17 @@ func CreateDashboard(data map[string]interface{}) (*Dashboard, *model.ApiError)
dash.UpdateSlug()
dash.Uuid = uuid.New().String()
map_data, err := json.Marshal(dash.Data)
mapData, err := json.Marshal(dash.Data)
if err != nil {
zap.S().Errorf("Error in marshalling data field in dashboard: ", dash, err)
zap.S().Error("Error in marshalling data field in dashboard: ", dash, err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
// db.Prepare("Insert into dashboards where")
result, err := db.Exec("INSERT INTO dashboards (uuid, created_at, updated_at, data) VALUES ($1, $2, $3, $4)", dash.Uuid, dash.CreatedAt, dash.UpdatedAt, map_data)
result, err := db.Exec("INSERT INTO dashboards (uuid, created_at, updated_at, data) VALUES ($1, $2, $3, $4)", dash.Uuid, dash.CreatedAt, dash.UpdatedAt, mapData)
if err != nil {
zap.S().Errorf("Error in inserting dashboard data: ", dash, err)
zap.S().Error("Error in inserting dashboard data: ", dash, err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
lastInsertId, err := result.LastInsertId()
@ -153,7 +153,7 @@ func CreateDashboard(data map[string]interface{}) (*Dashboard, *model.ApiError)
func GetDashboards() ([]Dashboard, *model.ApiError) {
dashboards := []Dashboard{}
var dashboards []Dashboard
query := fmt.Sprintf("SELECT * FROM dashboards;")
err := db.Select(&dashboards, query)
@ -200,9 +200,9 @@ func GetDashboard(uuid string) (*Dashboard, *model.ApiError) {
func UpdateDashboard(uuid string, data map[string]interface{}) (*Dashboard, *model.ApiError) {
map_data, err := json.Marshal(data)
mapData, err := json.Marshal(data)
if err != nil {
zap.S().Errorf("Error in marshalling data field in dashboard: ", data, err)
zap.S().Error("Error in marshalling data field in dashboard: ", data, err)
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err}
}
@ -215,10 +215,10 @@ func UpdateDashboard(uuid string, data map[string]interface{}) (*Dashboard, *mod
dashboard.Data = data
// db.Prepare("Insert into dashboards where")
_, err = db.Exec("UPDATE dashboards SET updated_at=$1, data=$2 WHERE uuid=$3 ", dashboard.UpdatedAt, map_data, dashboard.Uuid)
_, err = db.Exec("UPDATE dashboards SET updated_at=$1, data=$2 WHERE uuid=$3 ", dashboard.UpdatedAt, mapData, dashboard.Uuid)
if err != nil {
zap.S().Errorf("Error in inserting dashboard data: ", data, err)
zap.S().Error("Error in inserting dashboard data: ", data, err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
@ -249,7 +249,7 @@ func IsPostDataSane(data *map[string]interface{}) error {
func SlugifyTitle(title string) string {
s := slug.Make(strings.ToLower(title))
if s == "" {
// If the dashboard name is only characters outside of the
// If the dashboard name is only characters outside the
// sluggable characters, the slug creation will return an
// empty string which will mess up URLs. This failsafe picks
// that up and creates the slug as a base64 identifier instead.

View File

@ -210,7 +210,7 @@ func ViewAccess(f func(http.ResponseWriter, *http.Request)) http.HandlerFunc {
if !(auth.IsViewer(user) || auth.IsEditor(user) || auth.IsAdmin(user)) {
respondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible to viewers/editors/admins."),
Err: errors.New("API is accessible to viewers/editors/admins"),
}, nil)
return
}
@ -231,7 +231,7 @@ func EditAccess(f func(http.ResponseWriter, *http.Request)) http.HandlerFunc {
if !(auth.IsEditor(user) || auth.IsAdmin(user)) {
respondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible to editors/admins."),
Err: errors.New("API is accessible to editors/admins"),
}, nil)
return
}
@ -253,7 +253,7 @@ func SelfAccess(f func(http.ResponseWriter, *http.Request)) http.HandlerFunc {
if !(auth.IsSelfAccessRequest(user, id) || auth.IsAdmin(user)) {
respondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible for self access or to the admins."),
Err: errors.New("API is accessible for self access or to the admins"),
}, nil)
return
}
@ -455,13 +455,13 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
}
// prometheus instant query needs same timestamp
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == model.QUERY_VALUE &&
metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.PROM {
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == model.QueryValue &&
metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.Prom {
metricsQueryRangeParams.Start = metricsQueryRangeParams.End
}
// round up the end to neaerest multiple
if metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.QUERY_BUILDER {
// round down the end to the nearest multiple
if metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.QueryBuilder {
end := (metricsQueryRangeParams.End) / 1000
step := metricsQueryRangeParams.Step
metricsQueryRangeParams.End = (end / step * step) * 1000
@ -571,15 +571,15 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
var seriesList []*model.Series
var err error
switch metricsQueryRangeParams.CompositeMetricQuery.QueryType {
case model.QUERY_BUILDER:
runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SIGNOZ_TIMESERIES_TABLENAME)
case model.QueryBuilder:
runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SignozTimeSeriesTableName)
if runQueries.Err != nil {
respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: runQueries.Err}, nil)
return
}
seriesList, err = execClickHouseQueries(runQueries.Queries)
case model.CLICKHOUSE:
case model.ClickHouse:
queries := make(map[string]string)
for name, chQuery := range metricsQueryRangeParams.CompositeMetricQuery.ClickHouseQueries {
if chQuery.Disabled {
@ -588,7 +588,7 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
queries[name] = chQuery.Query
}
seriesList, err = execClickHouseQueries(queries)
case model.PROM:
case model.Prom:
seriesList, err = execPromQueries(metricsQueryRangeParams)
default:
err = fmt.Errorf("invalid query type")
@ -601,10 +601,10 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
respondError(w, apiErrObj, nil)
return
}
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == model.QUERY_VALUE &&
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == model.QueryValue &&
len(seriesList) > 1 &&
(metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.QUERY_BUILDER ||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.CLICKHOUSE) {
(metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.QueryBuilder ||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.ClickHouse) {
respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil)
return
}
@ -667,7 +667,7 @@ func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) {
inter = Intersection(inter, tags2Dash[tag])
}
filteredDashboards := []dashboards.Dashboard{}
var filteredDashboards []dashboards.Dashboard
for _, val := range inter {
dash := (allDashboards)[val]
filteredDashboards = append(filteredDashboards, dash)
@ -827,14 +827,14 @@ func (aH *APIHandler) testChannel(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("Error in getting req body of testChannel API\n", err)
zap.S().Error("Error in getting req body of testChannel API\n", err)
respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
receiver := &am.Receiver{}
if err := json.Unmarshal(body, receiver); err != nil { // Parse []byte to go struct pointer
zap.S().Errorf("Error in parsing req body of testChannel API\n", err)
zap.S().Error("Error in parsing req body of testChannel API\n", err)
respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
@ -855,14 +855,14 @@ func (aH *APIHandler) editChannel(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("Error in getting req body of editChannel API\n", err)
zap.S().Error("Error in getting req body of editChannel API\n", err)
respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
receiver := &am.Receiver{}
if err := json.Unmarshal(body, receiver); err != nil { // Parse []byte to go struct pointer
zap.S().Errorf("Error in parsing req body of editChannel API\n", err)
zap.S().Error("Error in parsing req body of editChannel API\n", err)
respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
@ -883,14 +883,14 @@ func (aH *APIHandler) createChannel(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("Error in getting req body of createChannel API\n", err)
zap.S().Error("Error in getting req body of createChannel API\n", err)
respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
receiver := &am.Receiver{}
if err := json.Unmarshal(body, receiver); err != nil { // Parse []byte to go struct pointer
zap.S().Errorf("Error in parsing req body of createChannel API\n", err)
zap.S().Error("Error in parsing req body of createChannel API\n", err)
respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
@ -968,20 +968,20 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
if res.Err != nil {
switch res.Err.(type) {
case promql.ErrQueryCanceled:
respondError(w, &model.ApiError{model.ErrorCanceled, res.Err}, nil)
respondError(w, &model.ApiError{Typ: model.ErrorCanceled, Err: res.Err}, nil)
case promql.ErrQueryTimeout:
respondError(w, &model.ApiError{model.ErrorTimeout, res.Err}, nil)
respondError(w, &model.ApiError{Typ: model.ErrorTimeout, Err: res.Err}, nil)
}
respondError(w, &model.ApiError{model.ErrorExec, res.Err}, nil)
respondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
}
response_data := &model.QueryData{
responseData := &model.QueryData{
ResultType: res.Value.Type(),
Result: res.Value,
Stats: qs,
}
aH.respond(w, response_data)
aH.respond(w, responseData)
}
@ -1022,20 +1022,20 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
if res.Err != nil {
switch res.Err.(type) {
case promql.ErrQueryCanceled:
respondError(w, &model.ApiError{model.ErrorCanceled, res.Err}, nil)
respondError(w, &model.ApiError{Typ: model.ErrorCanceled, Err: res.Err}, nil)
case promql.ErrQueryTimeout:
respondError(w, &model.ApiError{model.ErrorTimeout, res.Err}, nil)
respondError(w, &model.ApiError{Typ: model.ErrorTimeout, Err: res.Err}, nil)
}
respondError(w, &model.ApiError{model.ErrorExec, res.Err}, nil)
respondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
}
response_data := &model.QueryData{
responseData := &model.QueryData{
ResultType: res.Value.Type(),
Result: res.Value,
Stats: qs,
}
aH.respond(w, response_data)
aH.respond(w, responseData)
}
@ -1065,7 +1065,7 @@ func (aH *APIHandler) submitFeedback(w http.ResponseWriter, r *http.Request) {
"email": email,
"message": message,
}
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_INPRODUCT_FEEDBACK, data)
telemetry.GetInstance().SendEvent(telemetry.EventInproductFeedback, data)
}
@ -1134,7 +1134,7 @@ func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
"number": len(*result),
}
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_NUMBER_OF_SERVICES, data)
telemetry.GetInstance().SendEvent(telemetry.EventNumberOfServices, data)
aH.writeJSON(w, r, result)
}
@ -1378,8 +1378,8 @@ func (aH *APIHandler) getDisks(w http.ResponseWriter, r *http.Request) {
}
func (aH *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) {
version := version.GetVersion()
aH.writeJSON(w, r, map[string]string{"version": version})
v := version.GetVersion()
aH.writeJSON(w, r, map[string]string{"version": v})
}
// inviteUser is used to invite a user. It is used by an admin api.
@ -1411,7 +1411,7 @@ func (aH *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
aH.writeJSON(w, r, resp)
}
// revokeInvite is used to revoke an invite.
// revokeInvite is used to revoke an invitation.
func (aH *APIHandler) revokeInvite(w http.ResponseWriter, r *http.Request) {
email := mux.Vars(r)["email"]
@ -1529,7 +1529,7 @@ func (aH *APIHandler) getUser(w http.ResponseWriter, r *http.Request) {
if user == nil {
respondError(w, &model.ApiError{
Typ: model.ErrorInternal,
Err: errors.New("User not found"),
Err: errors.New("user not found"),
}, nil)
return
}
@ -1540,7 +1540,7 @@ func (aH *APIHandler) getUser(w http.ResponseWriter, r *http.Request) {
}
// editUser only changes the user's Name and ProfilePictureURL. It is intentionally designed
// to not support update of orgId, Password, createdAt for the sucurity reasons.
// to not support update of orgId, Password, createdAt for the security reasons.
func (aH *APIHandler) editUser(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
@ -1596,7 +1596,7 @@ func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
if user == nil {
respondError(w, &model.ApiError{
Typ: model.ErrorNotFound,
Err: errors.New("User not found"),
Err: errors.New("user not found"),
}, nil)
return
}
@ -1638,7 +1638,7 @@ func (aH *APIHandler) getRole(w http.ResponseWriter, r *http.Request) {
if user == nil {
respondError(w, &model.ApiError{
Typ: model.ErrorNotFound,
Err: errors.New("No user found"),
Err: errors.New("no user found"),
}, nil)
return
}
@ -1678,8 +1678,8 @@ func (aH *APIHandler) editRole(w http.ResponseWriter, r *http.Request) {
}
// Make sure that the request is not demoting the last admin user.
if user.GroupId == auth.AuthCacheObj.AdminGroupId {
adminUsers, apiErr := dao.DB().GetUsersByGroup(ctx, auth.AuthCacheObj.AdminGroupId)
if user.GroupId == auth.CacheObj.AdminGroupId {
adminUsers, apiErr := dao.DB().GetUsersByGroup(ctx, auth.CacheObj.AdminGroupId)
if apiErr != nil {
respondError(w, apiErr, "Failed to fetch adminUsers")
return
@ -1687,7 +1687,7 @@ func (aH *APIHandler) editRole(w http.ResponseWriter, r *http.Request) {
if len(adminUsers) == 1 {
respondError(w, &model.ApiError{
Err: errors.New("Cannot demote the last admin"),
Err: errors.New("cannot demote the last admin"),
Typ: model.ErrorInternal}, nil)
return
}
@ -1739,7 +1739,7 @@ func (aH *APIHandler) editOrg(w http.ResponseWriter, r *http.Request) {
"organizationName": req.Name,
}
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_ORG_SETTINGS, data)
telemetry.GetInstance().SendEvent(telemetry.EventOrgSettings, data)
aH.writeJSON(w, r, map[string]string{"data": "org updated successfully"})
}

View File

@ -28,14 +28,14 @@ var AggregateOperatorToPercentile = map[model.AggregateOperator]float64{
}
var AggregateOperatorToSQLFunc = map[model.AggregateOperator]string{
model.AVG: "avg",
model.MAX: "max",
model.MIN: "min",
model.SUM: "sum",
model.RATE_SUM: "sum",
model.RATE_AVG: "avg",
model.RATE_MAX: "max",
model.RATE_MIN: "min",
model.Avg: "avg",
model.Max: "max",
model.Min: "min",
model.Sum: "sum",
model.RateSum: "sum",
model.RateAvg: "avg",
model.RateMax: "max",
model.RateMin: "min",
}
var SupportedFunctions = []string{"exp", "log", "ln", "exp2", "log2", "exp10", "log10", "sqrt", "cbrt", "erf", "erfc", "lgamma", "tgamma", "sin", "cos", "tan", "asin", "acos", "atan", "degrees", "radians"}
@ -128,7 +128,7 @@ func BuildMetricsTimeSeriesFilterQuery(fs *model.FilterSet, groupTags []string,
queryString := strings.Join(conditions, " AND ")
var selectLabels string
if aggregateOperator == model.NOOP || aggregateOperator == model.RATE {
if aggregateOperator == model.NoOp || aggregateOperator == model.Rate {
selectLabels = "labels,"
} else {
for _, tag := range groupTags {
@ -136,14 +136,14 @@ func BuildMetricsTimeSeriesFilterQuery(fs *model.FilterSet, groupTags []string,
}
}
filterSubQuery := fmt.Sprintf("SELECT %s fingerprint FROM %s.%s WHERE %s", selectLabels, constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_TABLENAME, queryString)
filterSubQuery := fmt.Sprintf("SELECT %s fingerprint FROM %s.%s WHERE %s", selectLabels, constants.SignozMetricDbname, constants.SignozTimeSeriesTableName, queryString)
return filterSubQuery, nil
}
func BuildMetricQuery(qp *model.QueryRangeParamsV2, mq *model.MetricQuery, tableName string) (string, error) {
if qp.CompositeMetricQuery.PanelType == model.QUERY_VALUE && len(mq.GroupingTags) != 0 {
if qp.CompositeMetricQuery.PanelType == model.QueryValue && len(mq.GroupingTags) != 0 {
return "", fmt.Errorf("reduce operator cannot be applied for the query")
}
@ -159,7 +159,7 @@ func BuildMetricQuery(qp *model.QueryRangeParamsV2, mq *model.MetricQuery, table
"SELECT %s" +
" toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
" %s as value" +
" FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
" FROM " + constants.SignozMetricDbname + "." + constants.SignozSamplesTableName +
" INNER JOIN" +
" (%s) as filtered_time_series" +
" USING fingerprint" +
@ -171,7 +171,7 @@ func BuildMetricQuery(qp *model.QueryRangeParamsV2, mq *model.MetricQuery, table
groupTags := groupSelect(mq.GroupingTags...)
switch mq.AggregateOperator {
case model.RATE:
case model.Rate:
// Calculate rate of change of metric for each unique time series
groupBy = "fingerprint, ts"
groupTags = "fingerprint,"
@ -183,7 +183,7 @@ func BuildMetricQuery(qp *model.QueryRangeParamsV2, mq *model.MetricQuery, table
query = fmt.Sprintf(query, "labels as fullLabels,", subQuery)
return query, nil
case model.SUM_RATE:
case model.SumRate:
rateGroupBy := "fingerprint, " + groupBy
rateGroupTags := "fingerprint, " + groupTags
op := "max(value)"
@ -194,7 +194,7 @@ func BuildMetricQuery(qp *model.QueryRangeParamsV2, mq *model.MetricQuery, table
query = fmt.Sprintf(query, groupTags, subQuery)
query = fmt.Sprintf(`SELECT %s ts, sum(value) as value FROM (%s) GROUP BY %s ORDER BY %s ts`, groupTags, query, groupBy, groupTags)
return query, nil
case model.RATE_SUM, model.RATE_MAX, model.RATE_AVG, model.RATE_MIN:
case model.RateSum, model.RateMax, model.RateAvg, model.RateMin:
op := fmt.Sprintf("%s(value)", AggregateOperatorToSQLFunc[mq.AggregateOperator])
subQuery := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags)
query := `SELECT %s ts, runningDifference(value)/runningDifference(ts) as value FROM(%s) OFFSET 1`
@ -204,24 +204,24 @@ func BuildMetricQuery(qp *model.QueryRangeParamsV2, mq *model.MetricQuery, table
op := fmt.Sprintf("quantile(%v)(value)", AggregateOperatorToPercentile[mq.AggregateOperator])
query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags)
return query, nil
case model.AVG, model.SUM, model.MIN, model.MAX:
case model.Avg, model.Sum, model.Min, model.Max:
op := fmt.Sprintf("%s(value)", AggregateOperatorToSQLFunc[mq.AggregateOperator])
query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags)
return query, nil
case model.COUNT:
case model.Count:
op := "toFloat64(count(*))"
query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags)
return query, nil
case model.COUNT_DISTINCT:
case model.CountDistinct:
op := "toFloat64(count(distinct(value)))"
query := fmt.Sprintf(queryTmpl, groupTags, qp.Step, op, filterSubQuery, groupBy, groupTags)
return query, nil
case model.NOOP:
case model.NoOp:
queryTmpl :=
"SELECT fingerprint, labels as fullLabels," +
" toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
" any(value) as value" +
" FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
" FROM " + constants.SignozMetricDbname + "." + constants.SignozSamplesTableName +
" INNER JOIN" +
" (%s) as filtered_time_series" +
" USING fingerprint" +
@ -275,24 +275,24 @@ func reduceQuery(query string, reduceTo model.ReduceToOperator, aggregateOperato
var groupBy string
// NOOP and RATE can possibly return multiple time series and reduce should be applied
// for each uniques series. When the final result contains more than one series we throw
// an error post DB fetching. Otherwise just return the single data. This is not known until queried so the
// the query is prepared accordingly.
if aggregateOperator == model.NOOP || aggregateOperator == model.RATE {
// an error post DB fetching. Otherwise, just return the single data. This is not known until queried so the
// query is prepared accordingly.
if aggregateOperator == model.NoOp || aggregateOperator == model.Rate {
selectLabels = ", any(fullLabels) as fullLabels"
groupBy = "GROUP BY fingerprint"
}
// the timestamp picked is not relevant here since the final value used is show the single
// chart with just the query value. For the quer
// chart with just the query value.
switch reduceTo {
case model.RLAST:
case model.RLast:
query = fmt.Sprintf("SELECT anyLast(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy)
case model.RSUM:
case model.RSum:
query = fmt.Sprintf("SELECT sum(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy)
case model.RAVG:
case model.RAvg:
query = fmt.Sprintf("SELECT avg(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy)
case model.RMAX:
case model.RMax:
query = fmt.Sprintf("SELECT max(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy)
case model.RMIN:
case model.RMin:
query = fmt.Sprintf("SELECT min(value) as value, any(ts) as ts %s FROM (%s) %s", selectLabels, query, groupBy)
default:
return "", fmt.Errorf("unsupported reduce operator")
@ -317,7 +317,7 @@ func varToQuery(qp *model.QueryRangeParamsV2, tableName string) (map[string]stri
if err != nil {
errs = append(errs, err)
} else {
if qp.CompositeMetricQuery.PanelType == model.QUERY_VALUE {
if qp.CompositeMetricQuery.PanelType == model.QueryValue {
query, err = reduceQuery(query, mq.ReduceTo, mq.AggregateOperator)
if err != nil {
errs = append(errs, err)

View File

@ -18,7 +18,7 @@ func TestBuildQuery(t *testing.T) {
"a": {
QueryName: "a",
MetricName: "name",
AggregateOperator: model.RATE_MAX,
AggregateOperator: model.RateMax,
Expression: "a",
},
},
@ -46,7 +46,7 @@ func TestBuildQueryWithFilters(t *testing.T) {
{Key: "a", Value: "b", Operator: "neq"},
{Key: "code", Value: "ERROR_*", Operator: "nmatch"},
}},
AggregateOperator: model.RATE_MAX,
AggregateOperator: model.RateMax,
Expression: "a",
},
},
@ -75,13 +75,13 @@ func TestBuildQueryWithMultipleQueries(t *testing.T) {
TagFilters: &model.FilterSet{Operator: "AND", Items: []model.FilterItem{
{Key: "in", Value: []interface{}{"a", "b", "c"}, Operator: "in"},
}},
AggregateOperator: model.RATE_AVG,
AggregateOperator: model.RateAvg,
Expression: "a",
},
"b": {
QueryName: "b",
MetricName: "name2",
AggregateOperator: model.RATE_MAX,
AggregateOperator: model.RateMax,
Expression: "b",
},
},
@ -108,12 +108,12 @@ func TestBuildQueryWithMultipleQueriesAndFormula(t *testing.T) {
TagFilters: &model.FilterSet{Operator: "AND", Items: []model.FilterItem{
{Key: "in", Value: []interface{}{"a", "b", "c"}, Operator: "in"},
}},
AggregateOperator: model.RATE_MAX,
AggregateOperator: model.RateMax,
Expression: "a",
},
"b": {
MetricName: "name2",
AggregateOperator: model.RATE_AVG,
AggregateOperator: model.RateAvg,
Expression: "b",
},
"c": {

View File

@ -467,8 +467,8 @@ func parseCountErrorsRequest(r *http.Request) (*model.CountErrorsParams, error)
}
params := &model.CountErrorsParams{
Start: startTime,
End: endTime,
Start: startTime,
End: endTime,
}
return params, nil
@ -597,7 +597,7 @@ func parseTTLParams(r *http.Request) (*model.TTLParams, error) {
// Validate the TTL duration.
durationParsed, err := time.ParseDuration(delDuration)
if err != nil || durationParsed.Seconds() <= 0 {
return nil, fmt.Errorf("Not a valid TTL duration %v", delDuration)
return nil, fmt.Errorf("not a valid TTL duration %v", delDuration)
}
var toColdParsed time.Duration
@ -606,10 +606,10 @@ func parseTTLParams(r *http.Request) (*model.TTLParams, error) {
if len(coldStorage) > 0 {
toColdParsed, err = time.ParseDuration(toColdDuration)
if err != nil || toColdParsed.Seconds() <= 0 {
return nil, fmt.Errorf("Not a valid toCold TTL duration %v", toColdDuration)
return nil, fmt.Errorf("not a valid toCold TTL duration %v", toColdDuration)
}
if toColdParsed.Seconds() != 0 && toColdParsed.Seconds() >= durationParsed.Seconds() {
return nil, fmt.Errorf("Delete TTL should be greater than cold storage move TTL.")
return nil, fmt.Errorf("delete TTL should be greater than cold storage move TTL")
}
}

View File

@ -11,13 +11,13 @@ import (
func validateQueryRangeParamsV2(qp *model.QueryRangeParamsV2) error {
var errs []error
if !(qp.DataSource >= model.METRICS && qp.DataSource <= model.LOGS) {
if !(qp.DataSource >= model.Metrics && qp.DataSource <= model.Logs) {
errs = append(errs, fmt.Errorf("unsupported data source"))
}
if !(qp.CompositeMetricQuery.QueryType >= model.QUERY_BUILDER && qp.CompositeMetricQuery.QueryType <= model.PROM) {
if !(qp.CompositeMetricQuery.QueryType >= model.QueryBuilder && qp.CompositeMetricQuery.QueryType <= model.Prom) {
errs = append(errs, fmt.Errorf("unsupported query type"))
}
if !(qp.CompositeMetricQuery.PanelType >= model.TIME_SERIES && qp.CompositeMetricQuery.PanelType <= model.QUERY_VALUE) {
if !(qp.CompositeMetricQuery.PanelType >= model.TimeSeries && qp.CompositeMetricQuery.PanelType <= model.QueryValue) {
errs = append(errs, fmt.Errorf("unsupported panel type"))
}
if len(errs) != 0 {

View File

@ -22,7 +22,7 @@ func TestParseFilterSingleFilter(t *testing.T) {
}`)
req, _ := http.NewRequest("POST", "", bytes.NewReader(postBody))
res, _ := parseFilterSet(req)
query, _ := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NOOP)
query, _ := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NoOp)
So(query, ShouldContainSubstring, "signoz_metrics.time_series_v2 WHERE metric_name = 'table' AND labels_object.namespace = 'a'")
})
}
@ -38,7 +38,7 @@ func TestParseFilterMultipleFilter(t *testing.T) {
}`)
req, _ := http.NewRequest("POST", "", bytes.NewReader(postBody))
res, _ := parseFilterSet(req)
query, _ := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NOOP)
query, _ := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NoOp)
So(query, should.ContainSubstring, "labels_object.host IN ['host-1','host-2']")
So(query, should.ContainSubstring, "labels_object.namespace = 'a'")
})
@ -54,7 +54,7 @@ func TestParseFilterNotSupportedOp(t *testing.T) {
}`)
req, _ := http.NewRequest("POST", "", bytes.NewReader(postBody))
res, _ := parseFilterSet(req)
_, err := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NOOP)
_, err := metrics.BuildMetricsTimeSeriesFilterQuery(res, []string{}, "table", model.NoOp)
So(err, should.BeError, "unsupported operation")
})
}

View File

@ -55,10 +55,10 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
// NewServer creates and initializes Server
func NewServer(serverOptions *ServerOptions) (*Server, error) {
if err := dao.InitDao("sqlite", constants.RELATIONAL_DATASOURCE_PATH); err != nil {
if err := dao.InitDao("sqlite", constants.RelationalDatasourcePath); err != nil {
return nil, err
}
localDB, err := dashboards.InitDB(constants.RELATIONAL_DATASOURCE_PATH)
localDB, err := dashboards.InitDB(constants.RelationalDatasourcePath)
if err != nil {
return nil, err
@ -70,11 +70,11 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
storage := os.Getenv("STORAGE")
if storage == "clickhouse" {
zap.S().Info("Using ClickHouse as datastore ...")
clickhouseReader := clickhouseReader.NewReader(localDB)
go clickhouseReader.Start()
reader = clickhouseReader
chReader := clickhouseReader.NewReader(localDB)
go chReader.Start()
reader = chReader
} else {
return nil, fmt.Errorf("Storage type: %s is not supported in query service", storage)
return nil, fmt.Errorf("storage type: %s is not supported in query service", storage)
}
telemetry.GetInstance().SetReader(reader)
@ -211,7 +211,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data)
telemetry.GetInstance().SendEvent(telemetry.EventPath, data)
}
})

View File

@ -21,9 +21,10 @@ const (
)
var (
ErrorInvalidCreds = fmt.Errorf("Invalid credentials")
ErrorInvalidCreds = fmt.Errorf("invalid credentials")
)
// Invite sends the invitation for users
// The root user should be able to invite people to create account on SigNoz cluster.
func Invite(ctx context.Context, req *model.InviteRequest) (*model.InviteResponse, error) {
zap.S().Debugf("Got an invite request for email: %s\n", req.Email)

View File

@ -17,13 +17,13 @@ type Group struct {
GroupName string
}
type AuthCache struct {
type Cache struct {
AdminGroupId string
EditorGroupId string
ViewerGroupId string
}
var AuthCacheObj AuthCache
var CacheObj Cache
// InitAuthCache reads the DB and initialize the auth cache.
func InitAuthCache(ctx context.Context) error {
@ -37,13 +37,13 @@ func InitAuthCache(ctx context.Context) error {
return nil
}
if err := setGroupId(constants.AdminGroup, &AuthCacheObj.AdminGroupId); err != nil {
if err := setGroupId(constants.AdminGroup, &CacheObj.AdminGroupId); err != nil {
return err
}
if err := setGroupId(constants.EditorGroup, &AuthCacheObj.EditorGroupId); err != nil {
if err := setGroupId(constants.EditorGroup, &CacheObj.EditorGroupId); err != nil {
return err
}
if err := setGroupId(constants.ViewerGroup, &AuthCacheObj.ViewerGroupId); err != nil {
if err := setGroupId(constants.ViewerGroup, &CacheObj.ViewerGroupId); err != nil {
return err
}
@ -65,9 +65,9 @@ func GetUserFromRequest(r *http.Request) (*model.UserPayload, error) {
func IsSelfAccessRequest(user *model.UserPayload, id string) bool { return user.Id == id }
func IsViewer(user *model.UserPayload) bool { return user.GroupId == AuthCacheObj.ViewerGroupId }
func IsEditor(user *model.UserPayload) bool { return user.GroupId == AuthCacheObj.EditorGroupId }
func IsAdmin(user *model.UserPayload) bool { return user.GroupId == AuthCacheObj.AdminGroupId }
func IsViewer(user *model.UserPayload) bool { return user.GroupId == CacheObj.ViewerGroupId }
func IsEditor(user *model.UserPayload) bool { return user.GroupId == CacheObj.EditorGroupId }
func IsAdmin(user *model.UserPayload) bool { return user.GroupId == CacheObj.AdminGroupId }
func ValidatePassword(password string) error {
if len(password) < minimumPasswordLength {

View File

@ -33,7 +33,6 @@ func isValidRole(role string) bool {
default:
return false
}
return false
}
func validateInviteRequest(req *model.InviteRequest) error {

View File

@ -11,7 +11,7 @@ const (
DebugHttpPort = "0.0.0.0:6060" // Address to serve http (pprof)
)
var DEFAULT_TELEMETRY_ANONYMOUS = false
var DefaultTelemetryAnonymous = false
func IsTelemetryEnabled() bool {
isTelemetryEnabledStr := os.Getenv("TELEMETRY_ENABLED")
@ -32,10 +32,10 @@ func GetAlertManagerApiPrefix() string {
return "http://alertmanager:9093/api/"
}
// Alert manager channel subpath
// AmChannelApiPath is a channel subpath for Alert manager
var AmChannelApiPath = GetOrDefaultEnv("ALERTMANAGER_API_CHANNEL_PATH", "v1/routes")
var RELATIONAL_DATASOURCE_PATH = GetOrDefaultEnv("SIGNOZ_LOCAL_DB_PATH", "/var/lib/signoz/signoz.db")
var RelationalDatasourcePath = GetOrDefaultEnv("SIGNOZ_LOCAL_DB_PATH", "/var/lib/signoz/signoz.db")
const (
ServiceName = "serviceName"
@ -67,9 +67,9 @@ const (
FirstSeen = "firstSeen"
)
const (
SIGNOZ_METRIC_DBNAME = "signoz_metrics"
SIGNOZ_SAMPLES_TABLENAME = "samples_v2"
SIGNOZ_TIMESERIES_TABLENAME = "time_series_v2"
SignozMetricDbname = "signoz_metrics"
SignozSamplesTableName = "samples_v2"
SignozTimeSeriesTableName = "time_series_v2"
)
func GetOrDefaultEnv(key string, fallback string) string {

View File

@ -26,7 +26,7 @@ func InitDB(dataSourceName string) (*ModelDaoSqlite, error) {
}
db.SetMaxOpenConns(10)
table_schema := `
tableSchema := `
PRAGMA foreign_keys = ON;
CREATE TABLE IF NOT EXISTS invites (
@ -70,9 +70,9 @@ func InitDB(dataSourceName string) (*ModelDaoSqlite, error) {
);
`
_, err = db.Exec(table_schema)
_, err = db.Exec(tableSchema)
if err != nil {
return nil, fmt.Errorf("Error in creating tables: %v", err.Error())
return nil, fmt.Errorf("error in creating tables: %v", err.Error())
}
mds := &ModelDaoSqlite{db: db}
@ -96,7 +96,7 @@ func InitDB(dataSourceName string) (*ModelDaoSqlite, error) {
func (mds *ModelDaoSqlite) initializeOrgPreferences(ctx context.Context) error {
// set anonymous setting as default in case of any failures to fetch UserPreference in below section
telemetry.GetInstance().SetTelemetryAnonymous(constants.DEFAULT_TELEMETRY_ANONYMOUS)
telemetry.GetInstance().SetTelemetryAnonymous(constants.DefaultTelemetryAnonymous)
orgs, apiError := mds.GetOrgs(ctx)
if apiError != nil {

View File

@ -35,7 +35,7 @@ func (mds *ModelDaoSqlite) DeleteInvitation(ctx context.Context, email string) *
func (mds *ModelDaoSqlite) GetInviteFromEmail(ctx context.Context, email string,
) (*model.InvitationObject, *model.ApiError) {
invites := []model.InvitationObject{}
var invites []model.InvitationObject
err := mds.db.Select(&invites,
`SELECT * FROM invites WHERE email=?;`, email)
@ -57,7 +57,7 @@ func (mds *ModelDaoSqlite) GetInviteFromEmail(ctx context.Context, email string,
func (mds *ModelDaoSqlite) GetInviteFromToken(ctx context.Context, token string,
) (*model.InvitationObject, *model.ApiError) {
invites := []model.InvitationObject{}
var invites []model.InvitationObject
err := mds.db.Select(&invites,
`SELECT * FROM invites WHERE token=?;`, token)
@ -77,7 +77,7 @@ func (mds *ModelDaoSqlite) GetInviteFromToken(ctx context.Context, token string,
func (mds *ModelDaoSqlite) GetInvites(ctx context.Context,
) ([]model.InvitationObject, *model.ApiError) {
invites := []model.InvitationObject{}
var invites []model.InvitationObject
err := mds.db.Select(&invites, "SELECT * FROM invites")
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
@ -103,7 +103,7 @@ func (mds *ModelDaoSqlite) CreateOrg(ctx context.Context,
func (mds *ModelDaoSqlite) GetOrg(ctx context.Context,
id string) (*model.Organization, *model.ApiError) {
orgs := []model.Organization{}
var orgs []model.Organization
err := mds.db.Select(&orgs, `SELECT * FROM organizations WHERE id=?;`, id)
if err != nil {
@ -125,7 +125,7 @@ func (mds *ModelDaoSqlite) GetOrg(ctx context.Context,
func (mds *ModelDaoSqlite) GetOrgByName(ctx context.Context,
name string) (*model.Organization, *model.ApiError) {
orgs := []model.Organization{}
var orgs []model.Organization
if err := mds.db.Select(&orgs, `SELECT * FROM organizations WHERE name=?;`, name); err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
@ -144,7 +144,7 @@ func (mds *ModelDaoSqlite) GetOrgByName(ctx context.Context,
}
func (mds *ModelDaoSqlite) GetOrgs(ctx context.Context) ([]model.Organization, *model.ApiError) {
orgs := []model.Organization{}
var orgs []model.Organization
err := mds.db.Select(&orgs, `SELECT * FROM organizations`)
if err != nil {
@ -194,7 +194,7 @@ func (mds *ModelDaoSqlite) CreateUser(ctx context.Context,
"email": user.Email,
}
telemetry.GetInstance().IdentifyUser(user)
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_USER, data)
telemetry.GetInstance().SendEvent(telemetry.EventUser, data)
return user, nil
}
@ -254,7 +254,7 @@ func (mds *ModelDaoSqlite) DeleteUser(ctx context.Context, id string) *model.Api
func (mds *ModelDaoSqlite) GetUser(ctx context.Context,
id string) (*model.UserPayload, *model.ApiError) {
users := []model.UserPayload{}
var users []model.UserPayload
query := `select
u.id,
u.name,
@ -291,7 +291,7 @@ func (mds *ModelDaoSqlite) GetUser(ctx context.Context,
func (mds *ModelDaoSqlite) GetUserByEmail(ctx context.Context,
email string) (*model.UserPayload, *model.ApiError) {
users := []model.UserPayload{}
var users []model.UserPayload
query := `select
u.id,
u.name,
@ -326,7 +326,7 @@ func (mds *ModelDaoSqlite) GetUserByEmail(ctx context.Context,
}
func (mds *ModelDaoSqlite) GetUsers(ctx context.Context) ([]model.UserPayload, *model.ApiError) {
users := []model.UserPayload{}
var users []model.UserPayload
query := `select
u.id,
@ -355,7 +355,7 @@ func (mds *ModelDaoSqlite) GetUsers(ctx context.Context) ([]model.UserPayload, *
func (mds *ModelDaoSqlite) GetUsersByOrg(ctx context.Context,
orgId string) ([]model.UserPayload, *model.ApiError) {
users := []model.UserPayload{}
var users []model.UserPayload
query := `select
u.id,
u.name,
@ -382,7 +382,7 @@ func (mds *ModelDaoSqlite) GetUsersByOrg(ctx context.Context,
func (mds *ModelDaoSqlite) GetUsersByGroup(ctx context.Context,
groupId string) ([]model.UserPayload, *model.ApiError) {
users := []model.UserPayload{}
var users []model.UserPayload
query := `select
u.id,
u.name,
@ -430,7 +430,7 @@ func (mds *ModelDaoSqlite) DeleteGroup(ctx context.Context, id string) *model.Ap
func (mds *ModelDaoSqlite) GetGroup(ctx context.Context,
id string) (*model.Group, *model.ApiError) {
groups := []model.Group{}
var groups []model.Group
if err := mds.db.Select(&groups, `SELECT id, name FROM groups WHERE id=?`, id); err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
}
@ -451,7 +451,7 @@ func (mds *ModelDaoSqlite) GetGroup(ctx context.Context,
func (mds *ModelDaoSqlite) GetGroupByName(ctx context.Context,
name string) (*model.Group, *model.ApiError) {
groups := []model.Group{}
var groups []model.Group
if err := mds.db.Select(&groups, `SELECT id, name FROM groups WHERE name=?`, name); err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
}
@ -472,7 +472,7 @@ func (mds *ModelDaoSqlite) GetGroupByName(ctx context.Context,
func (mds *ModelDaoSqlite) GetGroups(ctx context.Context) ([]model.Group, *model.ApiError) {
groups := []model.Group{}
var groups []model.Group
if err := mds.db.Select(&groups, "SELECT * FROM groups"); err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
}
@ -502,7 +502,7 @@ func (mds *ModelDaoSqlite) DeleteResetPasswordEntry(ctx context.Context,
func (mds *ModelDaoSqlite) GetResetPasswordEntry(ctx context.Context,
token string) (*model.ResetPasswordEntry, *model.ApiError) {
entries := []model.ResetPasswordEntry{}
var entries []model.ResetPasswordEntry
q := `SELECT user_id,token FROM reset_password_request WHERE token=?;`
if err := mds.db.Select(&entries, q, token); err != nil {

View File

@ -40,7 +40,7 @@ func prepareAmChannelApiURL() string {
basePath := constants.GetAlertManagerApiPrefix()
AmChannelApiPath := constants.AmChannelApiPath
if len(AmChannelApiPath) > 0 && rune(AmChannelApiPath[0]) == rune('/') {
if len(AmChannelApiPath) > 0 && rune(AmChannelApiPath[0]) == '/' {
AmChannelApiPath = AmChannelApiPath[1:]
}
@ -109,7 +109,7 @@ func (m *manager) DeleteRoute(name string) *model.ApiError {
req, err := http.NewRequest(http.MethodDelete, amURL, bytes.NewBuffer(requestData))
if err != nil {
zap.S().Errorf("Error in creating new delete request to alertmanager/v1/receivers\n", err)
zap.S().Error("Error in creating new delete request to alertmanager/v1/receivers\n", err)
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
}
@ -119,7 +119,7 @@ func (m *manager) DeleteRoute(name string) *model.ApiError {
response, err := client.Do(req)
if err != nil {
zap.S().Errorf(fmt.Sprintf("Error in getting response of API call to alertmanager(DELETE %s)\n", amURL), err)
zap.S().Error("Error in getting response of API call to alertmanager(DELETE %s)\n", amURL, err)
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
}

View File

@ -19,4 +19,4 @@ type Receiver struct {
type ReceiverResponse struct {
Status string `json:"status"`
Data Receiver `json:"data"`
}
}

View File

@ -47,10 +47,8 @@ type Reader interface {
GetErrorFromGroupID(ctx context.Context, params *model.GetErrorParams) (*model.ErrorWithSpan, *model.ApiError)
GetNextPrevErrorIDs(ctx context.Context, params *model.GetErrorParams) (*model.NextPrevErrorIDs, *model.ApiError)
// Search Interfaces
SearchTraces(ctx context.Context, traceID string) (*[]model.SearchSpansResult, error)
// Setter Interfaces
SetTTL(ctx context.Context, ttlParams *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError)
GetMetricAutocompleteMetricNames(ctx context.Context, matchText string, limit int) (*[]string, *model.ApiError)

View File

@ -33,20 +33,20 @@ type ReduceToOperator int
const (
_ ReduceToOperator = iota
RLAST
RSUM
RAVG
RMAX
RMIN
RLast
RSum
RAvg
RMax
RMin
)
type QueryType int
const (
_ QueryType = iota
QUERY_BUILDER
CLICKHOUSE
PROM
QueryBuilder
ClickHouse
Prom
)
type PromQuery struct {
@ -64,8 +64,8 @@ type PanelType int
const (
_ PanelType = iota
TIME_SERIES
QUERY_VALUE
TimeSeries
QueryValue
)
type CompositeMetricQuery struct {
@ -80,13 +80,13 @@ type AggregateOperator int
const (
_ AggregateOperator = iota
NOOP
COUNT
COUNT_DISTINCT
SUM
AVG
MAX
MIN
NoOp
Count
CountDistinct
Sum
Avg
Max
Min
P05
P10
P20
@ -96,25 +96,25 @@ const (
P90
P95
P99
RATE
SUM_RATE
// leave blank space for possily {AVG, X}_RATE
Rate
SumRate
// leave blank space for possibly {AVG, X}_RATE
_
_
_
RATE_SUM
RATE_AVG
RATE_MAX
RATE_MIN
RateSum
RateAvg
RateMax
RateMin
)
type DataSource int
const (
_ DataSource = iota
METRICS
TRACES
LOGS
Metrics
Traces
Logs
)
type QueryRangeParamsV2 struct {

View File

@ -73,7 +73,7 @@ type AlertDiscovery struct {
Alerts []*AlertingRuleResponse `json:"rules"`
}
// Alert has info for an alert.
// AlertingRuleResponse has info for an alert.
type AlertingRuleResponse struct {
Labels labels.Labels `json:"labels"`
Annotations labels.Labels `json:"annotations"`
@ -137,7 +137,7 @@ type GetFilterSpansResponse struct {
TotalSpans uint64 `json:"totalSpans"`
}
type SearchSpanDBReponseItem struct {
type SearchSpanDBResponseItem struct {
Timestamp time.Time `ch:"timestamp"`
TraceID string `ch:"traceID"`
Model string `ch:"model"`
@ -150,7 +150,7 @@ type Event struct {
IsError bool `json:"isError,omitempty"`
}
type SearchSpanReponseItem struct {
type SearchSpanResponseItem struct {
TimeUnixNano uint64 `json:"timestamp"`
SpanID string `json:"spanID"`
TraceID string `json:"traceID"`
@ -177,13 +177,13 @@ func (ref *OtelSpanRef) toString() string {
return retString
}
func (item *SearchSpanReponseItem) GetValues() []interface{} {
func (item *SearchSpanResponseItem) GetValues() []interface{} {
references := []OtelSpanRef{}
jsonbody, _ := json.Marshal(item.References)
json.Unmarshal(jsonbody, &references)
var references []OtelSpanRef
jsonBody, _ := json.Marshal(item.References)
json.Unmarshal(jsonBody, &references)
referencesStringArray := []string{}
var referencesStringArray []string
for _, item := range references {
referencesStringArray = append(referencesStringArray, item.toString())
}

View File

@ -16,18 +16,18 @@ import (
)
const (
TELEMETRY_EVENT_PATH = "API Call"
TELEMETRY_EVENT_USER = "User"
TELEMETRY_EVENT_INPRODUCT_FEEDBACK = "InProduct Feeback Submitted"
TELEMETRY_EVENT_NUMBER_OF_SERVICES = "Number of Services"
TELEMETRY_EVENT_HEART_BEAT = "Heart Beat"
TELEMETRY_EVENT_ORG_SETTINGS = "Org Settings"
EventPath = "API Call"
EventUser = "User"
EventInproductFeedback = "InProduct Feedback Submitted"
EventNumberOfServices = "Number of Services"
EventHeartBeat = "Heart Beat"
EventOrgSettings = "Org Settings"
)
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
const IP_NOT_FOUND_PLACEHOLDER = "NA"
const writeKey = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
const IpNotFoundPlaceholder = "NA"
const HEART_BEAT_DURATION = 6 * time.Hour
const HeartBeatDuration = 6 * time.Hour
// const HEART_BEAT_DURATION = 10 * time.Second
@ -45,15 +45,15 @@ type Telemetry struct {
func createTelemetry() {
telemetry = &Telemetry{
operator: analytics.New(api_key),
operator: analytics.New(writeKey),
ipAddress: getOutboundIP(),
}
data := map[string]interface{}{}
telemetry.SetTelemetryEnabled(constants.IsTelemetryEnabled())
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data)
ticker := time.NewTicker(HEART_BEAT_DURATION)
telemetry.SendEvent(EventHeartBeat, data)
ticker := time.NewTicker(HeartBeatDuration)
go func() {
for {
select {
@ -71,7 +71,7 @@ func createTelemetry() {
for key, value := range tsInfo {
data[key] = value
}
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data)
telemetry.SendEvent(EventHeartBeat, data)
}
}
}()
@ -81,7 +81,7 @@ func createTelemetry() {
// Get preferred outbound ip of this machine
func getOutboundIP() string {
ip := []byte(IP_NOT_FOUND_PLACEHOLDER)
ip := []byte(IpNotFoundPlaceholder)
resp, err := http.Get("https://api.ipify.org?format=text")
if err != nil {
@ -112,7 +112,7 @@ func (a *Telemetry) IdentifyUser(user *model.User) {
}
func (a *Telemetry) checkEvents(event string) bool {
sendEvent := true
if event == TELEMETRY_EVENT_USER && a.isTelemetryAnonymous() {
if event == EventUser && a.isTelemetryAnonymous() {
sendEvent = false
}
return sendEvent
@ -139,7 +139,7 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}) {
}
userId := a.ipAddress
if a.isTelemetryAnonymous() || userId == IP_NOT_FOUND_PLACEHOLDER {
if a.isTelemetryAnonymous() || userId == IpNotFoundPlaceholder {
userId = a.GetDistinctId()
}

View File

@ -5,6 +5,7 @@ import (
"fmt"
"io/ioutil"
"net/http"
"os"
"testing"
"time"
@ -27,6 +28,9 @@ func setTTL(table, coldStorage, toColdTTL, deleteTTL string, jwtToken string) ([
}
var bearer = "Bearer " + jwtToken
req, err := http.NewRequest("POST", endpoint+"/api/v1/settings/ttl?"+params, nil)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", bearer)
resp, err := client.Do(req)
@ -128,6 +132,9 @@ func getTTL(t *testing.T, table string, jwtToken string) *model.GetTTLResponseIt
var bearer = "Bearer " + jwtToken
req, err := http.NewRequest("GET", url, nil)
if err != nil {
t.Fatal(err)
}
req.Header.Add("Authorization", bearer)
resp, err := client.Do(req)
@ -214,5 +221,5 @@ func TestMain(m *testing.M) {
}
defer stopCluster()
m.Run()
os.Exit(m.Run())
}