-
Email
+
{t('label_email')}
-
First Name
+
{t('label_firstname')}
{
setState(e.target.value, setFirstName);
@@ -230,9 +299,9 @@ function SignUp({ version }: SignUpProps): JSX.Element {
)}
- Organization Name
+ {t('label_orgname')}
{
setState(e.target.value, setOrganizationName);
@@ -242,53 +311,57 @@ function SignUp({ version }: SignUpProps): JSX.Element {
disabled={isDetailsDisable}
/>
-
- Password
- {
- setState(e.target.value, setPassword);
- }}
- required
- id="currentPassword"
- />
-
-
-
Confirm Password
-
{
- const updateValue = e.target.value;
- setState(updateValue, setConfirmPassword);
- }}
- required
- id="confirmPassword"
- />
+ {!precheck.sso && (
+
+ {t('label_password')}
+ {
+ setState(e.target.value, setPassword);
+ }}
+ required
+ id="currentPassword"
+ />
+
+ )}
+ {!precheck.sso && (
+
+ {t('label_confirm_password')}
+ {
+ const updateValue = e.target.value;
+ setState(updateValue, setConfirmPassword);
+ }}
+ required
+ id="confirmPassword"
+ />
- {confirmPasswordError && (
-
- Passwords don’t match. Please try again
-
- )}
- {isPasswordPolicyError && (
-
- {isPasswordNotValidMessage}
-
- )}
-
+ {confirmPasswordError && (
+
+ {t('failed_confirm_password')}
+
+ )}
+ {isPasswordPolicyError && (
+
+ {isPasswordNotValidMessage}
+
+ )}
+
+ )}
{isPreferenceVisible && (
<>
@@ -298,7 +371,7 @@ function SignUp({ version }: SignUpProps): JSX.Element {
onChange={(value): void => onSwitchHandler(value, setHasOptedUpdates)}
checked={hasOptedUpdates}
/>
-
Keep me updated on new SigNoz features
+
{t('prompt_keepme_posted')}
@@ -308,9 +381,7 @@ function SignUp({ version }: SignUpProps): JSX.Element {
onChange={(value): void => onSwitchHandler(value, setIsAnonymous)}
checked={isAnonymous}
/>
-
- Anonymise my usage date. We collect data to measure product usage
-
+
{t('prompt_anonymise')}
>
@@ -339,14 +410,13 @@ function SignUp({ version }: SignUpProps): JSX.Element {
loading ||
!email ||
!organizationName ||
- !password ||
- !confirmPassword ||
+ (!precheck.sso && (!password || !confirmPassword)) ||
!firstName ||
confirmPasswordError ||
isPasswordPolicyError
}
>
- Get Started
+ {t('button_get_started')}
diff --git a/frontend/src/store/reducers/app.ts b/frontend/src/store/reducers/app.ts
index cc2c15cd6e..3e18a4c957 100644
--- a/frontend/src/store/reducers/app.ts
+++ b/frontend/src/store/reducers/app.ts
@@ -48,6 +48,7 @@ const InitialValue: InitialValueTypes = {
isSideBarCollapsed: getLocalStorageKey(IS_SIDEBAR_COLLAPSED) === 'true',
currentVersion: '',
latestVersion: '',
+ featureFlags: {},
isCurrentVersionError: false,
isLatestVersionError: false,
user: getInitialUser(),
@@ -55,7 +56,6 @@ const InitialValue: InitialValueTypes = {
isUserFetchingError: false,
org: null,
role: null,
- featureFlags: null,
};
const appReducer = (
@@ -84,6 +84,13 @@ const appReducer = (
};
}
+ case UPDATE_FEATURE_FLAGS: {
+ return {
+ ...state,
+ featureFlags: { ...action.payload },
+ };
+ }
+
case UPDATE_CURRENT_VERSION: {
return {
...state,
@@ -196,13 +203,6 @@ const appReducer = (
};
}
- case UPDATE_FEATURE_FLAGS: {
- return {
- ...state,
- featureFlags: action.payload,
- };
- }
-
case UPDATE_ORG: {
return {
...state,
diff --git a/frontend/src/types/actions/app.ts b/frontend/src/types/actions/app.ts
index 7a6cde83eb..65264f5ca3 100644
--- a/frontend/src/types/actions/app.ts
+++ b/frontend/src/types/actions/app.ts
@@ -40,6 +40,10 @@ export interface SideBarCollapse {
payload: boolean;
}
+export interface UpdateFeatureFlags {
+ type: typeof UPDATE_FEATURE_FLAGS;
+ payload: null | FeatureFlagPayload;
+}
export interface UpdateAppVersion {
type: typeof UPDATE_CURRENT_VERSION;
payload: {
@@ -112,11 +116,6 @@ export interface UpdateOrg {
};
}
-export interface UpdateFeatureFlags {
- type: typeof UPDATE_FEATURE_FLAGS;
- payload: FeatureFlagPayload;
-}
-
export type AppAction =
| SwitchDarkMode
| LoggedInUser
diff --git a/frontend/src/types/api/features/getFeatures.ts b/frontend/src/types/api/features/getFeatures.ts
new file mode 100644
index 0000000000..f1af4d6abe
--- /dev/null
+++ b/frontend/src/types/api/features/getFeatures.ts
@@ -0,0 +1,3 @@
+export interface PayloadProps {
+ [key: string]: boolean;
+}
diff --git a/frontend/src/types/api/licenses/apply.ts b/frontend/src/types/api/licenses/apply.ts
new file mode 100644
index 0000000000..5a08fa6f03
--- /dev/null
+++ b/frontend/src/types/api/licenses/apply.ts
@@ -0,0 +1,10 @@
+import { License } from './def';
+
+export interface Props {
+ key: string;
+}
+
+export interface PayloadProps {
+ status: string;
+ data: License;
+}
diff --git a/frontend/src/types/api/licenses/def.ts b/frontend/src/types/api/licenses/def.ts
new file mode 100644
index 0000000000..3242077788
--- /dev/null
+++ b/frontend/src/types/api/licenses/def.ts
@@ -0,0 +1,8 @@
+export interface License {
+ key: string;
+ ValidFrom: Date;
+ ValidUntil: Date;
+ planKey: string;
+ status: string;
+ isCurrent: boolean;
+}
diff --git a/frontend/src/types/api/licenses/getAll.ts b/frontend/src/types/api/licenses/getAll.ts
new file mode 100644
index 0000000000..48a4394f43
--- /dev/null
+++ b/frontend/src/types/api/licenses/getAll.ts
@@ -0,0 +1,3 @@
+import { License } from './def';
+
+export type PayloadProps = License[];
diff --git a/frontend/src/types/api/user/getInviteDetails.ts b/frontend/src/types/api/user/getInviteDetails.ts
index 224c73ca84..06c690ce2e 100644
--- a/frontend/src/types/api/user/getInviteDetails.ts
+++ b/frontend/src/types/api/user/getInviteDetails.ts
@@ -2,6 +2,7 @@ import { User } from 'types/reducer/app';
import { ROLES } from 'types/roles';
import { Organization } from './getOrganization';
+import * as loginPrecheck from './loginPrecheck';
export interface Props {
inviteId: string;
@@ -14,4 +15,5 @@ export interface PayloadProps {
role: ROLES;
token: string;
organization: Organization['name'];
+ precheck?: loginPrecheck.PayloadProps;
}
diff --git a/frontend/src/types/api/user/getVersion.ts b/frontend/src/types/api/user/getVersion.ts
index 78f07363ef..a729875bb6 100644
--- a/frontend/src/types/api/user/getVersion.ts
+++ b/frontend/src/types/api/user/getVersion.ts
@@ -1,3 +1,4 @@
export interface PayloadProps {
version: string;
+ ee: string;
}
diff --git a/frontend/src/types/api/user/loginPrecheck.ts b/frontend/src/types/api/user/loginPrecheck.ts
new file mode 100644
index 0000000000..d2bd8772db
--- /dev/null
+++ b/frontend/src/types/api/user/loginPrecheck.ts
@@ -0,0 +1,11 @@
+export interface PayloadProps {
+ sso: boolean;
+ ssoUrl?: string;
+ canSelfRegister?: boolean;
+ isUser: boolean;
+}
+
+export interface Props {
+ email?: string;
+ path?: string;
+}
diff --git a/frontend/src/types/api/user/signup.ts b/frontend/src/types/api/user/signup.ts
index 809792042a..84ad4a97a8 100644
--- a/frontend/src/types/api/user/signup.ts
+++ b/frontend/src/types/api/user/signup.ts
@@ -4,4 +4,5 @@ export interface Props {
email: string;
password: string;
token?: string;
+ sourceUrl?: string;
}
diff --git a/frontend/src/utils/permission/index.ts b/frontend/src/utils/permission/index.ts
index 56b9436a42..3eb9032ba9 100644
--- a/frontend/src/utils/permission/index.ts
+++ b/frontend/src/utils/permission/index.ts
@@ -69,4 +69,5 @@ export const routePermission: Record
= {
USAGE_EXPLORER: ['ADMIN', 'EDITOR', 'VIEWER'],
VERSION: ['ADMIN', 'EDITOR', 'VIEWER'],
LOGS: ['ADMIN', 'EDITOR', 'VIEWER'],
+ LIST_LICENSES: ['ADMIN'],
};
From 9c4521b34a82d5fcd01cde39ea3de6a796c54ea6 Mon Sep 17 00:00:00 2001
From: Amol Umbark
Date: Thu, 6 Oct 2022 20:13:30 +0530
Subject: [PATCH 16/28] feat: enterprise edition (#1575)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* feat: added license manager and feature flags
* feat: completed org domain api
* chore: checking in saml auth handler code
* feat: added signup with sso
* feat: added login support for admins
* feat: added pem support for certificate
* ci(build-workflow): 👷 include EE query-service
* fix: 🐛 update package name
* chore(ee): 🔧 LD_FLAGS related changes
Signed-off-by: Prashant Shahi
Co-authored-by: Prashant Shahi
Co-authored-by: nityanandagohain
---
.dockerignore | 6 +
.github/workflows/build.yaml | 12 +-
.gitignore | 7 +-
Makefile | 54 +-
ee/query-service/.dockerignore | 4 +
ee/query-service/Dockerfile | 48 ++
ee/query-service/app/api/api.go | 124 ++++
ee/query-service/app/api/auth.go | 297 +++++++++
ee/query-service/app/api/domains.go | 90 +++
ee/query-service/app/api/featureFlags.go | 10 +
ee/query-service/app/api/license.go | 40 ++
ee/query-service/app/api/response.go | 12 +
ee/query-service/app/db/reader.go | 28 +
ee/query-service/app/server.go | 442 ++++++++++++
ee/query-service/constants/constants.go | 28 +
ee/query-service/dao/factory.go | 18 +
ee/query-service/dao/interface.go | 33 +
ee/query-service/dao/sqlite/auth.go | 112 ++++
ee/query-service/dao/sqlite/domain.go | 183 +++++
ee/query-service/dao/sqlite/modelDao.go | 63 ++
.../integrations/signozio/response.go | 20 +
.../integrations/signozio/signozio.go | 159 +++++
ee/query-service/interfaces/connector.go | 12 +
ee/query-service/license/db.go | 127 ++++
ee/query-service/license/manager.go | 295 ++++++++
ee/query-service/license/sqlite/init.go | 37 +
ee/query-service/main.go | 90 +++
ee/query-service/model/auth.go | 21 +
ee/query-service/model/domain.go | 142 ++++
ee/query-service/model/errors.go | 91 +++
ee/query-service/model/license.go | 91 +++
ee/query-service/model/plans.go | 27 +
ee/query-service/model/usage.go | 35 +
ee/query-service/saml/request.go | 107 +++
ee/query-service/usage/manager.go | 321 +++++++++
.../usage/repository/repository.go | 126 ++++
ee/query-service/usage/sqlite/init.go | 32 +
pkg/query-service/go.mod => go.mod | 26 +-
pkg/query-service/go.sum => go.sum | 22 +-
pkg/query-service/Dockerfile | 11 +-
.../app/clickhouseReader/reader.go | 10 +-
pkg/query-service/app/dashboards/model.go | 2 +-
pkg/query-service/app/dashboards/provision.go | 2 +-
pkg/query-service/app/http_handler.go | 631 +++++++++---------
pkg/query-service/app/logs/parser.go | 4 +-
pkg/query-service/app/logs/parser_test.go | 2 +-
pkg/query-service/app/logs/validator.go | 4 +-
.../app/metrics/query_builder.go | 4 +-
.../app/metrics/query_builder_test.go | 2 +-
pkg/query-service/app/parser.go | 6 +-
pkg/query-service/app/parser/metrics.go | 4 +-
pkg/query-service/app/parser_test.go | 4 +-
pkg/query-service/app/server.go | 28 +-
pkg/query-service/auth/auth.go | 249 ++++---
pkg/query-service/auth/jwt.go | 2 +-
pkg/query-service/auth/rbac.go | 6 +-
pkg/query-service/auth/utils.go | 4 +-
pkg/query-service/constants/constants.go | 2 +-
pkg/query-service/dao/factory.go | 7 +-
pkg/query-service/dao/interface.go | 2 +-
pkg/query-service/dao/sqlite/connection.go | 11 +-
pkg/query-service/dao/sqlite/rbac.go | 11 +-
.../integrations/alertManager/manager.go | 7 +-
.../integrations/alertManager/model.go | 3 +-
pkg/query-service/interfaces/featureLookup.go | 10 +
pkg/query-service/interfaces/interface.go | 4 +-
pkg/query-service/main.go | 8 +-
pkg/query-service/model/auth.go | 8 +-
pkg/query-service/model/db.go | 1 +
pkg/query-service/model/errors.go | 36 +
pkg/query-service/model/featureSet.go | 9 +
pkg/query-service/model/response.go | 46 ++
pkg/query-service/pqlEngine/engine.go | 2 +-
pkg/query-service/rules/alerting.go | 7 +-
pkg/query-service/rules/apiParams.go | 11 +-
pkg/query-service/rules/manager.go | 11 +-
pkg/query-service/rules/manager_test.go | 155 -----
pkg/query-service/rules/promRule.go | 15 +-
pkg/query-service/rules/queriers.go | 2 +-
pkg/query-service/rules/resultTypes.go | 2 +-
pkg/query-service/rules/rule.go | 3 +-
pkg/query-service/rules/ruleTask.go | 7 +-
pkg/query-service/rules/templates.go | 2 +-
pkg/query-service/rules/thresholdRule.go | 17 +-
pkg/query-service/telemetry/telemetry.go | 11 +-
pkg/query-service/tests/auth_test.go | 4 +-
pkg/query-service/tests/cold_storage_test.go | 2 +-
.../utils/encryption/encryption.go | 45 ++
pkg/query-service/utils/pass.go | 10 +
89 files changed, 4163 insertions(+), 675 deletions(-)
create mode 100644 .dockerignore
create mode 100644 ee/query-service/.dockerignore
create mode 100644 ee/query-service/Dockerfile
create mode 100644 ee/query-service/app/api/api.go
create mode 100644 ee/query-service/app/api/auth.go
create mode 100644 ee/query-service/app/api/domains.go
create mode 100644 ee/query-service/app/api/featureFlags.go
create mode 100644 ee/query-service/app/api/license.go
create mode 100644 ee/query-service/app/api/response.go
create mode 100644 ee/query-service/app/db/reader.go
create mode 100644 ee/query-service/app/server.go
create mode 100644 ee/query-service/constants/constants.go
create mode 100644 ee/query-service/dao/factory.go
create mode 100644 ee/query-service/dao/interface.go
create mode 100644 ee/query-service/dao/sqlite/auth.go
create mode 100644 ee/query-service/dao/sqlite/domain.go
create mode 100644 ee/query-service/dao/sqlite/modelDao.go
create mode 100644 ee/query-service/integrations/signozio/response.go
create mode 100644 ee/query-service/integrations/signozio/signozio.go
create mode 100644 ee/query-service/interfaces/connector.go
create mode 100644 ee/query-service/license/db.go
create mode 100644 ee/query-service/license/manager.go
create mode 100644 ee/query-service/license/sqlite/init.go
create mode 100644 ee/query-service/main.go
create mode 100644 ee/query-service/model/auth.go
create mode 100644 ee/query-service/model/domain.go
create mode 100644 ee/query-service/model/errors.go
create mode 100644 ee/query-service/model/license.go
create mode 100644 ee/query-service/model/plans.go
create mode 100644 ee/query-service/model/usage.go
create mode 100644 ee/query-service/saml/request.go
create mode 100644 ee/query-service/usage/manager.go
create mode 100644 ee/query-service/usage/repository/repository.go
create mode 100644 ee/query-service/usage/sqlite/init.go
rename pkg/query-service/go.mod => go.mod (91%)
rename pkg/query-service/go.sum => go.sum (98%)
create mode 100644 pkg/query-service/interfaces/featureLookup.go
create mode 100644 pkg/query-service/model/errors.go
create mode 100644 pkg/query-service/model/featureSet.go
delete mode 100644 pkg/query-service/rules/manager_test.go
create mode 100644 pkg/query-service/utils/encryption/encryption.go
create mode 100644 pkg/query-service/utils/pass.go
diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000000..028b1e410b
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,6 @@
+.git
+.github
+.vscode
+README.md
+deploy
+sample-apps
\ No newline at end of file
diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
index 8f346bf882..a7bcbd1ad2 100644
--- a/.github/workflows/build.yaml
+++ b/.github/workflows/build.yaml
@@ -32,7 +32,17 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v2
- - name: Build query-service image
+ - name: Build query-service image
shell: bash
run: |
make build-query-service-amd64
+
+ build-ee-query-service:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+ - name: Build EE query-service image
+ shell: bash
+ run: |
+ make build-ee-query-service-amd64
diff --git a/.gitignore b/.gitignore
index f584e2c656..9e422ac336 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+
node_modules
yarn.lock
package.json
@@ -43,8 +44,12 @@ pkg/query-service/signoz.db
pkg/query-service/tests/test-deploy/data/
+ee/query-service/signoz.db
+
+ee/query-service/tests/test-deploy/data/
# local data
-
+*.db
/deploy/docker/clickhouse-setup/data/
/deploy/docker-swarm/clickhouse-setup/data/
+bin/
\ No newline at end of file
diff --git a/Makefile b/Makefile
index bdf10b584a..17a4b32fb6 100644
--- a/Makefile
+++ b/Makefile
@@ -7,10 +7,12 @@ BUILD_VERSION ?= $(shell git describe --always --tags)
BUILD_HASH ?= $(shell git rev-parse --short HEAD)
BUILD_TIME ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
BUILD_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
+DEV_LICENSE_SIGNOZ_IO ?= https://staging-license.signoz.io/api/v1
# Internal variables or constants.
FRONTEND_DIRECTORY ?= frontend
QUERY_SERVICE_DIRECTORY ?= pkg/query-service
+EE_QUERY_SERVICE_DIRECTORY ?= ee/query-service
STANDALONE_DIRECTORY ?= deploy/docker/clickhouse-setup
SWARM_DIRECTORY ?= deploy/docker-swarm/clickhouse-setup
LOCAL_GOOS ?= $(shell go env GOOS)
@@ -21,15 +23,18 @@ DOCKER_TAG ?= latest
FRONTEND_DOCKER_IMAGE ?= frontend
QUERY_SERVICE_DOCKER_IMAGE ?= query-service
+DEV_BUILD ?= ""
# Build-time Go variables
-PACKAGE?=go.signoz.io/query-service
-buildVersion=${PACKAGE}/version.buildVersion
-buildHash=${PACKAGE}/version.buildHash
-buildTime=${PACKAGE}/version.buildTime
-gitBranch=${PACKAGE}/version.gitBranch
+PACKAGE?=go.signoz.io/signoz
+buildVersion=${PACKAGE}/pkg/query-service/version.buildVersion
+buildHash=${PACKAGE}/pkg/query-service/version.buildHash
+buildTime=${PACKAGE}/pkg/query-service/version.buildTime
+gitBranch=${PACKAGE}/pkg/query-service/version.gitBranch
+licenseSignozIo=${PACKAGE}/ee/query-service/constants.LicenseSignozIo
-LD_FLAGS="-X ${buildHash}=${BUILD_HASH} -X ${buildTime}=${BUILD_TIME} -X ${buildVersion}=${BUILD_VERSION} -X ${gitBranch}=${BUILD_BRANCH}"
+LD_FLAGS=-X ${buildHash}=${BUILD_HASH} -X ${buildTime}=${BUILD_TIME} -X ${buildVersion}=${BUILD_VERSION} -X ${gitBranch}=${BUILD_BRANCH}
+DEV_LD_FLAGS=-X ${licenseSignozIo}=${DEV_LICENSE_SIGNOZ_IO}
all: build-push-frontend build-push-query-service
# Steps to build and push docker image of frontend
@@ -40,7 +45,7 @@ build-frontend-amd64:
@echo "--> Building frontend docker image for amd64"
@echo "------------------"
@cd $(FRONTEND_DIRECTORY) && \
- docker build -f Dockerfile --no-cache -t $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) \
+ docker build --file Dockerfile --no-cache -t $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) \
--build-arg TARGETPLATFORM="linux/amd64" .
# Step to build and push docker image of frontend(used in push pipeline)
@@ -59,20 +64,43 @@ build-query-service-amd64:
@echo "------------------"
@echo "--> Building query-service docker image for amd64"
@echo "------------------"
- @cd $(QUERY_SERVICE_DIRECTORY) && \
- docker build -f Dockerfile --no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
- --build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS=$(LD_FLAGS) .
+ @docker build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile \
+ --no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
+ --build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="$(LD_FLAGS)" .
# Step to build and push docker image of query in amd64 and arm64 (used in push pipeline)
build-push-query-service:
@echo "------------------"
@echo "--> Building and pushing query-service docker image"
@echo "------------------"
- @cd $(QUERY_SERVICE_DIRECTORY) && \
- docker buildx build --file Dockerfile --progress plane --no-cache \
- --push --platform linux/arm64,linux/amd64 --build-arg LD_FLAGS=$(LD_FLAGS) \
+ @docker buildx build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plane --no-cache \
+ --push --platform linux/arm64,linux/amd64 --build-arg LD_FLAGS="$(LD_FLAGS)" \
--tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) .
+# Step to build EE docker image of query service in amd64 (used in build pipeline)
+build-ee-query-service-amd64:
+ @echo "------------------"
+ @echo "--> Building query-service docker image for amd64"
+ @echo "------------------"
+ @if [ $(DEV_BUILD) != "" ]; then \
+ docker build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
+ --no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
+ --build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="${LD_FLAGS} ${DEV_LD_FLAGS}" .; \
+ else \
+ docker build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
+ --no-cache -t $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \
+ --build-arg TARGETPLATFORM="linux/amd64" --build-arg LD_FLAGS="$(LD_FLAGS)" .; \
+ fi
+
+# Step to build and push EE docker image of query in amd64 and arm64 (used in push pipeline)
+build-push-ee-query-service:
+ @echo "------------------"
+ @echo "--> Building and pushing query-service docker image"
+ @echo "------------------"
+ @docker buildx build --file $(EE_QUERY_SERVICE_DIRECTORY)/Dockerfile \
+ --progress plane --no-cache --push --platform linux/arm64,linux/amd64 \
+ --build-arg LD_FLAGS="$(LD_FLAGS)" --tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) .
+
dev-setup:
mkdir -p /var/lib/signoz
sqlite3 /var/lib/signoz/signoz.db "VACUUM";
diff --git a/ee/query-service/.dockerignore b/ee/query-service/.dockerignore
new file mode 100644
index 0000000000..9521c5060b
--- /dev/null
+++ b/ee/query-service/.dockerignore
@@ -0,0 +1,4 @@
+.vscode
+README.md
+signoz.db
+bin
\ No newline at end of file
diff --git a/ee/query-service/Dockerfile b/ee/query-service/Dockerfile
new file mode 100644
index 0000000000..7def5c0982
--- /dev/null
+++ b/ee/query-service/Dockerfile
@@ -0,0 +1,48 @@
+FROM golang:1.17-buster AS builder
+
+# LD_FLAGS is passed as argument from Makefile. It will be empty, if no argument passed
+ARG LD_FLAGS
+ARG TARGETPLATFORM
+
+ENV CGO_ENABLED=1
+ENV GOPATH=/go
+
+RUN export GOOS=$(echo ${TARGETPLATFORM} | cut -d / -f1) && \
+ export GOARCH=$(echo ${TARGETPLATFORM} | cut -d / -f2)
+
+# Prepare and enter src directory
+WORKDIR /go/src/github.com/signoz/signoz
+
+# Add the sources and proceed with build
+ADD . .
+RUN cd ee/query-service \
+ && go build -tags timetzdata -a -o ./bin/query-service \
+ -ldflags "-linkmode external -extldflags '-static' -s -w $LD_FLAGS" \
+ && chmod +x ./bin/query-service
+
+
+# use a minimal alpine image
+FROM alpine:3.7
+
+# Add Maintainer Info
+LABEL maintainer="signoz"
+
+# add ca-certificates in case you need them
+RUN apk update && apk add ca-certificates && rm -rf /var/cache/apk/*
+
+# set working directory
+WORKDIR /root
+
+# copy the binary from builder
+COPY --from=builder /go/src/github.com/signoz/signoz/ee/query-service/bin/query-service .
+
+# copy prometheus YAML config
+COPY pkg/query-service/config/prometheus.yml /root/config/prometheus.yml
+
+# run the binary
+ENTRYPOINT ["./query-service"]
+
+CMD ["-config", "../config/prometheus.yml"]
+# CMD ["./query-service -config /root/config/prometheus.yml"]
+
+EXPOSE 8080
diff --git a/ee/query-service/app/api/api.go b/ee/query-service/app/api/api.go
new file mode 100644
index 0000000000..a6497b615e
--- /dev/null
+++ b/ee/query-service/app/api/api.go
@@ -0,0 +1,124 @@
+package api
+
+import (
+ "net/http"
+
+ "github.com/gorilla/mux"
+ "go.signoz.io/signoz/ee/query-service/dao"
+ "go.signoz.io/signoz/ee/query-service/interfaces"
+ "go.signoz.io/signoz/ee/query-service/license"
+ baseapp "go.signoz.io/signoz/pkg/query-service/app"
+ baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
+ rules "go.signoz.io/signoz/pkg/query-service/rules"
+ "go.signoz.io/signoz/pkg/query-service/version"
+)
+
+type APIHandlerOptions struct {
+ DataConnector interfaces.DataConnector
+ AppDao dao.ModelDao
+ RulesManager *rules.Manager
+ FeatureFlags baseint.FeatureLookup
+ LicenseManager *license.Manager
+}
+
+type APIHandler struct {
+ opts APIHandlerOptions
+ baseapp.APIHandler
+}
+
+// NewAPIHandler returns an APIHandler
+func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
+
+ baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
+ Reader: opts.DataConnector,
+ AppDao: opts.AppDao,
+ RuleManager: opts.RulesManager,
+ FeatureFlags: opts.FeatureFlags})
+
+ if err != nil {
+ return nil, err
+ }
+
+ ah := &APIHandler{
+ opts: opts,
+ APIHandler: *baseHandler,
+ }
+ return ah, nil
+}
+
+func (ah *APIHandler) FF() baseint.FeatureLookup {
+ return ah.opts.FeatureFlags
+}
+
+func (ah *APIHandler) RM() *rules.Manager {
+ return ah.opts.RulesManager
+}
+
+func (ah *APIHandler) LM() *license.Manager {
+ return ah.opts.LicenseManager
+}
+
+func (ah *APIHandler) AppDao() dao.ModelDao {
+ return ah.opts.AppDao
+}
+
+func (ah *APIHandler) CheckFeature(f string) bool {
+ err := ah.FF().CheckFeature(f)
+ return err == nil
+}
+
+// RegisterRoutes registers routes for this handler on the given router
+func (ah *APIHandler) RegisterRoutes(router *mux.Router) {
+ // note: add ee override methods first
+
+ // routes available only in ee version
+ router.HandleFunc("/api/v1/licenses",
+ baseapp.AdminAccess(ah.listLicenses)).
+ Methods(http.MethodGet)
+
+ router.HandleFunc("/api/v1/licenses",
+ baseapp.AdminAccess(ah.applyLicense)).
+ Methods(http.MethodPost)
+
+ router.HandleFunc("/api/v1/featureFlags",
+ baseapp.OpenAccess(ah.getFeatureFlags)).
+ Methods(http.MethodGet)
+
+ router.HandleFunc("/api/v1/loginPrecheck",
+ baseapp.OpenAccess(ah.precheckLogin)).
+ Methods(http.MethodGet)
+
+ // paid plans specific routes
+ router.HandleFunc("/api/v1/complete/saml",
+ baseapp.OpenAccess(ah.receiveSAML)).
+ Methods(http.MethodPost)
+
+ router.HandleFunc("/api/v1/orgs/{orgId}/domains",
+ baseapp.AdminAccess(ah.listDomainsByOrg)).
+ Methods(http.MethodGet)
+
+ router.HandleFunc("/api/v1/domains",
+ baseapp.AdminAccess(ah.postDomain)).
+ Methods(http.MethodPost)
+
+ router.HandleFunc("/api/v1/domains/{id}",
+ baseapp.AdminAccess(ah.putDomain)).
+ Methods(http.MethodPut)
+
+ router.HandleFunc("/api/v1/domains/{id}",
+ baseapp.AdminAccess(ah.deleteDomain)).
+ Methods(http.MethodDelete)
+
+ // base overrides
+ router.HandleFunc("/api/v1/version", baseapp.OpenAccess(ah.getVersion)).Methods(http.MethodGet)
+ router.HandleFunc("/api/v1/invite/{token}", baseapp.OpenAccess(ah.getInvite)).Methods(http.MethodGet)
+ router.HandleFunc("/api/v1/register", baseapp.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
+ router.HandleFunc("/api/v1/login", baseapp.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
+ ah.APIHandler.RegisterRoutes(router)
+
+}
+
+func (ah *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) {
+ version := version.GetVersion()
+ ah.WriteJSON(w, r, map[string]string{"version": version, "ee": "Y"})
+}
diff --git a/ee/query-service/app/api/auth.go b/ee/query-service/app/api/auth.go
new file mode 100644
index 0000000000..0c99edfc36
--- /dev/null
+++ b/ee/query-service/app/api/auth.go
@@ -0,0 +1,297 @@
+package api
+
+import (
+ "context"
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "strings"
+
+ "github.com/google/uuid"
+ "github.com/gorilla/mux"
+ "go.signoz.io/signoz/ee/query-service/constants"
+ "go.signoz.io/signoz/ee/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/auth"
+ baseauth "go.signoz.io/signoz/pkg/query-service/auth"
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+ "go.uber.org/zap"
+)
+
+func parseRequest(r *http.Request, req interface{}) error {
+ defer r.Body.Close()
+ requestBody, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return err
+ }
+
+ err = json.Unmarshal(requestBody, &req)
+ return err
+}
+
+// loginUser overrides base handler and considers SSO case.
+func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
+
+ req := basemodel.LoginRequest{}
+ err := parseRequest(r, &req)
+ if err != nil {
+ RespondError(w, model.BadRequest(err), nil)
+ return
+ }
+
+ ctx := context.Background()
+
+ if req.Email != "" && ah.CheckFeature(model.SSO) {
+ var apierr basemodel.BaseApiError
+ _, apierr = ah.AppDao().CanUsePassword(ctx, req.Email)
+ if apierr != nil && !apierr.IsNil() {
+ RespondError(w, apierr, nil)
+ }
+ }
+
+ // if all looks good, call auth
+ resp, err := auth.Login(ctx, &req)
+ if ah.HandleError(w, err, http.StatusUnauthorized) {
+ return
+ }
+
+ ah.WriteJSON(w, r, resp)
+}
+
+// registerUser registers a user and responds with a precheck
+// so the front-end can decide the login method
+func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
+
+ if !ah.CheckFeature(model.SSO) {
+ ah.APIHandler.Register(w, r)
+ return
+ }
+
+ ctx := context.Background()
+ var req *baseauth.RegisterRequest
+
+ defer r.Body.Close()
+ requestBody, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ zap.S().Errorf("received no input in api\n", err)
+ RespondError(w, model.BadRequest(err), nil)
+ return
+ }
+
+ err = json.Unmarshal(requestBody, &req)
+
+ if err != nil {
+ zap.S().Errorf("received invalid user registration request", zap.Error(err))
+ RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
+ return
+ }
+
+ // get invite object
+ invite, err := baseauth.ValidateInvite(ctx, req)
+ if err != nil || invite == nil {
+ zap.S().Errorf("failed to validate invite token", err)
+ RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
+ }
+
+ // get auth domain from email domain
+ domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
+ if apierr != nil {
+ zap.S().Errorf("failed to get domain from email", apierr)
+ RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
+ }
+
+ precheckResp := &model.PrecheckResponse{
+ SSO: false,
+ IsUser: false,
+ }
+
+ if domain != nil && domain.SsoEnabled {
+ // so is enabled, create user and respond precheck data
+ user, apierr := baseauth.RegisterInvitedUser(ctx, req, true)
+ if apierr != nil {
+ RespondError(w, apierr, nil)
+ return
+ }
+
+ var precheckError basemodel.BaseApiError
+
+ precheckResp, precheckError = ah.AppDao().PrecheckLogin(ctx, user.Email, req.SourceUrl)
+ if precheckError != nil {
+ RespondError(w, precheckError, precheckResp)
+ }
+
+ } else {
+ // no-sso, validate password
+ if err := auth.ValidatePassword(req.Password); err != nil {
+ RespondError(w, model.InternalError(fmt.Errorf("password is not in a valid format")), nil)
+ return
+ }
+
+ _, registerError := baseauth.Register(ctx, req)
+ if !registerError.IsNil() {
+ RespondError(w, apierr, nil)
+ return
+ }
+
+ precheckResp.IsUser = true
+ }
+
+ ah.Respond(w, precheckResp)
+}
+
+// getInvite returns the invite object details for the given invite token. We do not need to
+// protect this API because invite token itself is meant to be private.
+func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
+ token := mux.Vars(r)["token"]
+ sourceUrl := r.URL.Query().Get("ref")
+ ctx := context.Background()
+
+ inviteObject, err := baseauth.GetInvite(context.Background(), token)
+ if err != nil {
+ RespondError(w, model.BadRequest(err), nil)
+ return
+ }
+
+ resp := model.GettableInvitation{
+ InvitationResponseObject: inviteObject,
+ }
+
+ precheck, apierr := ah.AppDao().PrecheckLogin(ctx, inviteObject.Email, sourceUrl)
+ resp.Precheck = precheck
+
+ if apierr != nil {
+ RespondError(w, apierr, resp)
+ }
+
+ ah.WriteJSON(w, r, resp)
+}
+
+// PrecheckLogin enables browser login page to display appropriate
+// login methods
+func (ah *APIHandler) precheckLogin(w http.ResponseWriter, r *http.Request) {
+ ctx := context.Background()
+
+ email := r.URL.Query().Get("email")
+ sourceUrl := r.URL.Query().Get("ref")
+
+ resp, apierr := ah.AppDao().PrecheckLogin(ctx, email, sourceUrl)
+ if apierr != nil {
+ RespondError(w, apierr, resp)
+ }
+
+ ah.Respond(w, resp)
+}
+
+func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
+ // this is the source url that initiated the login request
+ redirectUri := constants.GetDefaultSiteURL()
+ ctx := context.Background()
+
+ var apierr basemodel.BaseApiError
+
+ redirectOnError := func() {
+ ssoError := []byte("Login failed. Please contact your system administrator")
+ dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError)))
+ base64.StdEncoding.Encode(dst, ssoError)
+
+ http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, string(dst)), http.StatusMovedPermanently)
+ }
+
+ if !ah.CheckFeature(model.SSO) {
+ zap.S().Errorf("[ReceiveSAML] sso requested but feature unavailable %s in org domain %s", model.SSO)
+ http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
+ return
+ }
+
+ err := r.ParseForm()
+ if err != nil {
+ zap.S().Errorf("[ReceiveSAML] failed to process response - invalid response from IDP", err, r)
+ redirectOnError()
+ return
+ }
+
+ // the relay state is sent when a login request is submitted to
+ // Idp.
+ relayState := r.FormValue("RelayState")
+ zap.S().Debug("[ReceiveML] relay state", zap.String("relayState", relayState))
+
+ parsedState, err := url.Parse(relayState)
+ if err != nil || relayState == "" {
+ zap.S().Errorf("[ReceiveSAML] failed to process response - invalid response from IDP", err, r)
+ redirectOnError()
+ return
+ }
+
+ // upgrade redirect url from the relay state for better accuracy
+ redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
+
+ // derive domain id from relay state now
+ var domainIdStr string
+ for k, v := range parsedState.Query() {
+ if k == "domainId" && len(v) > 0 {
+ domainIdStr = strings.Replace(v[0], ":", "-", -1)
+ }
+ }
+
+ domainId, err := uuid.Parse(domainIdStr)
+ if err != nil {
+ zap.S().Errorf("[ReceiveSAML] failed to process request- failed to parse domain id ifrom relay", zap.Error(err))
+ redirectOnError()
+ return
+ }
+
+ domain, apierr := ah.AppDao().GetDomain(ctx, domainId)
+ if (apierr != nil) || domain == nil {
+ zap.S().Errorf("[ReceiveSAML] failed to process request- invalid domain", domainIdStr, zap.Error(apierr))
+ redirectOnError()
+ return
+ }
+
+ sp, err := domain.PrepareSamlRequest(parsedState)
+ if err != nil {
+ zap.S().Errorf("[ReceiveSAML] failed to prepare saml request for domain (%s): %v", domainId, err)
+ redirectOnError()
+ return
+ }
+
+ assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse"))
+ if err != nil {
+ zap.S().Errorf("[ReceiveSAML] failed to retrieve assertion info from saml response for organization (%s): %v", domainId, err)
+ redirectOnError()
+ return
+ }
+
+ if assertionInfo.WarningInfo.InvalidTime {
+ zap.S().Errorf("[ReceiveSAML] expired saml response for organization (%s): %v", domainId, err)
+ redirectOnError()
+ return
+ }
+
+ email := assertionInfo.NameID
+
+ // user email found, now start preparing jwt response
+ userPayload, baseapierr := ah.AppDao().GetUserByEmail(ctx, email)
+ if baseapierr != nil {
+ zap.S().Errorf("[ReceiveSAML] failed to find or register a new user for email %s and org %s", email, domainId, zap.Error(baseapierr.Err))
+ redirectOnError()
+ return
+ }
+
+ tokenStore, err := baseauth.GenerateJWTForUser(&userPayload.User)
+ if err != nil {
+ zap.S().Errorf("[ReceiveSAML] failed to generate access token for email %s and org %s", email, domainId, zap.Error(err))
+ redirectOnError()
+ return
+ }
+
+ userID := userPayload.User.Id
+ nextPage := fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
+ redirectUri,
+ tokenStore.AccessJwt,
+ userID,
+ tokenStore.RefreshJwt)
+
+ http.Redirect(w, r, nextPage, http.StatusMovedPermanently)
+}
diff --git a/ee/query-service/app/api/domains.go b/ee/query-service/app/api/domains.go
new file mode 100644
index 0000000000..6456928c75
--- /dev/null
+++ b/ee/query-service/app/api/domains.go
@@ -0,0 +1,90 @@
+package api
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ "github.com/google/uuid"
+ "github.com/gorilla/mux"
+ "go.signoz.io/signoz/ee/query-service/model"
+)
+
+func (ah *APIHandler) listDomainsByOrg(w http.ResponseWriter, r *http.Request) {
+ orgId := mux.Vars(r)["orgId"]
+ domains, apierr := ah.AppDao().ListDomains(context.Background(), orgId)
+ if apierr != nil {
+ RespondError(w, apierr, domains)
+ return
+ }
+ ah.Respond(w, domains)
+}
+
+func (ah *APIHandler) postDomain(w http.ResponseWriter, r *http.Request) {
+ ctx := context.Background()
+
+ req := model.OrgDomain{}
+
+ if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
+ RespondError(w, model.BadRequest(err), nil)
+ return
+ }
+
+ if err := req.ValidNew(); err != nil {
+ RespondError(w, model.BadRequest(err), nil)
+ return
+ }
+
+ if apierr := ah.AppDao().CreateDomain(ctx, &req); apierr != nil {
+ RespondError(w, apierr, nil)
+ return
+ }
+
+ ah.Respond(w, &req)
+}
+
+func (ah *APIHandler) putDomain(w http.ResponseWriter, r *http.Request) {
+ ctx := context.Background()
+
+ domainIdStr := mux.Vars(r)["id"]
+ domainId, err := uuid.Parse(domainIdStr)
+ if err != nil {
+ RespondError(w, model.BadRequest(err), nil)
+ return
+ }
+
+ req := model.OrgDomain{Id: domainId}
+ if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
+ RespondError(w, model.BadRequest(err), nil)
+ return
+ }
+ req.Id = domainId
+ if err := req.Valid(nil); err != nil {
+ RespondError(w, model.BadRequest(err), nil)
+ }
+
+ if apierr := ah.AppDao().UpdateDomain(ctx, &req); apierr != nil {
+ RespondError(w, apierr, nil)
+ return
+ }
+
+ ah.Respond(w, &req)
+}
+
+func (ah *APIHandler) deleteDomain(w http.ResponseWriter, r *http.Request) {
+ domainIdStr := mux.Vars(r)["id"]
+
+ domainId, err := uuid.Parse(domainIdStr)
+ if err != nil {
+ RespondError(w, model.BadRequest(fmt.Errorf("invalid domain id")), nil)
+ return
+ }
+
+ apierr := ah.AppDao().DeleteDomain(context.Background(), domainId)
+ if apierr != nil {
+ RespondError(w, apierr, nil)
+ return
+ }
+ ah.Respond(w, nil)
+}
diff --git a/ee/query-service/app/api/featureFlags.go b/ee/query-service/app/api/featureFlags.go
new file mode 100644
index 0000000000..9c979d17ba
--- /dev/null
+++ b/ee/query-service/app/api/featureFlags.go
@@ -0,0 +1,10 @@
+package api
+
+import (
+ "net/http"
+)
+
+func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
+ featureSet := ah.FF().GetFeatureFlags()
+ ah.Respond(w, featureSet)
+}
diff --git a/ee/query-service/app/api/license.go b/ee/query-service/app/api/license.go
new file mode 100644
index 0000000000..e5f5b0ca0a
--- /dev/null
+++ b/ee/query-service/app/api/license.go
@@ -0,0 +1,40 @@
+package api
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "go.signoz.io/signoz/ee/query-service/model"
+ "net/http"
+)
+
+func (ah *APIHandler) listLicenses(w http.ResponseWriter, r *http.Request) {
+ licenses, apiError := ah.LM().GetLicenses(context.Background())
+ if apiError != nil {
+ RespondError(w, apiError, nil)
+ }
+ ah.Respond(w, licenses)
+}
+
+func (ah *APIHandler) applyLicense(w http.ResponseWriter, r *http.Request) {
+ ctx := context.Background()
+ var l model.License
+
+ if err := json.NewDecoder(r.Body).Decode(&l); err != nil {
+ RespondError(w, model.BadRequest(err), nil)
+ return
+ }
+
+ if l.Key == "" {
+ RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
+ return
+ }
+
+ license, apiError := ah.LM().Activate(ctx, l.Key)
+ if apiError != nil {
+ RespondError(w, apiError, nil)
+ return
+ }
+
+ ah.Respond(w, license)
+}
diff --git a/ee/query-service/app/api/response.go b/ee/query-service/app/api/response.go
new file mode 100644
index 0000000000..fef5f89798
--- /dev/null
+++ b/ee/query-service/app/api/response.go
@@ -0,0 +1,12 @@
+package api
+
+import (
+ "net/http"
+
+ baseapp "go.signoz.io/signoz/pkg/query-service/app"
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+)
+
+func RespondError(w http.ResponseWriter, apiErr basemodel.BaseApiError, data interface{}) {
+ baseapp.RespondError(w, apiErr, data)
+}
diff --git a/ee/query-service/app/db/reader.go b/ee/query-service/app/db/reader.go
new file mode 100644
index 0000000000..e948ee430b
--- /dev/null
+++ b/ee/query-service/app/db/reader.go
@@ -0,0 +1,28 @@
+package db
+
+import (
+ "github.com/ClickHouse/clickhouse-go/v2"
+
+ "github.com/jmoiron/sqlx"
+
+ basechr "go.signoz.io/signoz/pkg/query-service/app/clickhouseReader"
+)
+
+type ClickhouseReader struct {
+ conn clickhouse.Conn
+ appdb *sqlx.DB
+ *basechr.ClickHouseReader
+}
+
+func NewDataConnector(localDB *sqlx.DB, promConfigPath string) *ClickhouseReader {
+ ch := basechr.NewReader(localDB, promConfigPath)
+ return &ClickhouseReader{
+ conn: ch.GetConn(),
+ appdb: localDB,
+ ClickHouseReader: ch,
+ }
+}
+
+func (r *ClickhouseReader) Start(readerReady chan bool) {
+ r.ClickHouseReader.Start(readerReady)
+}
diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go
new file mode 100644
index 0000000000..608926deb5
--- /dev/null
+++ b/ee/query-service/app/server.go
@@ -0,0 +1,442 @@
+package app
+
+import (
+ "context"
+ "fmt"
+ "net"
+ "net/http"
+ _ "net/http/pprof" // http profiler
+ "os"
+ "time"
+
+ "github.com/gorilla/handlers"
+ "github.com/gorilla/mux"
+ "github.com/jmoiron/sqlx"
+
+ "github.com/rs/cors"
+ "github.com/soheilhy/cmux"
+ "go.signoz.io/signoz/ee/query-service/app/api"
+ "go.signoz.io/signoz/ee/query-service/app/db"
+ "go.signoz.io/signoz/ee/query-service/dao"
+ "go.signoz.io/signoz/ee/query-service/interfaces"
+ licensepkg "go.signoz.io/signoz/ee/query-service/license"
+ "go.signoz.io/signoz/ee/query-service/usage"
+
+ "go.signoz.io/signoz/pkg/query-service/app/dashboards"
+ baseconst "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/healthcheck"
+ basealm "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
+ baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
+ pqle "go.signoz.io/signoz/pkg/query-service/pqlEngine"
+ rules "go.signoz.io/signoz/pkg/query-service/rules"
+ "go.signoz.io/signoz/pkg/query-service/telemetry"
+ "go.signoz.io/signoz/pkg/query-service/utils"
+ "go.uber.org/zap"
+)
+
+type ServerOptions struct {
+ PromConfigPath string
+ HTTPHostPort string
+ PrivateHostPort string
+ // alert specific params
+ DisableRules bool
+ RuleRepoURL string
+}
+
+// Server runs HTTP api service
+type Server struct {
+ serverOptions *ServerOptions
+ conn net.Listener
+ ruleManager *rules.Manager
+ separatePorts bool
+
+ // public http router
+ httpConn net.Listener
+ httpServer *http.Server
+
+ // private http
+ privateConn net.Listener
+ privateHTTP *http.Server
+
+ // feature flags
+ featureLookup baseint.FeatureLookup
+
+ unavailableChannel chan healthcheck.Status
+}
+
+// HealthCheckStatus returns health check status channel a client can subscribe to
+func (s Server) HealthCheckStatus() chan healthcheck.Status {
+ return s.unavailableChannel
+}
+
+// NewServer creates and initializes Server
+func NewServer(serverOptions *ServerOptions) (*Server, error) {
+
+ modelDao, err := dao.InitDao("sqlite", baseconst.RELATIONAL_DATASOURCE_PATH)
+ if err != nil {
+ return nil, err
+ }
+
+ localDB, err := dashboards.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH)
+
+ if err != nil {
+ return nil, err
+ }
+
+ localDB.SetMaxOpenConns(10)
+
+ // initiate license manager
+ lm, err := licensepkg.StartManager("sqlite", localDB)
+ if err != nil {
+ return nil, err
+ }
+
+ // set license manager as feature flag provider in dao
+ modelDao.SetFlagProvider(lm)
+ readerReady := make(chan bool)
+
+ var reader interfaces.DataConnector
+ storage := os.Getenv("STORAGE")
+ if storage == "clickhouse" {
+ zap.S().Info("Using ClickHouse as datastore ...")
+ qb := db.NewDataConnector(localDB, serverOptions.PromConfigPath)
+ go qb.Start(readerReady)
+ reader = qb
+ } else {
+ return nil, fmt.Errorf("Storage type: %s is not supported in query service", storage)
+ }
+
+ <-readerReady
+ rm, err := makeRulesManager(serverOptions.PromConfigPath,
+ baseconst.GetAlertManagerApiPrefix(),
+ serverOptions.RuleRepoURL,
+ localDB,
+ reader,
+ serverOptions.DisableRules)
+
+ if err != nil {
+ return nil, err
+ }
+
+ // start the usagemanager
+ usageManager, err := usage.New("sqlite", localDB, lm.GetRepo(), reader.GetConn())
+ if err != nil {
+ return nil, err
+ }
+ err = usageManager.Start()
+ if err != nil {
+ return nil, err
+ }
+
+ telemetry.GetInstance().SetReader(reader)
+
+ apiOpts := api.APIHandlerOptions{
+ DataConnector: reader,
+ AppDao: modelDao,
+ RulesManager: rm,
+ FeatureFlags: lm,
+ LicenseManager: lm,
+ }
+
+ apiHandler, err := api.NewAPIHandler(apiOpts)
+ if err != nil {
+ return nil, err
+ }
+
+ s := &Server{
+ // logger: logger,
+ // tracer: tracer,
+ ruleManager: rm,
+ serverOptions: serverOptions,
+ unavailableChannel: make(chan healthcheck.Status),
+ }
+
+ httpServer, err := s.createPublicServer(apiHandler)
+
+ if err != nil {
+ return nil, err
+ }
+
+ s.httpServer = httpServer
+
+ privateServer, err := s.createPrivateServer(apiHandler)
+ if err != nil {
+ return nil, err
+ }
+
+ s.privateHTTP = privateServer
+
+ return s, nil
+}
+
+func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
+
+ r := mux.NewRouter()
+
+ r.Use(setTimeoutMiddleware)
+ r.Use(s.analyticsMiddleware)
+ r.Use(loggingMiddlewarePrivate)
+
+ apiHandler.RegisterPrivateRoutes(r)
+
+ c := cors.New(cors.Options{
+ //todo(amol): find out a way to add exact domain or
+ // ip here for alert manager
+ AllowedOrigins: []string{"*"},
+ AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"},
+ AllowedHeaders: []string{"Accept", "Authorization", "Content-Type"},
+ })
+
+ handler := c.Handler(r)
+ handler = handlers.CompressHandler(handler)
+
+ return &http.Server{
+ Handler: handler,
+ }, nil
+}
+
+func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, error) {
+
+ r := mux.NewRouter()
+
+ r.Use(setTimeoutMiddleware)
+ r.Use(s.analyticsMiddleware)
+ r.Use(loggingMiddleware)
+
+ apiHandler.RegisterRoutes(r)
+ apiHandler.RegisterMetricsRoutes(r)
+ apiHandler.RegisterLogsRoutes(r)
+
+ c := cors.New(cors.Options{
+ AllowedOrigins: []string{"*"},
+ AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"},
+ AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "cache-control"},
+ })
+
+ handler := c.Handler(r)
+
+ handler = handlers.CompressHandler(handler)
+
+ return &http.Server{
+ Handler: handler,
+ }, nil
+}
+
+// loggingMiddleware is used for logging public api calls
+func loggingMiddleware(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ route := mux.CurrentRoute(r)
+ path, _ := route.GetPathTemplate()
+ startTime := time.Now()
+ next.ServeHTTP(w, r)
+ zap.S().Info(path, "\ttimeTaken: ", time.Now().Sub(startTime))
+ })
+}
+
+// loggingMiddlewarePrivate is used for logging private api calls
+// from internal services like alert manager
+func loggingMiddlewarePrivate(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ route := mux.CurrentRoute(r)
+ path, _ := route.GetPathTemplate()
+ startTime := time.Now()
+ next.ServeHTTP(w, r)
+ zap.S().Info(path, "\tprivatePort: true", "\ttimeTaken: ", time.Now().Sub(startTime))
+ })
+}
+
+type loggingResponseWriter struct {
+ http.ResponseWriter
+ statusCode int
+}
+
+func NewLoggingResponseWriter(w http.ResponseWriter) *loggingResponseWriter {
+ // WriteHeader(int) is not called if our response implicitly returns 200 OK, so
+ // we default to that status code.
+ return &loggingResponseWriter{w, http.StatusOK}
+}
+
+func (lrw *loggingResponseWriter) WriteHeader(code int) {
+ lrw.statusCode = code
+ lrw.ResponseWriter.WriteHeader(code)
+}
+
+// Flush implements the http.Flush interface.
+func (lrw *loggingResponseWriter) Flush() {
+ lrw.ResponseWriter.(http.Flusher).Flush()
+}
+
+func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ route := mux.CurrentRoute(r)
+ path, _ := route.GetPathTemplate()
+
+ lrw := NewLoggingResponseWriter(w)
+ next.ServeHTTP(lrw, r)
+
+ data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
+
+ if _, ok := telemetry.IgnoredPaths()[path]; !ok {
+ telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data)
+ }
+
+ })
+}
+
+func setTimeoutMiddleware(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := r.Context()
+ var cancel context.CancelFunc
+ // check if route is not excluded
+ url := r.URL.Path
+ if _, ok := baseconst.TimeoutExcludedRoutes[url]; !ok {
+ ctx, cancel = context.WithTimeout(r.Context(), baseconst.ContextTimeout*time.Second)
+ defer cancel()
+ }
+
+ r = r.WithContext(ctx)
+ next.ServeHTTP(w, r)
+ })
+}
+
+// initListeners initialises listeners of the server
+func (s *Server) initListeners() error {
+ // listen on public port
+ var err error
+ publicHostPort := s.serverOptions.HTTPHostPort
+ if publicHostPort == "" {
+ return fmt.Errorf("baseconst.HTTPHostPort is required")
+ }
+
+ s.httpConn, err = net.Listen("tcp", publicHostPort)
+ if err != nil {
+ return err
+ }
+
+ zap.S().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
+
+ // listen on private port to support internal services
+ privateHostPort := s.serverOptions.PrivateHostPort
+
+ if privateHostPort == "" {
+ return fmt.Errorf("baseconst.PrivateHostPort is required")
+ }
+
+ s.privateConn, err = net.Listen("tcp", privateHostPort)
+ if err != nil {
+ return err
+ }
+ zap.S().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
+
+ return nil
+}
+
+// Start listening on http and private http port concurrently
+func (s *Server) Start() error {
+
+ // initiate rule manager first
+ if !s.serverOptions.DisableRules {
+ s.ruleManager.Start()
+ } else {
+ zap.S().Info("msg: Rules disabled as rules.disable is set to TRUE")
+ }
+
+ err := s.initListeners()
+ if err != nil {
+ return err
+ }
+
+ var httpPort int
+ if port, err := utils.GetPort(s.httpConn.Addr()); err == nil {
+ httpPort = port
+ }
+
+ go func() {
+ zap.S().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
+
+ switch err := s.httpServer.Serve(s.httpConn); err {
+ case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
+ // normal exit, nothing to do
+ default:
+ zap.S().Error("Could not start HTTP server", zap.Error(err))
+ }
+ s.unavailableChannel <- healthcheck.Unavailable
+ }()
+
+ go func() {
+ zap.S().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
+
+ err = http.ListenAndServe(baseconst.DebugHttpPort, nil)
+ if err != nil {
+ zap.S().Error("Could not start pprof server", zap.Error(err))
+ }
+ }()
+
+ var privatePort int
+ if port, err := utils.GetPort(s.privateConn.Addr()); err == nil {
+ privatePort = port
+ }
+ fmt.Println("starting private http")
+ go func() {
+ zap.S().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
+
+ switch err := s.privateHTTP.Serve(s.privateConn); err {
+ case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
+ // normal exit, nothing to do
+ zap.S().Info("private http server closed")
+ default:
+ zap.S().Error("Could not start private HTTP server", zap.Error(err))
+ }
+
+ s.unavailableChannel <- healthcheck.Unavailable
+
+ }()
+
+ return nil
+}
+
+func makeRulesManager(
+ promConfigPath,
+ alertManagerURL string,
+ ruleRepoURL string,
+ db *sqlx.DB,
+ ch baseint.Reader,
+ disableRules bool) (*rules.Manager, error) {
+
+ // create engine
+ pqle, err := pqle.FromConfigPath(promConfigPath)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create pql engine : %v", err)
+ }
+
+ // notifier opts
+ notifierOpts := basealm.NotifierOptions{
+ QueueCapacity: 10000,
+ Timeout: 1 * time.Second,
+ AlertManagerURLs: []string{alertManagerURL},
+ }
+
+ // create manager opts
+ managerOpts := &rules.ManagerOptions{
+ NotifierOpts: notifierOpts,
+ Queriers: &rules.Queriers{
+ PqlEngine: pqle,
+ Ch: ch.GetConn(),
+ },
+ RepoURL: ruleRepoURL,
+ DBConn: db,
+ Context: context.Background(),
+ Logger: nil,
+ DisableRules: disableRules,
+ }
+
+ // create Manager
+ manager, err := rules.NewManager(managerOpts)
+ if err != nil {
+ return nil, fmt.Errorf("rule manager error: %v", err)
+ }
+
+ zap.S().Info("rules manager is ready")
+
+ return manager, nil
+}
diff --git a/ee/query-service/constants/constants.go b/ee/query-service/constants/constants.go
new file mode 100644
index 0000000000..ba9bb141a5
--- /dev/null
+++ b/ee/query-service/constants/constants.go
@@ -0,0 +1,28 @@
+package constants
+
+import (
+ "os"
+)
+
+const (
+ DefaultSiteURL = "https://localhost:3301"
+)
+
+var LicenseSignozIo = "https://license.signoz.io/api/v1"
+
+func GetOrDefaultEnv(key string, fallback string) string {
+ v := os.Getenv(key)
+ if len(v) == 0 {
+ return fallback
+ }
+ return v
+}
+
+// constant functions that override env vars
+
+// GetDefaultSiteURL returns default site url, primarily
+// used to send saml request and allowing backend to
+// handle http redirect
+func GetDefaultSiteURL() string {
+ return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL)
+}
diff --git a/ee/query-service/dao/factory.go b/ee/query-service/dao/factory.go
new file mode 100644
index 0000000000..f623e17783
--- /dev/null
+++ b/ee/query-service/dao/factory.go
@@ -0,0 +1,18 @@
+package dao
+
+import (
+ "fmt"
+
+ "go.signoz.io/signoz/ee/query-service/dao/sqlite"
+)
+
+func InitDao(engine, path string) (ModelDao, error) {
+
+ switch engine {
+ case "sqlite":
+ return sqlite.InitDB(path)
+ default:
+ return nil, fmt.Errorf("qsdb type: %s is not supported in query service", engine)
+ }
+
+}
diff --git a/ee/query-service/dao/interface.go b/ee/query-service/dao/interface.go
new file mode 100644
index 0000000000..7e17dcb635
--- /dev/null
+++ b/ee/query-service/dao/interface.go
@@ -0,0 +1,33 @@
+package dao
+
+import (
+ "context"
+
+ "github.com/google/uuid"
+ "github.com/jmoiron/sqlx"
+ "go.signoz.io/signoz/ee/query-service/model"
+ basedao "go.signoz.io/signoz/pkg/query-service/dao"
+ baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+)
+
+type ModelDao interface {
+ basedao.ModelDao
+
+ // SetFlagProvider sets the feature lookup provider
+ SetFlagProvider(flags baseint.FeatureLookup)
+
+ DB() *sqlx.DB
+
+ // auth methods
+ PrecheckLogin(ctx context.Context, email, sourceUrl string) (*model.PrecheckResponse, basemodel.BaseApiError)
+ CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError)
+
+ // org domain (auth domains) CRUD ops
+ ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, basemodel.BaseApiError)
+ GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, basemodel.BaseApiError)
+ CreateDomain(ctx context.Context, d *model.OrgDomain) basemodel.BaseApiError
+ UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError
+ DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
+ GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError)
+}
diff --git a/ee/query-service/dao/sqlite/auth.go b/ee/query-service/dao/sqlite/auth.go
new file mode 100644
index 0000000000..13fd57259f
--- /dev/null
+++ b/ee/query-service/dao/sqlite/auth.go
@@ -0,0 +1,112 @@
+package sqlite
+
+import (
+ "context"
+ "fmt"
+ "net/url"
+ "strings"
+
+ "go.signoz.io/signoz/ee/query-service/constants"
+ "go.signoz.io/signoz/ee/query-service/model"
+ baseconst "go.signoz.io/signoz/pkg/query-service/constants"
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+ "go.uber.org/zap"
+)
+
+func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) {
+ domain, apierr := m.GetDomainByEmail(ctx, email)
+ if apierr != nil {
+ return false, apierr
+ }
+
+ if domain != nil && domain.SsoEnabled {
+ // sso is enabled, check if the user has admin role
+ userPayload, baseapierr := m.GetUserByEmail(ctx, email)
+
+ if baseapierr != nil || userPayload == nil {
+ return false, baseapierr
+ }
+
+ if userPayload.Role != baseconst.AdminGroup {
+ return false, model.BadRequest(fmt.Errorf("auth method not supported"))
+ }
+
+ }
+
+ return true, nil
+}
+
+// PrecheckLogin is called when the login or signup page is loaded
+// to check sso login is to be prompted
+func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*model.PrecheckResponse, basemodel.BaseApiError) {
+
+ // assume user is valid unless proven otherwise
+ resp := &model.PrecheckResponse{IsUser: true, CanSelfRegister: false}
+
+ // check if email is a valid user
+ userPayload, baseApiErr := m.GetUserByEmail(ctx, email)
+ if baseApiErr != nil {
+ return resp, baseApiErr
+ }
+
+ if userPayload == nil {
+ resp.IsUser = false
+ }
+ ssoAvailable := true
+ err := m.checkFeature(model.SSO)
+ if err != nil {
+ switch err.(type) {
+ case basemodel.ErrFeatureUnavailable:
+ // do nothing, just skip sso
+ ssoAvailable = false
+ default:
+ zap.S().Errorf("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
+ return resp, model.BadRequest(err)
+ }
+ }
+
+ if ssoAvailable {
+
+ // find domain from email
+ orgDomain, apierr := m.GetDomainByEmail(ctx, email)
+ if apierr != nil {
+ var emailDomain string
+ emailComponents := strings.Split(email, "@")
+ if len(emailComponents) > 0 {
+ emailDomain = emailComponents[1]
+ }
+ zap.S().Errorf("failed to get org domain from email", zap.String("emailDomain", emailDomain), apierr.ToError())
+ return resp, apierr
+ }
+
+ if orgDomain != nil && orgDomain.SsoEnabled {
+ // saml is enabled for this domain, lets prepare sso url
+
+ if sourceUrl == "" {
+ sourceUrl = constants.GetDefaultSiteURL()
+ }
+
+ // parse source url that generated the login request
+ var err error
+ escapedUrl, _ := url.QueryUnescape(sourceUrl)
+ siteUrl, err := url.Parse(escapedUrl)
+ if err != nil {
+ zap.S().Errorf("failed to parse referer", err)
+ return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
+ }
+
+ // build Idp URL that will authenticat the user
+ // the front-end will redirect user to this url
+ resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
+
+ if err != nil {
+ zap.S().Errorf("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), err)
+ return resp, model.InternalError(err)
+ }
+
+ // set SSO to true, as the url is generated correctly
+ resp.SSO = true
+ }
+ }
+ return resp, nil
+}
diff --git a/ee/query-service/dao/sqlite/domain.go b/ee/query-service/dao/sqlite/domain.go
new file mode 100644
index 0000000000..b98bc70cdb
--- /dev/null
+++ b/ee/query-service/dao/sqlite/domain.go
@@ -0,0 +1,183 @@
+package sqlite
+
+import (
+ "context"
+ "database/sql"
+ "encoding/json"
+ "fmt"
+ "strings"
+ "time"
+
+ "github.com/google/uuid"
+ "go.signoz.io/signoz/ee/query-service/model"
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+ "go.uber.org/zap"
+)
+
+// StoredDomain represents stored database record for org domain
+
+type StoredDomain struct {
+ Id uuid.UUID `db:"id"`
+ Name string `db:"name"`
+ OrgId string `db:"org_id"`
+ Data string `db:"data"`
+ CreatedAt int64 `db:"created_at"`
+ UpdatedAt int64 `db:"updated_at"`
+}
+
+// GetDomain returns org domain for a given domain id
+func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, basemodel.BaseApiError) {
+
+ stored := StoredDomain{}
+ err := m.DB().Get(&stored, `SELECT * FROM org_domains WHERE id=$1 LIMIT 1`, id)
+
+ if err != nil {
+ if err == sql.ErrNoRows {
+ return nil, model.BadRequest(fmt.Errorf("invalid domain id"))
+ }
+ return nil, model.InternalError(err)
+ }
+
+ domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId}
+ if err := domain.LoadConfig(stored.Data); err != nil {
+ return domain, model.InternalError(err)
+ }
+ return domain, nil
+}
+
+// ListDomains gets the list of auth domains by org id
+func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, basemodel.BaseApiError) {
+ domains := []model.OrgDomain{}
+
+ stored := []StoredDomain{}
+ err := m.DB().SelectContext(ctx, &stored, `SELECT * FROM org_domains WHERE org_id=$1`, orgId)
+
+ if err != nil {
+ if err == sql.ErrNoRows {
+ return []model.OrgDomain{}, nil
+ }
+ return nil, model.InternalError(err)
+ }
+
+ for _, s := range stored {
+ domain := model.OrgDomain{Id: s.Id, Name: s.Name, OrgId: s.OrgId}
+ if err := domain.LoadConfig(s.Data); err != nil {
+ zap.S().Errorf("ListDomains() failed", zap.Error(err))
+ }
+ domains = append(domains, domain)
+ }
+
+ return domains, nil
+}
+
+// CreateDomain creates a new auth domain
+func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError {
+
+ if domain.Id == uuid.Nil {
+ domain.Id = uuid.New()
+ }
+
+ if domain.OrgId == "" || domain.Name == "" {
+ return model.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgId, Name "))
+ }
+
+ configJson, err := json.Marshal(domain)
+ if err != nil {
+ zap.S().Errorf("failed to unmarshal domain config", zap.Error(err))
+ return model.InternalError(fmt.Errorf("domain creation failed"))
+ }
+
+ _, err = m.DB().ExecContext(ctx,
+ "INSERT INTO org_domains (id, name, org_id, data, created_at, updated_at) VALUES ($1, $2, $3, $4, $5, $6)",
+ domain.Id,
+ domain.Name,
+ domain.OrgId,
+ configJson,
+ time.Now().Unix(),
+ time.Now().Unix())
+
+ if err != nil {
+ zap.S().Errorf("failed to insert domain in db", zap.Error(err))
+ return model.InternalError(fmt.Errorf("domain creation failed"))
+ }
+
+ return nil
+}
+
+// UpdateDomain updates stored config params for a domain
+func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError {
+
+ if domain.Id == uuid.Nil {
+ zap.S().Errorf("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
+ return model.InternalError(fmt.Errorf("domain update failed"))
+ }
+
+ configJson, err := json.Marshal(domain)
+ if err != nil {
+ zap.S().Errorf("domain update failed", zap.Error(err))
+ return model.InternalError(fmt.Errorf("domain update failed"))
+ }
+
+ _, err = m.DB().ExecContext(ctx,
+ "UPDATE org_domains SET data = $1, updated_at = $2 WHERE id = $3",
+ configJson,
+ time.Now().Unix(),
+ domain.Id)
+
+ if err != nil {
+ zap.S().Errorf("domain update failed", zap.Error(err))
+ return model.InternalError(fmt.Errorf("domain update failed"))
+ }
+
+ return nil
+}
+
+// DeleteDomain deletes an org domain
+func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
+
+ if id == uuid.Nil {
+ zap.S().Errorf("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
+ return model.InternalError(fmt.Errorf("domain delete failed"))
+ }
+
+ _, err := m.DB().ExecContext(ctx,
+ "DELETE FROM org_domains WHERE id = $1",
+ id)
+
+ if err != nil {
+ zap.S().Errorf("domain delete failed", zap.Error(err))
+ return model.InternalError(fmt.Errorf("domain delete failed"))
+ }
+
+ return nil
+}
+
+func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError) {
+
+ if email == "" {
+ return nil, model.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email "))
+ }
+
+ components := strings.Split(email, "@")
+ if len(components) < 2 {
+ return nil, model.BadRequest(fmt.Errorf("invalid email address"))
+ }
+
+ parsedDomain := components[1]
+
+ stored := StoredDomain{}
+ err := m.DB().Get(&stored, `SELECT * FROM org_domains WHERE name=$1 LIMIT 1`, parsedDomain)
+
+ if err != nil {
+ if err == sql.ErrNoRows {
+ return nil, nil
+ }
+ return nil, model.InternalError(err)
+ }
+
+ domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId}
+ if err := domain.LoadConfig(stored.Data); err != nil {
+ return domain, model.InternalError(err)
+ }
+ return domain, nil
+}
diff --git a/ee/query-service/dao/sqlite/modelDao.go b/ee/query-service/dao/sqlite/modelDao.go
new file mode 100644
index 0000000000..156f6b30e7
--- /dev/null
+++ b/ee/query-service/dao/sqlite/modelDao.go
@@ -0,0 +1,63 @@
+package sqlite
+
+import (
+ "fmt"
+
+ "github.com/jmoiron/sqlx"
+ basedao "go.signoz.io/signoz/pkg/query-service/dao"
+ basedsql "go.signoz.io/signoz/pkg/query-service/dao/sqlite"
+ baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
+)
+
+type modelDao struct {
+ *basedsql.ModelDaoSqlite
+ flags baseint.FeatureLookup
+}
+
+// SetFlagProvider sets the feature lookup provider
+func (m *modelDao) SetFlagProvider(flags baseint.FeatureLookup) {
+ m.flags = flags
+}
+
+// CheckFeature confirms if a feature is available
+func (m *modelDao) checkFeature(key string) error {
+ if m.flags == nil {
+ return fmt.Errorf("flag provider not set")
+ }
+
+ return m.flags.CheckFeature(key)
+}
+
+// InitDB creates and extends base model DB repository
+func InitDB(dataSourceName string) (*modelDao, error) {
+ dao, err := basedsql.InitDB(dataSourceName)
+ if err != nil {
+ return nil, err
+ }
+ // set package variable so dependent base methods (e.g. AuthCache) will work
+ basedao.SetDB(dao)
+ m := &modelDao{ModelDaoSqlite: dao}
+
+ table_schema := `
+ PRAGMA foreign_keys = ON;
+ CREATE TABLE IF NOT EXISTS org_domains(
+ id TEXT PRIMARY KEY,
+ org_id TEXT NOT NULL,
+ name VARCHAR(50) NOT NULL UNIQUE,
+ created_at INTEGER NOT NULL,
+ updated_at INTEGER,
+ data TEXT NOT NULL,
+ FOREIGN KEY(org_id) REFERENCES organizations(id)
+ );`
+
+ _, err = m.DB().Exec(table_schema)
+ if err != nil {
+ return nil, fmt.Errorf("error in creating tables: %v", err.Error())
+ }
+
+ return m, nil
+}
+
+func (m *modelDao) DB() *sqlx.DB {
+ return m.ModelDaoSqlite.DB()
+}
diff --git a/ee/query-service/integrations/signozio/response.go b/ee/query-service/integrations/signozio/response.go
new file mode 100644
index 0000000000..c8812105f1
--- /dev/null
+++ b/ee/query-service/integrations/signozio/response.go
@@ -0,0 +1,20 @@
+package signozio
+
+type status string
+
+const (
+ statusSuccess status = "success"
+ statusError status = "error"
+)
+
+type ActivationResult struct {
+ Status status `json:"status"`
+ Data *ActivationResponse `json:"data,omitempty"`
+ ErrorType string `json:"errorType,omitempty"`
+ Error string `json:"error,omitempty"`
+}
+
+type ActivationResponse struct {
+ ActivationId string `json:"ActivationId"`
+ PlanDetails string `json:"PlanDetails"`
+}
diff --git a/ee/query-service/integrations/signozio/signozio.go b/ee/query-service/integrations/signozio/signozio.go
new file mode 100644
index 0000000000..ac9d4128ab
--- /dev/null
+++ b/ee/query-service/integrations/signozio/signozio.go
@@ -0,0 +1,159 @@
+package signozio
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+
+ "github.com/pkg/errors"
+ "go.signoz.io/signoz/ee/query-service/constants"
+ "go.signoz.io/signoz/ee/query-service/model"
+ "go.uber.org/zap"
+)
+
+var C *Client
+
+const (
+ POST = "POST"
+ APPLICATION_JSON = "application/json"
+)
+
+type Client struct {
+ Prefix string
+}
+
+func New() *Client {
+ return &Client{
+ Prefix: constants.LicenseSignozIo,
+ }
+}
+
+func init() {
+ C = New()
+}
+
+// ActivateLicense sends key to license.signoz.io and gets activation data
+func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError) {
+ licenseReq := map[string]string{
+ "key": key,
+ "siteId": siteId,
+ }
+
+ reqString, _ := json.Marshal(licenseReq)
+ httpResponse, err := http.Post(C.Prefix+"/licenses/activate", APPLICATION_JSON, bytes.NewBuffer(reqString))
+
+ if err != nil {
+ zap.S().Errorf("failed to connect to license.signoz.io", err)
+ return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
+ }
+
+ httpBody, err := ioutil.ReadAll(httpResponse.Body)
+ if err != nil {
+ zap.S().Errorf("failed to read activation response from license.signoz.io", err)
+ return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
+ }
+
+ defer httpResponse.Body.Close()
+
+ // read api request result
+ result := ActivationResult{}
+ err = json.Unmarshal(httpBody, &result)
+ if err != nil {
+ zap.S().Errorf("failed to marshal activation response from license.signoz.io", err)
+ return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
+ }
+
+ switch httpResponse.StatusCode {
+ case 200, 201:
+ return result.Data, nil
+ case 400, 401:
+ return nil, model.BadRequest(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
+ default:
+ return nil, model.InternalError(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
+ }
+
+}
+
+// ValidateLicense validates the license key
+func ValidateLicense(activationId string) (*ActivationResponse, *model.ApiError) {
+ validReq := map[string]string{
+ "activationId": activationId,
+ }
+
+ reqString, _ := json.Marshal(validReq)
+ response, err := http.Post(C.Prefix+"/licenses/validate", APPLICATION_JSON, bytes.NewBuffer(reqString))
+
+ if err != nil {
+ return nil, model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
+ }
+
+ body, err := ioutil.ReadAll(response.Body)
+ if err != nil {
+ return nil, model.BadRequest(errors.Wrap(err, "failed to read validation response from license.signoz.io"))
+ }
+
+ defer response.Body.Close()
+
+ switch response.StatusCode {
+ case 200, 201:
+ a := ActivationResult{}
+ err = json.Unmarshal(body, &a)
+ if err != nil {
+ return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
+ }
+ return a.Data, nil
+ case 400, 401:
+ return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
+ "bad request error received from license.signoz.io"))
+ default:
+ return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
+ "internal error received from license.signoz.io"))
+ }
+
+}
+
+func NewPostRequestWithCtx(ctx context.Context, url string, contentType string, body io.Reader) (*http.Request, error) {
+ req, err := http.NewRequestWithContext(ctx, POST, url, body)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Add("Content-Type", contentType)
+ return req, err
+
+}
+
+// SendUsage reports the usage of signoz to license server
+func SendUsage(ctx context.Context, usage *model.UsagePayload) *model.ApiError {
+ reqString, _ := json.Marshal(usage)
+ req, err := NewPostRequestWithCtx(ctx, C.Prefix+"/usage", APPLICATION_JSON, bytes.NewBuffer(reqString))
+ if err != nil {
+ return model.BadRequest(errors.Wrap(err, "unable to create http request"))
+ }
+
+ res, err := http.DefaultClient.Do(req)
+ if err != nil {
+ return model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
+ }
+
+ body, err := io.ReadAll(res.Body)
+ if err != nil {
+ return model.BadRequest(errors.Wrap(err, "failed to read usage response from license.signoz.io"))
+ }
+
+ defer res.Body.Close()
+
+ switch res.StatusCode {
+ case 200, 201:
+ return nil
+ case 400, 401:
+ return model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
+ "bad request error received from license.signoz.io"))
+ default:
+ return model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
+ "internal error received from license.signoz.io"))
+ }
+}
diff --git a/ee/query-service/interfaces/connector.go b/ee/query-service/interfaces/connector.go
new file mode 100644
index 0000000000..5428e421fa
--- /dev/null
+++ b/ee/query-service/interfaces/connector.go
@@ -0,0 +1,12 @@
+package interfaces
+
+import (
+ baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
+)
+
+// Connector defines methods for interaction
+// with o11y data. for example - clickhouse
+type DataConnector interface {
+ Start(readerReady chan bool)
+ baseint.Reader
+}
diff --git a/ee/query-service/license/db.go b/ee/query-service/license/db.go
new file mode 100644
index 0000000000..a82f0377e2
--- /dev/null
+++ b/ee/query-service/license/db.go
@@ -0,0 +1,127 @@
+package license
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ "github.com/jmoiron/sqlx"
+
+ "go.signoz.io/signoz/ee/query-service/license/sqlite"
+ "go.signoz.io/signoz/ee/query-service/model"
+ "go.uber.org/zap"
+)
+
+// Repo is license repo. stores license keys in a secured DB
+type Repo struct {
+ db *sqlx.DB
+}
+
+// NewLicenseRepo initiates a new license repo
+func NewLicenseRepo(db *sqlx.DB) Repo {
+ return Repo{
+ db: db,
+ }
+}
+
+func (r *Repo) InitDB(engine string) error {
+ switch engine {
+ case "sqlite3", "sqlite":
+ return sqlite.InitDB(r.db)
+ default:
+ return fmt.Errorf("unsupported db")
+ }
+}
+
+func (r *Repo) GetLicenses(ctx context.Context) ([]model.License, error) {
+ licenses := []model.License{}
+
+ query := "SELECT key, activationId, planDetails, validationMessage FROM licenses"
+
+ err := r.db.Select(&licenses, query)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get licenses from db: %v", err)
+ }
+
+ return licenses, nil
+}
+
+// GetActiveLicense fetches the latest active license from DB
+func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, error) {
+ var err error
+ licenses := []model.License{}
+
+ query := "SELECT key, activationId, planDetails, validationMessage FROM licenses"
+
+ err = r.db.Select(&licenses, query)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get active licenses from db: %v", err)
+ }
+
+ var active *model.License
+ for _, l := range licenses {
+ l.ParsePlan()
+ if active == nil &&
+ (l.ValidFrom != 0) &&
+ (l.ValidUntil == -1 || l.ValidUntil > time.Now().Unix()) {
+ active = &l
+ }
+ if active != nil &&
+ l.ValidFrom > active.ValidFrom &&
+ (l.ValidUntil == -1 || l.ValidUntil > time.Now().Unix()) {
+ active = &l
+ }
+ }
+
+ return active, nil
+}
+
+// InsertLicense inserts a new license in db
+func (r *Repo) InsertLicense(ctx context.Context, l *model.License) error {
+
+ if l.Key == "" {
+ return fmt.Errorf("insert license failed: license key is required")
+ }
+
+ query := `INSERT INTO licenses
+ (key, planDetails, activationId, validationmessage)
+ VALUES ($1, $2, $3, $4)`
+
+ _, err := r.db.ExecContext(ctx,
+ query,
+ l.Key,
+ l.PlanDetails,
+ l.ActivationId,
+ l.ValidationMessage)
+
+ if err != nil {
+ zap.S().Errorf("error in inserting license data: ", zap.Error(err))
+ return fmt.Errorf("failed to insert license in db: %v", err)
+ }
+
+ return nil
+}
+
+// UpdatePlanDetails writes new plan details to the db
+func (r *Repo) UpdatePlanDetails(ctx context.Context,
+ key,
+ planDetails string) error {
+
+ if key == "" {
+ return fmt.Errorf("Update Plan Details failed: license key is required")
+ }
+
+ query := `UPDATE licenses
+ SET planDetails = $1,
+ updatedAt = $2
+ WHERE key = $3`
+
+ _, err := r.db.ExecContext(ctx, query, planDetails, time.Now(), key)
+
+ if err != nil {
+ zap.S().Errorf("error in updating license: ", zap.Error(err))
+ return fmt.Errorf("failed to update license in db: %v", err)
+ }
+
+ return nil
+}
diff --git a/ee/query-service/license/manager.go b/ee/query-service/license/manager.go
new file mode 100644
index 0000000000..306fa5a8d1
--- /dev/null
+++ b/ee/query-service/license/manager.go
@@ -0,0 +1,295 @@
+package license
+
+import (
+ "context"
+ "fmt"
+ "sync/atomic"
+ "time"
+
+ "github.com/jmoiron/sqlx"
+
+ "sync"
+
+ validate "go.signoz.io/signoz/ee/query-service/integrations/signozio"
+ "go.signoz.io/signoz/ee/query-service/model"
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/telemetry"
+ "go.uber.org/zap"
+)
+
+var LM *Manager
+
+// validate and update license every 24 hours
+var validationFrequency = 24 * 60 * time.Minute
+
+type Manager struct {
+ repo *Repo
+ mutex sync.Mutex
+
+ validatorRunning bool
+
+ // end the license validation, this is important to gracefully
+ // stopping validation and protect in-consistent updates
+ done chan struct{}
+
+ // terminated waits for the validate go routine to end
+ terminated chan struct{}
+
+ // last time the license was validated
+ lastValidated int64
+
+ // keep track of validation failure attempts
+ failedAttempts uint64
+
+ // keep track of active license and features
+ activeLicense *model.License
+ activeFeatures basemodel.FeatureSet
+}
+
+func StartManager(dbType string, db *sqlx.DB) (*Manager, error) {
+
+ if LM != nil {
+ return LM, nil
+ }
+
+ repo := NewLicenseRepo(db)
+ err := repo.InitDB(dbType)
+
+ if err != nil {
+ return nil, fmt.Errorf("failed to initiate license repo: %v", err)
+ }
+
+ m := &Manager{
+ repo: &repo,
+ }
+
+ if err := m.start(); err != nil {
+ return m, err
+ }
+ LM = m
+ return m, nil
+}
+
+// start loads active license in memory and initiates validator
+func (lm *Manager) start() error {
+ err := lm.LoadActiveLicense()
+
+ return err
+}
+
+func (lm *Manager) Stop() {
+ close(lm.done)
+ <-lm.terminated
+}
+
+func (lm *Manager) SetActive(l *model.License) {
+ lm.mutex.Lock()
+ defer lm.mutex.Unlock()
+
+ if l == nil {
+ return
+ }
+
+ lm.activeLicense = l
+ lm.activeFeatures = l.FeatureSet
+ if !lm.validatorRunning {
+ // we want to make sure only one validator runs,
+ // we already have lock() so good to go
+ lm.validatorRunning = true
+ go lm.Validator(context.Background())
+ }
+
+}
+
+// LoadActiveLicense loads the most recent active licenseex
+func (lm *Manager) LoadActiveLicense() error {
+ var err error
+ active, err := lm.repo.GetActiveLicense(context.Background())
+ if err != nil {
+ return err
+ }
+ if active != nil {
+ lm.SetActive(active)
+ } else {
+ zap.S().Info("No active license found.")
+ }
+
+ return nil
+}
+
+func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, apiError *model.ApiError) {
+
+ licenses, err := lm.repo.GetLicenses(ctx)
+ if err != nil {
+ return nil, model.InternalError(err)
+ }
+
+ for _, l := range licenses {
+ l.ParsePlan()
+
+ if l.Key == lm.activeLicense.Key {
+ l.IsCurrent = true
+ }
+
+ if l.ValidUntil == -1 {
+ // for subscriptions, there is no end-date as such
+ // but for showing user some validity we default one year timespan
+ l.ValidUntil = l.ValidFrom + 31556926
+ }
+
+ response = append(response, l)
+ }
+
+ return
+}
+
+// Validator validates license after an epoch of time
+func (lm *Manager) Validator(ctx context.Context) {
+ defer close(lm.terminated)
+ tick := time.NewTicker(validationFrequency)
+ defer tick.Stop()
+
+ lm.Validate(ctx)
+
+ for {
+ select {
+ case <-lm.done:
+ return
+ default:
+ select {
+ case <-lm.done:
+ return
+ case <-tick.C:
+ lm.Validate(ctx)
+ }
+ }
+
+ }
+}
+
+// Validate validates the current active license
+func (lm *Manager) Validate(ctx context.Context) (reterr error) {
+ zap.S().Info("License validation started")
+ if lm.activeLicense == nil {
+ return nil
+ }
+
+ defer func() {
+ lm.mutex.Lock()
+
+ lm.lastValidated = time.Now().Unix()
+ if reterr != nil {
+ zap.S().Errorf("License validation completed with error", reterr)
+ atomic.AddUint64(&lm.failedAttempts, 1)
+ telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
+ map[string]interface{}{"err": reterr.Error()})
+ } else {
+ zap.S().Info("License validation completed with no errors")
+ }
+
+ lm.mutex.Unlock()
+ }()
+
+ response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId)
+ if apiError != nil {
+ zap.S().Errorf("failed to validate license", apiError)
+ return apiError.Err
+ }
+
+ if response.PlanDetails == lm.activeLicense.PlanDetails {
+ // license plan hasnt changed, nothing to do
+ return nil
+ }
+
+ if response.PlanDetails != "" {
+
+ // copy and replace the active license record
+ l := model.License{
+ Key: lm.activeLicense.Key,
+ CreatedAt: lm.activeLicense.CreatedAt,
+ PlanDetails: response.PlanDetails,
+ ValidationMessage: lm.activeLicense.ValidationMessage,
+ ActivationId: lm.activeLicense.ActivationId,
+ }
+
+ if err := l.ParsePlan(); err != nil {
+ zap.S().Errorf("failed to parse updated license", zap.Error(err))
+ return err
+ }
+
+ // updated plan is parsable, check if plan has changed
+ if lm.activeLicense.PlanDetails != response.PlanDetails {
+ err := lm.repo.UpdatePlanDetails(ctx, lm.activeLicense.Key, response.PlanDetails)
+ if err != nil {
+ // unexpected db write issue but we can let the user continue
+ // and wait for update to work in next cycle.
+ zap.S().Errorf("failed to validate license", zap.Error(err))
+ }
+ }
+
+ // activate the update license plan
+ lm.SetActive(&l)
+ }
+
+ return nil
+}
+
+// Activate activates a license key with signoz server
+func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *model.License, errResponse *model.ApiError) {
+ defer func() {
+ if errResponse != nil {
+ telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED,
+ map[string]interface{}{"err": errResponse.Err.Error()})
+ }
+ }()
+
+ response, apiError := validate.ActivateLicense(key, "")
+ if apiError != nil {
+ zap.S().Errorf("failed to activate license", zap.Error(apiError.Err))
+ return nil, apiError
+ }
+
+ l := &model.License{
+ Key: key,
+ ActivationId: response.ActivationId,
+ PlanDetails: response.PlanDetails,
+ }
+
+ // parse validity and features from the plan details
+ err := l.ParsePlan()
+
+ if err != nil {
+ zap.S().Errorf("failed to activate license", zap.Error(err))
+ return nil, model.InternalError(err)
+ }
+
+ // store the license before activating it
+ err = lm.repo.InsertLicense(ctx, l)
+ if err != nil {
+ zap.S().Errorf("failed to activate license", zap.Error(err))
+ return nil, model.InternalError(err)
+ }
+
+ // license is valid, activate it
+ lm.SetActive(l)
+ return l, nil
+}
+
+// CheckFeature will be internally used by backend routines
+// for feature gating
+func (lm *Manager) CheckFeature(featureKey string) error {
+ if _, ok := lm.activeFeatures[featureKey]; ok {
+ return nil
+ }
+ return basemodel.ErrFeatureUnavailable{Key: featureKey}
+}
+
+// GetFeatureFlags returns current active features
+func (lm *Manager) GetFeatureFlags() basemodel.FeatureSet {
+ return lm.activeFeatures
+}
+
+// GetRepo return the license repo
+func (lm *Manager) GetRepo() *Repo {
+ return lm.repo
+}
diff --git a/ee/query-service/license/sqlite/init.go b/ee/query-service/license/sqlite/init.go
new file mode 100644
index 0000000000..a03153659c
--- /dev/null
+++ b/ee/query-service/license/sqlite/init.go
@@ -0,0 +1,37 @@
+package sqlite
+
+import (
+ "fmt"
+ "github.com/jmoiron/sqlx"
+)
+
+func InitDB(db *sqlx.DB) error {
+ var err error
+ if db == nil {
+ return fmt.Errorf("invalid db connection")
+ }
+
+ table_schema := `CREATE TABLE IF NOT EXISTS licenses(
+ key TEXT PRIMARY KEY,
+ createdAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ planDetails TEXT,
+ activationId TEXT,
+ validationMessage TEXT,
+ lastValidated TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+ );
+
+ CREATE TABLE IF NOT EXISTS sites(
+ uuid TEXT PRIMARY KEY,
+ alias VARCHAR(180) DEFAULT 'PROD',
+ url VARCHAR(300),
+ createdAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+ );
+ `
+
+ _, err = db.Exec(table_schema)
+ if err != nil {
+ return fmt.Errorf("Error in creating licenses table: %s", err.Error())
+ }
+ return nil
+}
diff --git a/ee/query-service/main.go b/ee/query-service/main.go
new file mode 100644
index 0000000000..e29b86797a
--- /dev/null
+++ b/ee/query-service/main.go
@@ -0,0 +1,90 @@
+package main
+
+import (
+ "context"
+ "flag"
+ "os"
+ "os/signal"
+ "syscall"
+
+ "go.signoz.io/signoz/ee/query-service/app"
+ "go.signoz.io/signoz/pkg/query-service/auth"
+ baseconst "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/version"
+
+ "go.uber.org/zap"
+ "go.uber.org/zap/zapcore"
+)
+
+func initZapLog() *zap.Logger {
+ config := zap.NewDevelopmentConfig()
+ config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
+ config.EncoderConfig.TimeKey = "timestamp"
+ config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
+ logger, _ := config.Build()
+ return logger
+}
+
+func main() {
+ var promConfigPath string
+
+ // disables rule execution but allows change to the rule definition
+ var disableRules bool
+
+ // the url used to build link in the alert messages in slack and other systems
+ var ruleRepoURL string
+
+ flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
+ flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
+ flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
+ flag.Parse()
+
+ loggerMgr := initZapLog()
+ zap.ReplaceGlobals(loggerMgr)
+ defer loggerMgr.Sync() // flushes buffer, if any
+
+ logger := loggerMgr.Sugar()
+ version.PrintVersion()
+
+ serverOptions := &app.ServerOptions{
+ HTTPHostPort: baseconst.HTTPHostPort,
+ PromConfigPath: promConfigPath,
+ PrivateHostPort: baseconst.PrivateHostPort,
+ DisableRules: disableRules,
+ RuleRepoURL: ruleRepoURL,
+ }
+
+ // Read the jwt secret key
+ auth.JwtSecret = os.Getenv("SIGNOZ_JWT_SECRET")
+
+ if len(auth.JwtSecret) == 0 {
+ zap.S().Warn("No JWT secret key is specified.")
+ } else {
+ zap.S().Info("No JWT secret key set successfully.")
+ }
+
+ server, err := app.NewServer(serverOptions)
+ if err != nil {
+ logger.Fatal("Failed to create server", zap.Error(err))
+ }
+
+ if err := server.Start(); err != nil {
+ logger.Fatal("Could not start servers", zap.Error(err))
+ }
+
+ if err := auth.InitAuthCache(context.Background()); err != nil {
+ logger.Fatal("Failed to initialize auth cache", zap.Error(err))
+ }
+
+ signalsChannel := make(chan os.Signal, 1)
+ signal.Notify(signalsChannel, os.Interrupt, syscall.SIGTERM)
+
+ for {
+ select {
+ case status := <-server.HealthCheckStatus():
+ logger.Info("Received HealthCheck status: ", zap.Int("status", int(status)))
+ case <-signalsChannel:
+ logger.Fatal("Received OS Interrupt Signal ... ")
+ }
+ }
+}
diff --git a/ee/query-service/model/auth.go b/ee/query-service/model/auth.go
new file mode 100644
index 0000000000..8c3447a00d
--- /dev/null
+++ b/ee/query-service/model/auth.go
@@ -0,0 +1,21 @@
+package model
+
+import (
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+)
+
+// PrecheckResponse contains login precheck response
+type PrecheckResponse struct {
+ SSO bool `json:"sso"`
+ SsoUrl string `json:"ssoUrl"`
+ CanSelfRegister bool `json:"canSelfRegister"`
+ IsUser bool `json:"isUser"`
+ SsoError string `json:"ssoError"`
+}
+
+// GettableInvitation overrides base object and adds precheck into
+// response
+type GettableInvitation struct {
+ *basemodel.InvitationResponseObject
+ Precheck *PrecheckResponse `json:"precheck"`
+}
diff --git a/ee/query-service/model/domain.go b/ee/query-service/model/domain.go
new file mode 100644
index 0000000000..acde0e2194
--- /dev/null
+++ b/ee/query-service/model/domain.go
@@ -0,0 +1,142 @@
+package model
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/url"
+ "strings"
+
+ "github.com/google/uuid"
+ "github.com/pkg/errors"
+ saml2 "github.com/russellhaering/gosaml2"
+ "go.signoz.io/signoz/ee/query-service/saml"
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+)
+
+type SSOType string
+
+const (
+ SAML SSOType = "SAML"
+ GoogleAuth SSOType = "GOOGLE_AUTH"
+)
+
+type SamlConfig struct {
+ SamlEntity string `json:"samlEntity"`
+ SamlIdp string `json:"samlIdp"`
+ SamlCert string `json:"samlCert"`
+}
+
+// OrgDomain identify org owned web domains for auth and other purposes
+type OrgDomain struct {
+ Id uuid.UUID `json:"id"`
+ Name string `json:"name"`
+ OrgId string `json:"orgId"`
+ SsoEnabled bool `json:"ssoEnabled"`
+ SsoType SSOType `json:"ssoType"`
+ SamlConfig *SamlConfig `json:"samlConfig"`
+ Org *basemodel.Organization
+}
+
+// Valid is used a pipeline function to check if org domain
+// loaded from db is valid
+func (od *OrgDomain) Valid(err error) error {
+ if err != nil {
+ return err
+ }
+
+ if od.Id == uuid.Nil || od.OrgId == "" {
+ return fmt.Errorf("both id and orgId are required")
+ }
+
+ return nil
+}
+
+// ValidNew cheks if the org domain is valid for insertion in db
+func (od *OrgDomain) ValidNew() error {
+
+ if od.OrgId == "" {
+ return fmt.Errorf("orgId is required")
+ }
+
+ if od.Name == "" {
+ return fmt.Errorf("name is required")
+ }
+
+ return nil
+}
+
+// LoadConfig loads config params from json text
+func (od *OrgDomain) LoadConfig(jsondata string) error {
+ d := *od
+ err := json.Unmarshal([]byte(jsondata), &d)
+ if err != nil {
+ return errors.Wrap(err, "failed to marshal json to OrgDomain{}")
+ }
+ *od = d
+ return nil
+}
+
+func (od *OrgDomain) GetSAMLEntityID() string {
+ if od.SamlConfig != nil {
+ return od.SamlConfig.SamlEntity
+ }
+ return ""
+}
+
+func (od *OrgDomain) GetSAMLIdpURL() string {
+ if od.SamlConfig != nil {
+ return od.SamlConfig.SamlIdp
+ }
+ return ""
+}
+
+func (od *OrgDomain) GetSAMLCert() string {
+ if od.SamlConfig != nil {
+ return od.SamlConfig.SamlCert
+ }
+ return ""
+}
+
+// PrepareSamlRequest creates a request accordingly gosaml2
+func (od *OrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServiceProvider, error) {
+
+ // this is the url Idp will call after login completion
+ acs := fmt.Sprintf("%s://%s/%s",
+ siteUrl.Scheme,
+ siteUrl.Host,
+ "api/v1/complete/saml")
+
+ // this is the address of the calling url, useful to redirect user
+ sourceUrl := fmt.Sprintf("%s://%s%s",
+ siteUrl.Scheme,
+ siteUrl.Host,
+ siteUrl.Path)
+
+ // ideally this should be some unique ID for each installation
+ // but since we dont have UI to support it, we default it to
+ // host. this issuer is an identifier of service provider (signoz)
+ // on id provider (e.g. azure, okta). Azure requires this id to be configured
+ // in their system, while others seem to not care about it.
+ // currently we default it to host from window.location (received from browser)
+ issuer := siteUrl.Host
+
+ return saml.PrepareRequest(issuer, acs, sourceUrl, od.GetSAMLEntityID(), od.GetSAMLIdpURL(), od.GetSAMLCert())
+}
+
+func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
+
+ sp, err := od.PrepareSamlRequest(siteUrl)
+ if err != nil {
+ return "", err
+ }
+
+ fmtDomainId := strings.Replace(od.Id.String(), "-", ":", -1)
+
+ relayState := fmt.Sprintf("%s://%s%s?domainId=%s",
+ siteUrl.Scheme,
+ siteUrl.Host,
+ siteUrl.Path,
+ fmtDomainId)
+
+ return sp.BuildAuthURL(relayState)
+}
diff --git a/ee/query-service/model/errors.go b/ee/query-service/model/errors.go
new file mode 100644
index 0000000000..4c49f515c1
--- /dev/null
+++ b/ee/query-service/model/errors.go
@@ -0,0 +1,91 @@
+package model
+
+import (
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+)
+
+type ApiError struct {
+ Typ basemodel.ErrorType
+ Err error
+}
+
+func (a *ApiError) Type() basemodel.ErrorType {
+ return a.Typ
+}
+
+func (a *ApiError) ToError() error {
+ if a != nil {
+ return a.Err
+ }
+ return a.Err
+}
+
+func (a *ApiError) Error() string {
+ return a.Err.Error()
+}
+
+func (a *ApiError) IsNil() bool {
+ return a == nil || a.Err == nil
+}
+
+// NewApiError returns a ApiError object of given type
+func NewApiError(typ basemodel.ErrorType, err error) *ApiError {
+ return &ApiError{
+ Typ: typ,
+ Err: err,
+ }
+}
+
+// BadRequest returns a ApiError object of bad request
+func BadRequest(err error) *ApiError {
+ return &ApiError{
+ Typ: basemodel.ErrorBadData,
+ Err: err,
+ }
+}
+
+// InternalError returns a ApiError object of internal type
+func InternalError(err error) *ApiError {
+ return &ApiError{
+ Typ: basemodel.ErrorInternal,
+ Err: err,
+ }
+}
+
+var (
+ ErrorNone basemodel.ErrorType = ""
+ ErrorTimeout basemodel.ErrorType = "timeout"
+ ErrorCanceled basemodel.ErrorType = "canceled"
+ ErrorExec basemodel.ErrorType = "execution"
+ ErrorBadData basemodel.ErrorType = "bad_data"
+ ErrorInternal basemodel.ErrorType = "internal"
+ ErrorUnavailable basemodel.ErrorType = "unavailable"
+ ErrorNotFound basemodel.ErrorType = "not_found"
+ ErrorNotImplemented basemodel.ErrorType = "not_implemented"
+ ErrorUnauthorized basemodel.ErrorType = "unauthorized"
+ ErrorForbidden basemodel.ErrorType = "forbidden"
+ ErrorConflict basemodel.ErrorType = "conflict"
+ ErrorStreamingNotSupported basemodel.ErrorType = "streaming is not supported"
+)
+
+func init() {
+ ErrorNone = basemodel.ErrorNone
+ ErrorTimeout = basemodel.ErrorTimeout
+ ErrorCanceled = basemodel.ErrorCanceled
+ ErrorExec = basemodel.ErrorExec
+ ErrorBadData = basemodel.ErrorBadData
+ ErrorInternal = basemodel.ErrorInternal
+ ErrorUnavailable = basemodel.ErrorUnavailable
+ ErrorNotFound = basemodel.ErrorNotFound
+ ErrorNotImplemented = basemodel.ErrorNotImplemented
+ ErrorUnauthorized = basemodel.ErrorUnauthorized
+ ErrorForbidden = basemodel.ErrorForbidden
+ ErrorConflict = basemodel.ErrorConflict
+ ErrorStreamingNotSupported = basemodel.ErrorStreamingNotSupported
+}
+
+type ErrUnsupportedAuth struct{}
+
+func (errUnsupportedAuth ErrUnsupportedAuth) Error() string {
+ return "this authentication method not supported"
+}
diff --git a/ee/query-service/model/license.go b/ee/query-service/model/license.go
new file mode 100644
index 0000000000..e1e6a997da
--- /dev/null
+++ b/ee/query-service/model/license.go
@@ -0,0 +1,91 @@
+package model
+
+import (
+ "encoding/base64"
+ "encoding/json"
+ "time"
+
+ "github.com/pkg/errors"
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+)
+
+type License struct {
+ Key string `json:"key" db:"key"`
+ ActivationId string `json:"activationId" db:"activationId"`
+ CreatedAt time.Time `db:"created_at"`
+
+ // PlanDetails contains the encrypted plan info
+ PlanDetails string `json:"planDetails" db:"planDetails"`
+
+ // stores parsed license details
+ LicensePlan
+
+ FeatureSet basemodel.FeatureSet
+
+ // populated in case license has any errors
+ ValidationMessage string `db:"validationMessage"`
+
+ // used only for sending details to front-end
+ IsCurrent bool `json:"isCurrent"`
+}
+
+func (l *License) MarshalJSON() ([]byte, error) {
+
+ return json.Marshal(&struct {
+ Key string `json:"key" db:"key"`
+ ActivationId string `json:"activationId" db:"activationId"`
+ ValidationMessage string `db:"validationMessage"`
+ IsCurrent bool `json:"isCurrent"`
+ PlanKey string `json:"planKey"`
+ ValidFrom time.Time `json:"ValidFrom"`
+ ValidUntil time.Time `json:"ValidUntil"`
+ Status string `json:"status"`
+ }{
+ Key: l.Key,
+ ActivationId: l.ActivationId,
+ IsCurrent: l.IsCurrent,
+ PlanKey: l.PlanKey,
+ ValidFrom: time.Unix(l.ValidFrom, 0),
+ ValidUntil: time.Unix(l.ValidUntil, 0),
+ Status: l.Status,
+ ValidationMessage: l.ValidationMessage,
+ })
+}
+
+type LicensePlan struct {
+ PlanKey string `json:"planKey"`
+ ValidFrom int64 `json:"validFrom"`
+ ValidUntil int64 `json:"validUntil"`
+ Status string `json:"status"`
+}
+
+func (l *License) ParsePlan() error {
+ l.LicensePlan = LicensePlan{}
+
+ planData, err := base64.StdEncoding.DecodeString(l.PlanDetails)
+ if err != nil {
+ return err
+ }
+
+ plan := LicensePlan{}
+ err = json.Unmarshal([]byte(planData), &plan)
+ if err != nil {
+ l.ValidationMessage = "failed to parse plan from license"
+ return errors.Wrap(err, "failed to parse plan from license")
+ }
+
+ l.LicensePlan = plan
+ l.ParseFeatures()
+ return nil
+}
+
+func (l *License) ParseFeatures() {
+ switch l.PlanKey {
+ case Pro:
+ l.FeatureSet = ProPlan
+ case Enterprise:
+ l.FeatureSet = EnterprisePlan
+ default:
+ l.FeatureSet = BasicPlan
+ }
+}
diff --git a/ee/query-service/model/plans.go b/ee/query-service/model/plans.go
new file mode 100644
index 0000000000..e68217730a
--- /dev/null
+++ b/ee/query-service/model/plans.go
@@ -0,0 +1,27 @@
+package model
+
+import (
+ basemodel "go.signoz.io/signoz/pkg/query-service/model"
+)
+
+const SSO = "SSO"
+const Basic = "BASIC_PLAN"
+const Pro = "PRO_PLAN"
+const Enterprise = "ENTERPRISE_PLAN"
+const DisableUpsell = "DISABLE_UPSELL"
+
+var BasicPlan = basemodel.FeatureSet{
+ Basic: true,
+ SSO: false,
+ DisableUpsell: false,
+}
+
+var ProPlan = basemodel.FeatureSet{
+ Pro: true,
+ SSO: true,
+}
+
+var EnterprisePlan = basemodel.FeatureSet{
+ Enterprise: true,
+ SSO: true,
+}
diff --git a/ee/query-service/model/usage.go b/ee/query-service/model/usage.go
new file mode 100644
index 0000000000..7d6eec91cc
--- /dev/null
+++ b/ee/query-service/model/usage.go
@@ -0,0 +1,35 @@
+package model
+
+import (
+ "time"
+
+ "github.com/google/uuid"
+)
+
+type UsageSnapshot struct {
+ CurrentLogSizeBytes uint64 `json:"currentLogSizeBytes"`
+ CurrentLogSizeBytesColdStorage uint64 `json:"currentLogSizeBytesColdStorage"`
+ CurrentSpansCount uint64 `json:"currentSpansCount"`
+ CurrentSpansCountColdStorage uint64 `json:"currentSpansCountColdStorage"`
+ CurrentSamplesCount uint64 `json:"currentSamplesCount"`
+ CurrentSamplesCountColdStorage uint64 `json:"currentSamplesCountColdStorage"`
+}
+
+type UsageBase struct {
+ Id uuid.UUID `json:"id" db:"id"`
+ InstallationId uuid.UUID `json:"installationId" db:"installation_id"`
+ ActivationId uuid.UUID `json:"activationId" db:"activation_id"`
+ CreatedAt time.Time `json:"createdAt" db:"created_at"`
+ FailedSyncRequest int `json:"failedSyncRequest" db:"failed_sync_request_count"`
+}
+
+type UsagePayload struct {
+ UsageBase
+ Metrics UsageSnapshot `json:"metrics"`
+ SnapshotDate time.Time `json:"snapshotDate"`
+}
+
+type Usage struct {
+ UsageBase
+ Snapshot string `db:"snapshot"`
+}
diff --git a/ee/query-service/saml/request.go b/ee/query-service/saml/request.go
new file mode 100644
index 0000000000..01af7afe28
--- /dev/null
+++ b/ee/query-service/saml/request.go
@@ -0,0 +1,107 @@
+package saml
+
+import (
+ "crypto/x509"
+ "encoding/base64"
+ "encoding/pem"
+ "fmt"
+ "strings"
+
+ saml2 "github.com/russellhaering/gosaml2"
+ dsig "github.com/russellhaering/goxmldsig"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.uber.org/zap"
+)
+
+func LoadCertificateStore(certString string) (dsig.X509CertificateStore, error) {
+ certStore := &dsig.MemoryX509CertificateStore{
+ Roots: []*x509.Certificate{},
+ }
+
+ certData, err := base64.StdEncoding.DecodeString(certString)
+ if err != nil {
+ return certStore, fmt.Errorf(fmt.Sprintf("failed to read certificate: %v", err))
+ }
+
+ idpCert, err := x509.ParseCertificate(certData)
+ if err != nil {
+ return certStore, fmt.Errorf(fmt.Sprintf("failed to prepare saml request, invalid cert: %s", err.Error()))
+ }
+
+ certStore.Roots = append(certStore.Roots, idpCert)
+
+ return certStore, nil
+}
+
+func LoadCertFromPem(certString string) (dsig.X509CertificateStore, error) {
+ certStore := &dsig.MemoryX509CertificateStore{
+ Roots: []*x509.Certificate{},
+ }
+
+ block, _ := pem.Decode([]byte(certString))
+ if block == nil {
+ return certStore, fmt.Errorf("no valid pem cert found")
+ }
+
+ idpCert, err := x509.ParseCertificate(block.Bytes)
+ if err != nil {
+ return certStore, fmt.Errorf(fmt.Sprintf("failed to parse pem cert: %s", err.Error()))
+ }
+
+ certStore.Roots = append(certStore.Roots, idpCert)
+
+ return certStore, nil
+}
+
+// PrepareRequest prepares authorization URL (Idp Provider URL)
+func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (*saml2.SAMLServiceProvider, error) {
+ var certStore dsig.X509CertificateStore
+ if certString == "" {
+ return nil, fmt.Errorf("invalid certificate data")
+ }
+
+ var err error
+ if strings.Contains(certString, "-----BEGIN CERTIFICATE-----") {
+ certStore, err = LoadCertFromPem(certString)
+ } else {
+ certStore, err = LoadCertificateStore(certString)
+ }
+ // certificate store can not be created, throw error
+ if err != nil {
+ return nil, err
+ }
+
+ randomKeyStore := dsig.RandomKeyStoreForTest()
+
+ // SIGNOZ_SAML_RETURN_URL env var would support overriding window.location
+ // as return destination after saml request is complete from IdP side.
+ // this var is also useful for development, as it is easy to override with backend endpoint
+ // e.g. http://localhost:8080/api/v1/complete/saml
+ acsUrl = constants.GetOrDefaultEnv("SIGNOZ_SAML_RETURN_URL", acsUrl)
+
+ sp := &saml2.SAMLServiceProvider{
+ IdentityProviderSSOURL: idp,
+ IdentityProviderIssuer: entity,
+ ServiceProviderIssuer: issuer,
+ AssertionConsumerServiceURL: acsUrl,
+ SignAuthnRequests: true,
+ AllowMissingAttributes: true,
+
+ // about cert stores -sender(signoz app) and receiver (idp)
+ // The random key (random key store) is sender cert. The public cert store(IDPCertificateStore) that you see on org domain is receiver cert (idp provided).
+ // At the moment, the library we use doesn't bother about sender cert and IdP too. It just adds additional layer of security, which we can explore in future versions
+ // The receiver (Idp) cert will be different for each org domain. Imagine cloud setup where each company setups their domain that integrates with their Idp.
+ // @signoz.io
+ // @next.io
+ // Each of above will have their own Idp setup and hence separate public cert to decrypt the response.
+ // The way SAML request travels is -
+ // SigNoz Backend -> IdP Login Screen -> SigNoz Backend -> SigNoz Frontend
+ // ---------------- | -------------------| -------------------------------------
+ // The dotted lines indicate request boundries. So if you notice, the response from Idp starts a new request. hence we need relay state to pass the context around.
+
+ IDPCertificateStore: certStore,
+ SPKeyStore: randomKeyStore,
+ }
+ zap.S().Debugf("SAML request:", sp)
+ return sp, nil
+}
diff --git a/ee/query-service/usage/manager.go b/ee/query-service/usage/manager.go
new file mode 100644
index 0000000000..34d931861c
--- /dev/null
+++ b/ee/query-service/usage/manager.go
@@ -0,0 +1,321 @@
+package usage
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "sync/atomic"
+ "time"
+
+ "github.com/ClickHouse/clickhouse-go/v2"
+ "github.com/google/uuid"
+ "github.com/jmoiron/sqlx"
+ "go.uber.org/zap"
+
+ licenseserver "go.signoz.io/signoz/ee/query-service/integrations/signozio"
+ "go.signoz.io/signoz/ee/query-service/license"
+ "go.signoz.io/signoz/ee/query-service/model"
+ "go.signoz.io/signoz/ee/query-service/usage/repository"
+ "go.signoz.io/signoz/pkg/query-service/utils/encryption"
+)
+
+const (
+ MaxRetries = 3
+ RetryInterval = 5 * time.Second
+ stateUnlocked uint32 = 0
+ stateLocked uint32 = 1
+)
+
+var (
+ // // collect usage every hour
+ // collectionFrequency = 1 * time.Hour
+
+ // // send usage every 24 hour
+ // uploadFrequency = 24 * time.Hour
+
+ // collect usage every hour
+ collectionFrequency = 5 * time.Second
+
+ // send usage every 24 hour
+ uploadFrequency = 30 * time.Second
+
+ locker = stateUnlocked
+)
+
+type Manager struct {
+ repository *repository.Repository
+
+ clickhouseConn clickhouse.Conn
+
+ licenseRepo *license.Repo
+
+ // end the usage routine, this is important to gracefully
+ // stopping usage reporting and protect in-consistent updates
+ done chan struct{}
+
+ // terminated waits for the UsageExporter go routine to end
+ terminated chan struct{}
+}
+
+func New(dbType string, db *sqlx.DB, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn) (*Manager, error) {
+ repo := repository.New(db)
+
+ err := repo.Init(dbType)
+ if err != nil {
+ return nil, fmt.Errorf("failed to initiate usage repo: %v", err)
+ }
+
+ m := &Manager{
+ repository: repo,
+ clickhouseConn: clickhouseConn,
+ licenseRepo: licenseRepo,
+ }
+ return m, nil
+}
+
+// start loads collects and exports any exported snapshot and starts the exporter
+func (lm *Manager) Start() error {
+ // compares the locker and stateUnlocked if both are same lock is applied else returns error
+ if !atomic.CompareAndSwapUint32(&locker, stateUnlocked, stateLocked) {
+ return fmt.Errorf("usage exporter is locked")
+ }
+
+ // check if license is present or not
+ license, err := lm.licenseRepo.GetActiveLicense(context.Background())
+ if err != nil {
+ return fmt.Errorf("failed to get active license")
+ }
+ if license == nil {
+ // we will not start the usage reporting if license is not present.
+ zap.S().Info("no license present, skipping usage reporting")
+ return nil
+ }
+
+ // upload previous snapshots if any
+ err = lm.UploadUsage(context.Background())
+ if err != nil {
+ return err
+ }
+
+ // collect snapshot if incase it wasn't collect in (t - collectionFrequency)
+ err = lm.CollectCurrentUsage(context.Background())
+ if err != nil {
+ return err
+ }
+
+ go lm.UsageExporter(context.Background())
+
+ return nil
+}
+
+// CollectCurrentUsage checks if needs to collect usage data
+func (lm *Manager) CollectCurrentUsage(ctx context.Context) error {
+ // check the DB if anything exist where timestamp > t - collectionFrequency
+ ts := time.Now().Add(-collectionFrequency)
+ alreadyCreated, err := lm.repository.CheckSnapshotGtCreatedAt(ctx, ts)
+ if err != nil {
+ return err
+ }
+ if !alreadyCreated {
+ zap.S().Info("Collecting current usage")
+ exportError := lm.CollectAndStoreUsage(ctx)
+ if exportError != nil {
+ return exportError
+ }
+ } else {
+ zap.S().Info("Nothing to collect")
+ }
+ return nil
+}
+
+func (lm *Manager) UsageExporter(ctx context.Context) {
+ defer close(lm.terminated)
+
+ collectionTicker := time.NewTicker(collectionFrequency)
+ defer collectionTicker.Stop()
+
+ uploadTicker := time.NewTicker(uploadFrequency)
+ defer uploadTicker.Stop()
+
+ for {
+ select {
+ case <-lm.done:
+ return
+ case <-collectionTicker.C:
+ lm.CollectAndStoreUsage(ctx)
+ case <-uploadTicker.C:
+ lm.UploadUsage(ctx)
+ }
+ }
+}
+
+type TableSize struct {
+ Table string `ch:"table"`
+ DiskName string `ch:"disk_name"`
+ Rows uint64 `ch:"rows"`
+ UncompressedBytes uint64 `ch:"uncompressed_bytes"`
+}
+
+func (lm *Manager) CollectAndStoreUsage(ctx context.Context) error {
+ snap, err := lm.GetUsageFromClickHouse(ctx)
+ if err != nil {
+ return err
+ }
+
+ license, err := lm.licenseRepo.GetActiveLicense(ctx)
+ if err != nil {
+ return err
+ }
+
+ activationId, _ := uuid.Parse(license.ActivationId)
+ // TODO (nitya) : Add installation ID in the payload
+ payload := model.UsagePayload{
+ UsageBase: model.UsageBase{
+ ActivationId: activationId,
+ FailedSyncRequest: 0,
+ },
+ Metrics: *snap,
+ SnapshotDate: time.Now(),
+ }
+
+ err = lm.repository.InsertSnapshot(ctx, &payload)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (lm *Manager) GetUsageFromClickHouse(ctx context.Context) (*model.UsageSnapshot, error) {
+ tableSizes := []TableSize{}
+ snap := model.UsageSnapshot{}
+
+ // get usage from clickhouse
+ query := `
+ SELECT
+ table,
+ disk_name,
+ sum(rows) as rows,
+ sum(data_uncompressed_bytes) AS uncompressed_bytes
+ FROM system.parts
+ WHERE active AND (database in ('signoz_logs', 'signoz_metrics', 'signoz_traces')) AND (table in ('logs','samples_v2', 'signoz_index_v2'))
+ GROUP BY
+ table,
+ disk_name
+ ORDER BY table
+ `
+ err := lm.clickhouseConn.Select(ctx, &tableSizes, query)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, val := range tableSizes {
+ switch val.Table {
+ case "logs":
+ if val.DiskName == "default" {
+ snap.CurrentLogSizeBytes = val.UncompressedBytes
+ } else {
+ snap.CurrentLogSizeBytesColdStorage = val.UncompressedBytes
+ }
+ case "samples_v2":
+ if val.DiskName == "default" {
+ snap.CurrentSamplesCount = val.Rows
+ } else {
+ snap.CurrentSamplesCountColdStorage = val.Rows
+ }
+ case "signoz_index_v2":
+ if val.DiskName == "default" {
+ snap.CurrentSpansCount = val.Rows
+ } else {
+ snap.CurrentSpansCountColdStorage = val.Rows
+ }
+ }
+ }
+
+ return &snap, nil
+}
+
+func (lm *Manager) UploadUsage(ctx context.Context) error {
+ snapshots, err := lm.repository.GetSnapshotsNotSynced(ctx)
+ if err != nil {
+ return err
+ }
+
+ if len(snapshots) <= 0 {
+ zap.S().Info("no snapshots to upload, skipping.")
+ return nil
+ }
+
+ zap.S().Info("uploading snapshots")
+ for _, snap := range snapshots {
+ metricsBytes, err := encryption.Decrypt([]byte(snap.ActivationId.String()[:32]), []byte(snap.Snapshot))
+ if err != nil {
+ return err
+ }
+
+ metrics := model.UsageSnapshot{}
+ err = json.Unmarshal(metricsBytes, &metrics)
+ if err != nil {
+ return err
+ }
+
+ err = lm.UploadUsageWithExponentalBackOff(ctx, model.UsagePayload{
+ UsageBase: model.UsageBase{
+ Id: snap.Id,
+ InstallationId: snap.InstallationId,
+ ActivationId: snap.ActivationId,
+ FailedSyncRequest: snap.FailedSyncRequest,
+ },
+ SnapshotDate: snap.CreatedAt,
+ Metrics: metrics,
+ })
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload model.UsagePayload) error {
+ for i := 1; i <= MaxRetries; i++ {
+ apiErr := licenseserver.SendUsage(ctx, &payload)
+ if apiErr != nil && i == MaxRetries {
+ err := lm.repository.IncrementFailedRequestCount(ctx, payload.Id)
+ if err != nil {
+ zap.S().Errorf("failed to updated the failure count for snapshot in DB : ", zap.Error(err))
+ return err
+ }
+ zap.S().Errorf("retries stopped : %v", zap.Error(err))
+ // not returning error here since it is captured in the failed count
+ return nil
+ } else if apiErr != nil {
+ // sleeping for exponential backoff
+ sleepDuration := RetryInterval * time.Duration(i)
+ zap.S().Errorf("failed to upload snapshot retrying after %v secs : %v", sleepDuration.Seconds(), zap.Error(apiErr.Err))
+ time.Sleep(sleepDuration)
+
+ // update the failed request count
+ err := lm.repository.IncrementFailedRequestCount(ctx, payload.Id)
+ if err != nil {
+ zap.S().Errorf("failed to updated the failure count for snapshot in DB : %v", zap.Error(err))
+ return err
+ }
+ } else {
+ break
+ }
+ }
+
+ // update the database that it is synced
+ err := lm.repository.MoveToSynced(ctx, payload.Id)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (lm *Manager) Stop() {
+ close(lm.done)
+ atomic.StoreUint32(&locker, stateUnlocked)
+ <-lm.terminated
+}
diff --git a/ee/query-service/usage/repository/repository.go b/ee/query-service/usage/repository/repository.go
new file mode 100644
index 0000000000..99bd4c5796
--- /dev/null
+++ b/ee/query-service/usage/repository/repository.go
@@ -0,0 +1,126 @@
+package repository
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "time"
+
+ "github.com/google/uuid"
+ "github.com/jmoiron/sqlx"
+ "go.uber.org/zap"
+
+ "go.signoz.io/signoz/ee/query-service/model"
+ "go.signoz.io/signoz/ee/query-service/usage/sqlite"
+ "go.signoz.io/signoz/pkg/query-service/utils/encryption"
+)
+
+const (
+ MaxFailedSyncCount = 9 // a snapshot will be ignored if the max failed count is greater than or equal to 9
+)
+
+// Repository is usage Repository which stores usage snapshot in a secured DB
+type Repository struct {
+ db *sqlx.DB
+}
+
+// New initiates a new usage Repository
+func New(db *sqlx.DB) *Repository {
+ return &Repository{
+ db: db,
+ }
+}
+
+func (r *Repository) Init(engine string) error {
+ switch engine {
+ case "sqlite3", "sqlite":
+ return sqlite.InitDB(r.db)
+ default:
+ return fmt.Errorf("unsupported db")
+ }
+}
+
+func (r *Repository) InsertSnapshot(ctx context.Context, usage *model.UsagePayload) error {
+
+ snapshotBytes, err := json.Marshal(usage.Metrics)
+ if err != nil {
+ return err
+ }
+
+ usage.Id = uuid.New()
+
+ encryptedSnapshot, err := encryption.Encrypt([]byte(usage.ActivationId.String()[:32]), snapshotBytes)
+ if err != nil {
+ return err
+ }
+
+ query := `INSERT INTO usage(id, activation_id, snapshot)
+ VALUES ($1, $2, $3)`
+ _, err = r.db.ExecContext(ctx,
+ query,
+ usage.Id,
+ usage.ActivationId,
+ string(encryptedSnapshot),
+ )
+ if err != nil {
+ zap.S().Errorf("error inserting usage data: %v", zap.Error(err))
+ return fmt.Errorf("failed to insert usage in db: %v", err)
+ }
+ return nil
+}
+
+func (r *Repository) MoveToSynced(ctx context.Context, id uuid.UUID) error {
+
+ query := `UPDATE usage
+ SET synced = 'true',
+ synced_at = $1
+ WHERE id = $2`
+
+ _, err := r.db.ExecContext(ctx, query, time.Now(), id)
+
+ if err != nil {
+ zap.S().Errorf("error in updating usage: %v", zap.Error(err))
+ return fmt.Errorf("failed to update usage in db: %v", err)
+ }
+
+ return nil
+}
+
+func (r *Repository) IncrementFailedRequestCount(ctx context.Context, id uuid.UUID) error {
+
+ query := `UPDATE usage SET failed_sync_request_count = failed_sync_request_count + 1 WHERE id = $1`
+ _, err := r.db.ExecContext(ctx, query, id)
+ if err != nil {
+ zap.S().Errorf("error in updating usage: %v", zap.Error(err))
+ return fmt.Errorf("failed to update usage in db: %v", err)
+ }
+
+ return nil
+}
+
+func (r *Repository) GetSnapshotsNotSynced(ctx context.Context) ([]*model.Usage, error) {
+ snapshots := []*model.Usage{}
+
+ query := `SELECT id,created_at, activation_id, snapshot, failed_sync_request_count from usage where synced!='true' and failed_sync_request_count < $1 order by created_at asc `
+
+ err := r.db.SelectContext(ctx, &snapshots, query, MaxFailedSyncCount)
+ if err != nil {
+ return nil, err
+ }
+
+ return snapshots, nil
+}
+
+// CheckSnapshotGtCreatedAt checks if there is any snapshot greater than the provided timestamp
+func (r *Repository) CheckSnapshotGtCreatedAt(ctx context.Context, ts time.Time) (bool, error) {
+
+ var snapshots uint64
+ query := `SELECT count() from usage where created_at > '$1'`
+
+ err := r.db.QueryRowContext(ctx, query, ts).Scan(&snapshots)
+ if err != nil {
+ return false, err
+ }
+
+ return snapshots > 0, err
+}
diff --git a/ee/query-service/usage/sqlite/init.go b/ee/query-service/usage/sqlite/init.go
new file mode 100644
index 0000000000..4fefa644ae
--- /dev/null
+++ b/ee/query-service/usage/sqlite/init.go
@@ -0,0 +1,32 @@
+package sqlite
+
+import (
+ "fmt"
+
+ "github.com/jmoiron/sqlx"
+)
+
+func InitDB(db *sqlx.DB) error {
+ var err error
+ if db == nil {
+ return fmt.Errorf("invalid db connection")
+ }
+
+ table_schema := `CREATE TABLE IF NOT EXISTS usage(
+ id UUID PRIMARY KEY,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ activation_id UUID,
+ snapshot TEXT,
+ synced BOOLEAN DEFAULT 'false',
+ synced_at TIMESTAMP,
+ failed_sync_request_count INTEGER DEFAULT 0
+ );
+ `
+
+ _, err = db.Exec(table_schema)
+ if err != nil {
+ return fmt.Errorf("error in creating usage table: %v", err.Error())
+ }
+ return nil
+}
diff --git a/pkg/query-service/go.mod b/go.mod
similarity index 91%
rename from pkg/query-service/go.mod
rename to go.mod
index 5e608f0d8b..44107166d0 100644
--- a/pkg/query-service/go.mod
+++ b/go.mod
@@ -1,4 +1,4 @@
-module go.signoz.io/query-service
+module go.signoz.io/signoz
go 1.17
@@ -16,11 +16,13 @@ require (
github.com/minio/minio-go/v6 v6.0.57
github.com/oklog/oklog v0.3.2
github.com/pkg/errors v0.9.1
- github.com/prometheus/client_golang v0.9.0-pre1.0.20181001174001-0a8115f42e03
+ github.com/posthog/posthog-go v0.0.0-20220817142604-0b0bbf0f9c0f
github.com/prometheus/common v0.0.0-20180518154759-7600349dcfe1
github.com/prometheus/prometheus v2.5.0+incompatible
- github.com/prometheus/tsdb v0.0.0-20181003080831-0ce41118ed20
github.com/rs/cors v1.7.0
+ github.com/russellhaering/gosaml2 v0.8.0
+ github.com/russellhaering/goxmldsig v1.2.0
+ github.com/sethvargo/go-password v0.2.0
github.com/smartystreets/goconvey v1.6.4
github.com/soheilhy/cmux v0.1.4
go.uber.org/zap v1.16.0
@@ -29,12 +31,16 @@ require (
)
require (
+ github.com/beevik/etree v1.1.0 // indirect
github.com/form3tech-oss/jwt-go v3.2.2+incompatible // indirect
+ github.com/jonboulle/clockwork v0.2.2 // indirect
github.com/klauspost/cpuid v1.2.3 // indirect
+ github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
github.com/minio/md5-simd v1.1.0 // indirect
github.com/minio/sha256-simd v0.1.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
- github.com/posthog/posthog-go v0.0.0-20220817142604-0b0bbf0f9c0f // indirect
+ github.com/prometheus/client_golang v0.9.0-pre1.0.20181001174001-0a8115f42e03 // indirect
+ github.com/prometheus/tsdb v0.0.0-20181003080831-0ce41118ed20 // indirect
gopkg.in/ini.v1 v1.42.0 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
)
@@ -49,13 +55,13 @@ require (
github.com/aws/aws-sdk-go v1.27.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
- github.com/cespare/xxhash v1.1.0 // indirect
+ github.com/cespare/xxhash v1.1.0
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dgrijalva/jwt-go v3.2.0+incompatible // indirect
github.com/felixge/httpsnoop v1.0.1 // indirect
github.com/fsnotify/fsnotify v1.4.9 // indirect
github.com/ghodss/yaml v1.0.0 // indirect
- github.com/go-kit/kit v0.4.1-0.20170517165212-6964666de57c // indirect
+ github.com/go-kit/kit v0.4.1-0.20170517165212-6964666de57c
github.com/go-logfmt/logfmt v0.5.0 // indirect
github.com/go-stack/stack v1.8.0 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
@@ -86,7 +92,6 @@ require (
github.com/hashicorp/serf v0.8.1-0.20161007004122-1d4fa605f6ff // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/jtolds/gls v4.20.0+incompatible // indirect
- github.com/kr/text v0.2.0 // indirect
github.com/lib/pq v1.10.0 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect
github.com/miekg/dns v1.0.4 // indirect
@@ -97,7 +102,7 @@ require (
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223 // indirect
github.com/oklog/run v1.1.0 // indirect
github.com/oklog/ulid v0.3.1-0.20170117200651-66bb6560562f // indirect
- github.com/opentracing/opentracing-go v1.1.0 // indirect
+ github.com/opentracing/opentracing-go v1.1.0
github.com/pascaldekloe/goe v0.1.0 // indirect
github.com/paulmach/orb v0.4.0 // indirect
github.com/peterbourgon/diskv v2.0.2-0.20180312054125-0646ccaebea1+incompatible // indirect
@@ -120,7 +125,7 @@ require (
go.uber.org/atomic v1.6.0 // indirect
go.uber.org/multierr v1.5.0 // indirect
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519
- golang.org/x/net v0.0.0-20211013171255-e13a2654a71e // indirect
+ golang.org/x/net v0.0.0-20211013171255-e13a2654a71e
golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 // indirect
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect
golang.org/x/sys v0.0.0-20211110154304-99a53858aa08 // indirect
@@ -133,10 +138,9 @@ require (
google.golang.org/grpc v1.41.0
google.golang.org/grpc/examples v0.0.0-20210803221256-6ba56c814be7 // indirect
google.golang.org/protobuf v1.27.1 // indirect
- gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
gopkg.in/fsnotify/fsnotify.v1 v1.4.7 // indirect
gopkg.in/inf.v0 v0.9.1 // indirect
- gopkg.in/yaml.v2 v2.4.0 // indirect
+ gopkg.in/yaml.v2 v2.4.0
k8s.io/api v0.0.0-20180628040859-072894a440bd // indirect
k8s.io/client-go v8.0.0+incompatible // indirect
)
diff --git a/pkg/query-service/go.sum b/go.sum
similarity index 98%
rename from pkg/query-service/go.sum
rename to go.sum
index e0db02686a..c56a128ef8 100644
--- a/pkg/query-service/go.sum
+++ b/go.sum
@@ -70,6 +70,8 @@ github.com/auth0/go-jwt-middleware v1.0.1/go.mod h1:YSeUX3z6+TF2H+7padiEqNJ73Zy9
github.com/aws/aws-sdk-go v1.13.44-0.20180507225419-00862f899353/go.mod h1:ZRmQr0FajVIyZ4ZzBYKG5P3ZqPz9IHG41ZoMu1ADI3k=
github.com/aws/aws-sdk-go v1.27.0 h1:0xphMHGMLBrPMfxR2AmVjZKcMEESEgWF8Kru94BNByk=
github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
+github.com/beevik/etree v1.1.0 h1:T0xke/WvNtMoCqgzPhkX2r4rjY3GDZFi+FjpRZY2Jbs=
+github.com/beevik/etree v1.1.0/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A=
github.com/beorn7/perks v0.0.0-20160229213445-3ac7bf7a47d1/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
@@ -292,10 +294,10 @@ github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfC
github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
github.com/jmoiron/sqlx v1.3.4 h1:wv+0IJZfL5z0uZoUjlpKgHkgaFSYD+r9CfrXjEXsO7w=
github.com/jmoiron/sqlx v1.3.4/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ=
+github.com/jonboulle/clockwork v0.2.2 h1:UOGuzwb1PwsrDAObMuhUnj0p5ULPj8V/xJ7Kx9qUBdQ=
+github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8=
github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
-github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
-github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
@@ -310,8 +312,9 @@ github.com/klauspost/cpuid v1.2.3/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgo
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
-github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
@@ -320,6 +323,8 @@ github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E=
github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
+github.com/mattermost/xml-roundtrip-validator v0.1.0 h1:RXbVD2UAl7A7nOTR4u7E3ILa4IbtvKBHw64LDsmu9hU=
+github.com/mattermost/xml-roundtrip-validator v0.1.0/go.mod h1:qccnGMcpgwcNaBnxqpJpWWUiPNr5H3O8eDgGV9gT5To=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.8 h1:gDp86IdQsN/xWjIEmr9MF6o9mpksUgh0fu+9ByFxzIU=
@@ -344,7 +349,6 @@ github.com/mkevac/debugcharts v0.0.0-20191222103121-ae1c48aa8615/go.mod h1:Ad7oe
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
-github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
@@ -373,6 +377,7 @@ github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pierrec/lz4/v4 v4.1.14 h1:+fL8AQEZtz/ijeNnpduH0bROTu0O3NZAlPjQxGn8LwE=
github.com/pierrec/lz4/v4 v4.1.14/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
+github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.8.1-0.20161029093637-248dadf4e906/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
@@ -394,8 +399,15 @@ github.com/prometheus/tsdb v0.0.0-20181003080831-0ce41118ed20 h1:Jh/eKJuru9z9u3r
github.com/prometheus/tsdb v0.0.0-20181003080831-0ce41118ed20/go.mod h1:lFf/o1J2a31WmWQbxYXfY1azJK5Xp5D8hwKMnVMBTGU=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
+github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
+github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
+github.com/russellhaering/gosaml2 v0.8.0 h1:rm1Gc09/UoEsKGTSFvg8VCHJLY3wrP4BWjC+1ov0qCo=
+github.com/russellhaering/gosaml2 v0.8.0/go.mod h1:byViER/1YPUa0Puj9ROZblpoq2jsE7h/CJmitzX0geU=
+github.com/russellhaering/goxmldsig v1.2.0 h1:Y6GTTc9Un5hCxSzVz4UIWQ/zuVwDvzJk80guqzwx6Vg=
+github.com/russellhaering/goxmldsig v1.2.0/go.mod h1:gM4MDENBQf7M+V824SGfyIUVFWydB7n0KkEubVJl+Tw=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/samuel/go-zookeeper v0.0.0-20161028232340-1d7be4effb13/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E=
github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da h1:p3Vo3i64TCLY7gIfzeQaUJ+kppEO5WQG3cL8iE8tGHU=
@@ -405,6 +417,8 @@ github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUt
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/segmentio/backo-go v1.0.0 h1:kbOAtGJY2DqOR0jfRkYEorx/b18RgtepGtY3+Cpe6qA=
github.com/segmentio/backo-go v1.0.0/go.mod h1:kJ9mm9YmoWSkk+oQ+5Cj8DEoRCX2JT6As4kEtIIOp1M=
+github.com/sethvargo/go-password v0.2.0 h1:BTDl4CC/gjf/axHMaDQtw507ogrXLci6XRiLc7i/UHI=
+github.com/sethvargo/go-password v0.2.0/go.mod h1:Ym4Mr9JXLBycr02MFuVQ/0JHidNetSgbzutTr3zsYXE=
github.com/shirou/gopsutil v2.19.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc=
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
diff --git a/pkg/query-service/Dockerfile b/pkg/query-service/Dockerfile
index eec478aaef..0f70af1ea5 100644
--- a/pkg/query-service/Dockerfile
+++ b/pkg/query-service/Dockerfile
@@ -11,7 +11,7 @@ RUN export GOOS=$(echo ${TARGETPLATFORM} | cut -d / -f1) && \
export GOARCH=$(echo ${TARGETPLATFORM} | cut -d / -f2)
# Prepare and enter src directory
-WORKDIR /go/src/github.com/signoz/signoz/pkg/query-service
+WORKDIR /go/src/github.com/signoz/signoz
# Cache dependencies
ADD go.mod .
@@ -20,8 +20,10 @@ RUN go mod download -x
# Add the sources and proceed with build
ADD . .
-RUN go build -tags timetzdata -a -ldflags "-linkmode external -extldflags '-static' -s -w $LD_FLAGS" -o ./bin/query-service ./main.go
-RUN chmod +x ./bin/query-service
+RUN cd pkg/query-service \
+ && go build -tags timetzdata -a -o ./bin/query-service \
+ -ldflags "-linkmode external -extldflags '-static' -s -w $LD_FLAGS" \
+ && chmod +x ./bin/query-service
# use a minimal alpine image
@@ -39,7 +41,8 @@ WORKDIR /root
# copy the binary from builder
COPY --from=builder /go/src/github.com/signoz/signoz/pkg/query-service/bin/query-service .
-COPY config/prometheus.yml /root/config/prometheus.yml
+# copy prometheus YAML config
+COPY pkg/query-service/config/prometheus.yml /root/config/prometheus.yml
# run the binary
ENTRYPOINT ["./query-service"]
diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go
index bae3e3934d..d52b4c49ff 100644
--- a/pkg/query-service/app/clickhouseReader/reader.go
+++ b/pkg/query-service/app/clickhouseReader/reader.go
@@ -39,11 +39,11 @@ import (
"github.com/jmoiron/sqlx"
promModel "github.com/prometheus/common/model"
- "go.signoz.io/query-service/app/logs"
- "go.signoz.io/query-service/constants"
- am "go.signoz.io/query-service/integrations/alertManager"
- "go.signoz.io/query-service/model"
- "go.signoz.io/query-service/utils"
+ "go.signoz.io/signoz/pkg/query-service/app/logs"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
diff --git a/pkg/query-service/app/dashboards/model.go b/pkg/query-service/app/dashboards/model.go
index 1b74857d41..4969a18728 100644
--- a/pkg/query-service/app/dashboards/model.go
+++ b/pkg/query-service/app/dashboards/model.go
@@ -10,7 +10,7 @@ import (
"github.com/google/uuid"
"github.com/gosimple/slug"
"github.com/jmoiron/sqlx"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
diff --git a/pkg/query-service/app/dashboards/provision.go b/pkg/query-service/app/dashboards/provision.go
index 84441995cf..bc0bc39059 100644
--- a/pkg/query-service/app/dashboards/provision.go
+++ b/pkg/query-service/app/dashboards/provision.go
@@ -5,7 +5,7 @@ import (
"io/ioutil"
"os"
- "go.signoz.io/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/constants"
"go.uber.org/zap"
)
diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go
index 4ee4702028..1d648f4651 100644
--- a/pkg/query-service/app/http_handler.go
+++ b/pkg/query-service/app/http_handler.go
@@ -18,20 +18,20 @@ import (
jsoniter "github.com/json-iterator/go"
_ "github.com/mattn/go-sqlite3"
"github.com/prometheus/prometheus/promql"
- "go.signoz.io/query-service/app/dashboards"
- "go.signoz.io/query-service/app/logs"
- "go.signoz.io/query-service/app/metrics"
- "go.signoz.io/query-service/app/parser"
- "go.signoz.io/query-service/auth"
- "go.signoz.io/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/app/dashboards"
+ "go.signoz.io/signoz/pkg/query-service/app/logs"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics"
+ "go.signoz.io/signoz/pkg/query-service/app/parser"
+ "go.signoz.io/signoz/pkg/query-service/auth"
+ "go.signoz.io/signoz/pkg/query-service/constants"
- "go.signoz.io/query-service/dao"
- am "go.signoz.io/query-service/integrations/alertManager"
- "go.signoz.io/query-service/interfaces"
- "go.signoz.io/query-service/model"
- "go.signoz.io/query-service/rules"
- "go.signoz.io/query-service/telemetry"
- "go.signoz.io/query-service/version"
+ "go.signoz.io/signoz/pkg/query-service/dao"
+ am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
+ "go.signoz.io/signoz/pkg/query-service/interfaces"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/rules"
+ "go.signoz.io/signoz/pkg/query-service/telemetry"
+ "go.signoz.io/signoz/pkg/query-service/version"
"go.uber.org/zap"
)
@@ -53,26 +53,45 @@ type APIHandler struct {
// queryParser queryParser
basePath string
apiPrefix string
- reader *interfaces.Reader
- relationalDB dao.ModelDao
+ reader interfaces.Reader
+ appDao dao.ModelDao
alertManager am.Manager
ruleManager *rules.Manager
+ featureFlags interfaces.FeatureLookup
ready func(http.HandlerFunc) http.HandlerFunc
}
+type APIHandlerOpts struct {
+
+ // business data reader e.g. clickhouse
+ Reader interfaces.Reader
+
+ // dao layer to perform crud on app objects like dashboard, alerts etc
+ AppDao dao.ModelDao
+
+ // rule manager handles rule crud operations
+ RuleManager *rules.Manager
+
+ // feature flags querier
+ FeatureFlags interfaces.FeatureLookup
+}
+
// NewAPIHandler returns an APIHandler
-func NewAPIHandler(reader *interfaces.Reader, relationalDB dao.ModelDao, ruleManager *rules.Manager) (*APIHandler, error) {
+func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
alertManager, err := am.New("")
if err != nil {
return nil, err
}
+
aH := &APIHandler{
- reader: reader,
- relationalDB: relationalDB,
+ reader: opts.Reader,
+ appDao: opts.AppDao,
alertManager: alertManager,
- ruleManager: ruleManager,
+ ruleManager: opts.RuleManager,
+ featureFlags: opts.FeatureFlags,
}
+
aH.ready = aH.testReady
dashboards.LoadDashboardFiles()
@@ -128,12 +147,12 @@ type response struct {
Error string `json:"error,omitempty"`
}
-func respondError(w http.ResponseWriter, apiErr *model.ApiError, data interface{}) {
+func RespondError(w http.ResponseWriter, apiErr model.BaseApiError, data interface{}) {
json := jsoniter.ConfigCompatibleWithStandardLibrary
b, err := json.Marshal(&response{
Status: statusError,
- ErrorType: apiErr.Typ,
- Error: apiErr.Err.Error(),
+ ErrorType: apiErr.Type(),
+ Error: apiErr.Error(),
Data: data,
})
if err != nil {
@@ -143,7 +162,7 @@ func respondError(w http.ResponseWriter, apiErr *model.ApiError, data interface{
}
var code int
- switch apiErr.Typ {
+ switch apiErr.Type() {
case model.ErrorBadData:
code = http.StatusBadRequest
case model.ErrorExec:
@@ -189,6 +208,7 @@ func writeHttpResponse(w http.ResponseWriter, data interface{}) {
zap.S().Error("msg", "error writing response", "bytesWritten", n, "err", err)
}
}
+
func (aH *APIHandler) RegisterMetricsRoutes(router *mux.Router) {
subRouter := router.PathPrefix("/api/v2/metrics").Subrouter()
subRouter.HandleFunc("/query_range", ViewAccess(aH.queryRangeMetricsV2)).Methods(http.MethodPost)
@@ -197,7 +217,7 @@ func (aH *APIHandler) RegisterMetricsRoutes(router *mux.Router) {
subRouter.HandleFunc("/autocomplete/tagValue", ViewAccess(aH.metricAutocompleteTagValue)).Methods(http.MethodGet)
}
-func (aH *APIHandler) respond(w http.ResponseWriter, data interface{}) {
+func (aH *APIHandler) Respond(w http.ResponseWriter, data interface{}) {
writeHttpResponse(w, data)
}
@@ -211,7 +231,7 @@ func ViewAccess(f func(http.ResponseWriter, *http.Request)) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
user, err := auth.GetUserFromRequest(r)
if err != nil {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
@@ -219,7 +239,7 @@ func ViewAccess(f func(http.ResponseWriter, *http.Request)) http.HandlerFunc {
}
if !(auth.IsViewer(user) || auth.IsEditor(user) || auth.IsAdmin(user)) {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible to viewers/editors/admins."),
}, nil)
@@ -233,14 +253,14 @@ func EditAccess(f func(http.ResponseWriter, *http.Request)) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
user, err := auth.GetUserFromRequest(r)
if err != nil {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
}
if !(auth.IsEditor(user) || auth.IsAdmin(user)) {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible to editors/admins."),
}, nil)
@@ -254,7 +274,7 @@ func SelfAccess(f func(http.ResponseWriter, *http.Request)) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
user, err := auth.GetUserFromRequest(r)
if err != nil {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
@@ -262,7 +282,7 @@ func SelfAccess(f func(http.ResponseWriter, *http.Request)) http.HandlerFunc {
}
id := mux.Vars(r)["id"]
if !(auth.IsSelfAccessRequest(user, id) || auth.IsAdmin(user)) {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible for self access or to the admins."),
}, nil)
@@ -276,14 +296,14 @@ func AdminAccess(f func(http.ResponseWriter, *http.Request)) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
user, err := auth.GetUserFromRequest(r)
if err != nil {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
}
if !auth.IsAdmin(user) {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible to admins only"),
}, nil)
@@ -399,10 +419,10 @@ func (aH *APIHandler) getRule(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
ruleResponse, err := aH.ruleManager.GetRule(id)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
- aH.respond(w, ruleResponse)
+ aH.Respond(w, ruleResponse)
}
func (aH *APIHandler) metricAutocompleteMetricName(w http.ResponseWriter, r *http.Request) {
@@ -412,30 +432,30 @@ func (aH *APIHandler) metricAutocompleteMetricName(w http.ResponseWriter, r *htt
limit = 0 // no limit
}
- metricNameList, apiErrObj := (*aH.reader).GetMetricAutocompleteMetricNames(r.Context(), matchText, limit)
+ metricNameList, apiErrObj := aH.reader.GetMetricAutocompleteMetricNames(r.Context(), matchText, limit)
if apiErrObj != nil {
- respondError(w, apiErrObj, nil)
+ RespondError(w, apiErrObj, nil)
return
}
- aH.respond(w, metricNameList)
+ aH.Respond(w, metricNameList)
}
func (aH *APIHandler) metricAutocompleteTagKey(w http.ResponseWriter, r *http.Request) {
metricsAutocompleteTagKeyParams, apiErrorObj := parser.ParseMetricAutocompleteTagParams(r)
if apiErrorObj != nil {
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
- tagKeyList, apiErrObj := (*aH.reader).GetMetricAutocompleteTagKey(r.Context(), metricsAutocompleteTagKeyParams)
+ tagKeyList, apiErrObj := aH.reader.GetMetricAutocompleteTagKey(r.Context(), metricsAutocompleteTagKeyParams)
if apiErrObj != nil {
- respondError(w, apiErrObj, nil)
+ RespondError(w, apiErrObj, nil)
return
}
- aH.respond(w, tagKeyList)
+ aH.Respond(w, tagKeyList)
}
func (aH *APIHandler) metricAutocompleteTagValue(w http.ResponseWriter, r *http.Request) {
@@ -443,22 +463,22 @@ func (aH *APIHandler) metricAutocompleteTagValue(w http.ResponseWriter, r *http.
if len(metricsAutocompleteTagValueParams.TagKey) == 0 {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("tagKey not present in params")}
- respondError(w, apiErrObj, nil)
+ RespondError(w, apiErrObj, nil)
return
}
if apiErrorObj != nil {
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
- tagValueList, apiErrObj := (*aH.reader).GetMetricAutocompleteTagValue(r.Context(), metricsAutocompleteTagValueParams)
+ tagValueList, apiErrObj := aH.reader.GetMetricAutocompleteTagValue(r.Context(), metricsAutocompleteTagValueParams)
if apiErrObj != nil {
- respondError(w, apiErrObj, nil)
+ RespondError(w, apiErrObj, nil)
return
}
- aH.respond(w, tagValueList)
+ aH.Respond(w, tagValueList)
}
func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) {
@@ -466,7 +486,7 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
if apiErrorObj != nil {
zap.S().Errorf(apiErrorObj.Err.Error())
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
@@ -499,7 +519,7 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
wg.Add(1)
go func(name, query string) {
defer wg.Done()
- seriesList, err := (*aH.reader).GetMetricResult(r.Context(), query)
+ seriesList, err := aH.reader.GetMetricResult(r.Context(), query)
for _, series := range seriesList {
series.QueryName = name
}
@@ -564,7 +584,7 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
Step: time.Duration(metricsQueryRangeParams.Step * int64(time.Second)),
Query: query.Query,
}
- promResult, _, err := (*aH.reader).GetQueryRangeResult(r.Context(), &queryModel)
+ promResult, _, err := aH.reader.GetQueryRangeResult(r.Context(), &queryModel)
if err != nil {
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query.Query}
return
@@ -610,7 +630,7 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
case model.QUERY_BUILDER:
runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SIGNOZ_TIMESERIES_TABLENAME)
if runQueries.Err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: runQueries.Err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: runQueries.Err}, nil)
return
}
seriesList, err, errQuriesByName = execClickHouseQueries(runQueries.Queries)
@@ -624,13 +644,13 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(chQuery.Query)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
var query bytes.Buffer
err = tmpl.Execute(&query, metricsQueryRangeParams.Variables)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
queries[name] = query.String()
@@ -640,20 +660,20 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
seriesList, err, errQuriesByName = execPromQueries(metricsQueryRangeParams)
default:
err = fmt.Errorf("invalid query type")
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, errQuriesByName)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, errQuriesByName)
return
}
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
- respondError(w, apiErrObj, errQuriesByName)
+ RespondError(w, apiErrObj, errQuriesByName)
return
}
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == model.QUERY_VALUE &&
len(seriesList) > 1 &&
(metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.QUERY_BUILDER ||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == model.CLICKHOUSE) {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil)
return
}
@@ -662,20 +682,20 @@ func (aH *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request
Result []*model.Series `json:"result"`
}
resp := ResponseFormat{ResultType: "matrix", Result: seriesList}
- aH.respond(w, resp)
+ aH.Respond(w, resp)
}
func (aH *APIHandler) listRules(w http.ResponseWriter, r *http.Request) {
rules, err := aH.ruleManager.ListRuleStates()
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
// todo(amol): need to add sorter
- aH.respond(w, rules)
+ aH.Respond(w, rules)
}
func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) {
@@ -683,12 +703,12 @@ func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) {
allDashboards, err := dashboards.GetDashboards()
if err != nil {
- respondError(w, err, nil)
+ RespondError(w, err, nil)
return
}
tagsFromReq, ok := r.URL.Query()["tags"]
if !ok || len(tagsFromReq) == 0 || tagsFromReq[0] == "" {
- aH.respond(w, allDashboards)
+ aH.Respond(w, allDashboards)
return
}
@@ -725,7 +745,7 @@ func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) {
filteredDashboards = append(filteredDashboards, dash)
}
- aH.respond(w, filteredDashboards)
+ aH.Respond(w, filteredDashboards)
}
func (aH *APIHandler) deleteDashboard(w http.ResponseWriter, r *http.Request) {
@@ -734,11 +754,11 @@ func (aH *APIHandler) deleteDashboard(w http.ResponseWriter, r *http.Request) {
err := dashboards.DeleteDashboard(uuid)
if err != nil {
- respondError(w, err, nil)
+ RespondError(w, err, nil)
return
}
- aH.respond(w, nil)
+ aH.Respond(w, nil)
}
@@ -746,19 +766,19 @@ func (aH *APIHandler) queryDashboardVars(w http.ResponseWriter, r *http.Request)
query := r.URL.Query().Get("query")
if query == "" {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("query is required")}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("query is required")}, nil)
return
}
if strings.Contains(strings.ToLower(query), "alter table") {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("query shouldn't alter data")}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("query shouldn't alter data")}, nil)
return
}
- dashboardVars, err := (*aH.reader).QueryDashboardVars(r.Context(), query)
+ dashboardVars, err := aH.reader.QueryDashboardVars(r.Context(), query)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
- aH.respond(w, dashboardVars)
+ aH.Respond(w, dashboardVars)
}
func (aH *APIHandler) updateDashboard(w http.ResponseWriter, r *http.Request) {
@@ -768,22 +788,22 @@ func (aH *APIHandler) updateDashboard(w http.ResponseWriter, r *http.Request) {
var postData map[string]interface{}
err := json.NewDecoder(r.Body).Decode(&postData)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading request body")
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading request body")
return
}
err = dashboards.IsPostDataSane(&postData)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading request body")
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading request body")
return
}
dashboard, apiError := dashboards.UpdateDashboard(uuid, postData)
if apiError != nil {
- respondError(w, apiError, nil)
+ RespondError(w, apiError, nil)
return
}
- aH.respond(w, dashboard)
+ aH.Respond(w, dashboard)
}
@@ -794,11 +814,11 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
dashboard, apiError := dashboards.GetDashboard(uuid)
if apiError != nil {
- respondError(w, apiError, nil)
+ RespondError(w, apiError, nil)
return
}
- aH.respond(w, dashboard)
+ aH.Respond(w, dashboard)
}
@@ -808,24 +828,24 @@ func (aH *APIHandler) createDashboards(w http.ResponseWriter, r *http.Request) {
err := json.NewDecoder(r.Body).Decode(&postData)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, "Error reading request body")
+ RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, "Error reading request body")
return
}
err = dashboards.IsPostDataSane(&postData)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, "Error reading request body")
+ RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, "Error reading request body")
return
}
dash, apiErr := dashboards.CreateDashboard(postData)
if apiErr != nil {
- respondError(w, apiErr, nil)
+ RespondError(w, apiErr, nil)
return
}
- aH.respond(w, dash)
+ aH.Respond(w, dash)
}
@@ -835,7 +855,7 @@ func (aH *APIHandler) testRule(w http.ResponseWriter, r *http.Request) {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("Error in getting req body in test rule API\n", err)
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
@@ -844,7 +864,7 @@ func (aH *APIHandler) testRule(w http.ResponseWriter, r *http.Request) {
alertCount, apiRrr := aH.ruleManager.TestNotification(ctx, string(body))
if apiRrr != nil {
- respondError(w, apiRrr, nil)
+ RespondError(w, apiRrr, nil)
return
}
@@ -852,7 +872,7 @@ func (aH *APIHandler) testRule(w http.ResponseWriter, r *http.Request) {
"alertCount": alertCount,
"message": "notification sent",
}
- aH.respond(w, response)
+ aH.Respond(w, response)
}
func (aH *APIHandler) deleteRule(w http.ResponseWriter, r *http.Request) {
@@ -862,11 +882,11 @@ func (aH *APIHandler) deleteRule(w http.ResponseWriter, r *http.Request) {
err := aH.ruleManager.DeleteRule(id)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
- aH.respond(w, "rule successfully deleted")
+ aH.Respond(w, "rule successfully deleted")
}
@@ -878,18 +898,18 @@ func (aH *APIHandler) patchRule(w http.ResponseWriter, r *http.Request) {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("msg: error in getting req body of patch rule API\n", "\t error:", err)
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
gettableRule, err := aH.ruleManager.PatchRule(string(body), id)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
- aH.respond(w, gettableRule)
+ aH.Respond(w, gettableRule)
}
func (aH *APIHandler) editRule(w http.ResponseWriter, r *http.Request) {
@@ -899,48 +919,48 @@ func (aH *APIHandler) editRule(w http.ResponseWriter, r *http.Request) {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("msg: error in getting req body of edit rule API\n", "\t error:", err)
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
err = aH.ruleManager.EditRule(string(body), id)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
- aH.respond(w, "rule successfully edited")
+ aH.Respond(w, "rule successfully edited")
}
func (aH *APIHandler) getChannel(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
- channel, apiErrorObj := (*aH.reader).GetChannel(id)
+ channel, apiErrorObj := aH.reader.GetChannel(id)
if apiErrorObj != nil {
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
- aH.respond(w, channel)
+ aH.Respond(w, channel)
}
func (aH *APIHandler) deleteChannel(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
- apiErrorObj := (*aH.reader).DeleteChannel(id)
+ apiErrorObj := aH.reader.DeleteChannel(id)
if apiErrorObj != nil {
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
- aH.respond(w, "notification channel successfully deleted")
+ aH.Respond(w, "notification channel successfully deleted")
}
func (aH *APIHandler) listChannels(w http.ResponseWriter, r *http.Request) {
- channels, apiErrorObj := (*aH.reader).GetChannels()
+ channels, apiErrorObj := aH.reader.GetChannels()
if apiErrorObj != nil {
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
- aH.respond(w, channels)
+ aH.Respond(w, channels)
}
// testChannels sends test alert to all registered channels
@@ -950,24 +970,24 @@ func (aH *APIHandler) testChannel(w http.ResponseWriter, r *http.Request) {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("Error in getting req body of testChannel API\n", err)
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
receiver := &am.Receiver{}
if err := json.Unmarshal(body, receiver); err != nil { // Parse []byte to go struct pointer
zap.S().Errorf("Error in parsing req body of testChannel API\n", err)
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
// send alert
apiErrorObj := aH.alertManager.TestReceiver(receiver)
if apiErrorObj != nil {
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
- aH.respond(w, "test alert sent")
+ aH.Respond(w, "test alert sent")
}
func (aH *APIHandler) editChannel(w http.ResponseWriter, r *http.Request) {
@@ -978,25 +998,25 @@ func (aH *APIHandler) editChannel(w http.ResponseWriter, r *http.Request) {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("Error in getting req body of editChannel API\n", err)
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
receiver := &am.Receiver{}
if err := json.Unmarshal(body, receiver); err != nil { // Parse []byte to go struct pointer
zap.S().Errorf("Error in parsing req body of editChannel API\n", err)
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
- _, apiErrorObj := (*aH.reader).EditChannel(receiver, id)
+ _, apiErrorObj := aH.reader.EditChannel(receiver, id)
if apiErrorObj != nil {
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
- aH.respond(w, nil)
+ aH.Respond(w, nil)
}
@@ -1006,25 +1026,25 @@ func (aH *APIHandler) createChannel(w http.ResponseWriter, r *http.Request) {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("Error in getting req body of createChannel API\n", err)
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
receiver := &am.Receiver{}
if err := json.Unmarshal(body, receiver); err != nil { // Parse []byte to go struct pointer
zap.S().Errorf("Error in parsing req body of createChannel API\n", err)
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
- _, apiErrorObj := (*aH.reader).CreateChannel(receiver)
+ _, apiErrorObj := aH.reader.CreateChannel(receiver)
if apiErrorObj != nil {
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
- aH.respond(w, nil)
+ aH.Respond(w, nil)
}
@@ -1034,17 +1054,17 @@ func (aH *APIHandler) createRule(w http.ResponseWriter, r *http.Request) {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("Error in getting req body for create rule API\n", err)
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
err = aH.ruleManager.CreateRule(string(body))
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
- aH.respond(w, "rule successfully added")
+ aH.Respond(w, "rule successfully added")
}
@@ -1056,7 +1076,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
query, apiErrorObj := parseQueryRangeRequest(r)
if apiErrorObj != nil {
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
@@ -1066,7 +1086,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
if to := r.FormValue("timeout"); to != "" {
var cancel context.CancelFunc
timeout, err := parseMetricsDuration(to)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
@@ -1074,10 +1094,10 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
defer cancel()
}
- res, qs, apiError := (*aH.reader).GetQueryRangeResult(ctx, query)
+ res, qs, apiError := aH.reader.GetQueryRangeResult(ctx, query)
if apiError != nil {
- respondError(w, apiError, nil)
+ RespondError(w, apiError, nil)
return
}
@@ -1088,11 +1108,11 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
if res.Err != nil {
switch res.Err.(type) {
case promql.ErrQueryCanceled:
- respondError(w, &model.ApiError{Typ: model.ErrorCanceled, Err: res.Err}, nil)
+ RespondError(w, &model.ApiError{model.ErrorCanceled, res.Err}, nil)
case promql.ErrQueryTimeout:
- respondError(w, &model.ApiError{Typ: model.ErrorTimeout, Err: res.Err}, nil)
+ RespondError(w, &model.ApiError{model.ErrorTimeout, res.Err}, nil)
}
- respondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
+ RespondError(w, &model.ApiError{model.ErrorExec, res.Err}, nil)
}
response_data := &model.QueryData{
@@ -1101,7 +1121,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
Stats: qs,
}
- aH.respond(w, response_data)
+ aH.Respond(w, response_data)
}
@@ -1110,7 +1130,7 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
queryParams, apiErrorObj := parseInstantQueryMetricsRequest(r)
if apiErrorObj != nil {
- respondError(w, apiErrorObj, nil)
+ RespondError(w, apiErrorObj, nil)
return
}
@@ -1120,7 +1140,7 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
if to := r.FormValue("timeout"); to != "" {
var cancel context.CancelFunc
timeout, err := parseMetricsDuration(to)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
@@ -1128,10 +1148,10 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
defer cancel()
}
- res, qs, apiError := (*aH.reader).GetInstantQueryMetricsResult(ctx, queryParams)
+ res, qs, apiError := aH.reader.GetInstantQueryMetricsResult(ctx, queryParams)
if apiError != nil {
- respondError(w, apiError, nil)
+ RespondError(w, apiError, nil)
return
}
@@ -1142,11 +1162,11 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
if res.Err != nil {
switch res.Err.(type) {
case promql.ErrQueryCanceled:
- respondError(w, &model.ApiError{Typ: model.ErrorCanceled, Err: res.Err}, nil)
+ RespondError(w, &model.ApiError{model.ErrorCanceled, res.Err}, nil)
case promql.ErrQueryTimeout:
- respondError(w, &model.ApiError{Typ: model.ErrorTimeout, Err: res.Err}, nil)
+ RespondError(w, &model.ApiError{model.ErrorTimeout, res.Err}, nil)
}
- respondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
+ RespondError(w, &model.ApiError{model.ErrorExec, res.Err}, nil)
}
response_data := &model.QueryData{
@@ -1155,7 +1175,7 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
Stats: qs,
}
- aH.respond(w, response_data)
+ aH.Respond(w, response_data)
}
@@ -1164,18 +1184,18 @@ func (aH *APIHandler) submitFeedback(w http.ResponseWriter, r *http.Request) {
var postData map[string]interface{}
err := json.NewDecoder(r.Body).Decode(&postData)
if err != nil {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading request body")
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading request body")
return
}
message, ok := postData["message"]
if !ok {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("message not present in request body")}, "Error reading message from request body")
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("message not present in request body")}, "Error reading message from request body")
return
}
messageStr := fmt.Sprintf("%s", message)
if len(messageStr) == 0 {
- respondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("empty message in request body")}, "empty message in request body")
+ RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("empty message in request body")}, "empty message in request body")
return
}
@@ -1192,72 +1212,72 @@ func (aH *APIHandler) submitFeedback(w http.ResponseWriter, r *http.Request) {
func (aH *APIHandler) getTopOperations(w http.ResponseWriter, r *http.Request) {
query, err := parseGetTopOperationsRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetTopOperations(r.Context(), query)
+ result, apiErr := aH.reader.GetTopOperations(r.Context(), query)
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getUsage(w http.ResponseWriter, r *http.Request) {
query, err := parseGetUsageRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, err := (*aH.reader).GetUsage(r.Context(), query)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ result, err := aH.reader.GetUsage(r.Context(), query)
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getServiceOverview(w http.ResponseWriter, r *http.Request) {
query, err := parseGetServiceOverviewRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetServiceOverview(r.Context(), query)
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ result, apiErr := aH.reader.GetServiceOverview(r.Context(), query)
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getServicesTopLevelOps(w http.ResponseWriter, r *http.Request) {
- result, apiErr := (*aH.reader).GetTopLevelOperations(r.Context())
+ result, apiErr := aH.reader.GetTopLevelOperations(r.Context())
if apiErr != nil {
- respondError(w, apiErr, nil)
+ RespondError(w, apiErr, nil)
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
query, err := parseGetServicesRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetServices(r.Context(), query)
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ result, apiErr := aH.reader.GetServices(r.Context(), query)
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
@@ -1267,32 +1287,32 @@ func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_NUMBER_OF_SERVICES, data)
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) dependencyGraph(w http.ResponseWriter, r *http.Request) {
query, err := parseGetServicesRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, err := (*aH.reader).GetDependencyGraph(r.Context(), query)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ result, err := aH.reader.GetDependencyGraph(r.Context(), query)
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getServicesList(w http.ResponseWriter, r *http.Request) {
- result, err := (*aH.reader).GetServicesList(r.Context())
- if aH.handleError(w, err, http.StatusBadRequest) {
+ result, err := aH.reader.GetServicesList(r.Context())
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
@@ -1301,232 +1321,232 @@ func (aH *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
traceId := vars["traceId"]
- result, err := (*aH.reader).SearchTraces(r.Context(), traceId)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ result, err := aH.reader.SearchTraces(r.Context(), traceId)
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) listErrors(w http.ResponseWriter, r *http.Request) {
query, err := parseListErrorsRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).ListErrors(r.Context(), query)
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ result, apiErr := aH.reader.ListErrors(r.Context(), query)
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) countErrors(w http.ResponseWriter, r *http.Request) {
query, err := parseCountErrorsRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).CountErrors(r.Context(), query)
+ result, apiErr := aH.reader.CountErrors(r.Context(), query)
if apiErr != nil {
- respondError(w, apiErr, nil)
+ RespondError(w, apiErr, nil)
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getErrorFromErrorID(w http.ResponseWriter, r *http.Request) {
query, err := parseGetErrorRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetErrorFromErrorID(r.Context(), query)
+ result, apiErr := aH.reader.GetErrorFromErrorID(r.Context(), query)
if apiErr != nil {
- respondError(w, apiErr, nil)
+ RespondError(w, apiErr, nil)
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getNextPrevErrorIDs(w http.ResponseWriter, r *http.Request) {
query, err := parseGetErrorRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetNextPrevErrorIDs(r.Context(), query)
+ result, apiErr := aH.reader.GetNextPrevErrorIDs(r.Context(), query)
if apiErr != nil {
- respondError(w, apiErr, nil)
+ RespondError(w, apiErr, nil)
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getErrorFromGroupID(w http.ResponseWriter, r *http.Request) {
query, err := parseGetErrorRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetErrorFromGroupID(r.Context(), query)
+ result, apiErr := aH.reader.GetErrorFromGroupID(r.Context(), query)
if apiErr != nil {
- respondError(w, apiErr, nil)
+ RespondError(w, apiErr, nil)
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getSpanFilters(w http.ResponseWriter, r *http.Request) {
query, err := parseSpanFilterRequestBody(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetSpanFilters(r.Context(), query)
+ result, apiErr := aH.reader.GetSpanFilters(r.Context(), query)
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getFilteredSpans(w http.ResponseWriter, r *http.Request) {
query, err := parseFilteredSpansRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetFilteredSpans(r.Context(), query)
+ result, apiErr := aH.reader.GetFilteredSpans(r.Context(), query)
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getFilteredSpanAggregates(w http.ResponseWriter, r *http.Request) {
query, err := parseFilteredSpanAggregatesRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetFilteredSpansAggregates(r.Context(), query)
+ result, apiErr := aH.reader.GetFilteredSpansAggregates(r.Context(), query)
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getTagFilters(w http.ResponseWriter, r *http.Request) {
query, err := parseTagFilterRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetTagFilters(r.Context(), query)
+ result, apiErr := aH.reader.GetTagFilters(r.Context(), query)
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getTagValues(w http.ResponseWriter, r *http.Request) {
query, err := parseTagValueRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetTagValues(r.Context(), query)
+ result, apiErr := aH.reader.GetTagValues(r.Context(), query)
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) setTTL(w http.ResponseWriter, r *http.Request) {
ttlParams, err := parseTTLParams(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
// Context is not used here as TTL is long duration DB operation
- result, apiErr := (*aH.reader).SetTTL(context.Background(), ttlParams)
+ result, apiErr := aH.reader.SetTTL(context.Background(), ttlParams)
if apiErr != nil {
if apiErr.Typ == model.ErrorConflict {
- aH.handleError(w, apiErr.Err, http.StatusConflict)
+ aH.HandleError(w, apiErr.Err, http.StatusConflict)
} else {
- aH.handleError(w, apiErr.Err, http.StatusInternalServerError)
+ aH.HandleError(w, apiErr.Err, http.StatusInternalServerError)
}
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getTTL(w http.ResponseWriter, r *http.Request) {
ttlParams, err := parseGetTTL(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- result, apiErr := (*aH.reader).GetTTL(r.Context(), ttlParams)
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ result, apiErr := aH.reader.GetTTL(r.Context(), ttlParams)
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getDisks(w http.ResponseWriter, r *http.Request) {
- result, apiErr := (*aH.reader).GetDisks(context.Background())
- if apiErr != nil && aH.handleError(w, apiErr.Err, http.StatusInternalServerError) {
+ result, apiErr := aH.reader.GetDisks(context.Background())
+ if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
- aH.writeJSON(w, r, result)
+ aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) {
version := version.GetVersion()
- aH.writeJSON(w, r, map[string]string{"version": version})
+ aH.WriteJSON(w, r, map[string]string{"version": version, "ee": "N"})
}
// inviteUser is used to invite a user. It is used by an admin api.
func (aH *APIHandler) inviteUser(w http.ResponseWriter, r *http.Request) {
req, err := parseInviteRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
ctx := auth.AttachJwtToContext(context.Background(), r)
resp, err := auth.Invite(ctx, req)
if err != nil {
- respondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
+ RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
return
}
- aH.writeJSON(w, r, resp)
+ aH.WriteJSON(w, r, resp)
}
// getInvite returns the invite object details for the given invite token. We do not need to
@@ -1536,10 +1556,10 @@ func (aH *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
resp, err := auth.GetInvite(context.Background(), token)
if err != nil {
- respondError(w, &model.ApiError{Err: err, Typ: model.ErrorNotFound}, nil)
+ RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorNotFound}, nil)
return
}
- aH.writeJSON(w, r, resp)
+ aH.WriteJSON(w, r, resp)
}
// revokeInvite is used to revoke an invite.
@@ -1548,10 +1568,10 @@ func (aH *APIHandler) revokeInvite(w http.ResponseWriter, r *http.Request) {
ctx := auth.AttachJwtToContext(context.Background(), r)
if err := auth.RevokeInvite(ctx, email); err != nil {
- respondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
+ RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
return
}
- aH.writeJSON(w, r, map[string]string{"data": "invite revoked successfully"})
+ aH.WriteJSON(w, r, map[string]string{"data": "invite revoked successfully"})
}
// listPendingInvites is used to list the pending invites.
@@ -1560,7 +1580,7 @@ func (aH *APIHandler) listPendingInvites(w http.ResponseWriter, r *http.Request)
ctx := context.Background()
invites, err := dao.DB().GetInvites(ctx)
if err != nil {
- respondError(w, err, nil)
+ RespondError(w, err, nil)
return
}
@@ -1571,7 +1591,7 @@ func (aH *APIHandler) listPendingInvites(w http.ResponseWriter, r *http.Request)
org, apiErr := dao.DB().GetOrg(ctx, inv.OrgId)
if apiErr != nil {
- respondError(w, apiErr, nil)
+ RespondError(w, apiErr, nil)
}
resp = append(resp, &model.InvitationResponseObject{
Name: inv.Name,
@@ -1582,27 +1602,32 @@ func (aH *APIHandler) listPendingInvites(w http.ResponseWriter, r *http.Request)
Organization: org.Name,
})
}
- aH.writeJSON(w, r, resp)
+ aH.WriteJSON(w, r, resp)
+}
+
+// Register extends registerUser for non-internal packages
+func (aH *APIHandler) Register(w http.ResponseWriter, r *http.Request) {
+ aH.registerUser(w, r)
}
func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
req, err := parseRegisterRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
- apiErr := auth.Register(context.Background(), req)
+ _, apiErr := auth.Register(context.Background(), req)
if apiErr != nil {
- respondError(w, apiErr, nil)
+ RespondError(w, apiErr, nil)
return
}
- aH.writeJSON(w, r, map[string]string{"data": "user registered successfully"})
+ aH.Respond(w, nil)
}
func (aH *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
req, err := parseLoginRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
@@ -1619,7 +1644,7 @@ func (aH *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
// }
resp, err := auth.Login(context.Background(), req)
- if aH.handleError(w, err, http.StatusUnauthorized) {
+ if aH.HandleError(w, err, http.StatusUnauthorized) {
return
}
@@ -1630,21 +1655,21 @@ func (aH *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
// HttpOnly: true,
// })
- aH.writeJSON(w, r, resp)
+ aH.WriteJSON(w, r, resp)
}
func (aH *APIHandler) listUsers(w http.ResponseWriter, r *http.Request) {
users, err := dao.DB().GetUsers(context.Background())
if err != nil {
zap.S().Debugf("[listUsers] Failed to query list of users, err: %v", err)
- respondError(w, err, nil)
+ RespondError(w, err, nil)
return
}
// mask the password hash
for i := range users {
users[i].Password = ""
}
- aH.writeJSON(w, r, users)
+ aH.WriteJSON(w, r, users)
}
func (aH *APIHandler) getUser(w http.ResponseWriter, r *http.Request) {
@@ -1654,11 +1679,11 @@ func (aH *APIHandler) getUser(w http.ResponseWriter, r *http.Request) {
user, err := dao.DB().GetUser(ctx, id)
if err != nil {
zap.S().Debugf("[getUser] Failed to query user, err: %v", err)
- respondError(w, err, "Failed to get user")
+ RespondError(w, err, "Failed to get user")
return
}
if user == nil {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorInternal,
Err: errors.New("User not found"),
}, nil)
@@ -1667,7 +1692,7 @@ func (aH *APIHandler) getUser(w http.ResponseWriter, r *http.Request) {
// No need to send password hash for the user object.
user.Password = ""
- aH.writeJSON(w, r, user)
+ aH.WriteJSON(w, r, user)
}
// editUser only changes the user's Name and ProfilePictureURL. It is intentionally designed
@@ -1676,7 +1701,7 @@ func (aH *APIHandler) editUser(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
update, err := parseUserRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
@@ -1684,7 +1709,7 @@ func (aH *APIHandler) editUser(w http.ResponseWriter, r *http.Request) {
old, apiErr := dao.DB().GetUser(ctx, id)
if apiErr != nil {
zap.S().Debugf("[editUser] Failed to query user, err: %v", err)
- respondError(w, apiErr, nil)
+ RespondError(w, apiErr, nil)
return
}
@@ -1705,10 +1730,10 @@ func (aH *APIHandler) editUser(w http.ResponseWriter, r *http.Request) {
ProfilePirctureURL: old.ProfilePirctureURL,
})
if apiErr != nil {
- respondError(w, apiErr, nil)
+ RespondError(w, apiErr, nil)
return
}
- aH.writeJSON(w, r, map[string]string{"data": "user updated successfully"})
+ aH.WriteJSON(w, r, map[string]string{"data": "user updated successfully"})
}
func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
@@ -1720,12 +1745,12 @@ func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
user, apiErr := dao.DB().GetUser(ctx, id)
if apiErr != nil {
- respondError(w, apiErr, "Failed to get user's group")
+ RespondError(w, apiErr, "Failed to get user's group")
return
}
if user == nil {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorNotFound,
Err: errors.New("User not found"),
}, nil)
@@ -1734,17 +1759,17 @@ func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
adminGroup, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
if apiErr != nil {
- respondError(w, apiErr, "Failed to get admin group")
+ RespondError(w, apiErr, "Failed to get admin group")
return
}
adminUsers, apiErr := dao.DB().GetUsersByGroup(ctx, adminGroup.Id)
if apiErr != nil {
- respondError(w, apiErr, "Failed to get admin group users")
+ RespondError(w, apiErr, "Failed to get admin group users")
return
}
if user.GroupId == adminGroup.Id && len(adminUsers) == 1 {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorInternal,
Err: errors.New("cannot delete the last admin user")}, nil)
return
@@ -1752,10 +1777,10 @@ func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
err := dao.DB().DeleteUser(ctx, id)
if err != nil {
- respondError(w, err, "Failed to delete user")
+ RespondError(w, err, "Failed to delete user")
return
}
- aH.writeJSON(w, r, map[string]string{"data": "user deleted successfully"})
+ aH.WriteJSON(w, r, map[string]string{"data": "user deleted successfully"})
}
func (aH *APIHandler) getRole(w http.ResponseWriter, r *http.Request) {
@@ -1763,11 +1788,11 @@ func (aH *APIHandler) getRole(w http.ResponseWriter, r *http.Request) {
user, err := dao.DB().GetUser(context.Background(), id)
if err != nil {
- respondError(w, err, "Failed to get user's group")
+ RespondError(w, err, "Failed to get user's group")
return
}
if user == nil {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorNotFound,
Err: errors.New("No user found"),
}, nil)
@@ -1775,36 +1800,36 @@ func (aH *APIHandler) getRole(w http.ResponseWriter, r *http.Request) {
}
group, err := dao.DB().GetGroup(context.Background(), user.GroupId)
if err != nil {
- respondError(w, err, "Failed to get group")
+ RespondError(w, err, "Failed to get group")
return
}
- aH.writeJSON(w, r, &model.UserRole{UserId: id, GroupName: group.Name})
+ aH.WriteJSON(w, r, &model.UserRole{UserId: id, GroupName: group.Name})
}
func (aH *APIHandler) editRole(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
req, err := parseUserRoleRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
ctx := context.Background()
newGroup, apiErr := dao.DB().GetGroupByName(ctx, req.GroupName)
if apiErr != nil {
- respondError(w, apiErr, "Failed to get user's group")
+ RespondError(w, apiErr, "Failed to get user's group")
return
}
if newGroup == nil {
- respondError(w, apiErr, "Specified group is not present")
+ RespondError(w, apiErr, "Specified group is not present")
return
}
user, apiErr := dao.DB().GetUser(ctx, id)
if apiErr != nil {
- respondError(w, apiErr, "Failed to fetch user group")
+ RespondError(w, apiErr, "Failed to fetch user group")
return
}
@@ -1812,12 +1837,12 @@ func (aH *APIHandler) editRole(w http.ResponseWriter, r *http.Request) {
if user.GroupId == auth.AuthCacheObj.AdminGroupId {
adminUsers, apiErr := dao.DB().GetUsersByGroup(ctx, auth.AuthCacheObj.AdminGroupId)
if apiErr != nil {
- respondError(w, apiErr, "Failed to fetch adminUsers")
+ RespondError(w, apiErr, "Failed to fetch adminUsers")
return
}
if len(adminUsers) == 1 {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Err: errors.New("Cannot demote the last admin"),
Typ: model.ErrorInternal}, nil)
return
@@ -1826,41 +1851,41 @@ func (aH *APIHandler) editRole(w http.ResponseWriter, r *http.Request) {
apiErr = dao.DB().UpdateUserGroup(context.Background(), user.Id, newGroup.Id)
if apiErr != nil {
- respondError(w, apiErr, "Failed to add user to group")
+ RespondError(w, apiErr, "Failed to add user to group")
return
}
- aH.writeJSON(w, r, map[string]string{"data": "user group updated successfully"})
+ aH.WriteJSON(w, r, map[string]string{"data": "user group updated successfully"})
}
func (aH *APIHandler) getOrgs(w http.ResponseWriter, r *http.Request) {
orgs, apiErr := dao.DB().GetOrgs(context.Background())
if apiErr != nil {
- respondError(w, apiErr, "Failed to fetch orgs from the DB")
+ RespondError(w, apiErr, "Failed to fetch orgs from the DB")
return
}
- aH.writeJSON(w, r, orgs)
+ aH.WriteJSON(w, r, orgs)
}
func (aH *APIHandler) getOrg(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
org, apiErr := dao.DB().GetOrg(context.Background(), id)
if apiErr != nil {
- respondError(w, apiErr, "Failed to fetch org from the DB")
+ RespondError(w, apiErr, "Failed to fetch org from the DB")
return
}
- aH.writeJSON(w, r, org)
+ aH.WriteJSON(w, r, org)
}
func (aH *APIHandler) editOrg(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
req, err := parseEditOrgRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
req.Id = id
if apiErr := dao.DB().EditOrg(context.Background(), req); apiErr != nil {
- respondError(w, apiErr, "Failed to update org in the DB")
+ RespondError(w, apiErr, "Failed to update org in the DB")
return
}
@@ -1872,82 +1897,82 @@ func (aH *APIHandler) editOrg(w http.ResponseWriter, r *http.Request) {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_ORG_SETTINGS, data)
- aH.writeJSON(w, r, map[string]string{"data": "org updated successfully"})
+ aH.WriteJSON(w, r, map[string]string{"data": "org updated successfully"})
}
func (aH *APIHandler) getOrgUsers(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
users, apiErr := dao.DB().GetUsersByOrg(context.Background(), id)
if apiErr != nil {
- respondError(w, apiErr, "Failed to fetch org users from the DB")
+ RespondError(w, apiErr, "Failed to fetch org users from the DB")
return
}
// mask the password hash
for i := range users {
users[i].Password = ""
}
- aH.writeJSON(w, r, users)
+ aH.WriteJSON(w, r, users)
}
func (aH *APIHandler) getResetPasswordToken(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
resp, err := auth.CreateResetPasswordToken(context.Background(), id)
if err != nil {
- respondError(w, &model.ApiError{
+ RespondError(w, &model.ApiError{
Typ: model.ErrorInternal,
Err: err}, "Failed to create reset token entry in the DB")
return
}
- aH.writeJSON(w, r, resp)
+ aH.WriteJSON(w, r, resp)
}
func (aH *APIHandler) resetPassword(w http.ResponseWriter, r *http.Request) {
req, err := parseResetPasswordRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
if err := auth.ResetPassword(context.Background(), req); err != nil {
zap.S().Debugf("resetPassword failed, err: %v\n", err)
- if aH.handleError(w, err, http.StatusInternalServerError) {
+ if aH.HandleError(w, err, http.StatusInternalServerError) {
return
}
}
- aH.writeJSON(w, r, map[string]string{"data": "password reset successfully"})
+ aH.WriteJSON(w, r, map[string]string{"data": "password reset successfully"})
}
func (aH *APIHandler) changePassword(w http.ResponseWriter, r *http.Request) {
req, err := parseChangePasswordRequest(r)
- if aH.handleError(w, err, http.StatusBadRequest) {
+ if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
if err := auth.ChangePassword(context.Background(), req); err != nil {
- if aH.handleError(w, err, http.StatusInternalServerError) {
+ if aH.HandleError(w, err, http.StatusInternalServerError) {
return
}
}
- aH.writeJSON(w, r, map[string]string{"data": "password changed successfully"})
+ aH.WriteJSON(w, r, map[string]string{"data": "password changed successfully"})
}
// func (aH *APIHandler) getApplicationPercentiles(w http.ResponseWriter, r *http.Request) {
// // vars := mux.Vars(r)
// query, err := parseApplicationPercentileRequest(r)
-// if aH.handleError(w, err, http.StatusBadRequest) {
+// if aH.HandleError(w, err, http.StatusBadRequest) {
// return
// }
-// result, err := (*aH.reader).GetApplicationPercentiles(context.Background(), query)
-// if aH.handleError(w, err, http.StatusBadRequest) {
+// result, err := aH.reader.GetApplicationPercentiles(context.Background(), query)
+// if aH.HandleError(w, err, http.StatusBadRequest) {
// return
// }
-// aH.writeJSON(w, r, result)
+// aH.WriteJSON(w, r, result)
// }
-func (aH *APIHandler) handleError(w http.ResponseWriter, err error, statusCode int) bool {
+func (aH *APIHandler) HandleError(w http.ResponseWriter, err error, statusCode int) bool {
if err == nil {
return false
}
@@ -1967,7 +1992,7 @@ func (aH *APIHandler) handleError(w http.ResponseWriter, err error, statusCode i
return true
}
-func (aH *APIHandler) writeJSON(w http.ResponseWriter, r *http.Request, response interface{}) {
+func (aH *APIHandler) WriteJSON(w http.ResponseWriter, r *http.Request, response interface{}) {
marshall := json.Marshal
if prettyPrint := r.FormValue("pretty"); prettyPrint != "" && prettyPrint != "false" {
marshall = func(v interface{}) ([]byte, error) {
@@ -1990,64 +2015,64 @@ func (aH *APIHandler) RegisterLogsRoutes(router *mux.Router) {
}
func (aH *APIHandler) logFields(w http.ResponseWriter, r *http.Request) {
- fields, apiErr := (*aH.reader).GetLogFields(r.Context())
+ fields, apiErr := aH.reader.GetLogFields(r.Context())
if apiErr != nil {
- respondError(w, apiErr, "Failed to fetch fields from the DB")
+ RespondError(w, apiErr, "Failed to fetch fields from the DB")
return
}
- aH.writeJSON(w, r, fields)
+ aH.WriteJSON(w, r, fields)
}
func (aH *APIHandler) logFieldUpdate(w http.ResponseWriter, r *http.Request) {
field := model.UpdateField{}
if err := json.NewDecoder(r.Body).Decode(&field); err != nil {
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
- respondError(w, apiErr, "Failed to decode payload")
+ RespondError(w, apiErr, "Failed to decode payload")
return
}
err := logs.ValidateUpdateFieldPayload(&field)
if err != nil {
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
- respondError(w, apiErr, "Incorrect payload")
+ RespondError(w, apiErr, "Incorrect payload")
return
}
- apiErr := (*aH.reader).UpdateLogField(r.Context(), &field)
+ apiErr := aH.reader.UpdateLogField(r.Context(), &field)
if apiErr != nil {
- respondError(w, apiErr, "Failed to update filed in the DB")
+ RespondError(w, apiErr, "Failed to update filed in the DB")
return
}
- aH.writeJSON(w, r, field)
+ aH.WriteJSON(w, r, field)
}
func (aH *APIHandler) getLogs(w http.ResponseWriter, r *http.Request) {
params, err := logs.ParseLogFilterParams(r)
if err != nil {
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
- respondError(w, apiErr, "Incorrect params")
+ RespondError(w, apiErr, "Incorrect params")
return
}
- res, apiErr := (*aH.reader).GetLogs(r.Context(), params)
+ res, apiErr := aH.reader.GetLogs(r.Context(), params)
if apiErr != nil {
- respondError(w, apiErr, "Failed to fetch logs from the DB")
+ RespondError(w, apiErr, "Failed to fetch logs from the DB")
return
}
- aH.writeJSON(w, r, map[string]interface{}{"results": res})
+ aH.WriteJSON(w, r, map[string]interface{}{"results": res})
}
func (aH *APIHandler) tailLogs(w http.ResponseWriter, r *http.Request) {
params, err := logs.ParseLogFilterParams(r)
if err != nil {
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
- respondError(w, apiErr, "Incorrect params")
+ RespondError(w, apiErr, "Incorrect params")
return
}
// create the client
client := &model.LogsTailClient{Name: r.RemoteAddr, Logs: make(chan *model.GetLogsResponse, 1000), Done: make(chan *bool), Error: make(chan error), Filter: *params}
- go (*aH.reader).TailLogs(r.Context(), client)
+ go aH.reader.TailLogs(r.Context(), client)
w.Header().Set("Connection", "keep-alive")
w.Header().Set("Content-Type", "text/event-stream")
@@ -2058,7 +2083,7 @@ func (aH *APIHandler) tailLogs(w http.ResponseWriter, r *http.Request) {
flusher, ok := w.(http.Flusher)
if !ok {
err := model.ApiError{Typ: model.ErrorStreamingNotSupported, Err: nil}
- respondError(w, &err, "streaming is not supported")
+ RespondError(w, &err, "streaming is not supported")
return
}
@@ -2084,14 +2109,14 @@ func (aH *APIHandler) logAggregate(w http.ResponseWriter, r *http.Request) {
params, err := logs.ParseLogAggregateParams(r)
if err != nil {
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
- respondError(w, apiErr, "Incorrect params")
+ RespondError(w, apiErr, "Incorrect params")
return
}
- res, apiErr := (*aH.reader).AggregateLogs(r.Context(), params)
+ res, apiErr := aH.reader.AggregateLogs(r.Context(), params)
if apiErr != nil {
- respondError(w, apiErr, "Failed to fetch logs aggregate from the DB")
+ RespondError(w, apiErr, "Failed to fetch logs aggregate from the DB")
return
}
- aH.writeJSON(w, r, res)
+ aH.WriteJSON(w, r, res)
}
diff --git a/pkg/query-service/app/logs/parser.go b/pkg/query-service/app/logs/parser.go
index bf2ccd6418..1ccb0dbc54 100644
--- a/pkg/query-service/app/logs/parser.go
+++ b/pkg/query-service/app/logs/parser.go
@@ -7,8 +7,8 @@ import (
"strconv"
"strings"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
var operatorMapping = map[string]string{
diff --git a/pkg/query-service/app/logs/parser_test.go b/pkg/query-service/app/logs/parser_test.go
index ff47632a4b..439e323739 100644
--- a/pkg/query-service/app/logs/parser_test.go
+++ b/pkg/query-service/app/logs/parser_test.go
@@ -4,7 +4,7 @@ import (
"testing"
. "github.com/smartystreets/goconvey/convey"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
var correctQueriesTest = []struct {
diff --git a/pkg/query-service/app/logs/validator.go b/pkg/query-service/app/logs/validator.go
index 0a27a11b15..d4a1e42234 100644
--- a/pkg/query-service/app/logs/validator.go
+++ b/pkg/query-service/app/logs/validator.go
@@ -4,8 +4,8 @@ import (
"fmt"
"regexp"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
func ValidateUpdateFieldPayload(field *model.UpdateField) error {
diff --git a/pkg/query-service/app/metrics/query_builder.go b/pkg/query-service/app/metrics/query_builder.go
index 72d7ee183d..e0a696f509 100644
--- a/pkg/query-service/app/metrics/query_builder.go
+++ b/pkg/query-service/app/metrics/query_builder.go
@@ -6,8 +6,8 @@ import (
"strings"
"github.com/SigNoz/govaluate"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
diff --git a/pkg/query-service/app/metrics/query_builder_test.go b/pkg/query-service/app/metrics/query_builder_test.go
index f6f9944605..a9cf780ae4 100644
--- a/pkg/query-service/app/metrics/query_builder_test.go
+++ b/pkg/query-service/app/metrics/query_builder_test.go
@@ -4,7 +4,7 @@ import (
"testing"
. "github.com/smartystreets/goconvey/convey"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
func TestBuildQuery(t *testing.T) {
diff --git a/pkg/query-service/app/parser.go b/pkg/query-service/app/parser.go
index e7291e67a9..2fed317973 100644
--- a/pkg/query-service/app/parser.go
+++ b/pkg/query-service/app/parser.go
@@ -12,9 +12,9 @@ import (
"github.com/gorilla/mux"
promModel "github.com/prometheus/common/model"
- "go.signoz.io/query-service/auth"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/auth"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
var allowedFunctions = []string{"count", "ratePerSec", "sum", "avg", "min", "max", "p50", "p90", "p95", "p99"}
diff --git a/pkg/query-service/app/parser/metrics.go b/pkg/query-service/app/parser/metrics.go
index 639d1b9f0b..b13ff6d534 100644
--- a/pkg/query-service/app/parser/metrics.go
+++ b/pkg/query-service/app/parser/metrics.go
@@ -6,8 +6,8 @@ import (
"net/http"
"strings"
- "go.signoz.io/query-service/app/metrics"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
func validateQueryRangeParamsV2(qp *model.QueryRangeParamsV2) error {
diff --git a/pkg/query-service/app/parser_test.go b/pkg/query-service/app/parser_test.go
index 6fa049a05e..3e78263696 100644
--- a/pkg/query-service/app/parser_test.go
+++ b/pkg/query-service/app/parser_test.go
@@ -8,8 +8,8 @@ import (
"github.com/smartystreets/assertions/should"
. "github.com/smartystreets/goconvey/convey"
- "go.signoz.io/query-service/app/metrics"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
func TestParseFilterSingleFilter(t *testing.T) {
diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go
index 2a04302ef7..2b46ae8fed 100644
--- a/pkg/query-service/app/server.go
+++ b/pkg/query-service/app/server.go
@@ -15,17 +15,17 @@ import (
"github.com/rs/cors"
"github.com/soheilhy/cmux"
- "go.signoz.io/query-service/app/clickhouseReader"
- "go.signoz.io/query-service/app/dashboards"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/dao"
- "go.signoz.io/query-service/healthcheck"
- am "go.signoz.io/query-service/integrations/alertManager"
- "go.signoz.io/query-service/interfaces"
- pqle "go.signoz.io/query-service/pqlEngine"
- "go.signoz.io/query-service/rules"
- "go.signoz.io/query-service/telemetry"
- "go.signoz.io/query-service/utils"
+ "go.signoz.io/signoz/pkg/query-service/app/clickhouseReader"
+ "go.signoz.io/signoz/pkg/query-service/app/dashboards"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/dao"
+ "go.signoz.io/signoz/pkg/query-service/healthcheck"
+ am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
+ "go.signoz.io/signoz/pkg/query-service/interfaces"
+ pqle "go.signoz.io/signoz/pkg/query-service/pqlEngine"
+ "go.signoz.io/signoz/pkg/query-service/rules"
+ "go.signoz.io/signoz/pkg/query-service/telemetry"
+ "go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
@@ -97,7 +97,11 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
telemetry.GetInstance().SetReader(reader)
- apiHandler, err := NewAPIHandler(&reader, dao.DB(), rm)
+ apiHandler, err := NewAPIHandler(APIHandlerOpts{
+ Reader: reader,
+ AppDao: dao.DB(),
+ RuleManager: rm,
+ })
if err != nil {
return nil, err
}
diff --git a/pkg/query-service/auth/auth.go b/pkg/query-service/auth/auth.go
index 1a5775334a..ccb994b126 100644
--- a/pkg/query-service/auth/auth.go
+++ b/pkg/query-service/auth/auth.go
@@ -8,9 +8,11 @@ import (
"github.com/golang-jwt/jwt"
"github.com/google/uuid"
"github.com/pkg/errors"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/dao"
- "go.signoz.io/query-service/model"
+
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/dao"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap"
"golang.org/x/crypto/bcrypt"
)
@@ -119,7 +121,7 @@ func GetInvite(ctx context.Context, token string) (*model.InvitationResponseObje
}, nil
}
-func validateInvite(ctx context.Context, req *RegisterRequest) (*model.InvitationObject, error) {
+func ValidateInvite(ctx context.Context, req *RegisterRequest) (*model.InvitationObject, error) {
invitation, err := dao.DB().GetInviteFromEmail(ctx, req.Email)
if err != nil {
return nil, errors.Wrap(err.Err, "Failed to read from DB")
@@ -207,65 +209,43 @@ type RegisterRequest struct {
Email string `json:"email"`
Password string `json:"password"`
InviteToken string `json:"token"`
+
+ // reference URL to track where the register request is coming from
+ SourceUrl string `json:"sourceUrl"`
}
-// Register registers a new user. For the first register request, it doesn't need an invite token
-// and also the first registration is an enforced ADMIN registration. Every subsequent request will
-// need an invite token to go through.
-func Register(ctx context.Context, req *RegisterRequest) *model.ApiError {
+func RegisterFirstUser(ctx context.Context, req *RegisterRequest) (*model.User, *model.ApiError) {
- zap.S().Debugf("Got a register request for email: %v\n", req.Email)
-
- // TODO(Ahsan): We should optimize it, shouldn't make an extra DB call everytime to know if
- // this is the first register request.
- users, apiErr := dao.DB().GetUsers(ctx)
- if apiErr != nil {
- zap.S().Debugf("GetUser failed, err: %v\n", apiErr.Err)
- return apiErr
+ if req.Email == "" {
+ return nil, model.BadRequest(model.ErrEmailRequired{})
}
- var groupName, orgId string
-
- // If there are no user, then this first user is granted Admin role. Also, an org is created
- // based on the request. Any other user can't use any other org name, if they do then
- // registration will fail because of foreign key violation while create user.
- // TODO(Ahsan): We need to re-work this logic for the case of multi-tenant system.
- if len(users) == 0 {
- org, apiErr := dao.DB().CreateOrg(ctx, &model.Organization{Name: req.OrgName})
- if apiErr != nil {
- zap.S().Debugf("CreateOrg failed, err: %v\n", apiErr.Err)
- return apiErr
- }
- groupName = constants.AdminGroup
- orgId = org.Id
+ if req.Password == "" {
+ return nil, model.BadRequest(model.ErrPasswordRequired{})
}
- if len(users) > 0 {
- inv, err := validateInvite(ctx, req)
- if err != nil {
- return &model.ApiError{Err: err, Typ: model.ErrorUnauthorized}
- }
- org, apiErr := dao.DB().GetOrgByName(ctx, req.OrgName)
- if apiErr != nil {
- zap.S().Debugf("GetOrgByName failed, err: %v\n", apiErr.Err)
- return apiErr
- }
+ groupName := constants.AdminGroup
- groupName = inv.Role
- if org != nil {
- orgId = org.Id
- }
+ org, apierr := dao.DB().CreateOrg(ctx,
+ &model.Organization{Name: req.OrgName})
+ if apierr != nil {
+ zap.S().Debugf("CreateOrg failed, err: %v\n", zap.Error(apierr.ToError()))
+ return nil, apierr
}
group, apiErr := dao.DB().GetGroupByName(ctx, groupName)
if apiErr != nil {
zap.S().Debugf("GetGroupByName failed, err: %v\n", apiErr.Err)
- return apiErr
+ return nil, apiErr
}
- hash, err := passwordHash(req.Password)
+ var hash string
+ var err error
+
+ hash, err = passwordHash(req.Password)
if err != nil {
- return &model.ApiError{Err: err, Typ: model.ErrorUnauthorized}
+ zap.S().Errorf("failed to generate password hash when registering a user", zap.Error(err))
+ return nil, model.InternalError(model.ErrSignupFailed{})
}
user := &model.User{
@@ -276,17 +256,118 @@ func Register(ctx context.Context, req *RegisterRequest) *model.ApiError {
CreatedAt: time.Now().Unix(),
ProfilePirctureURL: "", // Currently unused
GroupId: group.Id,
- OrgId: orgId,
+ OrgId: org.Id,
+ }
+
+ return dao.DB().CreateUser(ctx, user)
+}
+
+// RegisterInvitedUser handles registering a invited user
+func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword bool) (*model.User, *model.ApiError) {
+
+ if req.InviteToken == "" {
+ return nil, model.BadRequest(fmt.Errorf("invite token is required"))
+ }
+
+ if !nopassword && req.Password == "" {
+ return nil, model.BadRequest(model.ErrPasswordRequired{})
+ }
+
+ invite, err := ValidateInvite(ctx, req)
+ if err != nil {
+ zap.S().Errorf("failed to validate invite token", err)
+ return nil, model.BadRequest(model.ErrSignupFailed{})
+ }
+
+ // checking if user email already exists, this is defensive but
+ // required as delete invitation and user creation dont happen
+ // in the same transaction at the end of this function
+ userPayload, apierr := dao.DB().GetUserByEmail(ctx, invite.Email)
+ if apierr != nil {
+ zap.S().Debugf("failed to get user by email", apierr.Err)
+ return nil, apierr
+ }
+
+ if userPayload != nil {
+ // user already exists
+ return &userPayload.User, nil
+ }
+
+ if invite.OrgId == "" {
+ zap.S().Errorf("failed to find org in the invite")
+ return nil, model.InternalError(fmt.Errorf("invalid invite, org not found"))
+ }
+
+ if invite.Role == "" {
+ // if role is not provided, default to viewer
+ invite.Role = constants.ViewerGroup
+ }
+
+ group, apiErr := dao.DB().GetGroupByName(ctx, invite.Role)
+ if apiErr != nil {
+ zap.S().Debugf("GetGroupByName failed, err: %v\n", apiErr.Err)
+ return nil, model.InternalError(model.ErrSignupFailed{})
+ }
+
+ var hash string
+
+ // check if password is not empty, as for SSO case it can be
+ if req.Password != "" {
+ hash, err = passwordHash(req.Password)
+ if err != nil {
+ zap.S().Errorf("failed to generate password hash when registering a user", zap.Error(err))
+ return nil, model.InternalError(model.ErrSignupFailed{})
+ }
+ } else {
+ hash, err = passwordHash(utils.GeneratePassowrd())
+ if err != nil {
+ zap.S().Errorf("failed to generate password hash when registering a user", zap.Error(err))
+ return nil, model.InternalError(model.ErrSignupFailed{})
+ }
+ }
+
+ user := &model.User{
+ Id: uuid.NewString(),
+ Name: req.Name,
+ Email: req.Email,
+ Password: hash,
+ CreatedAt: time.Now().Unix(),
+ ProfilePirctureURL: "", // Currently unused
+ GroupId: group.Id,
+ OrgId: invite.OrgId,
}
// TODO(Ahsan): Ideally create user and delete invitation should happen in a txn.
- _, apiErr = dao.DB().CreateUser(ctx, user)
+ user, apiErr = dao.DB().CreateUser(ctx, user)
if apiErr != nil {
zap.S().Debugf("CreateUser failed, err: %v\n", apiErr.Err)
- return apiErr
+ return nil, apiErr
}
- return dao.DB().DeleteInvitation(ctx, user.Email)
+ apiErr = dao.DB().DeleteInvitation(ctx, user.Email)
+ if apiErr != nil {
+ zap.S().Debugf("delete invitation failed, err: %v\n", apiErr.Err)
+ return nil, apiErr
+ }
+
+ return user, nil
+}
+
+// Register registers a new user. For the first register request, it doesn't need an invite token
+// and also the first registration is an enforced ADMIN registration. Every subsequent request will
+// need an invite token to go through.
+func Register(ctx context.Context, req *RegisterRequest) (*model.User, *model.ApiError) {
+ users, err := dao.DB().GetUsers(ctx)
+ if err != nil {
+ return nil, model.InternalError(fmt.Errorf("failed to get user count"))
+ }
+
+ switch len(users) {
+ case 0:
+ return RegisterFirstUser(ctx, req)
+ default:
+ return RegisterInvitedUser(ctx, req, false)
+ }
}
// Login method returns access and refresh tokens on successful login, else it errors out.
@@ -299,39 +380,15 @@ func Login(ctx context.Context, request *model.LoginRequest) (*model.LoginRespon
return nil, err
}
- accessJwtExpiry := time.Now().Add(JwtExpiry).Unix()
-
- token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
- "id": user.Id,
- "gid": user.GroupId,
- "email": user.Email,
- "exp": accessJwtExpiry,
- })
-
- accessJwt, err := token.SignedString([]byte(JwtSecret))
+ userjwt, err := GenerateJWTForUser(&user.User)
if err != nil {
- return nil, errors.Errorf("failed to encode jwt: %v", err)
- }
-
- refreshJwtExpiry := time.Now().Add(JwtRefresh).Unix()
- token = jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
- "id": user.Id,
- "gid": user.GroupId,
- "email": user.Email,
- "exp": refreshJwtExpiry,
- })
-
- refreshJwt, err := token.SignedString([]byte(JwtSecret))
- if err != nil {
- return nil, errors.Errorf("failed to encode jwt: %v", err)
+ zap.S().Debugf("Failed to generate JWT against login creds, %v", err)
+ return nil, err
}
return &model.LoginResponse{
- AccessJwt: accessJwt,
- AccessJwtExpiry: accessJwtExpiry,
- RefreshJwt: refreshJwt,
- RefreshJwtExpiry: refreshJwtExpiry,
- UserId: user.Id,
+ UserJwtObject: userjwt,
+ UserId: user.User.Id,
}, nil
}
@@ -375,3 +432,35 @@ func passwordMatch(hash, password string) bool {
}
return true
}
+
+func GenerateJWTForUser(user *model.User) (model.UserJwtObject, error) {
+ j := model.UserJwtObject{}
+ var err error
+ j.AccessJwtExpiry = time.Now().Add(JwtExpiry).Unix()
+
+ token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
+ "id": user.Id,
+ "gid": user.GroupId,
+ "email": user.Email,
+ "exp": j.AccessJwtExpiry,
+ })
+
+ j.AccessJwt, err = token.SignedString([]byte(JwtSecret))
+ if err != nil {
+ return j, errors.Errorf("failed to encode jwt: %v", err)
+ }
+
+ j.RefreshJwtExpiry = time.Now().Add(JwtRefresh).Unix()
+ token = jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
+ "id": user.Id,
+ "gid": user.GroupId,
+ "email": user.Email,
+ "exp": j.RefreshJwtExpiry,
+ })
+
+ j.RefreshJwt, err = token.SignedString([]byte(JwtSecret))
+ if err != nil {
+ return j, errors.Errorf("failed to encode jwt: %v", err)
+ }
+ return j, nil
+}
diff --git a/pkg/query-service/auth/jwt.go b/pkg/query-service/auth/jwt.go
index 35ed509048..705b7892ad 100644
--- a/pkg/query-service/auth/jwt.go
+++ b/pkg/query-service/auth/jwt.go
@@ -8,7 +8,7 @@ import (
jwtmiddleware "github.com/auth0/go-jwt-middleware"
"github.com/golang-jwt/jwt"
"github.com/pkg/errors"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
"google.golang.org/grpc/metadata"
)
diff --git a/pkg/query-service/auth/rbac.go b/pkg/query-service/auth/rbac.go
index d45a06e5d5..b07fbde5f7 100644
--- a/pkg/query-service/auth/rbac.go
+++ b/pkg/query-service/auth/rbac.go
@@ -7,9 +7,9 @@ import (
"regexp"
"github.com/pkg/errors"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/dao"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/dao"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
type Group struct {
diff --git a/pkg/query-service/auth/utils.go b/pkg/query-service/auth/utils.go
index 4de59ea2c0..df96057229 100644
--- a/pkg/query-service/auth/utils.go
+++ b/pkg/query-service/auth/utils.go
@@ -5,8 +5,8 @@ import (
"encoding/hex"
"github.com/pkg/errors"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
var (
diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go
index 3c6e8f6317..6e3af77a5e 100644
--- a/pkg/query-service/constants/constants.go
+++ b/pkg/query-service/constants/constants.go
@@ -4,7 +4,7 @@ import (
"os"
"strconv"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
const (
diff --git a/pkg/query-service/dao/factory.go b/pkg/query-service/dao/factory.go
index 710520421b..a8b13cd295 100644
--- a/pkg/query-service/dao/factory.go
+++ b/pkg/query-service/dao/factory.go
@@ -4,7 +4,7 @@ import (
"fmt"
"github.com/pkg/errors"
- "go.signoz.io/query-service/dao/sqlite"
+ "go.signoz.io/signoz/pkg/query-service/dao/sqlite"
)
var db ModelDao
@@ -24,6 +24,11 @@ func InitDao(engine, path string) error {
return nil
}
+// SetDB is used by ee for setting modelDAO
+func SetDB(m ModelDao) {
+ db = m
+}
+
func DB() ModelDao {
if db == nil {
// Should never reach here
diff --git a/pkg/query-service/dao/interface.go b/pkg/query-service/dao/interface.go
index bf105ba30e..40ba17988e 100644
--- a/pkg/query-service/dao/interface.go
+++ b/pkg/query-service/dao/interface.go
@@ -3,7 +3,7 @@ package dao
import (
"context"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
type ModelDao interface {
diff --git a/pkg/query-service/dao/sqlite/connection.go b/pkg/query-service/dao/sqlite/connection.go
index b66ddeec12..0af6cdeb8b 100644
--- a/pkg/query-service/dao/sqlite/connection.go
+++ b/pkg/query-service/dao/sqlite/connection.go
@@ -6,9 +6,9 @@ import (
"github.com/jmoiron/sqlx"
"github.com/pkg/errors"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/model"
- "go.signoz.io/query-service/telemetry"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/telemetry"
"go.uber.org/zap"
)
@@ -88,6 +88,11 @@ func InitDB(dataSourceName string) (*ModelDaoSqlite, error) {
return mds, nil
}
+// DB returns database connection
+func (mds *ModelDaoSqlite) DB() *sqlx.DB {
+ return mds.db
+}
+
// initializeOrgPreferences initializes in-memory telemetry settings. It is planned to have
// multiple orgs in the system. In case of multiple orgs, there will be separate instance
// of in-memory telemetry for each of the org, having their own settings. As of now, we only
diff --git a/pkg/query-service/dao/sqlite/rbac.go b/pkg/query-service/dao/sqlite/rbac.go
index 9c74f812c2..d803f03cdd 100644
--- a/pkg/query-service/dao/sqlite/rbac.go
+++ b/pkg/query-service/dao/sqlite/rbac.go
@@ -7,8 +7,8 @@ import (
"github.com/google/uuid"
"github.com/pkg/errors"
- "go.signoz.io/query-service/model"
- "go.signoz.io/query-service/telemetry"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/telemetry"
)
func (mds *ModelDaoSqlite) CreateInviteEntry(ctx context.Context,
@@ -291,6 +291,13 @@ func (mds *ModelDaoSqlite) GetUser(ctx context.Context,
func (mds *ModelDaoSqlite) GetUserByEmail(ctx context.Context,
email string) (*model.UserPayload, *model.ApiError) {
+ if email == "" {
+ return nil, &model.ApiError{
+ Typ: model.ErrorBadData,
+ Err: fmt.Errorf("empty email address"),
+ }
+ }
+
users := []model.UserPayload{}
query := `select
u.id,
diff --git a/pkg/query-service/integrations/alertManager/manager.go b/pkg/query-service/integrations/alertManager/manager.go
index 21b58174f9..3b7df3ce56 100644
--- a/pkg/query-service/integrations/alertManager/manager.go
+++ b/pkg/query-service/integrations/alertManager/manager.go
@@ -5,11 +5,12 @@ import (
"bytes"
"encoding/json"
"fmt"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/model"
- "go.uber.org/zap"
"net/http"
neturl "net/url"
+
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "go.uber.org/zap"
)
const contentType = "application/json"
diff --git a/pkg/query-service/integrations/alertManager/model.go b/pkg/query-service/integrations/alertManager/model.go
index 19371a9bfd..2bbac4cf9e 100644
--- a/pkg/query-service/integrations/alertManager/model.go
+++ b/pkg/query-service/integrations/alertManager/model.go
@@ -2,8 +2,9 @@ package alertManager
import (
"fmt"
- "go.signoz.io/query-service/utils/labels"
"time"
+
+ "go.signoz.io/signoz/pkg/query-service/utils/labels"
)
// Receiver configuration provides configuration on how to contact a receiver.
diff --git a/pkg/query-service/interfaces/featureLookup.go b/pkg/query-service/interfaces/featureLookup.go
new file mode 100644
index 0000000000..0f7ab49a03
--- /dev/null
+++ b/pkg/query-service/interfaces/featureLookup.go
@@ -0,0 +1,10 @@
+package interfaces
+
+import (
+ "go.signoz.io/signoz/pkg/query-service/model"
+)
+
+type FeatureLookup interface {
+ CheckFeature(f string) error
+ GetFeatureFlags() model.FeatureSet
+}
diff --git a/pkg/query-service/interfaces/interface.go b/pkg/query-service/interfaces/interface.go
index 4f4da853c6..37b5ec68aa 100644
--- a/pkg/query-service/interfaces/interface.go
+++ b/pkg/query-service/interfaces/interface.go
@@ -7,8 +7,8 @@ import (
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/util/stats"
- am "go.signoz.io/query-service/integrations/alertManager"
- "go.signoz.io/query-service/model"
+ am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
type Reader interface {
diff --git a/pkg/query-service/main.go b/pkg/query-service/main.go
index e23ff8785b..24e9d06cb8 100644
--- a/pkg/query-service/main.go
+++ b/pkg/query-service/main.go
@@ -7,10 +7,10 @@ import (
"os/signal"
"syscall"
- "go.signoz.io/query-service/app"
- "go.signoz.io/query-service/auth"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/version"
+ "go.signoz.io/signoz/pkg/query-service/app"
+ "go.signoz.io/signoz/pkg/query-service/auth"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/version"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
diff --git a/pkg/query-service/model/auth.go b/pkg/query-service/model/auth.go
index e5d3e7bfbf..cbfac290eb 100644
--- a/pkg/query-service/model/auth.go
+++ b/pkg/query-service/model/auth.go
@@ -32,12 +32,16 @@ type LoginRequest struct {
RefreshToken string `json:"refreshToken"`
}
-type LoginResponse struct {
+type UserJwtObject struct {
AccessJwt string `json:"accessJwt"`
AccessJwtExpiry int64 `json:"accessJwtExpiry"`
RefreshJwt string `json:"refreshJwt"`
RefreshJwtExpiry int64 `json:"refreshJwtExpiry"`
- UserId string `json:"userId"`
+}
+
+type LoginResponse struct {
+ UserJwtObject
+ UserId string `json:"userId"`
}
type ChangePasswordRequest struct {
diff --git a/pkg/query-service/model/db.go b/pkg/query-service/model/db.go
index 0c198fbf28..222f2bcc11 100644
--- a/pkg/query-service/model/db.go
+++ b/pkg/query-service/model/db.go
@@ -8,6 +8,7 @@ type Organization struct {
HasOptedUpdates bool `json:"hasOptedUpdates" db:"has_opted_updates"`
}
+// InvitationObject represents the token object stored in the db
type InvitationObject struct {
Id string `json:"id" db:"id"`
Email string `json:"email" db:"email"`
diff --git a/pkg/query-service/model/errors.go b/pkg/query-service/model/errors.go
new file mode 100644
index 0000000000..d0b95d279c
--- /dev/null
+++ b/pkg/query-service/model/errors.go
@@ -0,0 +1,36 @@
+package model
+
+import "fmt"
+
+// custom errors related to registration
+type ErrFeatureUnavailable struct {
+ Key string
+}
+
+func (errFeatureUnavailable ErrFeatureUnavailable) Error() string {
+ return fmt.Sprintf("feature unavailable: %s", errFeatureUnavailable.Key)
+}
+
+type ErrEmailRequired struct{}
+
+func (errEmailRequired ErrEmailRequired) Error() string {
+ return "email is required"
+}
+
+type ErrPasswordRequired struct{}
+
+func (errPasswordRequired ErrPasswordRequired) Error() string {
+ return "password is required"
+}
+
+type ErrSignupFailed struct{}
+
+func (errSignupFailed ErrSignupFailed) Error() string {
+ return "failed to register user"
+}
+
+type ErrNoOrgFound struct{}
+
+func (errNoOrgFound ErrNoOrgFound) Error() string {
+ return "no org found"
+}
diff --git a/pkg/query-service/model/featureSet.go b/pkg/query-service/model/featureSet.go
new file mode 100644
index 0000000000..1b59450a44
--- /dev/null
+++ b/pkg/query-service/model/featureSet.go
@@ -0,0 +1,9 @@
+package model
+
+type FeatureSet map[string]bool
+
+const Basic = "BASIC_PLAN"
+
+var BasicPlan = FeatureSet{
+ Basic: true,
+}
diff --git a/pkg/query-service/model/response.go b/pkg/query-service/model/response.go
index a78f93d8c0..a54675c111 100644
--- a/pkg/query-service/model/response.go
+++ b/pkg/query-service/model/response.go
@@ -12,10 +12,40 @@ import (
"k8s.io/apimachinery/pkg/labels"
)
+type BaseApiError interface {
+ Type() ErrorType
+ ToError() error
+ Error() string
+ IsNil() bool
+}
+
type ApiError struct {
Typ ErrorType
Err error
}
+
+func (a *ApiError) Type() ErrorType {
+ return a.Typ
+}
+
+func (a *ApiError) ToError() error {
+ if a != nil {
+ return a.Err
+ }
+ return a
+}
+
+func (a *ApiError) Error() string {
+ if a == nil || a.Err == nil {
+ return ""
+ }
+ return a.Err.Error()
+}
+
+func (a *ApiError) IsNil() bool {
+ return a == nil || a.Err == nil
+}
+
type ErrorType string
const (
@@ -34,6 +64,22 @@ const (
ErrorStreamingNotSupported ErrorType = "streaming is not supported"
)
+// BadRequest returns a ApiError object of bad request
+func BadRequest(err error) *ApiError {
+ return &ApiError{
+ Typ: ErrorBadData,
+ Err: err,
+ }
+}
+
+// InternalError returns a ApiError object of internal type
+func InternalError(err error) *ApiError {
+ return &ApiError{
+ Typ: ErrorInternal,
+ Err: err,
+ }
+}
+
type QueryDataV2 struct {
ResultType promql.ValueType `json:"resultType"`
Result promql.Value `json:"result"`
diff --git a/pkg/query-service/pqlEngine/engine.go b/pkg/query-service/pqlEngine/engine.go
index 47bde314ee..659683986d 100644
--- a/pkg/query-service/pqlEngine/engine.go
+++ b/pkg/query-service/pqlEngine/engine.go
@@ -13,7 +13,7 @@ import (
pql "github.com/prometheus/prometheus/promql"
pstorage "github.com/prometheus/prometheus/storage"
premote "github.com/prometheus/prometheus/storage/remote"
- "go.signoz.io/query-service/interfaces"
+ "go.signoz.io/signoz/pkg/query-service/interfaces"
)
type PqlEngine struct {
diff --git a/pkg/query-service/rules/alerting.go b/pkg/query-service/rules/alerting.go
index b7655733d0..ea82af76b8 100644
--- a/pkg/query-service/rules/alerting.go
+++ b/pkg/query-service/rules/alerting.go
@@ -3,12 +3,13 @@ package rules
import (
"encoding/json"
"fmt"
- "github.com/pkg/errors"
- "go.signoz.io/query-service/model"
- "go.signoz.io/query-service/utils/labels"
"net/url"
"strings"
"time"
+
+ "github.com/pkg/errors"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/utils/labels"
)
// this file contains common structs and methods used by
diff --git a/pkg/query-service/rules/apiParams.go b/pkg/query-service/rules/apiParams.go
index 1d488c026d..bf4c41a17a 100644
--- a/pkg/query-service/rules/apiParams.go
+++ b/pkg/query-service/rules/apiParams.go
@@ -4,14 +4,15 @@ import (
"context"
"encoding/json"
"fmt"
- "github.com/pkg/errors"
- "go.signoz.io/query-service/model"
- "go.uber.org/zap"
"time"
"unicode/utf8"
- "go.signoz.io/query-service/utils/times"
- "go.signoz.io/query-service/utils/timestamp"
+ "github.com/pkg/errors"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "go.uber.org/zap"
+
+ "go.signoz.io/signoz/pkg/query-service/utils/times"
+ "go.signoz.io/signoz/pkg/query-service/utils/timestamp"
yaml "gopkg.in/yaml.v2"
)
diff --git a/pkg/query-service/rules/manager.go b/pkg/query-service/rules/manager.go
index 93ae662c99..2437355daa 100644
--- a/pkg/query-service/rules/manager.go
+++ b/pkg/query-service/rules/manager.go
@@ -4,13 +4,14 @@ import (
"context"
"encoding/json"
"fmt"
- "github.com/google/uuid"
"sort"
"strconv"
"strings"
"sync"
"time"
+ "github.com/google/uuid"
+
"github.com/go-kit/log"
"go.uber.org/zap"
@@ -19,9 +20,9 @@ import (
"github.com/pkg/errors"
// opentracing "github.com/opentracing/opentracing-go"
- am "go.signoz.io/query-service/integrations/alertManager"
- "go.signoz.io/query-service/model"
- "go.signoz.io/query-service/utils/labels"
+ am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/utils/labels"
)
// namespace for prom metrics
@@ -241,7 +242,7 @@ func (m *Manager) EditRule(ruleStr string, id string) error {
func (m *Manager) editTask(rule *PostableRule, taskName string) error {
m.mtx.Lock()
defer m.mtx.Unlock()
-
+
zap.S().Debugf("msg:", "editing a rule task", "\t task name:", taskName)
newTask, err := m.prepareTask(false, rule, taskName)
diff --git a/pkg/query-service/rules/manager_test.go b/pkg/query-service/rules/manager_test.go
deleted file mode 100644
index e7b059dda9..0000000000
--- a/pkg/query-service/rules/manager_test.go
+++ /dev/null
@@ -1,155 +0,0 @@
-package rules
-
-import (
- "context"
- "fmt"
- "os"
- "os/signal"
- "syscall"
-
- "github.com/jmoiron/sqlx"
- _ "github.com/mattn/go-sqlite3"
- "go.signoz.io/query-service/app/clickhouseReader"
- am "go.signoz.io/query-service/integrations/alertManager"
- "go.signoz.io/query-service/model"
- pqle "go.signoz.io/query-service/pqlEngine"
- "go.signoz.io/query-service/utils/value"
- "go.uber.org/zap"
- "go.uber.org/zap/zapcore"
- "net/url"
- "testing"
- "time"
-)
-
-func initZapLog() *zap.Logger {
- config := zap.NewDevelopmentConfig()
- config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
- config.EncoderConfig.TimeKey = "timestamp"
- config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
- logger, _ := config.Build()
- return logger
-}
-
-func TestRules(t *testing.T) {
- fmt.Println("starting test TestRules..")
- loggerMgr := initZapLog()
- zap.ReplaceGlobals(loggerMgr)
- defer loggerMgr.Sync() // flushes buffer, if any
-
- logger := loggerMgr.Sugar()
-
- configFile := "../config/prometheus.yml"
- // create engine
- pqle, err := pqle.FromConfigPath(configFile)
- if err != nil {
- fmt.Println("failed to create pql:", err)
- t.Errorf("failed to create pql engine : %v", err)
- }
-
- // create db conn
- db, err := sqlx.Open("sqlite3", "../signoz.db")
- if err != nil {
- fmt.Println("failed to create db conn:", err)
- t.Errorf("failed to create db conn: %v", err)
- }
-
- // create ch reader
- ch := clickhouseReader.NewReader(db, configFile)
-
- // notifier opts
- notifierOpts := am.NotifierOptions{
- QueueCapacity: 10000,
- Timeout: 1 * time.Second,
- AlertManagerURLs: []string{"http://localhost:9093/api/"},
- }
-
- externalURL, _ := url.Parse("http://signoz.io")
-
- // create manager opts
- managerOpts := &ManagerOptions{
- NotifierOpts: notifierOpts,
- Queriers: &Queriers{
- PqlEngine: pqle,
- Ch: ch,
- },
- ExternalURL: externalURL,
- Conn: db,
- Context: context.Background(),
- Logger: nil,
- }
-
- // create Manager
- manager, err := NewManager(managerOpts)
- if err != nil {
- fmt.Println("manager error:", err)
- t.Errorf("manager error: %v", err)
- }
- fmt.Println("manager is ready:", manager)
-
- manager.run()
-
- // test rules
- // create promql rule
- /* promql rule
- postableRule := PostableRule{
- Alert: "test alert 1 - promql",
- RuleType: RuleTypeProm,
- EvalWindow: 5 * time.Minute,
- Frequency: 30 * time.Second,
- RuleCondition: RuleCondition{
- CompositeMetricQuery: &model.CompositeMetricQuery{
- QueryType: model.PROM,
- PromQueries: map[string]*model.PromQuery{
- "A": &model.PromQuery{Query: `sum(signoz_latency_count{span_kind="SPAN_KIND_SERVER"}) by (service_name) > 100`},
- },
- },
- },
- Labels: map[string]string{},
- Annotations: map[string]string{},
- }*/
- // create builder rule
- metricQuery := &model.MetricQuery{
- QueryName: "A",
- MetricName: "signoz_latency_count",
- TagFilters: &model.FilterSet{Operation: "AND", Items: []model.FilterItem{
- {Key: "span_kind", Value: "SPAN_KIND_SERVER", Operation: "neq"},
- }},
- GroupingTags: []string{"service_name"},
- AggregateOperator: model.RATE_SUM,
- Expression: "A",
- }
-
- postableRule := PostableRule{
- Alert: "test alert 2 - builder",
- RuleType: RuleTypeThreshold,
- EvalWindow: 5 * time.Minute,
- Frequency: 30 * time.Second,
- RuleCondition: RuleCondition{
- Target: value.Float64(500),
- CompareOp: TargetIsMore,
- CompositeMetricQuery: &model.CompositeMetricQuery{
- QueryType: model.QUERY_BUILDER,
- BuilderQueries: map[string]*model.MetricQuery{
- "A": metricQuery,
- },
- },
- },
- Labels: map[string]string{"host": "server1"},
- Annotations: map[string]string{},
- }
- err = manager.addTask(&postableRule, postableRule.Alert)
- if err != nil {
- fmt.Println("failed to add rule: ", err)
- t.Errorf("failed to add rule")
- }
-
- signalsChannel := make(chan os.Signal, 1)
- signal.Notify(signalsChannel, os.Interrupt, syscall.SIGTERM)
-
- for {
- select {
- case <-signalsChannel:
- logger.Fatal("Received OS Interrupt Signal ... ")
- }
- }
-}
diff --git a/pkg/query-service/rules/promRule.go b/pkg/query-service/rules/promRule.go
index 761ca8ddee..d4654becd2 100644
--- a/pkg/query-service/rules/promRule.go
+++ b/pkg/query-service/rules/promRule.go
@@ -3,18 +3,19 @@ package rules
import (
"context"
"fmt"
- "github.com/go-kit/log"
- "github.com/go-kit/log/level"
- "go.uber.org/zap"
"sync"
"time"
+ "github.com/go-kit/log"
+ "github.com/go-kit/log/level"
+ "go.uber.org/zap"
+
plabels "github.com/prometheus/prometheus/pkg/labels"
pql "github.com/prometheus/prometheus/promql"
- "go.signoz.io/query-service/model"
- qslabels "go.signoz.io/query-service/utils/labels"
- "go.signoz.io/query-service/utils/times"
- "go.signoz.io/query-service/utils/timestamp"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ qslabels "go.signoz.io/signoz/pkg/query-service/utils/labels"
+ "go.signoz.io/signoz/pkg/query-service/utils/times"
+ "go.signoz.io/signoz/pkg/query-service/utils/timestamp"
yaml "gopkg.in/yaml.v2"
)
diff --git a/pkg/query-service/rules/queriers.go b/pkg/query-service/rules/queriers.go
index c2444cff7a..2739e04280 100644
--- a/pkg/query-service/rules/queriers.go
+++ b/pkg/query-service/rules/queriers.go
@@ -2,7 +2,7 @@ package rules
import (
"github.com/ClickHouse/clickhouse-go/v2"
- pqle "go.signoz.io/query-service/pqlEngine"
+ pqle "go.signoz.io/signoz/pkg/query-service/pqlEngine"
)
// Queriers register the options for querying metrics or event sources
diff --git a/pkg/query-service/rules/resultTypes.go b/pkg/query-service/rules/resultTypes.go
index 9a36a9759f..e7e67bc7bd 100644
--- a/pkg/query-service/rules/resultTypes.go
+++ b/pkg/query-service/rules/resultTypes.go
@@ -5,7 +5,7 @@ import (
"fmt"
"strconv"
- "go.signoz.io/query-service/utils/labels"
+ "go.signoz.io/signoz/pkg/query-service/utils/labels"
)
// common result format of query
diff --git a/pkg/query-service/rules/rule.go b/pkg/query-service/rules/rule.go
index 9a2ac1bad0..8228f70c8f 100644
--- a/pkg/query-service/rules/rule.go
+++ b/pkg/query-service/rules/rule.go
@@ -2,8 +2,9 @@ package rules
import (
"context"
- "go.signoz.io/query-service/utils/labels"
"time"
+
+ "go.signoz.io/signoz/pkg/query-service/utils/labels"
)
// A Rule encapsulates a vector expression which is evaluated at a specified
diff --git a/pkg/query-service/rules/ruleTask.go b/pkg/query-service/rules/ruleTask.go
index 4075d9888e..46d3a0917a 100644
--- a/pkg/query-service/rules/ruleTask.go
+++ b/pkg/query-service/rules/ruleTask.go
@@ -3,12 +3,13 @@ package rules
import (
"context"
"fmt"
- opentracing "github.com/opentracing/opentracing-go"
- "go.signoz.io/query-service/utils/labels"
- "go.uber.org/zap"
"sort"
"sync"
"time"
+
+ opentracing "github.com/opentracing/opentracing-go"
+ "go.signoz.io/signoz/pkg/query-service/utils/labels"
+ "go.uber.org/zap"
)
// RuleTask holds a rule (with composite queries)
diff --git a/pkg/query-service/rules/templates.go b/pkg/query-service/rules/templates.go
index 3d9aa518d8..955a7b3761 100644
--- a/pkg/query-service/rules/templates.go
+++ b/pkg/query-service/rules/templates.go
@@ -14,7 +14,7 @@ import (
html_template "html/template"
text_template "text/template"
- "go.signoz.io/query-service/utils/times"
+ "go.signoz.io/signoz/pkg/query-service/utils/times"
)
// this file contains all the methods and structs
diff --git a/pkg/query-service/rules/thresholdRule.go b/pkg/query-service/rules/thresholdRule.go
index 5234e88a72..0ce8d9317b 100644
--- a/pkg/query-service/rules/thresholdRule.go
+++ b/pkg/query-service/rules/thresholdRule.go
@@ -3,21 +3,22 @@ package rules
import (
"context"
"fmt"
- "go.uber.org/zap"
"math"
"reflect"
"sort"
"sync"
"time"
+ "go.uber.org/zap"
+
"github.com/ClickHouse/clickhouse-go/v2"
- "go.signoz.io/query-service/app/metrics"
- "go.signoz.io/query-service/constants"
- qsmodel "go.signoz.io/query-service/model"
- "go.signoz.io/query-service/utils/labels"
- "go.signoz.io/query-service/utils/times"
- "go.signoz.io/query-service/utils/timestamp"
- "go.signoz.io/query-service/utils/value"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ qsmodel "go.signoz.io/signoz/pkg/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/utils/labels"
+ "go.signoz.io/signoz/pkg/query-service/utils/times"
+ "go.signoz.io/signoz/pkg/query-service/utils/timestamp"
+ "go.signoz.io/signoz/pkg/query-service/utils/value"
yaml "gopkg.in/yaml.v2"
)
diff --git a/pkg/query-service/telemetry/telemetry.go b/pkg/query-service/telemetry/telemetry.go
index 62560d97f9..b5e9e501c6 100644
--- a/pkg/query-service/telemetry/telemetry.go
+++ b/pkg/query-service/telemetry/telemetry.go
@@ -11,10 +11,10 @@ import (
"time"
ph "github.com/posthog/posthog-go"
- "go.signoz.io/query-service/constants"
- "go.signoz.io/query-service/interfaces"
- "go.signoz.io/query-service/model"
- "go.signoz.io/query-service/version"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/interfaces"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/version"
"gopkg.in/segmentio/analytics-go.v3"
)
@@ -27,6 +27,9 @@ const (
TELEMETRY_EVENT_HEART_BEAT = "Heart Beat"
TELEMETRY_EVENT_ORG_SETTINGS = "Org Settings"
DEFAULT_SAMPLING = 0.1
+ TELEMETRY_LICENSE_CHECK_FAILED = "License Check Failed"
+ TELEMETRY_LICENSE_UPDATED = "License Updated"
+ TELEMETRY_LICENSE_ACT_FAILED = "License Activation Failed"
)
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
diff --git a/pkg/query-service/tests/auth_test.go b/pkg/query-service/tests/auth_test.go
index 91e298555c..6989ceccb1 100644
--- a/pkg/query-service/tests/auth_test.go
+++ b/pkg/query-service/tests/auth_test.go
@@ -9,8 +9,8 @@ import (
"github.com/pkg/errors"
"github.com/stretchr/testify/require"
- "go.signoz.io/query-service/auth"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/auth"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
func invite(t *testing.T, email string) *model.InviteResponse {
diff --git a/pkg/query-service/tests/cold_storage_test.go b/pkg/query-service/tests/cold_storage_test.go
index a72e48e770..f2ff0784bc 100644
--- a/pkg/query-service/tests/cold_storage_test.go
+++ b/pkg/query-service/tests/cold_storage_test.go
@@ -9,7 +9,7 @@ import (
"time"
"github.com/stretchr/testify/require"
- "go.signoz.io/query-service/model"
+ "go.signoz.io/signoz/pkg/query-service/model"
)
const (
diff --git a/pkg/query-service/utils/encryption/encryption.go b/pkg/query-service/utils/encryption/encryption.go
new file mode 100644
index 0000000000..0a0207c31a
--- /dev/null
+++ b/pkg/query-service/utils/encryption/encryption.go
@@ -0,0 +1,45 @@
+package encryption
+
+import (
+ "crypto/aes"
+ "crypto/cipher"
+ "crypto/rand"
+ "encoding/base64"
+ "errors"
+ "io"
+)
+
+func Encrypt(key, text []byte) ([]byte, error) {
+ block, err := aes.NewCipher(key)
+ if err != nil {
+ return nil, err
+ }
+ b := base64.StdEncoding.EncodeToString(text)
+ ciphertext := make([]byte, aes.BlockSize+len(b))
+ iv := ciphertext[:aes.BlockSize]
+ if _, err := io.ReadFull(rand.Reader, iv); err != nil {
+ return nil, err
+ }
+ cfb := cipher.NewCFBEncrypter(block, iv)
+ cfb.XORKeyStream(ciphertext[aes.BlockSize:], []byte(b))
+ return ciphertext, nil
+}
+
+func Decrypt(key, text []byte) ([]byte, error) {
+ block, err := aes.NewCipher(key)
+ if err != nil {
+ return nil, err
+ }
+ if len(text) < aes.BlockSize {
+ return nil, errors.New("ciphertext too short")
+ }
+ iv := text[:aes.BlockSize]
+ text = text[aes.BlockSize:]
+ cfb := cipher.NewCFBDecrypter(block, iv)
+ cfb.XORKeyStream(text, text)
+ data, err := base64.StdEncoding.DecodeString(string(text))
+ if err != nil {
+ return nil, err
+ }
+ return data, nil
+}
diff --git a/pkg/query-service/utils/pass.go b/pkg/query-service/utils/pass.go
new file mode 100644
index 0000000000..381d3fe1e7
--- /dev/null
+++ b/pkg/query-service/utils/pass.go
@@ -0,0 +1,10 @@
+package utils
+
+import (
+ "github.com/sethvargo/go-password/password"
+)
+
+func GeneratePassowrd() string {
+ res, _ := password.Generate(64, 10, 10, false, false)
+ return res
+}
From 90d1a870279220596939d2c4b189f91114986bbb Mon Sep 17 00:00:00 2001
From: Nityananda Gohain
Date: Fri, 7 Oct 2022 11:48:22 +0530
Subject: [PATCH 17/28] fix: usage collection frequency updated (#1617)
---
ee/query-service/usage/manager.go | 10 ++--------
1 file changed, 2 insertions(+), 8 deletions(-)
diff --git a/ee/query-service/usage/manager.go b/ee/query-service/usage/manager.go
index 34d931861c..59f16b1a3d 100644
--- a/ee/query-service/usage/manager.go
+++ b/ee/query-service/usage/manager.go
@@ -27,17 +27,11 @@ const (
)
var (
- // // collect usage every hour
- // collectionFrequency = 1 * time.Hour
-
- // // send usage every 24 hour
- // uploadFrequency = 24 * time.Hour
-
// collect usage every hour
- collectionFrequency = 5 * time.Second
+ collectionFrequency = 1 * time.Hour
// send usage every 24 hour
- uploadFrequency = 30 * time.Second
+ uploadFrequency = 24 * time.Hour
locker = stateUnlocked
)
From 90d0c72aa28797473f31e0d5e897f6205e6eadb9 Mon Sep 17 00:00:00 2001
From: Prashant Shahi
Date: Fri, 7 Oct 2022 14:23:01 +0530
Subject: [PATCH 18/28] =?UTF-8?q?ci(push):=20=F0=9F=91=B7=20make=20ee=20qu?=
=?UTF-8?q?ery-service=20build=20default=20(#1616)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Prashant Shahi
---
.github/workflows/push.yaml | 37 ++++++++++++++++++++++++++++++++++++-
1 file changed, 36 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/push.yaml b/.github/workflows/push.yaml
index 2f39f28af1..b497db5001 100644
--- a/.github/workflows/push.yaml
+++ b/.github/workflows/push.yaml
@@ -11,6 +11,41 @@ on:
jobs:
image-build-and-push-query-service:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v1
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v1
+ with:
+ version: latest
+ - name: Login to DockerHub
+ uses: docker/login-action@v1
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - uses: benjlevesque/short-sha@v1.2
+ id: short-sha
+ - name: Get branch name
+ id: branch-name
+ uses: tj-actions/branch-names@v5.1
+ - name: Set docker tag environment
+ run: |
+ if [ '${{ steps.branch-name.outputs.is_tag }}' == 'true' ]; then
+ tag="${{ steps.branch-name.outputs.tag }}"
+ tag="${tag:1}"
+ echo "DOCKER_TAG=${tag}-oss" >> $GITHUB_ENV
+ elif [ '${{ steps.branch-name.outputs.current_branch }}' == 'main' ]; then
+ echo "DOCKER_TAG=latest-oss" >> $GITHUB_ENV
+ else
+ echo "DOCKER_TAG=${{ steps.branch-name.outputs.current_branch }}-oss" >> $GITHUB_ENV
+ fi
+ - name: Build and push docker image
+ run: make build-push-query-service
+
+ image-build-and-push-ee-query-service:
runs-on: ubuntu-latest
steps:
- name: Checkout code
@@ -43,7 +78,7 @@ jobs:
echo "DOCKER_TAG=${{ steps.branch-name.outputs.current_branch }}" >> $GITHUB_ENV
fi
- name: Build and push docker image
- run: make build-push-query-service
+ run: make build-push-ee-query-service
image-build-and-push-frontend:
runs-on: ubuntu-latest
From f2f2069835d643243a8aa06a6021903efb28a707 Mon Sep 17 00:00:00 2001
From: Srikanth Chekuri
Date: Fri, 7 Oct 2022 19:00:27 +0530
Subject: [PATCH 19/28] chore: bump SigNoz/prometheus to v1.9.74 (#1620)
---
go.mod | 2 +-
go.sum | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/go.mod b/go.mod
index 44107166d0..74fca9afe3 100644
--- a/go.mod
+++ b/go.mod
@@ -145,4 +145,4 @@ require (
k8s.io/client-go v8.0.0+incompatible // indirect
)
-replace github.com/prometheus/prometheus => github.com/SigNoz/prometheus v1.9.73
+replace github.com/prometheus/prometheus => github.com/SigNoz/prometheus v1.9.74
diff --git a/go.sum b/go.sum
index c56a128ef8..6babcdcb67 100644
--- a/go.sum
+++ b/go.sum
@@ -57,8 +57,8 @@ github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8
github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q=
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb h1:bneLSKPf9YUSFmafKx32bynV6QrzViL/s+ZDvQxH1E4=
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA=
-github.com/SigNoz/prometheus v1.9.73 h1:f6PjQrJGoCot9iJp/tWoKwgy0HTIqicYave4K3fT9ro=
-github.com/SigNoz/prometheus v1.9.73/go.mod h1:Y4J9tGDmacMC+EcOTp+EIAn2C1sN+9kE+idyVKadiVM=
+github.com/SigNoz/prometheus v1.9.74 h1:/AcKVZ80Cg4FQ/quMDLO4Ejyeb3KDjgUKveiN+OXVL8=
+github.com/SigNoz/prometheus v1.9.74/go.mod h1:Y4J9tGDmacMC+EcOTp+EIAn2C1sN+9kE+idyVKadiVM=
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
From fe0a4ab0cb4cbb3993c027595db44e08c4d6ce35 Mon Sep 17 00:00:00 2001
From: Nityananda Gohain
Date: Fri, 7 Oct 2022 20:06:01 +0530
Subject: [PATCH 20/28] Fix/delete old snapshot (#1621)
* fix: remove old snapshots
---
ee/query-service/usage/manager.go | 2 ++
ee/query-service/usage/repository/repository.go | 13 +++++++++++++
2 files changed, 15 insertions(+)
diff --git a/ee/query-service/usage/manager.go b/ee/query-service/usage/manager.go
index 59f16b1a3d..067b65e0b4 100644
--- a/ee/query-service/usage/manager.go
+++ b/ee/query-service/usage/manager.go
@@ -139,6 +139,8 @@ func (lm *Manager) UsageExporter(ctx context.Context) {
lm.CollectAndStoreUsage(ctx)
case <-uploadTicker.C:
lm.UploadUsage(ctx)
+ // remove the old snapshots
+ lm.repository.DropOldSnapshots(ctx)
}
}
}
diff --git a/ee/query-service/usage/repository/repository.go b/ee/query-service/usage/repository/repository.go
index 99bd4c5796..57bf5388b6 100644
--- a/ee/query-service/usage/repository/repository.go
+++ b/ee/query-service/usage/repository/repository.go
@@ -17,6 +17,7 @@ import (
const (
MaxFailedSyncCount = 9 // a snapshot will be ignored if the max failed count is greater than or equal to 9
+ SnapShotLife = 3 * 24 * time.Hour
)
// Repository is usage Repository which stores usage snapshot in a secured DB
@@ -111,6 +112,18 @@ func (r *Repository) GetSnapshotsNotSynced(ctx context.Context) ([]*model.Usage,
return snapshots, nil
}
+func (r *Repository) DropOldSnapshots(ctx context.Context) error {
+ query := `delete from usage where created_at <= $1`
+
+ _, err := r.db.ExecContext(ctx, query, time.Now().Add(-(SnapShotLife)))
+ if err != nil {
+ zap.S().Errorf("failed to remove old snapshots from db: %v", zap.Error(err))
+ return err
+ }
+
+ return nil
+}
+
// CheckSnapshotGtCreatedAt checks if there is any snapshot greater than the provided timestamp
func (r *Repository) CheckSnapshotGtCreatedAt(ctx context.Context, ts time.Time) (bool, error) {
From 1374444f366617a4d1faf780b88e303d6f86660a Mon Sep 17 00:00:00 2001
From: Ankit Nayan
Date: Tue, 11 Oct 2022 00:43:54 +0530
Subject: [PATCH 21/28] chore: analytics
---
.../app/clickhouseReader/reader.go | 34 +++++++++++++++++++
pkg/query-service/config/prometheus.yml | 2 +-
pkg/query-service/interfaces/interface.go | 2 +-
pkg/query-service/model/response.go | 11 ++++++
pkg/query-service/telemetry/telemetry.go | 13 +++++++
5 files changed, 60 insertions(+), 2 deletions(-)
diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go
index d52b4c49ff..009a372add 100644
--- a/pkg/query-service/app/clickhouseReader/reader.go
+++ b/pkg/query-service/app/clickhouseReader/reader.go
@@ -2999,6 +2999,40 @@ func (r *ClickHouseReader) GetLogsInfoInLastHeartBeatInterval(ctx context.Contex
return totalLogLines, nil
}
+func (r *ClickHouseReader) GetTagsInfoInLastHeartBeatInterval(ctx context.Context) (*model.TagsInfo, error) {
+
+ queryStr := fmt.Sprintf("select tagMap['service.name'] as serviceName, tagMap['deployment.environment'] as env, tagMap['telemetry.sdk.language'] as language from %s.%s where timestamp > toUnixTimestamp(now()-toIntervalMinute(%d));", r.traceDB, r.indexTable, 1)
+
+ tagTelemetryDataList := []model.TagTelemetryData{}
+ err := r.db.Select(ctx, &tagTelemetryDataList, queryStr)
+
+ if err != nil {
+ zap.S().Info(queryStr)
+ zap.S().Debug("Error in processing sql query: ", err)
+ return nil, err
+ }
+
+ tagsInfo := model.TagsInfo{
+ Languages: make(map[string]interface{}),
+ }
+
+ for _, tagTelemetryData := range tagTelemetryDataList {
+
+ if len(tagTelemetryData.ServiceName) != 0 && strings.Contains(tagTelemetryData.ServiceName, "prod") {
+ tagsInfo.Env = tagTelemetryData.ServiceName
+ }
+ if len(tagTelemetryData.Env) != 0 && strings.Contains(tagTelemetryData.Env, "prod") {
+ tagsInfo.Env = tagTelemetryData.Env
+ }
+ if len(tagTelemetryData.Language) != 0 {
+ tagsInfo.Languages[tagTelemetryData.Language] = struct{}{}
+ }
+
+ }
+
+ return &tagsInfo, nil
+}
+
func (r *ClickHouseReader) GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError) {
// response will contain top level fields from the otel log model
response := model.GetFieldsResponse{
diff --git a/pkg/query-service/config/prometheus.yml b/pkg/query-service/config/prometheus.yml
index 88ee92961b..0d5024bdc4 100644
--- a/pkg/query-service/config/prometheus.yml
+++ b/pkg/query-service/config/prometheus.yml
@@ -22,4 +22,4 @@ rule_files:
scrape_configs: []
remote_read:
- - url: tcp://localhost:9000/?database=signoz_metrics
+ - url: tcp://stagingapp.signoz.io:9000/?database=signoz_metrics
diff --git a/pkg/query-service/interfaces/interface.go b/pkg/query-service/interfaces/interface.go
index 37b5ec68aa..4a83b16c80 100644
--- a/pkg/query-service/interfaces/interface.go
+++ b/pkg/query-service/interfaces/interface.go
@@ -61,7 +61,7 @@ type Reader interface {
GetTimeSeriesInfo(ctx context.Context) (map[string]interface{}, error)
GetSamplesInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error)
GetLogsInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error)
-
+ GetTagsInfoInLastHeartBeatInterval(ctx context.Context) (*model.TagsInfo, error)
// Logs
GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError)
UpdateLogField(ctx context.Context, field *model.UpdateField) *model.ApiError
diff --git a/pkg/query-service/model/response.go b/pkg/query-service/model/response.go
index a54675c111..23654fb899 100644
--- a/pkg/query-service/model/response.go
+++ b/pkg/query-service/model/response.go
@@ -542,3 +542,14 @@ func (s *ServiceItem) MarshalJSON() ([]byte, error) {
type DashboardVar struct {
VariableValues []interface{} `json:"variableValues"`
}
+
+type TagsInfo struct {
+ Languages map[string]interface{} `json:"languages"`
+ Env string `json:"env"`
+}
+
+type TagTelemetryData struct {
+ ServiceName string `json:"serviceName" ch:"serviceName"`
+ Env string `json:"env" ch:"env"`
+ Language string `json:"language" ch:"language"`
+}
diff --git a/pkg/query-service/telemetry/telemetry.go b/pkg/query-service/telemetry/telemetry.go
index b5e9e501c6..6c6af545f7 100644
--- a/pkg/query-service/telemetry/telemetry.go
+++ b/pkg/query-service/telemetry/telemetry.go
@@ -30,6 +30,8 @@ const (
TELEMETRY_LICENSE_CHECK_FAILED = "License Check Failed"
TELEMETRY_LICENSE_UPDATED = "License Updated"
TELEMETRY_LICENSE_ACT_FAILED = "License Activation Failed"
+ TELEMETRY_EVENT_ENVIRONMENT = "Environment"
+ TELEMETRY_EVENT_LANGUAGE = "Language"
)
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
@@ -70,6 +72,7 @@ type Telemetry struct {
}
func createTelemetry() {
+
telemetry = &Telemetry{
operator: analytics.New(api_key),
phOperator: ph.New(ph_api_key),
@@ -89,6 +92,16 @@ func createTelemetry() {
for {
select {
case <-ticker.C:
+ tagsInfo, _ := telemetry.reader.GetTagsInfoInLastHeartBeatInterval(context.Background())
+
+ if len(tagsInfo.Env) != 0 {
+ telemetry.SendEvent(TELEMETRY_EVENT_ENVIRONMENT, map[string]interface{}{"value": tagsInfo.Env})
+ }
+
+ for language, _ := range tagsInfo.Languages {
+ telemetry.SendEvent(TELEMETRY_EVENT_LANGUAGE, map[string]interface{}{"language": language})
+ }
+
totalSpans, _ := telemetry.reader.GetTotalSpans(context.Background())
spansInLastHeartBeatInterval, _ := telemetry.reader.GetSpansInLastHeartBeatInterval(context.Background())
getSamplesInfoInLastHeartBeatInterval, _ := telemetry.reader.GetSamplesInfoInLastHeartBeatInterval(context.Background())
From 4e38f1dcc0a834a34c3b29f4c50cbff657058c38 Mon Sep 17 00:00:00 2001
From: Palash Gupta
Date: Tue, 11 Oct 2022 15:48:58 +0530
Subject: [PATCH 22/28] chore: free plan config is updated (#1625)
* chore: free plan config is updated
* fix: solved empty state issue with no auth domains
Co-authored-by: Amol
---
frontend/src/constants/app.ts | 3 ++
.../AuthDomains/index.tsx | 33 ++++++++++++++-----
2 files changed, 27 insertions(+), 9 deletions(-)
diff --git a/frontend/src/constants/app.ts b/frontend/src/constants/app.ts
index 35ae663592..68bfe983db 100644
--- a/frontend/src/constants/app.ts
+++ b/frontend/src/constants/app.ts
@@ -8,3 +8,6 @@ export const DEFAULT_AUTH0_APP_REDIRECTION_PATH = ROUTES.APPLICATION;
export const IS_SIDEBAR_COLLAPSED = 'isSideBarCollapsed';
export const INVITE_MEMBERS_HASH = '#invite-team-members';
+
+export const SIGNOZ_UPGRADE_PLAN_URL =
+ 'https://upgrade.signoz.io/upgrade-from-app';
diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx
index ad283027ac..221ba963dc 100644
--- a/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx
+++ b/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx
@@ -4,6 +4,7 @@ import { ColumnsType } from 'antd/lib/table';
import deleteDomain from 'api/SAML/deleteDomain';
import listAllDomain from 'api/SAML/listAllDomain';
import updateDomain from 'api/SAML/updateDomain';
+import { SIGNOZ_UPGRADE_PLAN_URL } from 'constants/app';
import { FeatureKeys } from 'constants/featureKeys';
import useFeatureFlag from 'hooks/useFeatureFlag';
import React, { useCallback, useState } from 'react';
@@ -30,6 +31,21 @@ function AuthDomains(): JSX.Element {
const SSOFlag = useFeatureFlag(FeatureKeys.SSO);
+ const notEntripriseData: SAMLDomain[] = [
+ {
+ id: v4(),
+ name: '',
+ ssoEnabled: false,
+ orgId: (org || [])[0].id || '',
+ samlConfig: {
+ samlCert: '',
+ samlEntity: '',
+ samlIdp: '',
+ },
+ ssoType: 'SAML',
+ },
+ ];
+
const { data, isLoading, refetch } = useQuery(['saml'], {
queryFn: () =>
listAllDomain({
@@ -90,10 +106,10 @@ function AuthDomains(): JSX.Element {
const onEditHandler = useCallback(
(record: SAMLDomain) => (): void => {
- setIsEditModalOpen(true);
+ onOpenHandler(setIsEditModalOpen)();
setCurrentDomain(record);
},
- [],
+ [onOpenHandler],
);
const onDeleteHandler = useCallback(
@@ -128,7 +144,7 @@ function AuthDomains(): JSX.Element {
);
const onClickLicenseHandler = useCallback(() => {
- window.open('http://signoz.io/pricing');
+ window.open(SIGNOZ_UPGRADE_PLAN_URL);
}, []);
const columns: ColumnsType = [
@@ -171,10 +187,7 @@ function AuthDomains(): JSX.Element {
if (!SSOFlag) {
return (
{
- setCurrentDomain(record);
- onOpenHandler(setIsSettingsOpen)();
- }}
+ onClick={onClickLicenseHandler}
type="link"
icon={ }
>
@@ -235,7 +248,7 @@ function AuthDomains(): JSX.Element {
record.name + v4()}
- dataSource={[]}
+ dataSource={!SSOFlag ? notEntripriseData : []}
columns={columns}
tableLayout="fixed"
/>
@@ -243,6 +256,8 @@ function AuthDomains(): JSX.Element {
);
}
+ const tableData = SSOFlag ? data?.payload || [] : notEntripriseData;
+
return (
<>
Date: Tue, 11 Oct 2022 18:35:05 +0530
Subject: [PATCH 23/28] chore: rateLimit added
---
pkg/query-service/telemetry/telemetry.go | 25 ++++++++++++++++++++++++
1 file changed, 25 insertions(+)
diff --git a/pkg/query-service/telemetry/telemetry.go b/pkg/query-service/telemetry/telemetry.go
index 6c6af545f7..793c02b8ab 100644
--- a/pkg/query-service/telemetry/telemetry.go
+++ b/pkg/query-service/telemetry/telemetry.go
@@ -43,6 +43,12 @@ const HEART_BEAT_DURATION = 6 * time.Hour
// const HEART_BEAT_DURATION = 10 * time.Second
+const RATE_LIMIT_CHECK_DURATION = 1 * time.Minute
+const RATE_LIMIT_VALUE = 60
+
+// const RATE_LIMIT_CHECK_DURATION = 20 * time.Second
+// const RATE_LIMIT_VALUE = 5
+
var telemetry *Telemetry
var once sync.Once
@@ -69,6 +75,7 @@ type Telemetry struct {
companyDomain string
minRandInt int
maxRandInt int
+ rateLimits map[string]int8
}
func createTelemetry() {
@@ -77,6 +84,7 @@ func createTelemetry() {
operator: analytics.New(api_key),
phOperator: ph.New(ph_api_key),
ipAddress: getOutboundIP(),
+ rateLimits: make(map[string]int8),
}
telemetry.minRandInt = 0
telemetry.maxRandInt = int(1 / DEFAULT_SAMPLING)
@@ -87,7 +95,18 @@ func createTelemetry() {
telemetry.SetTelemetryEnabled(constants.IsTelemetryEnabled())
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data)
+
ticker := time.NewTicker(HEART_BEAT_DURATION)
+ rateLimitTicker := time.NewTicker(RATE_LIMIT_CHECK_DURATION)
+
+ go func() {
+ for {
+ select {
+ case <-rateLimitTicker.C:
+ telemetry.rateLimits = make(map[string]int8)
+ }
+ }
+ }()
go func() {
for {
select {
@@ -199,6 +218,12 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}) {
return
}
+ if a.rateLimits[event] < RATE_LIMIT_VALUE {
+ a.rateLimits[event] += 1
+ } else {
+ return
+ }
+
// zap.S().Info(data)
properties := analytics.NewProperties()
properties.Set("version", version.GetVersion())
From 57f4f098f779512485841d2af06c965ac6dfe688 Mon Sep 17 00:00:00 2001
From: Palash Gupta
Date: Tue, 11 Oct 2022 19:38:22 +0530
Subject: [PATCH 24/28] feat: onsubmit is updated (#1628)
---
frontend/src/container/Login/index.tsx | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/frontend/src/container/Login/index.tsx b/frontend/src/container/Login/index.tsx
index b08ff0f353..0fd6673844 100644
--- a/frontend/src/container/Login/index.tsx
+++ b/frontend/src/container/Login/index.tsx
@@ -117,6 +117,12 @@ function Login({
try {
event.preventDefault();
event.persist();
+
+ if (!precheckComplete) {
+ onNextHandler();
+ return;
+ }
+
setIsLoading(true);
const response = await loginApi({
@@ -171,6 +177,7 @@ function Login({
};
const { sso, canSelfRegister } = precheckResult;
+
return (
From 134c5dc1d235b3197c4a5f6dca0df355f239bf25 Mon Sep 17 00:00:00 2001
From: Nityananda Gohain
Date: Wed, 12 Oct 2022 12:04:36 +0530
Subject: [PATCH 25/28] fix: disable usage collection (#1631)
---
ee/query-service/app/server.go | 11 -----------
1 file changed, 11 deletions(-)
diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go
index 608926deb5..7002af3f41 100644
--- a/ee/query-service/app/server.go
+++ b/ee/query-service/app/server.go
@@ -20,7 +20,6 @@ import (
"go.signoz.io/signoz/ee/query-service/dao"
"go.signoz.io/signoz/ee/query-service/interfaces"
licensepkg "go.signoz.io/signoz/ee/query-service/license"
- "go.signoz.io/signoz/ee/query-service/usage"
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
@@ -118,16 +117,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
return nil, err
}
- // start the usagemanager
- usageManager, err := usage.New("sqlite", localDB, lm.GetRepo(), reader.GetConn())
- if err != nil {
- return nil, err
- }
- err = usageManager.Start()
- if err != nil {
- return nil, err
- }
-
telemetry.GetInstance().SetReader(reader)
apiOpts := api.APIHandlerOptions{
From acd15af82313d9e8801664b7251e46d4929799ad Mon Sep 17 00:00:00 2001
From: Prashant Shahi
Date: Thu, 13 Oct 2022 08:58:06 +0530
Subject: [PATCH 26/28] =?UTF-8?q?ci(e2e):=20=F0=9F=91=B7=20ee=20build=20fo?=
=?UTF-8?q?r=20query-service=20(#1633)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Prashant Shahi
---
.github/workflows/e2e-k3s.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/e2e-k3s.yaml b/.github/workflows/e2e-k3s.yaml
index f25b8afdbe..6b14a9c975 100644
--- a/.github/workflows/e2e-k3s.yaml
+++ b/.github/workflows/e2e-k3s.yaml
@@ -16,7 +16,7 @@ jobs:
uses: actions/checkout@v2
- name: Build query-service image
- run: make build-query-service-amd64
+ run: make build-ee-query-service-amd64
- name: Build frontend image
run: make build-frontend-amd64
From f580bedb1c5bc973cac95c97195deb5660519699 Mon Sep 17 00:00:00 2001
From: Palash Gupta
Date: Thu, 13 Oct 2022 14:20:25 +0530
Subject: [PATCH 27/28] 1627 login: onsubmit is added (#1635)
* feat: onsubmit is updated
* chore: precheckComplete handler is updated
---
frontend/src/container/Login/index.tsx | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/frontend/src/container/Login/index.tsx b/frontend/src/container/Login/index.tsx
index 0fd6673844..61276ea3c3 100644
--- a/frontend/src/container/Login/index.tsx
+++ b/frontend/src/container/Login/index.tsx
@@ -111,6 +111,8 @@ function Login({
setFunc(value);
};
+ const { sso, canSelfRegister } = precheckResult;
+
const onSubmitHandler: React.FormEventHandler = async (
event,
) => {
@@ -123,6 +125,11 @@ function Login({
return;
}
+ if (precheckComplete && sso) {
+ window.location.href = precheckResult.ssoUrl || '';
+ return;
+ }
+
setIsLoading(true);
const response = await loginApi({
@@ -176,8 +183,6 @@ function Login({
);
};
- const { sso, canSelfRegister } = precheckResult;
-
return (
From 6a3c1c10fb46b999a4b918412cb574c3a66d06b3 Mon Sep 17 00:00:00 2001
From: Prashant Shahi
Date: Thu, 13 Oct 2022 15:25:37 +0545
Subject: [PATCH 28/28] =?UTF-8?q?chore(release):=20=F0=9F=93=8C=20pin=20ve?=
=?UTF-8?q?rsions:=20SigNoz=200.11.2,=20OtelCollector=200.55.3?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Prashant Shahi
---
deploy/docker-swarm/clickhouse-setup/docker-compose.yaml | 8 ++++----
deploy/docker/clickhouse-setup/docker-compose-core.yaml | 4 ++--
deploy/docker/clickhouse-setup/docker-compose-prod.yaml | 4 ++--
deploy/docker/clickhouse-setup/docker-compose.yaml | 8 ++++----
pkg/query-service/tests/test-deploy/docker-compose.yaml | 4 ++--
5 files changed, 14 insertions(+), 14 deletions(-)
diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml
index 07f8bbf3c2..5bc37de791 100644
--- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml
+++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml
@@ -40,7 +40,7 @@ services:
condition: on-failure
query-service:
- image: signoz/query-service:0.11.1
+ image: signoz/query-service:0.11.2
command: ["-config=/root/config/prometheus.yml"]
# ports:
# - "6060:6060" # pprof port
@@ -70,7 +70,7 @@ services:
- clickhouse
frontend:
- image: signoz/frontend:0.11.1
+ image: signoz/frontend:0.11.2
deploy:
restart_policy:
condition: on-failure
@@ -83,7 +83,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
- image: signoz/signoz-otel-collector:0.55.1
+ image: signoz/signoz-otel-collector:0.55.3
command: ["--config=/etc/otel-collector-config.yaml"]
user: root # required for reading docker container logs
volumes:
@@ -111,7 +111,7 @@ services:
- clickhouse
otel-collector-metrics:
- image: signoz/signoz-otel-collector:0.55.1
+ image: signoz/signoz-otel-collector:0.55.3
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml
index da338d1dd7..a7d265d3f5 100644
--- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml
+++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml
@@ -41,7 +41,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: otel-collector
- image: signoz/signoz-otel-collector:0.55.1
+ image: signoz/signoz-otel-collector:0.55.3
command: ["--config=/etc/otel-collector-config.yaml"]
# user: root # required for reading docker container logs
volumes:
@@ -67,7 +67,7 @@ services:
otel-collector-metrics:
container_name: otel-collector-metrics
- image: signoz/signoz-otel-collector:0.55.1
+ image: signoz/signoz-otel-collector:0.55.3
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
diff --git a/deploy/docker/clickhouse-setup/docker-compose-prod.yaml b/deploy/docker/clickhouse-setup/docker-compose-prod.yaml
index 56967c2ab0..2aa522026e 100644
--- a/deploy/docker/clickhouse-setup/docker-compose-prod.yaml
+++ b/deploy/docker/clickhouse-setup/docker-compose-prod.yaml
@@ -2,7 +2,7 @@ version: "2.4"
services:
query-service:
- image: signoz/query-service:0.11.1
+ image: signoz/query-service:0.11.2
container_name: query-service
command: ["-config=/root/config/prometheus.yml"]
# ports:
@@ -32,7 +32,7 @@ services:
condition: service_healthy
frontend:
- image: signoz/frontend:0.11.1
+ image: signoz/frontend:0.11.2
container_name: frontend
restart: on-failure
depends_on:
diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml
index e5af18b0b9..964a835e26 100644
--- a/deploy/docker/clickhouse-setup/docker-compose.yaml
+++ b/deploy/docker/clickhouse-setup/docker-compose.yaml
@@ -39,7 +39,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
- image: signoz/query-service:0.11.1
+ image: signoz/query-service:0.11.2
container_name: query-service
command: ["-config=/root/config/prometheus.yml"]
# ports:
@@ -69,7 +69,7 @@ services:
condition: service_healthy
frontend:
- image: signoz/frontend:0.11.1
+ image: signoz/frontend:0.11.2
container_name: frontend
restart: on-failure
depends_on:
@@ -81,7 +81,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
- image: signoz/signoz-otel-collector:0.55.1
+ image: signoz/signoz-otel-collector:0.55.3
command: ["--config=/etc/otel-collector-config.yaml"]
user: root # required for reading docker container logs
volumes:
@@ -107,7 +107,7 @@ services:
condition: service_healthy
otel-collector-metrics:
- image: signoz/signoz-otel-collector:0.55.1
+ image: signoz/signoz-otel-collector:0.55.3
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml
index 660ddc88d6..294fed787d 100644
--- a/pkg/query-service/tests/test-deploy/docker-compose.yaml
+++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml
@@ -62,7 +62,7 @@ services:
condition: service_healthy
otel-collector:
- image: signoz/signoz-otel-collector:0.55.1
+ image: signoz/signoz-otel-collector:0.55.3
command: ["--config=/etc/otel-collector-config.yaml"]
user: root # required for reading docker container logs
volumes:
@@ -78,7 +78,7 @@ services:
condition: service_healthy
otel-collector-metrics:
- image: signoz/signoz-otel-collector:0.55.1
+ image: signoz/signoz-otel-collector:0.55.3
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml