diff --git a/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx b/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx
index ae294e2fb8..68e8f0edce 100644
--- a/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx
+++ b/frontend/src/container/OnboardingContainer/OnboardingContainer.tsx
@@ -6,9 +6,11 @@ import { ArrowRightOutlined } from '@ant-design/icons';
import { Button, Card, Typography } from 'antd';
import getIngestionData from 'api/settings/getIngestionData';
import cx from 'classnames';
+import ROUTES from 'constants/routes';
import FullScreenHeader from 'container/FullScreenHeader/FullScreenHeader';
import useAnalytics from 'hooks/analytics/useAnalytics';
import { useIsDarkMode } from 'hooks/useDarkMode';
+import history from 'lib/history';
import { useEffect, useState } from 'react';
import { useQuery } from 'react-query';
import { useEffectOnce } from 'react-use';
@@ -21,9 +23,11 @@ import {
} from './context/OnboardingContext';
import { DataSourceType } from './Steps/DataSource/DataSource';
import {
+ defaultApplicationDataSource,
defaultAwsServices,
defaultInfraMetricsType,
defaultLogsType,
+ moduleRouteMap,
} from './utils/dataSourceUtils';
import {
APM_STEPS,
@@ -89,6 +93,7 @@ export default function Onboarding(): JSX.Element {
const [current, setCurrent] = useState(0);
const isDarkMode = useIsDarkMode();
const { trackEvent } = useAnalytics();
+ const { location } = history;
const {
selectedDataSource,
@@ -191,12 +196,16 @@ export default function Onboarding(): JSX.Element {
}
} else if (selectedModule?.id === ModulesMap.APM) {
handleAPMSteps();
+
+ if (!selectedDataSource) {
+ updateSelectedDataSource(defaultApplicationDataSource);
+ }
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [selectedModule, selectedDataSource, selectedEnvironment, selectedMethod]);
- const handleNext = (): void => {
+ const handleNextStep = (): void => {
if (activeStep <= 3) {
const nextStep = activeStep + 1;
@@ -217,12 +226,36 @@ export default function Onboarding(): JSX.Element {
}
};
+ const handleNext = (): void => {
+ if (activeStep <= 3) {
+ handleNextStep();
+ history.replace(moduleRouteMap[selectedModule.id as ModulesMap]);
+ }
+ };
+
const handleModuleSelect = (module: ModuleProps): void => {
setSelectedModule(module);
updateSelectedModule(module);
updateSelectedDataSource(null);
};
+ useEffect(() => {
+ if (location.pathname === ROUTES.GET_STARTED_APPLICATION_MONITORING) {
+ handleModuleSelect(useCases.APM);
+ updateSelectedDataSource(defaultApplicationDataSource);
+ handleNextStep();
+ } else if (
+ location.pathname === ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING
+ ) {
+ handleModuleSelect(useCases.InfrastructureMonitoring);
+ handleNextStep();
+ } else if (location.pathname === ROUTES.GET_STARTED_LOGS_MANAGEMENT) {
+ handleModuleSelect(useCases.LogsManagement);
+ handleNextStep();
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, []);
+
return (
{activeStep === 1 && (
@@ -285,6 +318,7 @@ export default function Onboarding(): JSX.Element {
setActiveStep(activeStep - 1);
setSelectedModule(useCases.APM);
resetProgress();
+ history.push(ROUTES.GET_STARTED);
}}
selectedModule={selectedModule}
selectedModuleSteps={selectedModuleSteps}
diff --git a/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx b/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx
index b7ebbcfde8..3654b21fd7 100644
--- a/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx
@@ -5,6 +5,7 @@ import {
CloseCircleTwoTone,
LoadingOutlined,
} from '@ant-design/icons';
+import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { PANEL_TYPES } from 'constants/queryBuilder';
import Header from 'container/OnboardingContainer/common/Header/Header';
import { useOnboardingContext } from 'container/OnboardingContainer/context/OnboardingContext';
@@ -72,6 +73,9 @@ export default function LogsConnectionStatus(): JSX.Element {
reduceTo: 'sum',
offset: 0,
pageSize: 100,
+ timeAggregation: '',
+ spaceAggregation: '',
+ functions: [],
},
],
queryFormulas: [],
@@ -84,6 +88,7 @@ export default function LogsConnectionStatus(): JSX.Element {
const { data, isFetching, error, isError } = useGetExplorerQueryRange(
requestData,
PANEL_TYPES.LIST,
+ DEFAULT_ENTITY_VERSION,
{
keepPreviousData: true,
refetchInterval: pollingInterval,
diff --git a/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts b/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts
index 7b851feac9..77f1210858 100644
--- a/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts
+++ b/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts
@@ -1,6 +1,15 @@
-import { ModuleProps, ModulesMap } from '../OnboardingContainer';
+import ROUTES from 'constants/routes';
+
+import { ModuleProps } from '../OnboardingContainer';
import { DataSourceType } from '../Steps/DataSource/DataSource';
+export enum ModulesMap {
+ APM = 'APM',
+ LogsManagement = 'LogsManagement',
+ InfrastructureMonitoring = 'InfrastructureMonitoring',
+ AwsMonitoring = 'AwsMonitoring',
+}
+
export const frameworksMap = {
APM: {
java: [
@@ -81,174 +90,174 @@ const supportedLanguages = [
{
name: 'java',
id: 'java',
- imgURL: `Logos/java.png`,
+ imgURL: `/Logos/java.png`,
},
{
name: 'python',
id: 'python',
- imgURL: `Logos/python.png`,
+ imgURL: `/Logos/python.png`,
},
{
name: 'go',
id: 'go',
- imgURL: `Logos/go.png`,
+ imgURL: `/Logos/go.png`,
},
{
name: 'javascript',
id: 'javascript',
- imgURL: `Logos/javascript.png`,
+ imgURL: `/Logos/javascript.png`,
},
{
name: 'rails',
id: 'rails',
- imgURL: `Logos/rails.png`,
+ imgURL: `/Logos/rails.png`,
},
{
name: '.NET',
id: 'dotnet',
- imgURL: `Logos/dotnet.png`,
+ imgURL: `/Logos/dotnet.png`,
},
{
name: 'rust',
id: 'rust',
- imgURL: `Logos/rust.png`,
+ imgURL: `/Logos/rust.png`,
},
{
name: 'elixir',
id: 'elixir',
- imgURL: `Logos/elixir.png`,
+ imgURL: `/Logos/elixir.png`,
},
{
name: 'swift',
id: 'swift',
- imgURL: `Logos/swift.png`,
+ imgURL: `/Logos/swift.png`,
},
];
export const defaultLogsType = {
name: 'Kubernetes Pod Logs',
id: 'kubernetes',
- imgURL: `Logos/kubernetes.svg`,
+ imgURL: `/Logos/kubernetes.svg`,
};
const supportedLogsTypes = [
{
name: 'Kubernetes Pod Logs',
id: 'kubernetes',
- imgURL: `Logos/kubernetes.svg`,
+ imgURL: `/Logos/kubernetes.svg`,
},
{
name: 'Docker Container Logs',
id: 'docker',
- imgURL: `Logos/docker.svg`,
+ imgURL: `/Logos/docker.svg`,
},
{
name: 'SysLogs',
id: 'syslogs',
- imgURL: `Logos/syslogs.svg`,
+ imgURL: `/Logos/syslogs.svg`,
},
{
name: 'Application Logs',
id: 'application_logs',
- imgURL: `Logos/software-window.svg`,
+ imgURL: `/Logos/software-window.svg`,
},
{
name: 'FluentBit',
id: 'fluentBit',
- imgURL: `Logos/fluent-bit.png`,
+ imgURL: `/Logos/fluent-bit.png`,
},
{
name: 'FluentD',
id: 'fluentD',
- imgURL: `Logos/fluentd.png`,
+ imgURL: `/Logos/fluentd.png`,
},
{
name: 'LogStash',
id: 'logStash',
- imgURL: `Logos/logstash.svg`,
+ imgURL: `/Logos/logstash.svg`,
},
{
name: 'Heroku',
id: 'heroku',
- imgURL: `Logos/heroku.png`,
+ imgURL: `/Logos/heroku.png`,
},
{
name: 'Vercel',
id: 'vercel',
- imgURL: `Logos/vercel.png`,
+ imgURL: `/Logos/vercel.png`,
},
{
name: 'HTTP',
id: 'http',
- imgURL: `Logos/http.png`,
+ imgURL: `/Logos/http.png`,
},
{
name: 'Cloudwatch',
id: 'cloudwatch',
- imgURL: `Logos/cloudwatch.png`,
+ imgURL: `/Logos/cloudwatch.png`,
},
];
export const defaultInfraMetricsType = {
name: 'Kubernetes Infra Metrics',
id: 'kubernetesInfraMetrics',
- imgURL: `Logos/kubernetes.svg`,
+ imgURL: `/Logos/kubernetes.svg`,
};
const supportedInfraMetrics = [
{
name: 'Kubernetes Infra Metrics',
id: 'kubernetesInfraMetrics',
- imgURL: `Logos/kubernetes.svg`,
+ imgURL: `/Logos/kubernetes.svg`,
},
{
name: 'HostMetrics',
id: 'hostMetrics',
- imgURL: `Logos/software-window.svg`,
+ imgURL: `/Logos/software-window.svg`,
},
{
name: 'Other Metrics',
id: 'otherMetrics',
- imgURL: `Logos/cmd-terminal.svg`,
+ imgURL: `/Logos/cmd-terminal.svg`,
},
];
export const defaultAwsServices = {
- name: 'EC2 - Application Logs',
+ name: 'EC2 - App/Server Logs',
id: 'awsEc2ApplicationLogs',
- imgURL: `Logos/ec2.svg`,
+ imgURL: `/Logos/ec2.svg`,
};
const supportedAwsServices = [
{
name: 'EC2 - App/Server Logs',
id: 'awsEc2ApplicationLogs',
- imgURL: `Logos/ec2.svg`,
+ imgURL: `/Logos/ec2.svg`,
},
{
name: 'EC2 - Infra Metrics',
id: 'awsEc2InfrastructureMetrics',
- imgURL: `Logos/ec2.svg`,
+ imgURL: `/Logos/ec2.svg`,
},
{
name: 'ECS - EC2',
id: 'awsEcsEc2',
- imgURL: `Logos/ecs.svg`,
+ imgURL: `/Logos/ecs.svg`,
},
{
name: 'ECS - Fargate',
id: 'awsEcsFargate',
- imgURL: `Logos/ecs.svg`,
+ imgURL: `/Logos/ecs.svg`,
},
{
name: 'ECS - External',
id: 'awsEcsExternal',
- imgURL: `Logos/ecs.svg`,
+ imgURL: `/Logos/ecs.svg`,
},
{
name: 'EKS',
id: 'awsEks',
- imgURL: `Logos/eks.svg`,
+ imgURL: `/Logos/eks.svg`,
},
];
@@ -320,3 +329,11 @@ export const hasFrameworks = ({
return true;
};
+
+export const moduleRouteMap = {
+ [ModulesMap.APM]: ROUTES.GET_STARTED_APPLICATION_MONITORING,
+ [ModulesMap.LogsManagement]: ROUTES.GET_STARTED_LOGS_MANAGEMENT,
+ [ModulesMap.InfrastructureMonitoring]:
+ ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING,
+ [ModulesMap.AwsMonitoring]: ROUTES.GET_STARTED_AWS_MONITORING,
+};
diff --git a/frontend/src/container/PipelinePage/PipelineListsView/Preview/components/PreviewIntervalSelector/components/LogsCountInInterval/index.tsx b/frontend/src/container/PipelinePage/PipelineListsView/Preview/components/PreviewIntervalSelector/components/LogsCountInInterval/index.tsx
index 63ee3ff3c0..b388bc0e7a 100644
--- a/frontend/src/container/PipelinePage/PipelineListsView/Preview/components/PreviewIntervalSelector/components/LogsCountInInterval/index.tsx
+++ b/frontend/src/container/PipelinePage/PipelineListsView/Preview/components/PreviewIntervalSelector/components/LogsCountInInterval/index.tsx
@@ -1,5 +1,6 @@
import './styles.scss';
+import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import {
initialFilters,
initialQueriesMap,
@@ -26,12 +27,15 @@ function LogsCountInInterval({
return q;
}, [filter]);
- const result = useGetQueryRange({
- graphType: PANEL_TYPES.TABLE,
- query,
- selectedTime: 'GLOBAL_TIME',
- globalSelectedInterval: timeInterval,
- });
+ const result = useGetQueryRange(
+ {
+ graphType: PANEL_TYPES.TABLE,
+ query,
+ selectedTime: 'GLOBAL_TIME',
+ globalSelectedInterval: timeInterval,
+ },
+ DEFAULT_ENTITY_VERSION,
+ );
if (!result.isFetched) {
return null;
diff --git a/frontend/src/container/PipelinePage/PipelineListsView/Preview/hooks/useSampleLogs.ts b/frontend/src/container/PipelinePage/PipelineListsView/Preview/hooks/useSampleLogs.ts
index 3789856771..457e3bd976 100644
--- a/frontend/src/container/PipelinePage/PipelineListsView/Preview/hooks/useSampleLogs.ts
+++ b/frontend/src/container/PipelinePage/PipelineListsView/Preview/hooks/useSampleLogs.ts
@@ -1,3 +1,4 @@
+import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import {
initialFilters,
initialQueriesMap,
@@ -42,12 +43,15 @@ const useSampleLogs = ({
return q;
}, [count, filter]);
- const response = useGetQueryRange({
- graphType: PANEL_TYPES.LIST,
- query,
- selectedTime: 'GLOBAL_TIME',
- globalSelectedInterval: timeInterval,
- });
+ const response = useGetQueryRange(
+ {
+ graphType: PANEL_TYPES.LIST,
+ query,
+ selectedTime: 'GLOBAL_TIME',
+ globalSelectedInterval: timeInterval,
+ },
+ DEFAULT_ENTITY_VERSION,
+ );
const { isFetching: isLoading, data } = response;
diff --git a/frontend/src/container/QueryBuilder/QueryBuilder.interfaces.ts b/frontend/src/container/QueryBuilder/QueryBuilder.interfaces.ts
index ef18d8ce39..5ebfd47da7 100644
--- a/frontend/src/container/QueryBuilder/QueryBuilder.interfaces.ts
+++ b/frontend/src/container/QueryBuilder/QueryBuilder.interfaces.ts
@@ -27,4 +27,6 @@ export type QueryBuilderProps = {
filterConfigs?: Partial
;
queryComponents?: { renderOrderBy?: (props: OrderByFilterProps) => ReactNode };
isListViewPanel?: boolean;
+ showFunctions?: boolean;
+ version: string;
};
diff --git a/frontend/src/container/QueryBuilder/QueryBuilder.styles.scss b/frontend/src/container/QueryBuilder/QueryBuilder.styles.scss
index b23521ad68..dbb7a962ef 100644
--- a/frontend/src/container/QueryBuilder/QueryBuilder.styles.scss
+++ b/frontend/src/container/QueryBuilder/QueryBuilder.styles.scss
@@ -152,7 +152,7 @@
}
::-webkit-scrollbar {
- height: 1rem;
+ height: 0.2rem;
width: 0.2rem;
}
}
diff --git a/frontend/src/container/QueryBuilder/QueryBuilder.tsx b/frontend/src/container/QueryBuilder/QueryBuilder.tsx
index 0bdc321c1e..f95049a2d1 100644
--- a/frontend/src/container/QueryBuilder/QueryBuilder.tsx
+++ b/frontend/src/container/QueryBuilder/QueryBuilder.tsx
@@ -25,6 +25,8 @@ export const QueryBuilder = memo(function QueryBuilder({
filterConfigs = {},
queryComponents,
isListViewPanel = false,
+ showFunctions = false,
+ version,
}: QueryBuilderProps): JSX.Element {
const {
currentQuery,
@@ -46,7 +48,7 @@ export const QueryBuilder = memo(function QueryBuilder({
useEffect(() => {
if (currentDataSource !== initialDataSource || newPanelType !== panelType) {
- if (panelType === PANEL_TYPES.BAR) {
+ if (newPanelType === PANEL_TYPES.BAR) {
handleSetConfig(PANEL_TYPES.BAR, DataSource.METRICS);
return;
}
@@ -170,6 +172,8 @@ export const QueryBuilder = memo(function QueryBuilder({
: listViewLogFilterConfigs
}
queryComponents={queryComponents}
+ showFunctions={showFunctions}
+ version={version}
isListViewPanel
/>
)}
@@ -188,6 +192,8 @@ export const QueryBuilder = memo(function QueryBuilder({
query={query}
filterConfigs={filterConfigs}
queryComponents={queryComponents}
+ showFunctions={showFunctions}
+ version={version}
/>
))}
diff --git a/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx b/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx
index 5f25edb29b..b6f9ebf894 100644
--- a/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx
+++ b/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx
@@ -39,6 +39,7 @@ export function Formula({
query,
filterConfigs,
formula,
+ entityVersion: '',
});
const [isCollapse, setIsCollapsed] = useState(false);
@@ -146,6 +147,7 @@ export function Formula({
void;
+ onCloneQuery?: (type: string, query: IBuilderQuery) => void;
onToggleVisibility: () => void;
onCollapseEntity: () => void;
+ onQueryFunctionsUpdates?: (functions: QueryFunctionProps[]) => void;
showDeleteButton: boolean;
isListViewPanel?: boolean;
}
export default function QBEntityOptions({
+ query,
+ isMetricsDataSource,
isCollapsed,
+ showFunctions,
entityType,
entityData,
onDelete,
+ onCloneQuery,
onToggleVisibility,
onCollapseEntity,
showDeleteButton,
- isListViewPanel = false,
+ onQueryFunctionsUpdates,
+ isListViewPanel,
}: QBEntityOptionsProps): JSX.Element {
+ const handleCloneEntity = (): void => {
+ if (isFunction(onCloneQuery)) {
+ onCloneQuery(entityType, entityData);
+ }
+ };
+
return (
@@ -46,6 +78,15 @@ export default function QBEntityOptions({
>
{entityData.disabled ? : }
+
+ {entityType === 'query' && (
+
+
+
+
+
+ )}
+
{entityData.queryName}
+
+ {showFunctions &&
+ isMetricsDataSource &&
+ query &&
+ onQueryFunctionsUpdates && (
+
+ )}
@@ -77,4 +128,9 @@ export default function QBEntityOptions({
QBEntityOptions.defaultProps = {
isListViewPanel: false,
+ query: undefined,
+ isMetricsDataSource: false,
+ onQueryFunctionsUpdates: undefined,
+ showFunctions: false,
+ onCloneQuery: noop,
};
diff --git a/frontend/src/container/QueryBuilder/components/Query/Query.interfaces.ts b/frontend/src/container/QueryBuilder/components/Query/Query.interfaces.ts
index fba34ef6cd..05713395ff 100644
--- a/frontend/src/container/QueryBuilder/components/Query/Query.interfaces.ts
+++ b/frontend/src/container/QueryBuilder/components/Query/Query.interfaces.ts
@@ -7,4 +7,6 @@ export type QueryProps = {
query: IBuilderQuery;
queryVariant: 'static' | 'dropdown';
isListViewPanel?: boolean;
+ showFunctions?: boolean;
+ version: string;
} & Pick;
diff --git a/frontend/src/container/QueryBuilder/components/Query/Query.tsx b/frontend/src/container/QueryBuilder/components/Query/Query.tsx
index 0c90142cb2..1bb761fde7 100644
--- a/frontend/src/container/QueryBuilder/components/Query/Query.tsx
+++ b/frontend/src/container/QueryBuilder/components/Query/Query.tsx
@@ -2,8 +2,9 @@
import './Query.styles.scss';
import { Col, Input, Row } from 'antd';
+import { ENTITY_VERSION_V4 } from 'constants/app';
// ** Constants
-import { PANEL_TYPES } from 'constants/queryBuilder';
+import { ATTRIBUTE_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
// ** Components
import {
@@ -38,9 +39,11 @@ import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { transformToUpperCase } from 'utils/transformToUpperCase';
import QBEntityOptions from '../QBEntityOptions/QBEntityOptions';
+import SpaceAggregationOptions from '../SpaceAggregationOptions/SpaceAggregationOptions';
// ** Types
import { QueryProps } from './Query.interfaces';
+// eslint-disable-next-line sonarjs/cognitive-complexity
export const Query = memo(function Query({
index,
queryVariant,
@@ -48,14 +51,17 @@ export const Query = memo(function Query({
filterConfigs,
queryComponents,
isListViewPanel = false,
+ showFunctions = false,
+ version,
}: QueryProps): JSX.Element {
- const { panelType, currentQuery } = useQueryBuilder();
+ const { panelType, currentQuery, cloneQuery } = useQueryBuilder();
const { pathname } = useLocation();
const [isCollapse, setIsCollapsed] = useState(false);
const {
operators,
+ spaceAggregationOptions,
isMetricsDataSource,
isTracePanelType,
listOfAdditionalFilters,
@@ -63,8 +69,16 @@ export const Query = memo(function Query({
handleChangeQueryData,
handleChangeDataSource,
handleChangeOperator,
+ handleSpaceAggregationChange,
handleDeleteQuery,
- } = useQueryOperations({ index, query, filterConfigs, isListViewPanel });
+ handleQueryFunctionsUpdates,
+ } = useQueryOperations({
+ index,
+ query,
+ filterConfigs,
+ isListViewPanel,
+ entityVersion: version,
+ });
const handleChangeAggregateEvery = useCallback(
(value: IBuilderQuery['stepInterval']) => {
@@ -192,13 +206,17 @@ export const Query = memo(function Query({
-
+
-
+
@@ -225,7 +243,11 @@ export const Query = memo(function Query({
-
+
@@ -257,7 +279,11 @@ export const Query = memo(function Query({
-
+
@@ -279,23 +305,36 @@ export const Query = memo(function Query({
}, [
panelType,
query,
- filterConfigs?.limit?.isHidden,
- filterConfigs?.having?.isHidden,
handleChangeLimit,
+ version,
handleChangeHavingFilter,
renderOrderByFilter,
renderAggregateEveryFilter,
+ filterConfigs?.limit?.isHidden,
+ filterConfigs?.having?.isHidden,
]);
+ const disableOperatorSelector =
+ !query?.aggregateAttribute.key || query?.aggregateAttribute.key === '';
+
+ const isVersionV4 = version && version === ENTITY_VERSION_V4;
+
return (
1}
isListViewPanel={isListViewPanel}
/>
@@ -322,23 +361,42 @@ export const Query = memo(function Query({
{isMetricsDataSource && (
-
-
-
+ {version && version === 'v3' && (
+
+
+
+ )}
+
+
+ {version &&
+ version === ENTITY_VERSION_V4 &&
+ operators &&
+ Array.isArray(operators) &&
+ operators.length > 0 && (
+
+
+
+ )}
)}
-
+
+
{isMetricsDataSource && (
@@ -379,16 +437,40 @@ export const Query = memo(function Query({
)}
{!isListViewPanel && (
-
+
-
+ {isVersionV4 && isMetricsDataSource ? (
+
+ ) : (
+
+ )}
+
{panelType === PANEL_TYPES.VALUE ? (
-
+
+ {isVersionV4 && isMetricsDataSource && (
+
+
+
+ )}
+
+
+
+
) : (
)}
+
+ {isVersionV4 && isMetricsDataSource && panelType === PANEL_TYPES.TABLE && (
+
+
+
+
+
+
+
+
+
+
+
+ )}
)}
diff --git a/frontend/src/container/QueryBuilder/components/QueryFunctions/Function.tsx b/frontend/src/container/QueryBuilder/components/QueryFunctions/Function.tsx
new file mode 100644
index 0000000000..1374c2bb1a
--- /dev/null
+++ b/frontend/src/container/QueryBuilder/components/QueryFunctions/Function.tsx
@@ -0,0 +1,89 @@
+/* eslint-disable react/jsx-props-no-spreading */
+import { Button, Flex, Input, Select } from 'antd';
+import cx from 'classnames';
+import {
+ queryFunctionOptions,
+ queryFunctionsTypesConfig,
+} from 'constants/queryFunctionOptions';
+import { useIsDarkMode } from 'hooks/useDarkMode';
+import { debounce, isNil } from 'lodash-es';
+import { X } from 'lucide-react';
+import { QueryFunctionProps } from 'types/api/queryBuilder/queryBuilderData';
+
+interface FunctionProps {
+ funcData: QueryFunctionProps;
+ index: any;
+ handleUpdateFunctionArgs: any;
+ handleUpdateFunctionName: any;
+ handleDeleteFunction: any;
+}
+
+export default function Function({
+ funcData,
+ index,
+ handleUpdateFunctionArgs,
+ handleUpdateFunctionName,
+ handleDeleteFunction,
+}: FunctionProps): JSX.Element {
+ const isDarkMode = useIsDarkMode();
+ const { showInput } = queryFunctionsTypesConfig[funcData.name];
+
+ let functionValue;
+
+ const hasValue = !isNil(
+ funcData.args && funcData.args.length > 0 && funcData.args[0],
+ );
+
+ if (hasValue) {
+ // eslint-disable-next-line prefer-destructuring
+ functionValue = funcData.args[0];
+ }
+
+ const debouncedhandleUpdateFunctionArgs = debounce(
+ handleUpdateFunctionArgs,
+ 500,
+ );
+
+ return (
+
+ {
+ handleUpdateFunctionName(funcData, index, value);
+ }}
+ dropdownStyle={{
+ minWidth: 200,
+ borderRadius: '4px',
+ border: isDarkMode
+ ? '1px solid var(--bg-slate-400)'
+ : '1px solid var(--bg-vanilla-300)',
+ boxShadow: `4px 10px 16px 2px rgba(0, 0, 0, 0.20)`,
+ }}
+ placement="bottomRight"
+ options={queryFunctionOptions}
+ />
+
+ {showInput && (
+ {
+ debouncedhandleUpdateFunctionArgs(funcData, index, event.target.value);
+ }}
+ />
+ )}
+
+ {
+ handleDeleteFunction(funcData, index);
+ }}
+ >
+
+
+
+ );
+}
diff --git a/frontend/src/container/QueryBuilder/components/QueryFunctions/QueryFunctions.styles.scss b/frontend/src/container/QueryBuilder/components/QueryFunctions/QueryFunctions.styles.scss
new file mode 100644
index 0000000000..8eb6bf2ffd
--- /dev/null
+++ b/frontend/src/container/QueryBuilder/components/QueryFunctions/QueryFunctions.styles.scss
@@ -0,0 +1,151 @@
+.query-functions-container {
+ display: flex;
+ margin: 0 12px;
+ justify-content: center;
+ align-items: center;
+
+ .function-btn,
+ .add-function-btn {
+ display: flex;
+ gap: 8px;
+
+ cursor: pointer;
+ border-radius: 3px !important;
+ }
+
+ .function-btn {
+ border-top-right-radius: 0px !important;
+ border-bottom-right-radius: 0px !important;
+
+ .function-icon {
+ height: 18px;
+ width: 18px;
+ }
+ }
+
+ .add-function-btn {
+ border-top-left-radius: 0px !important;
+ border-bottom-left-radius: 0px !important;
+
+ background-color: var(--bg-slate-500) !important;
+ opacity: 0.8;
+
+ &:disabled {
+ opacity: 0.4;
+ }
+ }
+
+ &.hasFunctions {
+ .function-btn {
+ border-top-right-radius: 3px !important;
+ border-bottom-right-radius: 3px !important;
+ margin-right: 8px;
+ }
+
+ .add-function-btn {
+ border-top-left-radius: 3px !important;
+ border-bottom-left-radius: 3px !important;
+ margin-left: 8px;
+ }
+ }
+}
+
+.query-functions-list {
+ display: flex;
+ gap: 8px;
+
+ .query-function {
+ position: relative;
+
+ &::before {
+ content: '';
+ height: 1px;
+ width: 8px;
+ position: absolute;
+ left: -8px;
+ top: 16px;
+ z-index: 0;
+ color: var(--bg-sakura-500);
+ background-color: var(--bg-sakura-500);
+ }
+
+ &::after {
+ content: '';
+ height: 1px;
+ width: 8px;
+ position: absolute;
+ right: -8px;
+ top: 16px;
+ z-index: 0;
+ color: var(--bg-sakura-500);
+ background-color: var(--bg-sakura-500);
+ }
+
+ .query-function-name-selector {
+ border-top-left-radius: 3px;
+ border-bottom-left-radius: 3px;
+
+ .ant-select-selector {
+ border: none;
+ background: var(--bg-ink-200);
+ }
+
+ &.showInput {
+ .ant-select-selector {
+ border-top-right-radius: 0;
+ border-bottom-right-radius: 0;
+ }
+ }
+ }
+
+ .query-function-value {
+ width: 55px;
+ border-left: 0;
+ background: var(--bg-ink-200);
+ border-radius: 0;
+ border: 1px solid transparent;
+
+ &:focus {
+ border-color: transparent !important;
+ }
+ }
+
+ .query-function-delete-btn {
+ border-top-right-radius: 3px;
+ border-bottom-right-radius: 3px;
+
+ border: none !important;
+
+ border-top-left-radius: 0px !important;
+ border-bottom-left-radius: 0px !important;
+ min-width: 24px !important;
+ }
+ }
+}
+
+.lightMode {
+ .query-functions-container {
+ .add-function-btn {
+ background-color: var(--bg-vanilla-100) !important;
+ }
+ }
+
+ .query-functions-list {
+ .query-function {
+ border: 1px solid var(--bg-vanilla-300);
+ .query-function-name-selector {
+ .ant-select-selector {
+ background: var(--bg-vanilla-100);
+ }
+ }
+
+ .query-function-value {
+ background: var(--bg-vanilla-100);
+
+ &:focus {
+ border-color: transparent !important;
+ }
+ }
+ }
+ }
+}
diff --git a/frontend/src/container/QueryBuilder/components/QueryFunctions/QueryFunctions.tsx b/frontend/src/container/QueryBuilder/components/QueryFunctions/QueryFunctions.tsx
new file mode 100644
index 0000000000..68c2701982
--- /dev/null
+++ b/frontend/src/container/QueryBuilder/components/QueryFunctions/QueryFunctions.tsx
@@ -0,0 +1,181 @@
+import './QueryFunctions.styles.scss';
+
+import { Button, Tooltip } from 'antd';
+import cx from 'classnames';
+import { useIsDarkMode } from 'hooks/useDarkMode';
+import { cloneDeep, pullAt } from 'lodash-es';
+import { Plus } from 'lucide-react';
+import { useState } from 'react';
+import { QueryFunctionProps } from 'types/api/queryBuilder/queryBuilderData';
+import { QueryFunctionsTypes } from 'types/common/queryBuilder';
+
+import Function from './Function';
+
+const defaultFunctionStruct: QueryFunctionProps = {
+ name: QueryFunctionsTypes.CUTOFF_MIN,
+ args: [],
+};
+
+interface QueryFunctionsProps {
+ queryFunctions: QueryFunctionProps[];
+ onChange: (functions: QueryFunctionProps[]) => void;
+}
+
+// SVG component
+function FunctionIcon({
+ fillColor = 'white',
+ className,
+}: {
+ fillColor: string;
+ className: string;
+}): JSX.Element {
+ return (
+
+
+
+
+
+
+ );
+}
+
+export default function QueryFunctions({
+ queryFunctions,
+ onChange,
+}: QueryFunctionsProps): JSX.Element {
+ const [functions, setFunctions] = useState(
+ queryFunctions,
+ );
+
+ const isDarkMode = useIsDarkMode();
+
+ const handleAddNewFunction = (): void => {
+ const updatedFunctionsArr = [
+ ...functions,
+ {
+ ...defaultFunctionStruct,
+ },
+ ];
+
+ setFunctions(updatedFunctionsArr);
+
+ onChange(updatedFunctionsArr);
+ };
+
+ const handleDeleteFunction = (
+ queryFunction: QueryFunctionProps,
+ index: number,
+ ): void => {
+ const clonedFunctions = cloneDeep(functions);
+ pullAt(clonedFunctions, index);
+
+ setFunctions(clonedFunctions);
+ onChange(clonedFunctions);
+ };
+
+ const handleUpdateFunctionName = (
+ func: QueryFunctionProps,
+ index: number,
+ value: string,
+ ): void => {
+ const updateFunctions = cloneDeep(functions);
+
+ if (updateFunctions && updateFunctions.length > 0 && updateFunctions[index]) {
+ updateFunctions[index].name = value;
+ setFunctions(updateFunctions);
+ onChange(updateFunctions);
+ }
+ };
+
+ const handleUpdateFunctionArgs = (
+ func: QueryFunctionProps,
+ index: number,
+ value: string,
+ ): void => {
+ const updateFunctions = cloneDeep(functions);
+
+ if (updateFunctions && updateFunctions.length > 0 && updateFunctions[index]) {
+ updateFunctions[index].args = [value];
+ setFunctions(updateFunctions);
+ onChange(updateFunctions);
+ }
+ };
+
+ return (
+ 0 ? 'hasFunctions' : '',
+ )}
+ >
+
+
+
+
+
+ {functions.map((func, index) => (
+
+ ))}
+
+
+
= 3
+ ? 'Functions are in early access. You can add a maximum of 3 function as of now.'
+ : ''
+ }
+ placement="right"
+ >
+ = 3}
+ onClick={handleAddNewFunction}
+ >
+
+
+
+
+ );
+}
diff --git a/frontend/src/container/QueryBuilder/components/SpaceAggregationOptions/SpaceAggregationOptions.tsx b/frontend/src/container/QueryBuilder/components/SpaceAggregationOptions/SpaceAggregationOptions.tsx
new file mode 100644
index 0000000000..9ed6f93c45
--- /dev/null
+++ b/frontend/src/container/QueryBuilder/components/SpaceAggregationOptions/SpaceAggregationOptions.tsx
@@ -0,0 +1,67 @@
+import { Select } from 'antd';
+import { ATTRIBUTE_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
+import { useEffect, useState } from 'react';
+import { MetricAggregateOperator } from 'types/common/queryBuilder';
+
+interface SpaceAggregationOptionsProps {
+ panelType: PANEL_TYPES | null;
+ selectedValue: string | undefined;
+ aggregatorAttributeType: ATTRIBUTE_TYPES | null;
+ disabled: boolean;
+ onSelect: (value: string) => void;
+ operators: any[];
+}
+
+export default function SpaceAggregationOptions({
+ panelType,
+ selectedValue,
+ aggregatorAttributeType = ATTRIBUTE_TYPES.GAUGE,
+ disabled,
+ onSelect,
+ operators,
+}: SpaceAggregationOptionsProps): JSX.Element {
+ const placeHolderText = panelType === PANEL_TYPES.VALUE ? 'Sum' : 'Sum By';
+ const [defaultValue, setDefaultValue] = useState(
+ selectedValue || placeHolderText,
+ );
+
+ useEffect(() => {
+ if (!selectedValue) {
+ if (
+ aggregatorAttributeType === ATTRIBUTE_TYPES.HISTOGRAM ||
+ aggregatorAttributeType === ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
+ ) {
+ setDefaultValue(MetricAggregateOperator.P90);
+ onSelect(MetricAggregateOperator.P90);
+ } else if (aggregatorAttributeType === ATTRIBUTE_TYPES.SUM) {
+ setDefaultValue(MetricAggregateOperator.SUM);
+ onSelect(MetricAggregateOperator.SUM);
+ } else if (aggregatorAttributeType === ATTRIBUTE_TYPES.GAUGE) {
+ setDefaultValue(MetricAggregateOperator.AVG);
+ onSelect(MetricAggregateOperator.AVG);
+ }
+ }
+
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [aggregatorAttributeType]);
+
+ return (
+
+
+ {operators.map((operator) => (
+
+ {operator.label} {panelType !== PANEL_TYPES.VALUE ? ' By' : ''}
+
+ ))}
+
+
+ );
+}
diff --git a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx
index f27336e96a..492329f69b 100644
--- a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx
+++ b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx
@@ -111,7 +111,7 @@ export const AggregatorFilter = memo(function AggregatorFilter({
debouncedValue,
query.aggregateOperator,
query.dataSource,
- ])?.payload.attributeKeys || [],
+ ])?.payload?.attributeKeys || [],
[debouncedValue, query.aggregateOperator, query.dataSource, queryClient],
);
diff --git a/frontend/src/container/QueryBuilder/filters/HavingFilter/HavingFilter.interfaces.ts b/frontend/src/container/QueryBuilder/filters/HavingFilter/HavingFilter.interfaces.ts
index d4c9ebbf88..1bdb838aa6 100644
--- a/frontend/src/container/QueryBuilder/filters/HavingFilter/HavingFilter.interfaces.ts
+++ b/frontend/src/container/QueryBuilder/filters/HavingFilter/HavingFilter.interfaces.ts
@@ -1,6 +1,7 @@
import { Having, IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
export type HavingFilterProps = {
+ entityVersion: string;
query: IBuilderQuery;
onChange: (having: Having[]) => void;
};
diff --git a/frontend/src/container/QueryBuilder/filters/HavingFilter/HavingFilter.tsx b/frontend/src/container/QueryBuilder/filters/HavingFilter/HavingFilter.tsx
index 4f638496f4..7d11d018cc 100644
--- a/frontend/src/container/QueryBuilder/filters/HavingFilter/HavingFilter.tsx
+++ b/frontend/src/container/QueryBuilder/filters/HavingFilter/HavingFilter.tsx
@@ -1,4 +1,5 @@
import { Select } from 'antd';
+import { ENTITY_VERSION_V4 } from 'constants/app';
// ** Constants
import { HAVING_OPERATORS, initialHavingValues } from 'constants/queryBuilder';
import { HavingFilterTag } from 'container/QueryBuilder/components';
@@ -22,6 +23,7 @@ import { getHavingObject, isValidHavingValue } from '../utils';
import { HavingFilterProps } from './HavingFilter.interfaces';
export function HavingFilter({
+ entityVersion,
query,
onChange,
}: HavingFilterProps): JSX.Element {
@@ -48,10 +50,18 @@ export function HavingFilter({
[query],
);
- const columnName = useMemo(
- () => `${query.aggregateOperator.toUpperCase()}(${aggregatorAttribute})`,
- [query, aggregatorAttribute],
- );
+ const columnName = useMemo(() => {
+ if (
+ query &&
+ query.dataSource === DataSource.METRICS &&
+ query.spaceAggregation &&
+ entityVersion === ENTITY_VERSION_V4
+ ) {
+ return `${query.spaceAggregation.toUpperCase()}(${aggregatorAttribute})`;
+ }
+
+ return `${query.aggregateOperator.toUpperCase()}(${aggregatorAttribute})`;
+ }, [query, aggregatorAttribute, entityVersion]);
const aggregatorOptions: SelectOption[] = useMemo(
() => [{ label: columnName, value: columnName }],
@@ -211,7 +221,7 @@ export function HavingFilter({
disabled={isMetricsDataSource && !query.aggregateAttribute.key}
style={{ width: '100%' }}
notFoundContent={currentFormValue.value.length === 0 ? undefined : null}
- placeholder="Count(operation) > 5"
+ placeholder="GroupBy(operation) > 5"
onDeselect={handleDeselect}
onChange={handleChange}
onSelect={handleSelect}
diff --git a/frontend/src/container/QueryBuilder/filters/HavingFilter/__tests__/utils.test.tsx b/frontend/src/container/QueryBuilder/filters/HavingFilter/__tests__/utils.test.tsx
index 5732b22712..24cf6b1730 100644
--- a/frontend/src/container/QueryBuilder/filters/HavingFilter/__tests__/utils.test.tsx
+++ b/frontend/src/container/QueryBuilder/filters/HavingFilter/__tests__/utils.test.tsx
@@ -1,5 +1,6 @@
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
+import { DEFAULT_ENTITY_VERSION } from 'constants/app';
// Constants
import {
HAVING_OPERATORS,
@@ -31,6 +32,7 @@ describe('Having filter behaviour', () => {
,
);
@@ -49,6 +51,7 @@ describe('Having filter behaviour', () => {
,
);
@@ -62,7 +65,11 @@ describe('Having filter behaviour', () => {
test('Is having filter is enable', () => {
const mockFn = jest.fn();
const { unmount } = render(
- ,
+ ,
);
const input = screen.getByRole('combobox');
@@ -80,7 +87,11 @@ describe('Having filter behaviour', () => {
const optionTestTitle = 'havingOption';
const { unmount } = render(
- ,
+ ,
);
// get input
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
index 32bf8fbd40..a7dcef96c3 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
@@ -1,9 +1,8 @@
-import {
- SelectOptionContainer,
- TagContainer,
- TagLabel,
- TagValue,
-} from './style';
+import './QueryBuilderSearch.styles.scss';
+
+import { Tooltip } from 'antd';
+
+import { TagContainer, TagLabel, TagValue } from './style';
import { getOptionType } from './utils';
function OptionRenderer({
@@ -16,21 +15,25 @@ function OptionRenderer({
return (
{optionType ? (
-
- {value}
-
-
- Type:
- {optionType}
-
-
- Data type:
- {dataType}
-
+
+
+
{value}
+
+
+ Type:
+ {optionType}
+
+
+ Data type:
+ {dataType}
+
+
-
+
) : (
-
{label}
+
+ {label}
+
)}
);
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/QueryBuilderSearch.styles.scss b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/QueryBuilderSearch.styles.scss
index 8fd979fa8e..a6f5fcaf37 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/QueryBuilderSearch.styles.scss
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/QueryBuilderSearch.styles.scss
@@ -1,3 +1,16 @@
+.selectOptionContainer {
+ display: flex;
+ gap: 8px;
+ justify-content: space-between;
+ align-items: center;
+ overflow-x: auto;
+
+ &::-webkit-scrollbar {
+ width: 0.2rem;
+ height: 0.2rem;
+ }
+}
+
.lightMode {
.query-builder-search {
.ant-select-dropdown {
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx
index 46d535737a..85c7ea2c64 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx
@@ -59,6 +59,7 @@ function QueryBuilderSearch({
updateTag,
handleClearTag,
handleKeyDown,
+ handleOnBlur,
handleSearch,
handleSelect,
tags,
@@ -260,6 +261,7 @@ function QueryBuilderSearch({
notFoundContent={isFetching ?
: null}
suffixIcon={suffixIcon}
showAction={['focus']}
+ onBlur={handleOnBlur}
>
{options.map((option) => (
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts
index fd6d5f209e..5e010ff34a 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts
@@ -16,19 +16,12 @@ export const StyledCheckOutlined = styled(CheckOutlined)`
float: right;
`;
-export const SelectOptionContainer = styled.div`
- display: flex;
- gap: 8px;
- justify-content: space-between;
- align-items: center;
- overflow-x: auto;
-`;
-
export const TagContainer = styled(Tag)`
&&& {
+ display: inline-block;
border-radius: 3px;
- padding: 0.3rem 0.3rem;
- font-weight: 400;
+ padding: 0.1rem 0.2rem;
+ font-weight: 300;
font-size: 0.6rem;
}
`;
diff --git a/frontend/src/container/QueryTable/QueryTable.tsx b/frontend/src/container/QueryTable/QueryTable.tsx
index 34b146f6da..2fa1d05f65 100644
--- a/frontend/src/container/QueryTable/QueryTable.tsx
+++ b/frontend/src/container/QueryTable/QueryTable.tsx
@@ -22,7 +22,8 @@ export function QueryTable({
...props
}: QueryTableProps): JSX.Element {
const { isDownloadEnabled = false, fileName = '' } = downloadOption || {};
- const { servicename } = useParams();
+ const { servicename: encodedServiceName } = useParams();
+ const servicename = decodeURIComponent(encodedServiceName);
const { loading } = props;
const { columns: newColumns, dataSource: newDataSource } = useMemo(() => {
if (columns && dataSource) {
diff --git a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx
index 5213513dc8..f592f6a540 100644
--- a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx
+++ b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx
@@ -1,6 +1,7 @@
import { WarningFilled } from '@ant-design/icons';
import { Flex, Typography } from 'antd';
import { ResizeTable } from 'components/ResizeTable';
+import { ENTITY_VERSION_V4 } from 'constants/app';
import { MAX_RPS_LIMIT } from 'constants/global';
import ResourceAttributesFilter from 'container/ResourceAttributesFilter';
import { useGetQueriesRange } from 'hooks/queryBuilder/useGetQueriesRange';
@@ -35,7 +36,7 @@ function ServiceMetricTable({
const { data: licenseData, isFetching } = useLicense();
const isCloudUserVal = isCloudUser();
- const queries = useGetQueriesRange(queryRangeRequestData, {
+ const queries = useGetQueriesRange(queryRangeRequestData, ENTITY_VERSION_V4, {
queryKey: [
`GetMetricsQueryRange-${queryRangeRequestData[0].selectedTime}-${globalSelectedInterval}`,
maxTime,
diff --git a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsQuery.ts b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsQuery.ts
index 46f94acd87..352e144ade 100644
--- a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsQuery.ts
+++ b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricsQuery.ts
@@ -166,11 +166,17 @@ export const serviceMetricsQuery = (
operationPrSecondAdditionalItems,
];
- const aggregateOperators = [
- MetricAggregateOperator.HIST_QUANTILE_99,
- MetricAggregateOperator.SUM_RATE,
- MetricAggregateOperator.SUM_RATE,
- MetricAggregateOperator.SUM_RATE,
+ const timeAggregateOperators = [
+ MetricAggregateOperator.EMPTY,
+ MetricAggregateOperator.RATE,
+ MetricAggregateOperator.RATE,
+ MetricAggregateOperator.RATE,
+ ];
+ const spaceAggregateOperators = [
+ MetricAggregateOperator.P99,
+ MetricAggregateOperator.SUM,
+ MetricAggregateOperator.SUM,
+ MetricAggregateOperator.SUM,
];
const disabled = [false, true, true, false];
@@ -201,7 +207,8 @@ export const serviceMetricsQuery = (
additionalItems,
disabled,
legends,
- aggregateOperators,
+ timeAggregateOperators,
+ spaceAggregateOperators,
expressions,
legendFormulas,
groupBy,
diff --git a/frontend/src/container/SideNav/menuItems.tsx b/frontend/src/container/SideNav/menuItems.tsx
index 9b897d2a9a..ed6f10b10a 100644
--- a/frontend/src/container/SideNav/menuItems.tsx
+++ b/frontend/src/container/SideNav/menuItems.tsx
@@ -16,6 +16,7 @@ import {
ScrollText,
Settings,
Slack,
+ // Unplug,
UserPlus,
} from 'lucide-react';
@@ -89,6 +90,11 @@ const menuItems: SidebarItem[] = [
label: 'Alerts',
icon: ,
},
+ // {
+ // key: ROUTES.INTEGRATIONS_INSTALLED,
+ // label: 'Integrations',
+ // icon: ,
+ // },
{
key: ROUTES.ALL_ERROR,
label: 'Exceptions',
@@ -121,6 +127,7 @@ export const NEW_ROUTES_MENU_ITEM_KEY_MAP: Record = {
[ROUTES.TRACES_EXPLORER]: ROUTES.TRACE,
[ROUTES.TRACE_EXPLORER]: ROUTES.TRACE,
[ROUTES.LOGS_BASE]: ROUTES.LOGS_EXPLORER,
+ [ROUTES.INTEGRATIONS_BASE]: ROUTES.INTEGRATIONS_INSTALLED,
};
export default menuItems;
diff --git a/frontend/src/container/TimeSeriesView/index.tsx b/frontend/src/container/TimeSeriesView/index.tsx
index 4acffd7e61..2dd009746d 100644
--- a/frontend/src/container/TimeSeriesView/index.tsx
+++ b/frontend/src/container/TimeSeriesView/index.tsx
@@ -1,3 +1,4 @@
+import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
@@ -49,6 +50,7 @@ function TimeSeriesViewContainer({
dataSource,
},
},
+ DEFAULT_ENTITY_VERSION,
{
queryKey: [
REACT_QUERY_KEY.GET_QUERY_RANGE,
diff --git a/frontend/src/container/TopNav/DateTimeSelection/config.ts b/frontend/src/container/TopNav/DateTimeSelection/config.ts
index bc77afe7d6..3618686c95 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/config.ts
+++ b/frontend/src/container/TopNav/DateTimeSelection/config.ts
@@ -81,6 +81,10 @@ export const routesToSkip = [
ROUTES.ALL_CHANNELS,
ROUTES.USAGE_EXPLORER,
ROUTES.GET_STARTED,
+ ROUTES.GET_STARTED_APPLICATION_MONITORING,
+ ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING,
+ ROUTES.GET_STARTED_LOGS_MANAGEMENT,
+ ROUTES.GET_STARTED_AWS_MONITORING,
ROUTES.VERSION,
ROUTES.ALL_DASHBOARD,
ROUTES.ORG_SETTINGS,
diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss b/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss
index 73688ad109..bd4cc3cdb1 100644
--- a/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss
+++ b/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss
@@ -58,8 +58,6 @@
.date-time-root {
.ant-popover-inner {
- width: 532px;
- min-height: 334px;
border-radius: 4px !important;
border: 1px solid var(--bg-slate-400);
box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2) !important;
@@ -129,12 +127,20 @@
}
.relative-date-time {
- width: 307px;
display: flex;
flex-direction: column;
gap: 35px;
padding: 13px 14px;
+ &.date-picker {
+ width: 480px;
+ height: 430px;
+ }
+
+ &.relative-times {
+ width: 320px;
+ }
+
.relative-date-time-section {
display: flex;
gap: 6px;
diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts b/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts
index becd3fed7b..6231505580 100644
--- a/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts
+++ b/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts
@@ -53,7 +53,7 @@ export const Options: Option[] = [
{ value: '1day', label: 'Last 1 day' },
{ value: '3days', label: 'Last 3 days' },
{ value: '1week', label: 'Last 1 week' },
- { value: 'custom', label: 'Custom...' },
+ { value: 'custom', label: 'Custom' },
];
export interface Option {
@@ -117,6 +117,10 @@ export const routesToSkip = [
ROUTES.ALL_CHANNELS,
ROUTES.USAGE_EXPLORER,
ROUTES.GET_STARTED,
+ ROUTES.GET_STARTED_APPLICATION_MONITORING,
+ ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING,
+ ROUTES.GET_STARTED_LOGS_MANAGEMENT,
+ ROUTES.GET_STARTED_AWS_MONITORING,
ROUTES.VERSION,
ROUTES.ALL_DASHBOARD,
ROUTES.ORG_SETTINGS,
@@ -135,6 +139,9 @@ export const routesToSkip = [
ROUTES.TRACES_EXPLORER,
ROUTES.TRACES_SAVE_VIEWS,
ROUTES.SHORTCUTS,
+ ROUTES.INTEGRATIONS_BASE,
+ ROUTES.INTEGRATIONS_INSTALLED,
+ ROUTES.INTEGRATIONS_MARKETPLACE,
];
export const routesToDisable = [ROUTES.LOGS_EXPLORER, ROUTES.LIVE_LOGS];
diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx b/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx
index 6e5c0c5b48..3ef5125ad7 100644
--- a/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx
+++ b/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx
@@ -44,7 +44,6 @@ import { DateTimeRangeType } from '../CustomDateTimeModal';
import {
getDefaultOption,
getOptions,
- LexicalContext,
LocalStorageTimeRange,
Time,
TimeRange,
@@ -319,22 +318,12 @@ function DateTimeSelection({
onLastRefreshHandler();
};
- const onCustomDateHandler = (
- dateTimeRange: DateTimeRangeType,
- lexicalContext?: LexicalContext,
- ): void => {
+ const onCustomDateHandler = (dateTimeRange: DateTimeRangeType): void => {
if (dateTimeRange !== null) {
const [startTimeMoment, endTimeMoment] = dateTimeRange;
if (startTimeMoment && endTimeMoment) {
- let startTime = startTimeMoment;
- let endTime = endTimeMoment;
- if (
- lexicalContext &&
- lexicalContext === LexicalContext.CUSTOM_DATE_PICKER
- ) {
- startTime = startTime.startOf('day');
- endTime = endTime.endOf('day');
- }
+ const startTime = startTimeMoment;
+ const endTime = endTimeMoment;
setCustomDTPickerVisible(false);
updateTimeInterval('custom', [
startTime.toDate().getTime(),
diff --git a/frontend/src/container/TraceDetail/SelectedSpanDetails/config.ts b/frontend/src/container/TraceDetail/SelectedSpanDetails/config.ts
index 46d0a5fc90..145b79f2ff 100644
--- a/frontend/src/container/TraceDetail/SelectedSpanDetails/config.ts
+++ b/frontend/src/container/TraceDetail/SelectedSpanDetails/config.ts
@@ -49,13 +49,16 @@ export const getTraceToLogsQuery = (
limit: null,
aggregateAttribute: initialAutocompleteData,
aggregateOperator: LogsAggregatorOperator.NOOP,
+ timeAggregation: '',
+ spaceAggregation: '',
+ functions: [],
expression: 'A',
groupBy: [],
having: [],
legend: '',
orderBy: [],
queryName: 'A',
- reduceTo: 'min',
+ reduceTo: 'avg',
stepInterval: getStep({
start: minTime,
end: maxTime,
diff --git a/frontend/src/container/TracesExplorer/ListView/index.tsx b/frontend/src/container/TracesExplorer/ListView/index.tsx
index 4f18bb3a27..f78be8c4de 100644
--- a/frontend/src/container/TracesExplorer/ListView/index.tsx
+++ b/frontend/src/container/TracesExplorer/ListView/index.tsx
@@ -1,4 +1,5 @@
import { ResizeTable } from 'components/ResizeTable';
+import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { LOCALSTORAGE } from 'constants/localStorage';
import { QueryParams } from 'constants/query';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
@@ -62,6 +63,7 @@ function ListView(): JSX.Element {
selectColumns: options?.selectColumns,
},
},
+ DEFAULT_ENTITY_VERSION,
{
queryKey: [
REACT_QUERY_KEY.GET_QUERY_RANGE,
diff --git a/frontend/src/container/TracesExplorer/QuerySection/index.tsx b/frontend/src/container/TracesExplorer/QuerySection/index.tsx
index 279f57b5df..0bd9515720 100644
--- a/frontend/src/container/TracesExplorer/QuerySection/index.tsx
+++ b/frontend/src/container/TracesExplorer/QuerySection/index.tsx
@@ -53,6 +53,7 @@ function QuerySection(): JSX.Element {
}}
filterConfigs={filterConfigs}
queryComponents={queryComponents}
+ version="v3" // setting this to v3 as we this is rendered in logs explorer
actions={
diff --git a/frontend/src/container/TracesExplorer/TableView/index.tsx b/frontend/src/container/TracesExplorer/TableView/index.tsx
index 11f27f6201..c6544f8eee 100644
--- a/frontend/src/container/TracesExplorer/TableView/index.tsx
+++ b/frontend/src/container/TracesExplorer/TableView/index.tsx
@@ -1,4 +1,5 @@
import { Space } from 'antd';
+import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { QueryTable } from 'container/QueryTable';
@@ -27,6 +28,7 @@ function TableView(): JSX.Element {
dataSource: 'traces',
},
},
+ DEFAULT_ENTITY_VERSION,
{
queryKey: [
REACT_QUERY_KEY.GET_QUERY_RANGE,
diff --git a/frontend/src/container/TracesExplorer/TracesView/index.tsx b/frontend/src/container/TracesExplorer/TracesView/index.tsx
index 21fa41431c..2093881e01 100644
--- a/frontend/src/container/TracesExplorer/TracesView/index.tsx
+++ b/frontend/src/container/TracesExplorer/TracesView/index.tsx
@@ -1,5 +1,6 @@
import { Typography } from 'antd';
import { ResizeTable } from 'components/ResizeTable';
+import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { QueryParams } from 'constants/query';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
@@ -41,6 +42,7 @@ function TracesView(): JSX.Element {
pagination: paginationQueryData,
},
},
+ DEFAULT_ENTITY_VERSION,
{
queryKey: [
REACT_QUERY_KEY.GET_QUERY_RANGE,
diff --git a/frontend/src/container/TracesTableComponent/TracesTableComponent.styles.scss b/frontend/src/container/TracesTableComponent/TracesTableComponent.styles.scss
index e1ff9ba437..0787972eec 100644
--- a/frontend/src/container/TracesTableComponent/TracesTableComponent.styles.scss
+++ b/frontend/src/container/TracesTableComponent/TracesTableComponent.styles.scss
@@ -1,65 +1,73 @@
.traces-table {
- position: relative;
- display: flex;
- flex-direction: column;
- height: 100%;
+ position: relative;
+ display: flex;
+ flex-direction: column;
+ height: 100%;
- .resize-table {
- height: calc(90% - 5px);
- overflow: scroll;
-
- .ant-table-wrapper .ant-table-tbody >tr >td {
- border: none;
- background-color: transparent;
- color: var(--bg-vanilla-100);
- font-size: 14px;
- font-style: normal;
- font-weight: 400;
- line-height: 18px;
- padding: 10px 8px;
- font-family: Inter;
- cursor: pointer;
- }
+ .resize-table {
+ height: calc(100% - 40px);
+ overflow: scroll;
+ overflow-x: hidden;
- .ant-table-wrapper .ant-table-thead > tr > th {
- font-family: Inter;
- color: var(--bg-vanilla-100);
- background-color: transparent;
- border: none;
- border-bottom: 0.5px solid var(--bg-slate-400);
- font-size: 14px;
- font-style: normal;
- font-weight: 600;
- line-height: 22px;
- letter-spacing: 0.5px;
- padding: 8px;
- }
+ &::-webkit-scrollbar {
+ width: 0.2rem;
+ height: 0.2rem;
+ }
- .ant-table-wrapper .ant-table-thead > tr > th::before {
- display: none;
- }
- }
+ .ant-table-wrapper .ant-table-tbody > tr > td {
+ border: none;
+ background-color: transparent;
+ color: var(--bg-vanilla-100);
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 18px;
+ padding: 10px 8px;
+ font-family: Inter;
+ cursor: pointer;
+ }
- .controller {
- position: absolute;
- bottom: 5px;
- right: 10px;
- }
+ .ant-table-wrapper .ant-table-thead > tr > th {
+ font-family: Inter;
+ color: var(--bg-vanilla-100);
+ background-color: transparent;
+ border: none;
+ border-bottom: 0.5px solid var(--bg-slate-400);
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 600;
+ line-height: 22px;
+ letter-spacing: 0.5px;
+ padding: 8px;
+ }
+
+ .ant-table-wrapper .ant-table-thead > tr > th::before {
+ display: none;
+ }
+ }
+
+ .controller {
+ display: flex;
+ align-items: center;
+ height: 40px;
+ justify-content: end;
+ padding: 0 8px;
+ }
}
.lightMode {
- .traces-table {
- .resize-table {
- .ant-table-wrapper .ant-table-tbody >tr >td {
- background-color: var(--bg-vanilla-100);
- color: var(--bg-ink-500);
- border-color: rgba(0, 0, 0, 0.06);
- }
- .ant-table-wrapper .ant-table-thead > tr > th {
- background-color: var(--bg-vanilla-300);
- color: var(--bg-ink-500);
- border-color: rgba(0, 0, 0, 0.06);
- }
- }
- }
-}
\ No newline at end of file
+ .traces-table {
+ .resize-table {
+ .ant-table-wrapper .ant-table-tbody > tr > td {
+ background-color: var(--bg-vanilla-100);
+ color: var(--bg-ink-500);
+ border-color: rgba(0, 0, 0, 0.06);
+ }
+ .ant-table-wrapper .ant-table-thead > tr > th {
+ background-color: var(--bg-vanilla-300);
+ color: var(--bg-ink-500);
+ border-color: rgba(0, 0, 0, 0.06);
+ }
+ }
+ }
+}
diff --git a/frontend/src/container/TracesTableComponent/TracesTableComponent.tsx b/frontend/src/container/TracesTableComponent/TracesTableComponent.tsx
index a59303780c..7d2e8e7498 100644
--- a/frontend/src/container/TracesTableComponent/TracesTableComponent.tsx
+++ b/frontend/src/container/TracesTableComponent/TracesTableComponent.tsx
@@ -30,6 +30,7 @@ import { GlobalReducer } from 'types/reducer/globalTime';
function TracesTableComponent({
selectedTracesFields,
query,
+ version,
selectedTime,
}: TracesTableComponentProps): JSX.Element {
const { selectedTime: globalSelectedTime, maxTime, minTime } = useSelector<
@@ -59,6 +60,7 @@ function TracesTableComponent({
},
variables: getDashboardVariables(selectedDashboard?.data.variables),
},
+ version,
{
queryKey: [
REACT_QUERY_KEY.GET_QUERY_RANGE,
@@ -160,6 +162,7 @@ function TracesTableComponent({
export type TracesTableComponentProps = {
selectedTracesFields: Widgets['selectedTracesFields'];
query: Query;
+ version: string;
selectedTime?: timePreferance;
};
diff --git a/frontend/src/hooks/Integrations/useGetAllIntegrations.ts b/frontend/src/hooks/Integrations/useGetAllIntegrations.ts
new file mode 100644
index 0000000000..c32bbd19e7
--- /dev/null
+++ b/frontend/src/hooks/Integrations/useGetAllIntegrations.ts
@@ -0,0 +1,13 @@
+import { getAllIntegrations } from 'api/Integrations/getAllIntegrations';
+import { AxiosError, AxiosResponse } from 'axios';
+import { useQuery, UseQueryResult } from 'react-query';
+import { AllIntegrationsProps } from 'types/api/integrations/types';
+
+export const useGetAllIntegrations = (): UseQueryResult<
+ AxiosResponse,
+ AxiosError
+> =>
+ useQuery, AxiosError>({
+ queryKey: ['Integrations'],
+ queryFn: () => getAllIntegrations(),
+ });
diff --git a/frontend/src/hooks/Integrations/useGetIntegration.ts b/frontend/src/hooks/Integrations/useGetIntegration.ts
new file mode 100644
index 0000000000..05cad6c40d
--- /dev/null
+++ b/frontend/src/hooks/Integrations/useGetIntegration.ts
@@ -0,0 +1,18 @@
+import { getIntegration } from 'api/Integrations/getIntegration';
+import { AxiosError, AxiosResponse } from 'axios';
+import { useQuery, UseQueryResult } from 'react-query';
+import {
+ GetIntegrationPayloadProps,
+ GetIntegrationProps,
+} from 'types/api/integrations/types';
+
+export const useGetIntegration = ({
+ integrationId,
+}: GetIntegrationPayloadProps): UseQueryResult<
+ AxiosResponse,
+ AxiosError
+> =>
+ useQuery, AxiosError>({
+ queryKey: ['Integration', integrationId],
+ queryFn: () => getIntegration({ integrationId }),
+ });
diff --git a/frontend/src/hooks/Integrations/useGetIntegrationStatus.ts b/frontend/src/hooks/Integrations/useGetIntegrationStatus.ts
new file mode 100644
index 0000000000..af58f63996
--- /dev/null
+++ b/frontend/src/hooks/Integrations/useGetIntegrationStatus.ts
@@ -0,0 +1,20 @@
+import { getIntegrationStatus } from 'api/Integrations/getIntegrationStatus';
+import { AxiosError, AxiosResponse } from 'axios';
+import { useQuery, UseQueryResult } from 'react-query';
+import {
+ GetIntegrationPayloadProps,
+ GetIntegrationStatusProps,
+} from 'types/api/integrations/types';
+
+export const useGetIntegrationStatus = ({
+ integrationId,
+ enabled,
+}: GetIntegrationPayloadProps): UseQueryResult<
+ AxiosResponse,
+ AxiosError
+> =>
+ useQuery, AxiosError>({
+ queryKey: ['Integration', integrationId, Date.now()],
+ queryFn: () => getIntegrationStatus({ integrationId }),
+ enabled,
+ });
diff --git a/frontend/src/hooks/hotkeys/useKeyboardHotkeys.tsx b/frontend/src/hooks/hotkeys/useKeyboardHotkeys.tsx
index ec1b861664..68e1bc7ae4 100644
--- a/frontend/src/hooks/hotkeys/useKeyboardHotkeys.tsx
+++ b/frontend/src/hooks/hotkeys/useKeyboardHotkeys.tsx
@@ -72,6 +72,9 @@ function KeyboardHotkeysProvider({
shortcutKey = shortcutKey + isAltKey + isShiftKey + isMetaKey;
if (shortcuts.current[shortcutKey]) {
+ event.preventDefault();
+ event.stopImmediatePropagation();
+
shortcuts.current[shortcutKey]();
}
};
diff --git a/frontend/src/hooks/logs/useCopyLogLink.ts b/frontend/src/hooks/logs/useCopyLogLink.ts
index 35b4293f51..b663aa750c 100644
--- a/frontend/src/hooks/logs/useCopyLogLink.ts
+++ b/frontend/src/hooks/logs/useCopyLogLink.ts
@@ -11,11 +11,8 @@ import {
useMemo,
useState,
} from 'react';
-import { useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
import { useCopyToClipboard } from 'react-use';
-import { AppState } from 'store/reducers';
-import { GlobalReducer } from 'types/reducer/globalTime';
import { HIGHLIGHTED_DELAY } from './configs';
import { LogTimeRange, UseCopyLogLink } from './types';
@@ -25,9 +22,6 @@ export const useCopyLogLink = (logId?: string): UseCopyLogLink => {
const { pathname } = useLocation();
const [, setCopy] = useCopyToClipboard();
const { notifications } = useNotifications();
- const { maxTime, minTime } = useSelector(
- (state) => state.globalTime,
- );
const { queryData: timeRange } = useUrlQueryData(
QueryParams.timeRange,
@@ -70,8 +64,8 @@ export const useCopyLogLink = (logId?: string): UseCopyLogLink => {
urlQuery.delete(QueryParams.timeRange);
urlQuery.set(QueryParams.activeLogId, `"${logId}"`);
urlQuery.set(QueryParams.timeRange, range);
- urlQuery.set(QueryParams.startTime, minTime.toString());
- urlQuery.set(QueryParams.endTime, maxTime.toString());
+ urlQuery.set(QueryParams.startTime, timeRange?.start.toString() || '');
+ urlQuery.set(QueryParams.endTime, timeRange?.end.toString() || '');
const link = `${window.location.origin}${pathname}?${urlQuery.toString()}`;
@@ -80,16 +74,7 @@ export const useCopyLogLink = (logId?: string): UseCopyLogLink => {
message: 'Copied to clipboard',
});
},
- [
- logId,
- timeRange,
- urlQuery,
- minTime,
- maxTime,
- pathname,
- setCopy,
- notifications,
- ],
+ [logId, timeRange, urlQuery, pathname, setCopy, notifications],
);
useEffect(() => {
diff --git a/frontend/src/hooks/queryBuilder/useAutoComplete.ts b/frontend/src/hooks/queryBuilder/useAutoComplete.ts
index dad262757a..6ac51eb45d 100644
--- a/frontend/src/hooks/queryBuilder/useAutoComplete.ts
+++ b/frontend/src/hooks/queryBuilder/useAutoComplete.ts
@@ -1,3 +1,4 @@
+import { OPERATORS } from 'constants/queryBuilder';
import {
getRemovePrefixFromKey,
getTagToken,
@@ -10,7 +11,7 @@ import { KeyboardEvent, useCallback, useState } from 'react';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { useFetchKeysAndValues } from './useFetchKeysAndValues';
-import { useOptions } from './useOptions';
+import { useOptions, WHERE_CLAUSE_CUSTOM_SUFFIX } from './useOptions';
import { useSetCurrentKeyAndOperator } from './useSetCurrentKeyAndOperator';
import { useTag } from './useTag';
import { useTagValidation } from './useTagValidation';
@@ -98,6 +99,23 @@ export const useAutoComplete = (
[handleAddTag, handleClearTag, isMulti, isValidTag, searchValue, tags],
);
+ const handleOnBlur = (event: React.FocusEvent): void => {
+ event.preventDefault();
+ if (searchValue) {
+ if (
+ key &&
+ !operator &&
+ whereClauseConfig?.customKey === 'body' &&
+ whereClauseConfig.customOp === OPERATORS.CONTAINS
+ ) {
+ const value = `${searchValue}${WHERE_CLAUSE_CUSTOM_SUFFIX}`;
+ handleAddTag(value);
+ return;
+ }
+ handleAddTag(searchValue);
+ }
+ };
+
const options = useOptions(
key,
keys,
@@ -117,6 +135,7 @@ export const useAutoComplete = (
handleClearTag,
handleSelect,
handleKeyDown,
+ handleOnBlur,
options,
tags,
searchValue,
@@ -133,6 +152,7 @@ interface IAutoComplete {
handleClearTag: (value: string) => void;
handleSelect: (value: string) => void;
handleKeyDown: (event: React.KeyboardEvent) => void;
+ handleOnBlur: (event: React.FocusEvent) => void;
options: Option[];
tags: string[];
searchValue: string;
diff --git a/frontend/src/hooks/queryBuilder/useCreateAlerts.tsx b/frontend/src/hooks/queryBuilder/useCreateAlerts.tsx
index 6cf7da047c..71089875a9 100644
--- a/frontend/src/hooks/queryBuilder/useCreateAlerts.tsx
+++ b/frontend/src/hooks/queryBuilder/useCreateAlerts.tsx
@@ -1,5 +1,6 @@
import { getQueryRangeFormat } from 'api/dashboard/queryRangeFormat';
import { SOMETHING_WENT_WRONG } from 'constants/api';
+import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import { useNotifications } from 'hooks/useNotifications';
@@ -45,7 +46,9 @@ const useCreateAlerts = (widget?: Widgets): VoidFunction => {
history.push(
`${ROUTES.ALERTS_NEW}?${QueryParams.compositeQuery}=${encodeURIComponent(
JSON.stringify(updatedQuery),
- )}&${QueryParams.panelTypes}=${widget.panelTypes}`,
+ )}&${QueryParams.panelTypes}=${widget.panelTypes}&version=${
+ selectedDashboard?.data.version || DEFAULT_ENTITY_VERSION
+ }`,
);
},
onError: () => {
@@ -59,6 +62,7 @@ const useCreateAlerts = (widget?: Widgets): VoidFunction => {
notifications,
queryRangeMutation,
selectedDashboard?.data.variables,
+ selectedDashboard?.data.version,
widget,
]);
};
diff --git a/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts b/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts
index bd85d8f799..cdcfb3e0c7 100644
--- a/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts
+++ b/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts
@@ -15,6 +15,7 @@ import { useQueryBuilder } from './useQueryBuilder';
export const useGetExplorerQueryRange = (
requestData: Query | null,
panelType: PANEL_TYPES | null,
+ version: string,
options?: UseQueryOptions, Error>,
params?: Record,
isDependentOnQB = true,
@@ -47,6 +48,7 @@ export const useGetExplorerQueryRange = (
query: requestData || initialQueriesMap.metrics,
params,
},
+ version,
{
...options,
retry: false,
diff --git a/frontend/src/hooks/queryBuilder/useGetQueriesRange.ts b/frontend/src/hooks/queryBuilder/useGetQueriesRange.ts
index d7ec04d336..3c92bf8d21 100644
--- a/frontend/src/hooks/queryBuilder/useGetQueriesRange.ts
+++ b/frontend/src/hooks/queryBuilder/useGetQueriesRange.ts
@@ -15,6 +15,7 @@ import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
export const useGetQueriesRange = (
requestData: GetQueryResultsProps[],
+ version: string,
options: UseQueryOptions, Error>,
): UseQueryResult, Error>[] => {
const queryKey = useMemo(() => {
@@ -26,7 +27,7 @@ export const useGetQueriesRange = (
const queryData = requestData.map((request, index) => ({
queryFn: async (): Promise> =>
- GetMetricQueryRange(request),
+ GetMetricQueryRange(request, version),
...options,
queryKey: [...queryKey, index] as QueryKey,
}));
diff --git a/frontend/src/hooks/queryBuilder/useGetQueryRange.ts b/frontend/src/hooks/queryBuilder/useGetQueryRange.ts
index e832be1c4c..334ee7f628 100644
--- a/frontend/src/hooks/queryBuilder/useGetQueryRange.ts
+++ b/frontend/src/hooks/queryBuilder/useGetQueryRange.ts
@@ -11,10 +11,15 @@ import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
type UseGetQueryRange = (
requestData: GetQueryResultsProps,
+ version: string,
options?: UseQueryOptions, Error>,
) => UseQueryResult, Error>;
-export const useGetQueryRange: UseGetQueryRange = (requestData, options) => {
+export const useGetQueryRange: UseGetQueryRange = (
+ requestData,
+ version,
+ options,
+) => {
const newRequestData: GetQueryResultsProps = useMemo(
() => ({
...requestData,
@@ -39,7 +44,8 @@ export const useGetQueryRange: UseGetQueryRange = (requestData, options) => {
}, [options?.queryKey, newRequestData]);
return useQuery, Error>({
- queryFn: async ({ signal }) => GetMetricQueryRange(newRequestData, signal),
+ queryFn: async ({ signal }) =>
+ GetMetricQueryRange(requestData, version, signal),
...options,
queryKey,
});
diff --git a/frontend/src/hooks/queryBuilder/useGetWidgetQueryRange.ts b/frontend/src/hooks/queryBuilder/useGetWidgetQueryRange.ts
index f3ecd21101..5e5ec70e39 100644
--- a/frontend/src/hooks/queryBuilder/useGetWidgetQueryRange.ts
+++ b/frontend/src/hooks/queryBuilder/useGetWidgetQueryRange.ts
@@ -18,6 +18,7 @@ export const useGetWidgetQueryRange = (
graphType,
selectedTime,
}: Pick,
+ version: string,
options?: UseQueryOptions, Error>,
): UseQueryResult, Error> => {
const { selectedTime: globalSelectedInterval } = useSelector<
@@ -37,6 +38,7 @@ export const useGetWidgetQueryRange = (
query: stagedQuery || initialQueriesMap.metrics,
variables: getDashboardVariables(selectedDashboard?.data.variables),
},
+ version,
{
enabled: !!stagedQuery,
retry: false,
diff --git a/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts b/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts
index 799640da4e..7766122a45 100644
--- a/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts
+++ b/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts
@@ -1,16 +1,24 @@
+import { ENTITY_VERSION_V4 } from 'constants/app';
import { LEGEND } from 'constants/global';
import {
+ ATTRIBUTE_TYPES,
initialAutocompleteData,
initialQueryBuilderFormValuesMap,
mapOfFormulaToFilters,
mapOfQueryFilters,
PANEL_TYPES,
} from 'constants/queryBuilder';
+import {
+ metricsGaugeSpaceAggregateOperatorOptions,
+ metricsHistogramSpaceAggregateOperatorOptions,
+ metricsSumSpaceAggregateOperatorOptions,
+} from 'constants/queryBuilderOperators';
import {
listViewInitialLogQuery,
listViewInitialTraceQuery,
} from 'container/NewDashboard/ComponentsSlider/constants';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
+import { getMetricsOperatorsByAttributeType } from 'lib/newQueryBuilder/getMetricsOperatorsByAttributeType';
import { getOperatorsBySourceAndPanelType } from 'lib/newQueryBuilder/getOperatorsBySourceAndPanelType';
import { findDataTypeOfOperator } from 'lib/query/findDataTypeOfOperator';
import { useCallback, useEffect, useState } from 'react';
@@ -18,13 +26,14 @@ import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteRe
import {
IBuilderFormula,
IBuilderQuery,
+ QueryFunctionProps,
} from 'types/api/queryBuilder/queryBuilderData';
import {
HandleChangeFormulaData,
HandleChangeQueryData,
UseQueryOperations,
} from 'types/common/operations.types';
-import { DataSource } from 'types/common/queryBuilder';
+import { DataSource, MetricAggregateOperator } from 'types/common/queryBuilder';
import { SelectOption } from 'types/common/select';
import { getFormatedLegend } from 'utils/getFormatedLegend';
@@ -34,6 +43,7 @@ export const useQueryOperations: UseQueryOperations = ({
filterConfigs,
formula,
isListViewPanel = false,
+ entityVersion,
}) => {
const {
handleSetQueryData,
@@ -46,6 +56,9 @@ export const useQueryOperations: UseQueryOperations = ({
} = useQueryBuilder();
const [operators, setOperators] = useState[]>([]);
+ const [spaceAggregationOptions, setSpaceAggregationOptions] = useState<
+ SelectOption[]
+ >([]);
const { dataSource, aggregateOperator } = query;
@@ -104,6 +117,7 @@ export const useQueryOperations: UseQueryOperations = ({
const newQuery: IBuilderQuery = {
...query,
aggregateOperator: value,
+ timeAggregation: value,
having: [],
limit: null,
...(shouldResetAggregateAttribute
@@ -116,6 +130,52 @@ export const useQueryOperations: UseQueryOperations = ({
[index, query, handleSetQueryData],
);
+ const handleSpaceAggregationChange = useCallback(
+ (value: string): void => {
+ const newQuery: IBuilderQuery = {
+ ...query,
+ spaceAggregation: value,
+ };
+
+ handleSetQueryData(index, newQuery);
+ },
+ [index, query, handleSetQueryData],
+ );
+
+ const handleMetricAggregateAtributeTypes = useCallback(
+ (aggregateAttribute: BaseAutocompleteData): any => {
+ const newOperators = getMetricsOperatorsByAttributeType({
+ dataSource: DataSource.METRICS,
+ panelType: panelType || PANEL_TYPES.TIME_SERIES,
+ aggregateAttributeType:
+ (aggregateAttribute.type as ATTRIBUTE_TYPES) || ATTRIBUTE_TYPES.GAUGE,
+ });
+
+ switch (aggregateAttribute.type) {
+ case ATTRIBUTE_TYPES.SUM:
+ setSpaceAggregationOptions(metricsSumSpaceAggregateOperatorOptions);
+ break;
+ case ATTRIBUTE_TYPES.GAUGE:
+ setSpaceAggregationOptions(metricsGaugeSpaceAggregateOperatorOptions);
+ break;
+
+ case ATTRIBUTE_TYPES.HISTOGRAM:
+ setSpaceAggregationOptions(metricsHistogramSpaceAggregateOperatorOptions);
+ break;
+
+ case ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM:
+ setSpaceAggregationOptions(metricsHistogramSpaceAggregateOperatorOptions);
+ break;
+ default:
+ setSpaceAggregationOptions(metricsGaugeSpaceAggregateOperatorOptions);
+ break;
+ }
+
+ setOperators(newOperators);
+ },
+ [panelType],
+ );
+
const handleChangeAggregatorAttribute = useCallback(
(value: BaseAutocompleteData): void => {
const newQuery: IBuilderQuery = {
@@ -124,9 +184,34 @@ export const useQueryOperations: UseQueryOperations = ({
having: [],
};
+ if (
+ newQuery.dataSource === DataSource.METRICS &&
+ entityVersion === ENTITY_VERSION_V4
+ ) {
+ handleMetricAggregateAtributeTypes(newQuery.aggregateAttribute);
+
+ if (newQuery.aggregateAttribute.type === ATTRIBUTE_TYPES.SUM) {
+ newQuery.aggregateOperator = MetricAggregateOperator.RATE;
+ newQuery.timeAggregation = MetricAggregateOperator.RATE;
+ } else if (newQuery.aggregateAttribute.type === ATTRIBUTE_TYPES.GAUGE) {
+ newQuery.aggregateOperator = MetricAggregateOperator.AVG;
+ newQuery.timeAggregation = MetricAggregateOperator.AVG;
+ } else {
+ newQuery.timeAggregation = '';
+ }
+
+ newQuery.spaceAggregation = '';
+ }
+
handleSetQueryData(index, newQuery);
},
- [index, query, handleSetQueryData],
+ [
+ query,
+ entityVersion,
+ handleSetQueryData,
+ index,
+ handleMetricAggregateAtributeTypes,
+ ],
);
const handleChangeDataSource = useCallback(
@@ -203,6 +288,21 @@ export const useQueryOperations: UseQueryOperations = ({
[formula, handleSetFormulaData, index],
);
+ const handleQueryFunctionsUpdates = useCallback(
+ (functions: QueryFunctionProps[]): void => {
+ const newQuery: IBuilderQuery = {
+ ...query,
+ };
+
+ if (newQuery.dataSource === DataSource.METRICS) {
+ newQuery.functions = functions;
+ }
+
+ handleSetQueryData(index, newQuery);
+ },
+ [query, handleSetQueryData, index],
+ );
+
const isMetricsDataSource = query.dataSource === DataSource.METRICS;
const isTracePanelType = panelType === PANEL_TYPES.TRACE;
@@ -210,15 +310,26 @@ export const useQueryOperations: UseQueryOperations = ({
useEffect(() => {
if (initialDataSource && dataSource !== initialDataSource) return;
- const initialOperators = getOperatorsBySourceAndPanelType({
- dataSource,
- panelType: panelType || PANEL_TYPES.TIME_SERIES,
- });
+ if (
+ dataSource === DataSource.METRICS &&
+ query &&
+ query.aggregateAttribute &&
+ entityVersion === ENTITY_VERSION_V4
+ ) {
+ handleMetricAggregateAtributeTypes(query.aggregateAttribute);
+ } else {
+ const initialOperators = getOperatorsBySourceAndPanelType({
+ dataSource,
+ panelType: panelType || PANEL_TYPES.TIME_SERIES,
+ });
- if (JSON.stringify(operators) === JSON.stringify(initialOperators)) return;
+ if (JSON.stringify(operators) === JSON.stringify(initialOperators)) return;
- setOperators(initialOperators);
- }, [dataSource, initialDataSource, panelType, operators]);
+ setOperators(initialOperators);
+ }
+
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [dataSource, initialDataSource, panelType, operators, entityVersion]);
useEffect(() => {
const additionalFilters = getNewListOfAdditionalFilters(dataSource, true);
@@ -236,13 +347,16 @@ export const useQueryOperations: UseQueryOperations = ({
isTracePanelType,
isMetricsDataSource,
operators,
+ spaceAggregationOptions,
listOfAdditionalFilters,
handleChangeOperator,
+ handleSpaceAggregationChange,
handleChangeAggregatorAttribute,
handleChangeDataSource,
handleDeleteQuery,
handleChangeQueryData,
listOfAdditionalFormulaFilters,
handleChangeFormulaData,
+ handleQueryFunctionsUpdates,
};
};
diff --git a/frontend/src/hooks/useLogsData.ts b/frontend/src/hooks/useLogsData.ts
index 6105c03cbf..bad1e53200 100644
--- a/frontend/src/hooks/useLogsData.ts
+++ b/frontend/src/hooks/useLogsData.ts
@@ -1,3 +1,4 @@
+import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { QueryParams } from 'constants/query';
import {
initialQueryBuilderFormValues,
@@ -126,6 +127,7 @@ export const useLogsData = ({
const { data, isFetching } = useGetExplorerQueryRange(
requestData,
panelType,
+ DEFAULT_ENTITY_VERSION,
{
keepPreviousData: true,
enabled: !isLimit && !!requestData,
diff --git a/frontend/src/lib/dashboard/getQueryResults.ts b/frontend/src/lib/dashboard/getQueryResults.ts
index ac012ce3a7..64b749e45c 100644
--- a/frontend/src/lib/dashboard/getQueryResults.ts
+++ b/frontend/src/lib/dashboard/getQueryResults.ts
@@ -18,11 +18,16 @@ import { prepareQueryRangePayload } from './prepareQueryRangePayload';
export async function GetMetricQueryRange(
props: GetQueryResultsProps,
+ version: string,
signal?: AbortSignal,
): Promise> {
const { legendMap, queryPayload } = prepareQueryRangePayload(props);
- const response = await getMetricsQueryRange(queryPayload, signal);
+ const response = await getMetricsQueryRange(
+ queryPayload,
+ version || 'v3',
+ signal,
+ );
if (response.statusCode >= 400) {
throw new Error(
diff --git a/frontend/src/lib/logql/parser.ts b/frontend/src/lib/logql/parser.ts
index 46fe02930b..a067a586ea 100644
--- a/frontend/src/lib/logql/parser.ts
+++ b/frontend/src/lib/logql/parser.ts
@@ -144,8 +144,6 @@ export const parseQuery = (queryString) => {
];
}
}
-
- // console.log(parsedRaw);
return parsedRaw;
};
diff --git a/frontend/src/lib/newQueryBuilder/getMetricsOperatorsByAttributeType.ts b/frontend/src/lib/newQueryBuilder/getMetricsOperatorsByAttributeType.ts
new file mode 100644
index 0000000000..79e110e084
--- /dev/null
+++ b/frontend/src/lib/newQueryBuilder/getMetricsOperatorsByAttributeType.ts
@@ -0,0 +1,31 @@
+import {
+ ATTRIBUTE_TYPES,
+ metricsOperatorsByType,
+ PANEL_TYPES,
+} from 'constants/queryBuilder';
+import { metricsEmptyTimeAggregateOperatorOptions } from 'constants/queryBuilderOperators';
+import { DataSource } from 'types/common/queryBuilder';
+import { SelectOption } from 'types/common/select';
+
+type GetQueryOperatorsParams = {
+ dataSource: DataSource;
+ panelType: PANEL_TYPES;
+ aggregateAttributeType: ATTRIBUTE_TYPES;
+};
+
+export const getMetricsOperatorsByAttributeType = ({
+ dataSource,
+ aggregateAttributeType,
+}: GetQueryOperatorsParams): SelectOption[] => {
+ if (dataSource === DataSource.METRICS && aggregateAttributeType) {
+ if (aggregateAttributeType === ATTRIBUTE_TYPES.SUM) {
+ return metricsOperatorsByType.Sum;
+ }
+
+ if (aggregateAttributeType === ATTRIBUTE_TYPES.GAUGE) {
+ return metricsOperatorsByType.Gauge;
+ }
+ }
+
+ return metricsEmptyTimeAggregateOperatorOptions;
+};
diff --git a/frontend/src/lib/uPlotLib/getUplotChartOptions.ts b/frontend/src/lib/uPlotLib/getUplotChartOptions.ts
index dae5bbdfd5..50f6c5fbc4 100644
--- a/frontend/src/lib/uPlotLib/getUplotChartOptions.ts
+++ b/frontend/src/lib/uPlotLib/getUplotChartOptions.ts
@@ -6,6 +6,7 @@ import './uPlotLib.styles.scss';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { FullViewProps } from 'container/GridCardLayout/GridCard/FullView/types';
+import { saveLegendEntriesToLocalStorage } from 'container/GridCardLayout/GridCard/FullView/utils';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import { Dimensions } from 'hooks/useDimensions';
import { convertValue } from 'lib/getConvertedValue';
@@ -203,6 +204,11 @@ export const getUPlotChartOptions = ({
newGraphVisibilityStates.fill(false);
newGraphVisibilityStates[index + 1] = true;
}
+ saveLegendEntriesToLocalStorage({
+ options: self,
+ graphVisibilityState: newGraphVisibilityStates,
+ name: id || '',
+ });
return newGraphVisibilityStates;
});
}
diff --git a/frontend/src/lib/uPlotLib/plugins/onClickPlugin.ts b/frontend/src/lib/uPlotLib/plugins/onClickPlugin.ts
index 56a6f1e333..7dfbbe9b47 100644
--- a/frontend/src/lib/uPlotLib/plugins/onClickPlugin.ts
+++ b/frontend/src/lib/uPlotLib/plugins/onClickPlugin.ts
@@ -18,8 +18,9 @@ function onClickPlugin(opts: OnClickPluginOpts): uPlot.Plugin {
const mouseY = event.offsetY + 40;
// Convert pixel positions to data values
- const xValue = u.posToVal(mouseX, 'x');
- const yValue = u.posToVal(mouseY, 'y');
+ // do not use mouseX and mouseY here as it offsets the timestamp as well
+ const xValue = u.posToVal(event.offsetX, 'x');
+ const yValue = u.posToVal(event.offsetY, 'y');
opts.onClick(xValue, yValue, mouseX, mouseY);
};
diff --git a/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts b/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts
index 713bf7958d..4ec3677dfb 100644
--- a/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts
+++ b/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts
@@ -18,6 +18,7 @@ interface UplotTooltipDataProps {
value: number;
tooltipValue: string;
textContent: string;
+ queryName: string;
}
const generateTooltipContent = (
@@ -26,6 +27,7 @@ const generateTooltipContent = (
idx: number,
yAxisUnit?: string,
series?: uPlot.Options['series'],
+ isBillingUsageGraphs?: boolean,
// eslint-disable-next-line sonarjs/cognitive-complexity
): HTMLElement => {
const container = document.createElement('div');
@@ -35,6 +37,7 @@ const generateTooltipContent = (
let tooltipTitle = '';
const formattedData: Record = {};
+ const duplicatedLegendLabels: Record = {};
function sortTooltipContentBasedOnValue(
tooltipDataObj: Record,
@@ -47,18 +50,50 @@ const generateTooltipContent = (
if (Array.isArray(series) && series.length > 0) {
series.forEach((item, index) => {
if (index === 0) {
- tooltipTitle = dayjs(data[0][idx] * 1000).format('MMM DD YYYY HH:mm:ss');
+ if (isBillingUsageGraphs) {
+ tooltipTitle = dayjs(data[0][idx] * 1000).format('MMM DD YYYY');
+ } else {
+ tooltipTitle = dayjs(data[0][idx] * 1000).format('MMM DD YYYY HH:mm:ss');
+ }
} else if (item.show) {
- const { metric = {}, queryName = '', legend = '' } =
- seriesList[index - 1] || {};
+ const {
+ metric = {},
+ queryName = '',
+ legend = '',
+ quantity = [],
+ unit = '',
+ } = seriesList[index - 1] || {};
const value = data[index][idx];
+ const dataIngested = quantity[idx];
const label = getLabelName(metric, queryName || '', legend || '');
const color = generateColor(label, themeColors.chartcolors);
+ let tooltipItemLabel = label;
+
if (Number.isFinite(value)) {
const tooltipValue = getToolTipValue(value, yAxisUnit);
+ const dataIngestedFormated = getToolTipValue(dataIngested);
+ if (
+ duplicatedLegendLabels[label] ||
+ Object.prototype.hasOwnProperty.call(formattedData, label)
+ ) {
+ duplicatedLegendLabels[label] = true;
+ const tempDataObj = formattedData[label];
+
+ if (tempDataObj) {
+ const newLabel = `${tempDataObj.queryName}: ${tempDataObj.label}`;
+
+ tempDataObj.textContent = `${newLabel} : ${tempDataObj.tooltipValue}`;
+
+ formattedData[newLabel] = tempDataObj;
+
+ delete formattedData[label];
+ }
+
+ tooltipItemLabel = `${queryName}: ${label}`;
+ }
const dataObj = {
show: item.show || false,
@@ -69,11 +104,15 @@ const generateTooltipContent = (
focus: item?._focus || false,
value,
tooltipValue,
- textContent: `${label} : ${tooltipValue}`,
+ queryName,
+ textContent: isBillingUsageGraphs
+ ? `${tooltipItemLabel} : $${tooltipValue} - ${dataIngestedFormated} ${unit}`
+ : `${tooltipItemLabel} : ${tooltipValue}`,
};
tooltipCount += 1;
- formattedData[label] = dataObj;
+
+ formattedData[tooltipItemLabel] = dataObj;
}
}
});
@@ -143,6 +182,7 @@ const generateTooltipContent = (
const tooltipPlugin = (
apiResponse: MetricRangePayloadProps | undefined,
yAxisUnit?: string,
+ isBillingUsageGraphs?: boolean,
): any => {
let over: HTMLElement;
let bound: HTMLElement;
@@ -203,6 +243,7 @@ const tooltipPlugin = (
idx,
yAxisUnit,
u.series,
+ isBillingUsageGraphs,
);
overlay.appendChild(content);
placement(overlay, anchor, 'right', 'start', { bound });
diff --git a/frontend/src/mocks-server/handlers.ts b/frontend/src/mocks-server/handlers.ts
index 25564363e4..af25738aa7 100644
--- a/frontend/src/mocks-server/handlers.ts
+++ b/frontend/src/mocks-server/handlers.ts
@@ -11,6 +11,10 @@ export const handlers = [
res(ctx.status(200), ctx.json(queryRangeSuccessResponse)),
),
+ rest.post('http://localhost/api/v4/query_range', (req, res, ctx) =>
+ res(ctx.status(200), ctx.json(queryRangeSuccessResponse)),
+ ),
+
rest.post('http://localhost/api/v1/services', (req, res, ctx) =>
res(ctx.status(200), ctx.json(serviceSuccessResponse)),
),
diff --git a/frontend/src/pages/Billing/BillingPage.styles.scss b/frontend/src/pages/Billing/BillingPage.styles.scss
index ced1d4d055..bb6bd3b529 100644
--- a/frontend/src/pages/Billing/BillingPage.styles.scss
+++ b/frontend/src/pages/Billing/BillingPage.styles.scss
@@ -2,4 +2,6 @@
display: flex;
width: 100%;
color: #fff;
+ justify-content: center;
+ align-items: center;
}
diff --git a/frontend/src/pages/ChannelsEdit/index.tsx b/frontend/src/pages/ChannelsEdit/index.tsx
index 8a578c06e0..9925c84849 100644
--- a/frontend/src/pages/ChannelsEdit/index.tsx
+++ b/frontend/src/pages/ChannelsEdit/index.tsx
@@ -81,6 +81,15 @@ function ChannelsEdit(): JSX.Element {
};
}
+ if (value && 'email_configs' in value) {
+ const emailConfig = value.email_configs[0];
+ channel = emailConfig;
+ return {
+ type: ChannelType.Email,
+ channel,
+ };
+ }
+
if (value && 'webhook_configs' in value) {
const webhookConfig = value.webhook_configs[0];
channel = webhookConfig;
diff --git a/frontend/src/pages/Integrations/Header.tsx b/frontend/src/pages/Integrations/Header.tsx
new file mode 100644
index 0000000000..f6b8592762
--- /dev/null
+++ b/frontend/src/pages/Integrations/Header.tsx
@@ -0,0 +1,37 @@
+import './Integrations.styles.scss';
+
+import { Color } from '@signozhq/design-tokens';
+import { Input, Typography } from 'antd';
+import { Search } from 'lucide-react';
+import { Dispatch, SetStateAction } from 'react';
+
+interface HeaderProps {
+ searchTerm: string;
+ setSearchTerm: Dispatch>;
+}
+
+function Header(props: HeaderProps): JSX.Element {
+ const { searchTerm, setSearchTerm } = props;
+
+ const handleSearch = (e: React.ChangeEvent): void => {
+ setSearchTerm(e.target.value);
+ };
+ return (
+
+ Integrations
+
+ Manage Integrations for this workspace
+
+
+ }
+ value={searchTerm}
+ onChange={handleSearch}
+ className="integrations-search-input"
+ />
+
+ );
+}
+
+export default Header;
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx
new file mode 100644
index 0000000000..6083489b58
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContent.tsx
@@ -0,0 +1,79 @@
+import './IntegrationDetailPage.styles.scss';
+
+import { Button, Tabs, TabsProps, Typography } from 'antd';
+import { Drum, Hammer, Table2 } from 'lucide-react';
+import { IntegrationDetailedProps } from 'types/api/integrations/types';
+
+import Configure from './IntegrationDetailContentTabs/Configure';
+import DataCollected from './IntegrationDetailContentTabs/DataCollected';
+import Overview from './IntegrationDetailContentTabs/Overview';
+
+interface IntegrationDetailContentProps {
+ activeDetailTab: string;
+ integrationData: IntegrationDetailedProps;
+}
+
+function IntegrationDetailContent(
+ props: IntegrationDetailContentProps,
+): JSX.Element {
+ const { activeDetailTab, integrationData } = props;
+ const items: TabsProps['items'] = [
+ {
+ key: 'overview',
+ label: (
+ }
+ >
+ Overview
+
+ ),
+ children: (
+
+ ),
+ },
+ {
+ key: 'configuration',
+ label: (
+ }
+ >
+ Configure
+
+ ),
+ children: ,
+ },
+ {
+ key: 'dataCollected',
+ label: (
+ }
+ >
+ Data Collected
+
+ ),
+ children: (
+
+ ),
+ },
+ ];
+ return (
+
+
+
+ );
+}
+
+export default IntegrationDetailContent;
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx
new file mode 100644
index 0000000000..ede3b41137
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Configure.tsx
@@ -0,0 +1,48 @@
+import './IntegrationDetailContentTabs.styles.scss';
+
+import { Button, Tooltip, Typography } from 'antd';
+import cx from 'classnames';
+import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
+import { useState } from 'react';
+
+interface ConfigurationProps {
+ configuration: Array<{ title: string; instructions: string }>;
+}
+
+function Configure(props: ConfigurationProps): JSX.Element {
+ // TODO Mardown renderer support once instructions are ready
+ const { configuration } = props;
+ const [selectedConfigStep, setSelectedConfigStep] = useState(0);
+
+ const handleMenuClick = (index: number): void => {
+ setSelectedConfigStep(index);
+ };
+ return (
+
+
+ {configuration.map((config, index) => (
+
+ handleMenuClick(index)}
+ >
+ {config.title}
+
+
+ ))}
+
+
+
+
+
+ );
+}
+
+export default Configure;
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/DataCollected.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/DataCollected.tsx
new file mode 100644
index 0000000000..a3c387dc3a
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/DataCollected.tsx
@@ -0,0 +1,85 @@
+import './IntegrationDetailContentTabs.styles.scss';
+
+import { Table, Typography } from 'antd';
+import { BarChart2, ScrollText } from 'lucide-react';
+
+interface DataCollectedProps {
+ logsData: Array;
+ metricsData: Array;
+}
+
+function DataCollected(props: DataCollectedProps): JSX.Element {
+ const { logsData, metricsData } = props;
+ const logsColumns = [
+ {
+ title: 'Name',
+ dataIndex: 'name',
+ key: 'name',
+ },
+ {
+ title: 'Path',
+ dataIndex: 'path',
+ key: 'path',
+ },
+ {
+ title: 'Type',
+ dataIndex: 'type',
+ key: 'type',
+ },
+ ];
+
+ const metricsColumns = [
+ {
+ title: 'Name',
+ dataIndex: 'name',
+ key: 'name',
+ },
+ {
+ title: 'Type',
+ dataIndex: 'type',
+ key: 'type',
+ },
+ {
+ title: 'Unit',
+ dataIndex: 'unit',
+ key: 'unit',
+ },
+ ];
+
+ return (
+
+
+
+
+ Logs
+
+
+ index % 2 === 0 ? 'table-row-dark' : ''
+ }
+ dataSource={logsData}
+ pagination={{ pageSize: 3 }}
+ className="logs-section-table"
+ />
+
+
+
+
+ Metrics
+
+
+ index % 2 === 0 ? 'table-row-dark' : ''
+ }
+ dataSource={metricsData}
+ pagination={{ pageSize: 3 }}
+ className="metrics-section-table"
+ />
+
+
+ );
+}
+
+export default DataCollected;
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss
new file mode 100644
index 0000000000..8340d0d4c0
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/IntegrationDetailContentTabs.styles.scss
@@ -0,0 +1,296 @@
+.integration-detail-overview {
+ display: flex;
+
+ .integration-detail-overview-left-container {
+ display: flex;
+ flex-direction: column;
+ width: 25%;
+ gap: 26px;
+ border-right: 1px solid var(--bg-slate-500);
+ padding: 16px 0;
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 11px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 16px; /* 145.455% */
+ letter-spacing: 0.44px;
+ text-transform: uppercase;
+
+ .integration-detail-overview-category {
+ display: flex;
+ flex-direction: column;
+
+ .heading {
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 11px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 16px; /* 145.455% */
+ letter-spacing: 0.44px;
+ text-transform: uppercase;
+ }
+
+ .category-tabs {
+ display: flex;
+ gap: 6px;
+ flex-flow: wrap;
+ margin-top: 12px;
+
+ .category-tab {
+ padding: 2px 8px;
+ border-radius: 4px;
+ border: 1px solid rgba(173, 127, 88, 0.2);
+ background: rgba(173, 127, 88, 0.1);
+ color: var(--bg-sienna-400);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 20px; /* 142.857% */
+ letter-spacing: -0.07px;
+ text-transform: none;
+ }
+ }
+ }
+
+ .integration-detail-overview-assets {
+ display: flex;
+ flex-direction: column;
+
+ .heading {
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 11px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 16px; /* 145.455% */
+ letter-spacing: 0.44px;
+ text-transform: uppercase;
+ }
+
+ .assets-list {
+ margin-left: 5px;
+ margin-top: 12px;
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 22px; /* 157.143% */
+ letter-spacing: -0.07px;
+ padding-inline-start: 16px !important;
+ text-transform: none;
+ }
+ }
+ }
+
+ .integration-detail-overview-right-container {
+ width: 75%;
+ padding: 16px 0 0 16px;
+ max-height: 600px;
+ overflow-y: auto;
+ }
+}
+
+.integration-data-collected {
+ display: flex;
+ flex-direction: column;
+ gap: 32px;
+ margin-top: 8px;
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 20px; /* 142.857% */
+ letter-spacing: -0.07px;
+
+ .logs-section {
+ display: flex;
+ flex-direction: column;
+ gap: 8px;
+
+ .table-row-dark {
+ background: rgba(255, 255, 255, 0.01);
+ }
+
+ .logs-section-table {
+ border-radius: 6px;
+ border: 1px solid var(--bg-slate-400);
+ background: var(--bg-ink-400);
+
+ .ant-table-thead {
+ text-transform: uppercase;
+ }
+ .ant-table-cell {
+ background: unset !important;
+ border-bottom: none !important;
+ }
+
+ .ant-table-cell::before {
+ background-color: unset !important;
+ }
+ }
+
+ .logs-heading {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+ padding: 4px 6px;
+ }
+ }
+
+ .metrics-section {
+ display: flex;
+ flex-direction: column;
+ gap: 8px;
+
+ .table-row-dark {
+ background: rgba(255, 255, 255, 0.01);
+ }
+
+ .metrics-section-table {
+ border-radius: 6px;
+ border: 1px solid var(--bg-slate-400);
+ background: var(--bg-ink-400);
+
+ .ant-table-thead {
+ text-transform: uppercase;
+ }
+
+ .ant-table-cell {
+ background: unset !important;
+ border-bottom: none !important;
+ }
+
+ .ant-table-cell::before {
+ background-color: unset !important;
+ }
+ }
+
+ .metrics-heading {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+ padding: 4px 6px;
+ }
+ }
+}
+
+.integration-detail-configure {
+ display: flex;
+
+ .configure-menu {
+ display: flex;
+ flex-direction: column;
+ width: 25%;
+ padding: 16px 16px 0px 0px;
+ border-right: 1px solid var(--bg-slate-500);
+ gap: 8px;
+
+ .configure-menu-item {
+ padding: 4px 8px;
+ text-align: start;
+ color: var(--bg-vanilla-100);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 18px; /* 128.571% */
+ }
+
+ .configure-menu-item:hover {
+ background-color: rgba(255, 255, 255, 0.08);
+ }
+
+ .active {
+ color: rgba(255, 255, 255, 0.85);
+ background-color: rgba(255, 255, 255, 0.08);
+ }
+ }
+
+ .markdown-container {
+ width: 75%;
+ padding: 16px 0px 0px 16px;
+ max-height: 600px;
+ overflow-y: auto;
+ }
+}
+
+.lightMode {
+ .integration-detail-overview {
+ .integration-detail-overview-left-container {
+ border-right: 1px solid var(--bg-vanilla-400);
+
+ color: var(--bg-slate-100);
+
+ .integration-detail-overview-category {
+ .heading {
+ color: var(--bg-slate-100);
+ }
+ .category-tabs {
+ .category-tab {
+ border: 1px solid var(--bg-sienna-600);
+ background: rgba(173, 127, 88, 0.1);
+ color: var(--bg-sienna-500);
+ }
+ }
+ }
+
+ .integration-detail-overview-assets {
+ .heading {
+ color: var(--bg-slate-100);
+ }
+ .assets-list {
+ color: var(--bg-slate-100);
+ }
+ }
+ }
+ }
+
+ .integration-data-collected {
+ color: var(--bg-vanilla-400);
+
+ .logs-section {
+ .table-row-dark {
+ background: rgba(255, 255, 255, 0.01);
+ }
+
+ .logs-section-table {
+ border: 1px solid var(--bg-vanilla-400);
+ background: var(--bg-vanilla-300);
+ }
+ }
+
+ .metrics-section {
+ .table-row-dark {
+ background: rgba(255, 255, 255, 0.01);
+ }
+
+ .metrics-section-table {
+ border: 1px solid var(--bg-vanilla-400);
+ background: var(--bg-vanilla-300);
+ }
+ }
+ }
+
+ .integration-detail-configure {
+ .configure-menu {
+ border-right: 1px solid var(--bg-vanilla-400);
+
+ .configure-menu-item {
+ color: var(--bg-vanilla-100);
+ }
+ .configure-menu-item:hover {
+ background-color: var(--bg-vanilla-200);
+ }
+
+ .active {
+ color: rgba(255, 255, 255, 0.85);
+ background-color: var(--bg-vanilla-200);
+ }
+ }
+ }
+}
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Overview.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Overview.tsx
new file mode 100644
index 0000000000..5160115e12
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailContentTabs/Overview.tsx
@@ -0,0 +1,63 @@
+import './IntegrationDetailContentTabs.styles.scss';
+
+import { Typography } from 'antd';
+import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
+
+interface OverviewProps {
+ categories: string[];
+ assets: {
+ logs: {
+ pipelines: Array;
+ };
+ dashboards: Array;
+ alerts: Array;
+ };
+ overviewContent: string;
+}
+
+function Overview(props: OverviewProps): JSX.Element {
+ const { categories, assets, overviewContent } = props;
+ const assetsCount = [
+ assets?.logs?.pipelines?.length || 0,
+ assets?.dashboards?.length || 0,
+ assets?.alerts?.length || 0,
+ ];
+
+ const assetLabelMap = ['Pipelines', 'Dashboards', 'Alerts'];
+ return (
+
+
+
+
Category
+
+ {categories.map((category) => (
+
+ {category}
+
+ ))}
+
+
+
+
Assets
+
+ {assetsCount.map((count, index) => {
+ if (count === 0) {
+ return undefined;
+ }
+ return (
+
+ {count} {assetLabelMap[index]}
+
+ );
+ })}
+
+
+
+
+
+
+
+ );
+}
+
+export default Overview;
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx
new file mode 100644
index 0000000000..34f5e612bf
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailHeader.tsx
@@ -0,0 +1,190 @@
+/* eslint-disable no-nested-ternary */
+import './IntegrationDetailPage.styles.scss';
+
+import { Button, Modal, Typography } from 'antd';
+import installIntegration from 'api/Integrations/installIntegration';
+import { SOMETHING_WENT_WRONG } from 'constants/api';
+import dayjs from 'dayjs';
+import { useNotifications } from 'hooks/useNotifications';
+import { ArrowLeftRight, Check } from 'lucide-react';
+import { useState } from 'react';
+import { useMutation } from 'react-query';
+import { IntegrationStatusProps } from 'types/api/integrations/types';
+
+import TestConnection, { ConnectionStates } from './TestConnection';
+
+interface IntegrationDetailHeaderProps {
+ id: string;
+ title: string;
+ description: string;
+ icon: string;
+ refetchIntegrationDetails: () => void;
+ connectionState: ConnectionStates;
+ connectionData: IntegrationStatusProps['connection_status'];
+}
+function IntegrationDetailHeader(
+ props: IntegrationDetailHeaderProps,
+): JSX.Element {
+ const {
+ id,
+ title,
+ icon,
+ description,
+ connectionState,
+ connectionData,
+ refetchIntegrationDetails,
+ } = props;
+ const [isModalOpen, setIsModalOpen] = useState(false);
+
+ const { notifications } = useNotifications();
+
+ const showModal = (): void => {
+ setIsModalOpen(true);
+ };
+
+ const handleOk = (): void => {
+ setIsModalOpen(false);
+ };
+
+ const handleCancel = (): void => {
+ setIsModalOpen(false);
+ };
+
+ const { mutate, isLoading: isInstallLoading } = useMutation(
+ installIntegration,
+ {
+ onSuccess: () => {
+ refetchIntegrationDetails();
+ },
+ onError: () => {
+ notifications.error({
+ message: SOMETHING_WENT_WRONG,
+ });
+ },
+ },
+ );
+
+ let latestData: {
+ last_received_ts_ms: number | null;
+ last_received_from: string | null;
+ } = {
+ last_received_ts_ms: null,
+ last_received_from: null,
+ };
+
+ if (
+ connectionData.logs?.last_received_ts_ms &&
+ connectionData.metrics?.last_received_ts_ms
+ ) {
+ if (
+ connectionData.logs.last_received_ts_ms >
+ connectionData.metrics.last_received_ts_ms
+ ) {
+ latestData = {
+ last_received_ts_ms: connectionData.logs.last_received_ts_ms,
+ last_received_from: connectionData.logs.last_received_from,
+ };
+ } else {
+ latestData = {
+ last_received_ts_ms: connectionData.metrics.last_received_ts_ms,
+ last_received_from: connectionData.metrics.last_received_from,
+ };
+ }
+ } else if (connectionData.logs?.last_received_ts_ms) {
+ latestData = {
+ last_received_ts_ms: connectionData.logs.last_received_ts_ms,
+ last_received_from: connectionData.logs.last_received_from,
+ };
+ } else if (connectionData.metrics?.last_received_ts_ms) {
+ latestData = {
+ last_received_ts_ms: connectionData.metrics.last_received_ts_ms,
+ last_received_from: connectionData.metrics.last_received_from,
+ };
+ }
+ return (
+
+
+
+
+
+
+
+ {title}
+ {description}
+
+
+
}
+ disabled={isInstallLoading}
+ onClick={(): void => {
+ if (connectionState === ConnectionStates.NotInstalled) {
+ mutate({ integration_id: id, config: {} });
+ } else {
+ showModal();
+ }
+ }}
+ >
+ {connectionState === ConnectionStates.NotInstalled
+ ? `Connect ${title}`
+ : `Test Connection`}
+
+
+
+ {connectionState !== ConnectionStates.NotInstalled && (
+
+ )}
+
+
}}
+ cancelButtonProps={{ style: { display: 'none' } }}
+ >
+
+
+ {connectionState === ConnectionStates.Connected ||
+ connectionState === ConnectionStates.NoDataSinceLong ? (
+ <>
+
+
+ Last recieved from
+
+
+ {latestData.last_received_from}
+
+
+
+
+ Last recieved at
+
+
+ {latestData.last_received_ts_ms
+ ? dayjs(latestData.last_received_ts_ms).format('DD MMM YYYY HH:mm')
+ : ''}
+
+
+ >
+ ) : connectionState === ConnectionStates.TestingConnection ? (
+
+
+ After adding the {title} integration, you need to manually configure
+ your Redis data source to start sending data to SigNoz.
+
+
+ The status bar above would turn green if we are successfully receiving
+ the data.
+
+
+ ) : null}
+
+
+
+ );
+}
+
+export default IntegrationDetailHeader;
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss
new file mode 100644
index 0000000000..d9982c3aab
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss
@@ -0,0 +1,652 @@
+.integration-detail-content {
+ display: flex;
+ flex-direction: column;
+ gap: 16px;
+ margin: 12px 0px 20px 0px;
+
+ .error-container {
+ display: flex;
+ border-radius: 6px;
+ border: 1px solid var(--bg-slate-500);
+ background: var(--bg-ink-400);
+ align-items: center;
+ justify-content: center;
+ flex-direction: column;
+
+ .error-content {
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ height: 300px;
+ gap: 15px;
+
+ .error-btns {
+ display: flex;
+ flex-direction: row;
+ gap: 16px;
+ align-items: center;
+
+ .retry-btn {
+ display: flex;
+ align-items: center;
+ }
+
+ .contact-support {
+ display: flex;
+ align-items: center;
+ gap: 4px;
+ cursor: pointer;
+
+ .text {
+ color: var(--text-robin-400);
+ font-weight: 500;
+ }
+ }
+ }
+
+ .error-state-svg {
+ height: 40px;
+ width: 40px;
+ }
+ }
+ }
+
+ .loading-integration-details {
+ display: flex;
+ height: 400px;
+ justify-content: center;
+ align-items: center;
+ }
+
+ .all-integrations-btn {
+ width: fit-content;
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ height: 24px;
+ padding-left: 0px;
+ color: #c0c1c3;
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 18px; /* 128.571% */
+ }
+
+ .all-integrations-btn:hover {
+ &.ant-btn-text {
+ background-color: unset !important;
+ }
+ }
+
+ .integration-connection-header {
+ display: flex;
+ flex-direction: column;
+ padding: 16px;
+ gap: 12px;
+ border-radius: 6px;
+ border: 1px solid var(--bg-slate-500);
+ background: var(--bg-ink-400);
+
+ .integration-detail-header {
+ display: flex;
+ gap: 10px;
+ justify-content: space-between;
+
+ .image-container {
+ height: 40px;
+ width: 40px;
+ flex-shrink: 0;
+ border-radius: 2px;
+ border: 1px solid var(--bg-ink-50);
+ background: var(--bg-ink-300);
+ display: flex;
+ align-items: center;
+ justify-content: center;
+
+ .image {
+ height: 24px;
+ width: 24px;
+ }
+ }
+ .details {
+ display: flex;
+ flex-direction: column;
+ .heading {
+ color: var(--bg-vanilla-100);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 20px; /* 142.857% */
+ letter-spacing: -0.07px;
+ }
+
+ .description {
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 12px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 18px; /* 150% */
+ }
+ }
+
+ .configure-btn {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ align-self: flex-start;
+ gap: 2px;
+ flex-shrink: 0;
+ min-width: 143px;
+ height: 30px;
+ padding: 6px;
+ border-radius: 2px;
+ border: 1px solid var(--bg-ink-50);
+ background: var(--bg-robin-500);
+ color: var(--bg-vanilla-100);
+ font-family: Inter;
+ font-size: 12px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 10px; /* 83.333% */
+ letter-spacing: 0.12px;
+ }
+ }
+
+ .connection-container {
+ padding: 0 18px;
+ height: 37px;
+ display: flex;
+ align-items: center;
+
+ .connection-text {
+ margin: 0px;
+ padding: 0px 0px 0px 10px;
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 22px; /* 157.143% */
+ letter-spacing: -0.07px;
+ }
+ }
+
+ .testingConnection {
+ border-radius: 4px;
+ border: 1px solid rgba(255, 205, 86, 0.1);
+ background: rgba(255, 205, 86, 0.1);
+ color: var(--bg-amber-400);
+ }
+
+ .connected {
+ border-radius: 4px;
+ border: 1px solid rgba(37, 225, 146, 0.1);
+ background: rgba(37, 225, 146, 0.1);
+ color: var(--bg-forest-400);
+ }
+
+ .connectionFailed {
+ border-radius: 4px;
+ border: 1px solid rgba(218, 85, 101, 0.2);
+ background: rgba(218, 85, 101, 0.06);
+ color: var(--bg-cherry-500);
+ }
+
+ .noDataSinceLong {
+ border-radius: 4px;
+ border: 1px solid rgba(78, 116, 248, 0.1);
+ background: rgba(78, 116, 248, 0.1);
+ color: var(--bg-robin-400);
+ }
+ }
+
+ .integration-detail-container {
+ border-radius: 6px;
+ padding: 10px 16px;
+ border: 1px solid var(--bg-slate-500);
+ background: var(--bg-ink-400, #121317);
+
+ .integration-tab-btns {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ padding: 8px 8px 18px 8px !important;
+
+ .typography {
+ color: var(--bg-vanilla-100);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 20px; /* 142.857% */
+ letter-spacing: -0.07px;
+ }
+ }
+
+ .integration-tab-btns:hover {
+ &.ant-btn-text {
+ background-color: unset !important;
+ }
+ }
+
+ .ant-tabs-nav-list {
+ gap: 24px;
+ }
+
+ .ant-tabs-nav {
+ padding: 0px !important;
+ }
+
+ .ant-tabs-tab {
+ padding: 0 !important;
+ }
+
+ .ant-tabs-tab + .ant-tabs-tab {
+ margin: 0px !important;
+ }
+ }
+
+ .uninstall-integration-bar {
+ display: flex;
+ padding: 16px;
+ border-radius: 4px;
+ border: 1px solid rgba(218, 85, 101, 0.2);
+ background: rgba(218, 85, 101, 0.06);
+
+ .unintall-integration-bar-text {
+ display: flex;
+ flex-direction: column;
+ gap: 6px;
+
+ .heading {
+ color: var(--bg-cherry-500);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: normal;
+ letter-spacing: -0.07px;
+ }
+
+ .subtitle {
+ color: var(--bg-cherry-300);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 22px; /* 157.143% */
+ letter-spacing: -0.07px;
+ }
+ }
+
+ .uninstall-integration-btn {
+ border-radius: 2px;
+ background: var(--Accent---Secondary-Cherry, #da5565);
+ border-color: unset !important;
+ padding: 9px 13px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ color: var(--bg-ink-300);
+ text-align: center;
+ font-family: Inter;
+ font-size: 12px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 13.3px; /* 110.833% */
+ }
+
+ .uninstall-integration-btn:hover {
+ &.ant-btn-default {
+ color: var(--bg-ink-300) !important;
+ }
+ }
+ }
+}
+
+.remove-integration-modal {
+ .ant-modal-content {
+ width: 400px;
+ min-height: 200px;
+ flex-shrink: 0;
+ border-radius: 4px;
+ border: 1px solid var(--bg-slate-500);
+ box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2);
+ background: var(--bg-ink-400);
+ }
+
+ .ant-modal-footer {
+ margin-top: 28px;
+ }
+
+ .ant-modal-header {
+ background: unset;
+ margin-bottom: 8px;
+ }
+
+ .ant-modal-title {
+ color: var(--bg-vanilla-100);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 20px; /* 142.857% */
+ letter-spacing: -0.07px;
+ }
+
+ .remove-integration-text {
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 20px; /* 142.857% */
+ letter-spacing: -0.07px;
+ }
+}
+
+.test-connection-modal {
+ .ant-modal-content {
+ width: 512px;
+ min-height: 170px;
+ flex-shrink: 0;
+ border-radius: 4px;
+ border: 1px solid var(--bg-slate-500);
+ background: var(--bg-ink-400);
+ box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2);
+
+ .ant-modal-header {
+ margin-bottom: 16px;
+ }
+
+ .ant-modal-body {
+ border-top: 1px solid var(--bg-slate-500);
+ padding-top: 16px;
+ }
+
+ .ant-modal-footer {
+ margin-top: 25px;
+ display: flex;
+ flex-direction: row-reverse;
+
+ .understandBtn {
+ border-radius: 2px;
+ border: 1px solid var(--bg-slate-400);
+ background: var(--bg-ink-300);
+ box-shadow: none;
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 12px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 10px; /* 83.333% */
+ letter-spacing: 0.12px;
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ width: 131px;
+ height: 30px;
+ padding: 6px;
+ flex-shrink: 0;
+ }
+ }
+ }
+
+ .ant-modal-header {
+ background: unset;
+ }
+
+ .connection-content {
+ display: flex;
+ flex-direction: column;
+ gap: 16px;
+
+ .connection-container {
+ padding: 0 10px;
+ height: 37px;
+ display: flex;
+ align-items: center;
+
+ .connection-text {
+ margin: 0px;
+ padding: 0px 0px 0px 10px;
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 22px; /* 157.143% */
+ letter-spacing: -0.07px;
+ }
+ }
+
+ .data-test-connection {
+ display: flex;
+ flex-direction: column;
+ gap: 16px;
+ }
+ .data-info {
+ display: flex;
+ justify-content: space-between;
+
+ .last-data {
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 22px; /* 157.143% */
+ letter-spacing: -0.07px;
+ }
+
+ .last-value {
+ color: var(--bg-vanilla-100);
+ font-family: 'Space Mono';
+ font-size: 12px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 18px; /* 150% */
+ }
+ }
+ .testingConnection {
+ border-radius: 4px;
+ border: 1px solid rgba(255, 205, 86, 0.1);
+ background: rgba(255, 205, 86, 0.1);
+ color: var(--bg-amber-400);
+ }
+
+ .connected {
+ border-radius: 4px;
+ border: 1px solid rgba(37, 225, 146, 0.1);
+ background: rgba(37, 225, 146, 0.1);
+ color: var(--bg-forest-400);
+ }
+
+ .connectionFailed {
+ border-radius: 4px;
+ border: 1px solid rgba(218, 85, 101, 0.2);
+ background: rgba(218, 85, 101, 0.06);
+ color: var(--bg-cherry-500);
+ }
+
+ .noDataSinceLong {
+ border-radius: 4px;
+ border: 1px solid rgba(78, 116, 248, 0.1);
+ background: rgba(78, 116, 248, 0.1);
+ color: var(--bg-robin-400);
+ }
+ }
+}
+
+.lightMode {
+ .integration-detail-content {
+ .error-container {
+ border: 1px solid var(--bg-slate-500);
+ background: var(--bg-ink-400);
+
+ .error-content {
+ .error-btns {
+ .contact-support {
+ .text {
+ color: var(--text-robin-400);
+ font-weight: 500;
+ }
+ }
+ }
+ }
+ }
+
+ .all-integrations-btn {
+ color: var(--bg-slate-300);
+ }
+
+ .all-integrations-btn:hover {
+ &.ant-btn-text {
+ background-color: unset !important;
+ }
+ }
+
+ .integration-connection-header {
+ border: 1px solid rgba(53, 59, 76, 0.2);
+ background: var(--bg-vanilla-100);
+
+ .integration-detail-header {
+ .image-container {
+ border: 1.111px solid var(--bg-vanilla-300);
+ background: var(--bg-vanilla-100);
+ }
+ .details {
+ .heading {
+ color: var(--bg-ink-500);
+ }
+
+ .description {
+ color: var(--bg-slate-200);
+ }
+ }
+ }
+
+ .testingConnection {
+ border: 1px solid rgba(255, 205, 86, 0.4);
+ background: rgba(255, 205, 86, 0.2);
+ color: var(--bg-amber-600);
+ }
+
+ .connected {
+ border: 1px solid rgba(37, 225, 146, 0.1);
+ background: rgba(37, 225, 146, 0.1);
+ color: var(--bg-forest-600);
+ }
+
+ .noDataSinceLong {
+ border: 1px solid rgba(78, 116, 248, 0.1);
+ background: rgba(78, 116, 248, 0.1);
+ color: var(--bg-robin-400);
+ }
+ }
+
+ .integration-detail-container {
+ border: 1px solid rgba(53, 59, 76, 0.2);
+ background: var(--bg-vanilla-100);
+
+ .integration-tab-btns {
+ .typography {
+ color: var(--bg-ink-500);
+ }
+ }
+ }
+
+ .uninstall-integration-bar {
+ border: 1px solid rgba(53, 59, 76, 0.2);
+ background: var(--bg-vanilla-100);
+
+ .unintall-integration-bar-text {
+ .heading {
+ color: var(--bg-ink-500);
+ }
+
+ .subtitle {
+ color: var(--bg-slate-100);
+ }
+ }
+
+ .uninstall-integration-btn {
+ background: var(--bg-cherry-500, #e5484d);
+ border-color: none !important;
+ color: var(--bg-vanilla-100);
+ }
+
+ .uninstall-integration-btn:hover {
+ &.ant-btn-default {
+ color: var(--bg-vanilla-300) !important;
+ }
+ }
+ }
+ }
+
+ .remove-integration-modal {
+ .ant-modal-content {
+ border: 1px solid var(--bg-vanilla-400);
+ box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2);
+ background: var(--bg-vanilla-100);
+ }
+
+ .ant-modal-title {
+ color: var(--bg-ink-500);
+ }
+
+ .remove-integration-text {
+ color: var(--bg-slate-400);
+ }
+ }
+
+ .test-connection-modal {
+ .ant-modal-content {
+ border: 1px solid rgba(53, 59, 76, 0.2);
+ background: var(--bg-vanilla-100);
+ box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2);
+
+ .ant-modal-body {
+ border-top: 1px solid var(--bg-vanilla-400);
+ }
+
+ .ant-modal-footer {
+ .understandBtn {
+ border: 1px solid rgba(53, 59, 76, 0.2);
+ background: var(--bg-vanilla-200);
+ color: var(--bg-slate-400);
+ }
+ }
+ }
+
+ .connection-content {
+ .data-info {
+ .last-data {
+ color: var(--bg-slate-400);
+ }
+
+ .last-value {
+ color: var(--bg-slate-100);
+ }
+ }
+ .testingConnection {
+ border: 1px solid rgba(255, 205, 86, 0.4);
+ background: rgba(255, 205, 86, 0.2);
+ color: var(--bg-amber-600);
+ }
+
+ .connected {
+ border: 1px solid rgba(37, 225, 146, 0.1);
+ background: rgba(37, 225, 146, 0.1);
+ color: var(--bg-forest-600);
+ }
+
+ .noDataSinceLong {
+ border: 1px solid rgba(78, 116, 248, 0.1);
+ background: rgba(78, 116, 248, 0.1);
+ color: var(--bg-robin-400);
+ }
+ }
+ }
+}
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx
new file mode 100644
index 0000000000..3d498a07d8
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx
@@ -0,0 +1,156 @@
+/* eslint-disable jsx-a11y/no-static-element-interactions */
+/* eslint-disable jsx-a11y/click-events-have-key-events */
+/* eslint-disable no-nested-ternary */
+import './IntegrationDetailPage.styles.scss';
+
+import { Color } from '@signozhq/design-tokens';
+import { Button, Typography } from 'antd';
+import { useGetIntegration } from 'hooks/Integrations/useGetIntegration';
+import { useGetIntegrationStatus } from 'hooks/Integrations/useGetIntegrationStatus';
+import { defaultTo } from 'lodash-es';
+import { ArrowLeft, MoveUpRight, RotateCw } from 'lucide-react';
+import { useEffect } from 'react';
+import { isCloudUser } from 'utils/app';
+
+import { handleContactSupport } from '../utils';
+import IntegrationDetailContent from './IntegrationDetailContent';
+import IntegrationDetailHeader from './IntegrationDetailHeader';
+import IntergrationsUninstallBar from './IntegrationsUninstallBar';
+import { ConnectionStates } from './TestConnection';
+import { getConnectionStatesFromConnectionStatus } from './utils';
+
+interface IntegrationDetailPageProps {
+ selectedIntegration: string;
+ setSelectedIntegration: (id: string | null) => void;
+ activeDetailTab: string;
+}
+
+function IntegrationDetailPage(props: IntegrationDetailPageProps): JSX.Element {
+ const { selectedIntegration, setSelectedIntegration, activeDetailTab } = props;
+
+ const {
+ data,
+ isLoading,
+ isFetching,
+ refetch,
+ isRefetching,
+ isError,
+ } = useGetIntegration({
+ integrationId: selectedIntegration,
+ });
+
+ const {
+ data: integrationStatus,
+ refetch: refetchStatus,
+ isLoading: isStatusLoading,
+ } = useGetIntegrationStatus({
+ integrationId: selectedIntegration,
+ enabled: false,
+ });
+
+ const loading = isLoading || isFetching || isRefetching || isStatusLoading;
+ const integrationData = data?.data.data;
+
+ const connectionStatus = getConnectionStatesFromConnectionStatus(
+ integrationData?.installation,
+ defaultTo(
+ integrationStatus?.data.data.connection_status,
+ defaultTo(integrationData?.connection_status, { logs: null, metrics: null }),
+ ),
+ );
+
+ useEffect(() => {
+ // we should once get data on load and then keep polling every 5 seconds
+ refetchStatus();
+ const timer = setInterval(() => {
+ refetchStatus();
+ }, 5000);
+
+ return (): void => {
+ clearInterval(timer);
+ };
+ }, [refetchStatus]);
+
+ return (
+
+
}
+ className="all-integrations-btn"
+ onClick={(): void => {
+ setSelectedIntegration(null);
+ }}
+ >
+ All Integrations
+
+
+ {loading ? (
+
+ Please wait.. While we load the integration details
+
+ ) : isError ? (
+
+
+
+
+ Something went wrong :/ Please retry or contact support.
+
+
+
=> refetch()}
+ icon={ }
+ >
+ Retry
+
+
handleContactSupport(isCloudUser())}
+ >
+ Contact Support
+
+
+
+
+
+
+ ) : (
+ integrationData && (
+ <>
+
+
+
+ {connectionStatus !== ConnectionStates.NotInstalled && (
+
+ )}
+ >
+ )
+ )}
+
+ );
+}
+
+export default IntegrationDetailPage;
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationsUninstallBar.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationsUninstallBar.tsx
new file mode 100644
index 0000000000..41e985abf8
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/IntegrationsUninstallBar.tsx
@@ -0,0 +1,89 @@
+import './IntegrationDetailPage.styles.scss';
+
+import { Button, Modal, Typography } from 'antd';
+import unInstallIntegration from 'api/Integrations/uninstallIntegration';
+import { SOMETHING_WENT_WRONG } from 'constants/api';
+import { useNotifications } from 'hooks/useNotifications';
+import { X } from 'lucide-react';
+import { useState } from 'react';
+import { useMutation } from 'react-query';
+
+interface IntergrationsUninstallBarProps {
+ integrationTitle: string;
+ integrationId: string;
+ refetchIntegrationDetails: () => void;
+}
+function IntergrationsUninstallBar(
+ props: IntergrationsUninstallBarProps,
+): JSX.Element {
+ const { integrationTitle, integrationId, refetchIntegrationDetails } = props;
+ const { notifications } = useNotifications();
+ const [isModalOpen, setIsModalOpen] = useState(false);
+
+ const {
+ mutate: uninstallIntegration,
+ isLoading: isUninstallLoading,
+ } = useMutation(unInstallIntegration, {
+ onSuccess: () => {
+ refetchIntegrationDetails();
+ setIsModalOpen(false);
+ },
+ onError: () => {
+ notifications.error({
+ message: SOMETHING_WENT_WRONG,
+ });
+ },
+ });
+
+ const showModal = (): void => {
+ setIsModalOpen(true);
+ };
+
+ const handleOk = (): void => {
+ uninstallIntegration({
+ integration_id: integrationId,
+ });
+ };
+
+ const handleCancel = (): void => {
+ setIsModalOpen(false);
+ };
+ return (
+
+
+ Remove Integration
+
+ Removing the {integrationTitle} integration would make your workspace stop
+ listening for data from {integrationTitle} instances.
+
+
+
}
+ onClick={(): void => showModal()}
+ >
+ Remove from SigNoz
+
+
+
+ Removing this integration makes SigNoz stop listening for data from{' '}
+ {integrationTitle} instances. You would still have to manually remove the
+ configuration in your code to stop sending data.
+
+
+
+ );
+}
+
+export default IntergrationsUninstallBar;
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/TestConnection.tsx b/frontend/src/pages/Integrations/IntegrationDetailPage/TestConnection.tsx
new file mode 100644
index 0000000000..e593e121e1
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/TestConnection.tsx
@@ -0,0 +1,35 @@
+import './IntegrationDetailPage.styles.scss';
+
+import cx from 'classnames';
+
+export enum ConnectionStates {
+ Connected = 'connected',
+ TestingConnection = 'testingConnection',
+ NoDataSinceLong = 'noDataSinceLong',
+ NotInstalled = 'notInstalled',
+}
+
+const ConnectionStatesLabelMap = {
+ [ConnectionStates.Connected]: 'This integration is working properly',
+ [ConnectionStates.TestingConnection]: 'Listening for data...',
+ [ConnectionStates.NoDataSinceLong]:
+ 'This integration has not received data in a while :/',
+ [ConnectionStates.NotInstalled]: '',
+};
+
+interface TestConnectionProps {
+ connectionState: ConnectionStates;
+}
+
+function TestConnection(props: TestConnectionProps): JSX.Element {
+ const { connectionState } = props;
+ return (
+
+
+ {ConnectionStatesLabelMap[connectionState]}
+
+
+ );
+}
+
+export default TestConnection;
diff --git a/frontend/src/pages/Integrations/IntegrationDetailPage/utils.ts b/frontend/src/pages/Integrations/IntegrationDetailPage/utils.ts
new file mode 100644
index 0000000000..43a4f76a5e
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationDetailPage/utils.ts
@@ -0,0 +1,55 @@
+import dayjs from 'dayjs';
+import { isNull, isUndefined } from 'lodash-es';
+
+import { ConnectionStates } from './TestConnection';
+
+export function getConnectionStatesFromConnectionStatus(
+ installation:
+ | {
+ installed_at: string;
+ }
+ | null
+ | undefined,
+ connection_status: {
+ logs:
+ | {
+ last_received_ts_ms: number;
+ last_received_from: string;
+ }
+ | null
+ | undefined;
+ metrics:
+ | {
+ last_received_ts_ms: number;
+ last_received_from: string;
+ }
+ | null
+ | undefined;
+ },
+): ConnectionStates {
+ if (isNull(installation) || isUndefined(installation)) {
+ return ConnectionStates.NotInstalled;
+ }
+ if (
+ (isNull(connection_status.logs) || isUndefined(connection_status.logs)) &&
+ (isNull(connection_status.metrics) || isUndefined(connection_status.metrics))
+ ) {
+ const installationDate = dayjs(installation.installed_at);
+ if (installationDate.isBefore(dayjs().subtract(7, 'days'))) {
+ return ConnectionStates.NoDataSinceLong;
+ }
+ return ConnectionStates.TestingConnection;
+ }
+
+ const logsDate = dayjs(connection_status.logs?.last_received_ts_ms);
+ const metricsDate = dayjs(connection_status.metrics?.last_received_ts_ms);
+
+ if (
+ logsDate.isBefore(dayjs().subtract(7, 'days')) &&
+ metricsDate.isBefore(dayjs().subtract(7, 'days'))
+ ) {
+ return ConnectionStates.NoDataSinceLong;
+ }
+
+ return ConnectionStates.Connected;
+}
diff --git a/frontend/src/pages/Integrations/Integrations.styles.scss b/frontend/src/pages/Integrations/Integrations.styles.scss
new file mode 100644
index 0000000000..794b596407
--- /dev/null
+++ b/frontend/src/pages/Integrations/Integrations.styles.scss
@@ -0,0 +1,228 @@
+.integrations-container {
+ margin-top: 24px;
+ display: flex;
+ justify-content: center;
+ width: 100%;
+
+ .integrations-content {
+ width: calc(100% - 30px);
+ max-width: 736px;
+
+ .integrations-header {
+ .title {
+ color: var(--bg-vanilla-100);
+ font-size: var(--font-size-lg);
+ font-style: normal;
+ line-height: 28px; /* 155.556% */
+ letter-spacing: -0.09px;
+ font-family: Inter;
+ font-weight: 500;
+ }
+
+ .subtitle {
+ color: var(--bg-vanilla-400);
+ font-size: var(--font-size-sm);
+ font-style: normal;
+ line-height: 20px; /* 142.857% */
+ letter-spacing: -0.07px;
+ font-family: Inter;
+ font-weight: 400;
+ }
+
+ .integrations-search-input {
+ margin-top: 1rem;
+ border-radius: 2px;
+ border: 1px solid var(--bg-slate-400);
+ background: var(--bg-ink-300);
+
+ .ant-input {
+ background-color: unset;
+ }
+ }
+ }
+
+ .integrations-list {
+ margin-top: 16px;
+
+ .error-container {
+ display: flex;
+ border-radius: 6px;
+ border: 1px solid var(--bg-slate-500);
+ background: var(--bg-ink-400);
+ align-items: center;
+ justify-content: center;
+ flex-direction: column;
+
+ .error-content {
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ height: 300px;
+ gap: 15px;
+
+ .error-btns {
+ display: flex;
+ flex-direction: row;
+ gap: 16px;
+ align-items: center;
+
+ .retry-btn {
+ display: flex;
+ align-items: center;
+ }
+
+ .contact-support {
+ display: flex;
+ align-items: center;
+ gap: 4px;
+ cursor: pointer;
+
+ .text {
+ color: var(--text-robin-400);
+ font-weight: 500;
+ }
+ }
+ }
+
+ .error-state-svg {
+ height: 40px;
+ width: 40px;
+ }
+ }
+ }
+
+ .ant-list-items {
+ gap: 16px;
+ display: flex;
+ flex-direction: column;
+ }
+
+ .integrations-list-item {
+ display: flex;
+ gap: 10px;
+ padding: 16px;
+ border-radius: 6px;
+ border: 1px solid var(--bg-slate-500);
+ background: var(--bg-ink-400);
+ cursor: pointer;
+
+ .list-item-image-container {
+ height: 40px;
+ width: 40px;
+ flex-shrink: 0;
+ border-radius: 2px;
+ border: 1px solid var(--bg-ink-50);
+ background: var(--bg-ink-300);
+ display: flex;
+ align-items: center;
+ justify-content: center;
+
+ .list-item-image {
+ height: 24px;
+ width: 24px;
+ }
+ }
+
+ .list-item-details {
+ display: flex;
+ flex-direction: column;
+
+ .heading {
+ color: var(--bg-vanilla-100);
+ font-family: Inter;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 20px; /* 142.857% */
+ letter-spacing: -0.07px;
+ margin-bottom: 8px;
+ }
+
+ .description {
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 12px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 18px; /* 150% */
+ }
+ }
+
+ .configure-btn {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ align-self: flex-start;
+ gap: 2px;
+ flex-shrink: 0;
+ width: 78px;
+ height: 24px;
+ padding: 6px 1px;
+ border-radius: 2px;
+ border: 1px solid #303540;
+ background: var(--bg-ink-200);
+ box-shadow: none;
+ color: var(--bg-vanilla-400);
+ font-family: Inter;
+ font-size: 12px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 10px; /* 83.333% */
+ letter-spacing: 0.12px;
+ }
+ }
+ }
+ }
+}
+
+.lightMode {
+ .integrations-container {
+ .integrations-content {
+ .integrations-header {
+ .title {
+ color: var(--bg-ink-500);
+ }
+ .subtitle {
+ color: var(--bg-slate-200);
+ }
+ .integrations-search-input {
+ border: 1px solid rgba(53, 59, 76, 0.2);
+ background: var(--bg-vanilla-100);
+ }
+ }
+
+ .integrations-list {
+ .error-container {
+ border: 1px solid rgba(53, 59, 76, 0.2);
+ background: var(--bg-vanilla-100);
+ }
+
+ .integrations-list-item {
+ border: 1px solid rgba(53, 59, 76, 0.2);
+ background: var(--bg-vanilla-100);
+
+ .list-item-image-container {
+ border: 1.111px solid var(--bg-vanilla-300);
+ background: var(--bg-vanilla-100);
+ }
+
+ .list-item-details {
+ .heading {
+ color: var(--bg-ink-500);
+ }
+
+ .description {
+ color: var(--bg-slate-200);
+ }
+ }
+
+ .configure-btn {
+ border: 1px solid rgba(53, 59, 76, 0.2);
+ background: var(--bg-vanilla-200);
+ color: var(--bg-ink-500);
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/frontend/src/pages/Integrations/Integrations.tsx b/frontend/src/pages/Integrations/Integrations.tsx
new file mode 100644
index 0000000000..6d25a20a6f
--- /dev/null
+++ b/frontend/src/pages/Integrations/Integrations.tsx
@@ -0,0 +1,41 @@
+import './Integrations.styles.scss';
+
+import { useState } from 'react';
+
+import Header from './Header';
+import IntegrationDetailPage from './IntegrationDetailPage/IntegrationDetailPage';
+import IntegrationsList from './IntegrationsList';
+
+function Integrations(): JSX.Element {
+ const [selectedIntegration, setSelectedIntegration] = useState(
+ null,
+ );
+
+ const [activeDetailTab, setActiveDetailTab] = useState(null);
+
+ const [searchTerm, setSearchTerm] = useState('');
+ return (
+
+
+ {selectedIntegration && activeDetailTab ? (
+
+ ) : (
+ <>
+
+
+ >
+ )}
+
+
+ );
+}
+
+export default Integrations;
diff --git a/frontend/src/pages/Integrations/IntegrationsList.tsx b/frontend/src/pages/Integrations/IntegrationsList.tsx
new file mode 100644
index 0000000000..47cd76bb68
--- /dev/null
+++ b/frontend/src/pages/Integrations/IntegrationsList.tsx
@@ -0,0 +1,120 @@
+/* eslint-disable jsx-a11y/no-static-element-interactions */
+/* eslint-disable jsx-a11y/click-events-have-key-events */
+import './Integrations.styles.scss';
+
+import { Color } from '@signozhq/design-tokens';
+import { Button, List, Typography } from 'antd';
+import { useGetAllIntegrations } from 'hooks/Integrations/useGetAllIntegrations';
+import { MoveUpRight, RotateCw } from 'lucide-react';
+import { Dispatch, SetStateAction, useMemo } from 'react';
+import { isCloudUser } from 'utils/app';
+
+import { handleContactSupport } from './utils';
+
+interface IntegrationsListProps {
+ setSelectedIntegration: (id: string) => void;
+ setActiveDetailTab: Dispatch>;
+ searchTerm: string;
+}
+
+function IntegrationsList(props: IntegrationsListProps): JSX.Element {
+ const { setSelectedIntegration, searchTerm, setActiveDetailTab } = props;
+
+ const {
+ data,
+ isFetching,
+ isLoading,
+ isRefetching,
+ isError,
+ refetch,
+ } = useGetAllIntegrations();
+
+ const filteredDataList = useMemo(() => {
+ if (data?.data.data.integrations) {
+ return data?.data.data.integrations.filter((item) =>
+ item.title.toLowerCase().includes(searchTerm.toLowerCase()),
+ );
+ }
+ return [];
+ }, [data?.data.data.integrations, searchTerm]);
+
+ const loading = isLoading || isFetching || isRefetching;
+
+ return (
+
+ {!loading && isError && (
+
+
+
+
+ Something went wrong :/ Please retry or contact support.
+
+
+
=> refetch()}
+ icon={ }
+ >
+ Retry
+
+
handleContactSupport(isCloudUser())}
+ >
+ Contact Support
+
+
+
+
+
+
+ )}
+ {!isError && (
+
(
+ {
+ setSelectedIntegration(item.id);
+ setActiveDetailTab('overview');
+ }}
+ >
+
+
+
+
+
+ {item.title}
+
+ {item.description}
+
+
+
+ {
+ event.stopPropagation();
+ setSelectedIntegration(item.id);
+ setActiveDetailTab('configuration');
+ }}
+ >
+ Configure
+
+
+ )}
+ />
+ )}
+
+ );
+}
+
+export default IntegrationsList;
diff --git a/frontend/src/pages/Integrations/index.ts b/frontend/src/pages/Integrations/index.ts
new file mode 100644
index 0000000000..806360c344
--- /dev/null
+++ b/frontend/src/pages/Integrations/index.ts
@@ -0,0 +1,3 @@
+import Integrations from './Integrations';
+
+export default Integrations;
diff --git a/frontend/src/pages/Integrations/utils.ts b/frontend/src/pages/Integrations/utils.ts
new file mode 100644
index 0000000000..81c70b6091
--- /dev/null
+++ b/frontend/src/pages/Integrations/utils.ts
@@ -0,0 +1,9 @@
+import history from 'lib/history';
+
+export const handleContactSupport = (isCloudUser: boolean): void => {
+ if (isCloudUser) {
+ history.push('/support');
+ } else {
+ window.open('https://signoz.io/slack', '_blank');
+ }
+};
diff --git a/frontend/src/pages/IntegrationsMarketPlace/IntegrationsMarketPlace.tsx b/frontend/src/pages/IntegrationsMarketPlace/IntegrationsMarketPlace.tsx
new file mode 100644
index 0000000000..c63f8a659a
--- /dev/null
+++ b/frontend/src/pages/IntegrationsMarketPlace/IntegrationsMarketPlace.tsx
@@ -0,0 +1,9 @@
+function IntegrationsMarketPlace(): JSX.Element {
+ return (
+
+
IntegrationsMarketPlace
+
+ );
+}
+
+export default IntegrationsMarketPlace;
diff --git a/frontend/src/pages/IntegrationsMarketPlace/index.ts b/frontend/src/pages/IntegrationsMarketPlace/index.ts
new file mode 100644
index 0000000000..6c088880e7
--- /dev/null
+++ b/frontend/src/pages/IntegrationsMarketPlace/index.ts
@@ -0,0 +1,3 @@
+import IntegrationsMarketPlace from './IntegrationsMarketPlace';
+
+export default IntegrationsMarketPlace;
diff --git a/frontend/src/pages/IntegrationsModulePage/IntegrationsModulePage.styles.scss b/frontend/src/pages/IntegrationsModulePage/IntegrationsModulePage.styles.scss
new file mode 100644
index 0000000000..4ff58bea40
--- /dev/null
+++ b/frontend/src/pages/IntegrationsModulePage/IntegrationsModulePage.styles.scss
@@ -0,0 +1,27 @@
+.integrations-module-container {
+ .ant-tabs-nav {
+ padding: 0 16px;
+ margin-bottom: 0px;
+
+ &::before {
+ border-bottom: 1px solid var(--bg-slate-400) !important;
+ }
+ }
+
+ .tab-item {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ gap: 8px;
+ }
+}
+
+.lightMode {
+ .integrations-module-container {
+ .ant-tabs-nav {
+ &::before {
+ border-bottom: 1px solid var(--bg-vanilla-400) !important;
+ }
+ }
+ }
+}
diff --git a/frontend/src/pages/IntegrationsModulePage/IntegrationsModulePage.tsx b/frontend/src/pages/IntegrationsModulePage/IntegrationsModulePage.tsx
new file mode 100644
index 0000000000..bdcf05b2de
--- /dev/null
+++ b/frontend/src/pages/IntegrationsModulePage/IntegrationsModulePage.tsx
@@ -0,0 +1,21 @@
+import './IntegrationsModulePage.styles.scss';
+
+import RouteTab from 'components/RouteTab';
+import { TabRoutes } from 'components/RouteTab/types';
+import history from 'lib/history';
+import { useLocation } from 'react-use';
+
+import { installedIntegrations } from './constants';
+
+function IntegrationsModulePage(): JSX.Element {
+ const { pathname } = useLocation();
+
+ const routes: TabRoutes[] = [installedIntegrations];
+ return (
+
+
+
+ );
+}
+
+export default IntegrationsModulePage;
diff --git a/frontend/src/pages/IntegrationsModulePage/constants.tsx b/frontend/src/pages/IntegrationsModulePage/constants.tsx
new file mode 100644
index 0000000000..d0100798a8
--- /dev/null
+++ b/frontend/src/pages/IntegrationsModulePage/constants.tsx
@@ -0,0 +1,15 @@
+import { TabRoutes } from 'components/RouteTab/types';
+import ROUTES from 'constants/routes';
+import { Compass } from 'lucide-react';
+import Integrations from 'pages/Integrations';
+
+export const installedIntegrations: TabRoutes = {
+ Component: Integrations,
+ name: (
+
+ Integrations
+
+ ),
+ route: ROUTES.INTEGRATIONS_INSTALLED,
+ key: ROUTES.INTEGRATIONS_INSTALLED,
+};
diff --git a/frontend/src/pages/IntegrationsModulePage/index.ts b/frontend/src/pages/IntegrationsModulePage/index.ts
new file mode 100644
index 0000000000..690904079a
--- /dev/null
+++ b/frontend/src/pages/IntegrationsModulePage/index.ts
@@ -0,0 +1,3 @@
+import IntegrationsModulePage from './IntegrationsModulePage';
+
+export default IntegrationsModulePage;
diff --git a/frontend/src/pages/LogsExplorer/LogsExplorer.styles.scss b/frontend/src/pages/LogsExplorer/LogsExplorer.styles.scss
new file mode 100644
index 0000000000..95d53fe9a4
--- /dev/null
+++ b/frontend/src/pages/LogsExplorer/LogsExplorer.styles.scss
@@ -0,0 +1,11 @@
+.log-explorer-query-container {
+ display: flex;
+ flex-direction: column;
+ flex: 1;
+
+ .logs-explorer-views {
+ flex: 1;
+ display: flex;
+ flex-direction: column;
+ }
+}
\ No newline at end of file
diff --git a/frontend/src/pages/LogsExplorer/index.tsx b/frontend/src/pages/LogsExplorer/index.tsx
index 8b91b955ea..0cc2c07b4d 100644
--- a/frontend/src/pages/LogsExplorer/index.tsx
+++ b/frontend/src/pages/LogsExplorer/index.tsx
@@ -1,8 +1,8 @@
-import { Col, Row } from 'antd';
+import './LogsExplorer.styles.scss';
+
import ExplorerCard from 'components/ExplorerCard/ExplorerCard';
import LogExplorerQuerySection from 'container/LogExplorerQuerySection';
import LogsExplorerViews from 'container/LogsExplorerViews';
-// import LogsTopNav from 'container/LogsTopNav';
import LeftToolbarActions from 'container/QueryBuilder/components/ToolbarActions/LeftToolbarActions';
import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions';
import Toolbar from 'container/Toolbar/Toolbar';
@@ -87,19 +87,19 @@ function LogsExplorer(): JSX.Element {
/>
-
-
+
);
diff --git a/frontend/src/pages/LogsExplorer/styles.ts b/frontend/src/pages/LogsExplorer/styles.ts
index 3e479cc001..54d553bc3c 100644
--- a/frontend/src/pages/LogsExplorer/styles.ts
+++ b/frontend/src/pages/LogsExplorer/styles.ts
@@ -3,6 +3,9 @@ import { themeColors } from 'constants/theme';
import styled from 'styled-components';
export const WrapperStyled = styled.div`
+ display: flex;
+ flex-direction: column;
+ flex: 1;
color: ${themeColors.lightWhite};
`;
diff --git a/frontend/src/pages/LogsModulePage/LogsModulePage.styles.scss b/frontend/src/pages/LogsModulePage/LogsModulePage.styles.scss
index 144264b532..9465594ccb 100644
--- a/frontend/src/pages/LogsModulePage/LogsModulePage.styles.scss
+++ b/frontend/src/pages/LogsModulePage/LogsModulePage.styles.scss
@@ -5,6 +5,14 @@
// margin: 0 -1rem;
// }
+ flex: 1;
+ display: flex;
+ flex-direction: column;
+
+ .ant-tabs {
+ flex: 1;
+ }
+
.ant-tabs-nav {
padding: 0 16px;
margin-bottom: 0px;
@@ -14,6 +22,22 @@
}
}
+ .ant-tabs-content-holder {
+ display: flex;
+
+ .ant-tabs-content {
+ flex: 1;
+ display: flex;
+ flex-direction: column;
+
+ .ant-tabs-tabpane {
+ flex: 1;
+ display: flex;
+ flex-direction: column;
+ }
+ }
+ }
+
.tab-item {
display: flex;
justify-content: center;
diff --git a/frontend/src/pages/MetricsApplication/ApDex/ApDexApplication.tsx b/frontend/src/pages/MetricsApplication/ApDex/ApDexApplication.tsx
index 3f088a37a8..00b29c7809 100644
--- a/frontend/src/pages/MetricsApplication/ApDex/ApDexApplication.tsx
+++ b/frontend/src/pages/MetricsApplication/ApDex/ApDexApplication.tsx
@@ -10,7 +10,9 @@ import { Button } from '../styles';
import ApDexSettings from './ApDexSettings';
function ApDexApplication(): JSX.Element {
- const { servicename } = useParams();
+ const { servicename: encodedServiceName } = useParams();
+ const servicename = decodeURIComponent(encodedServiceName);
+
const {
data,
isLoading,
diff --git a/frontend/src/pages/MetricsApplication/index.tsx b/frontend/src/pages/MetricsApplication/index.tsx
index 94cbd5d99e..1b0229e5d1 100644
--- a/frontend/src/pages/MetricsApplication/index.tsx
+++ b/frontend/src/pages/MetricsApplication/index.tsx
@@ -13,7 +13,11 @@ import { MetricsApplicationTab, TAB_KEY_VS_LABEL } from './types';
import useMetricsApplicationTabKey from './useMetricsApplicationTabKey';
function MetricsApplication(): JSX.Element {
- const { servicename } = useParams<{ servicename: string }>();
+ const { servicename: encodedServiceName } = useParams<{
+ servicename: string;
+ }>();
+
+ const servicename = decodeURIComponent(encodedServiceName);
const activeKey = useMetricsApplicationTabKey();
diff --git a/frontend/src/pages/SaveView/SaveView.styles.scss b/frontend/src/pages/SaveView/SaveView.styles.scss
index 461914c1d0..292a0b8d06 100644
--- a/frontend/src/pages/SaveView/SaveView.styles.scss
+++ b/frontend/src/pages/SaveView/SaveView.styles.scss
@@ -1,171 +1,170 @@
.save-view-container {
- margin-top: 70px;
- display: flex;
- justify-content: center;
- width: 100%;
+ margin-top: 70px;
+ display: flex;
+ justify-content: center;
+ width: 100%;
- .save-view-content {
- width: calc(100% - 30px);
- max-width: 736px;
+ .save-view-content {
+ width: calc(100% - 30px);
+ max-width: 736px;
-
- .title {
- color: var(--bg-vanilla-100);
- font-size: var(--font-size-lg);
- font-style: normal;
- font-weight: var(--font-weight-normal);
- line-height: 28px; /* 155.556% */
- letter-spacing: -0.09px;
- }
-
- .subtitle {
- color: var(---bg-vanilla-400);
- font-size: var(--font-size-sm);
- font-style: normal;
- font-weight: var(--font-weight-normal);
- line-height: 20px; /* 142.857% */
- letter-spacing: -0.07px;
- }
-
- .ant-input-affix-wrapper {
- margin-top: 16px;
- margin-bottom: 8px;
- }
+ .title {
+ color: var(--bg-vanilla-100);
+ font-size: var(--font-size-lg);
+ font-style: normal;
+ font-weight: var(--font-weight-normal);
+ line-height: 28px; /* 155.556% */
+ letter-spacing: -0.09px;
+ }
- .ant-table-row {
- .ant-table-cell {
- padding: 0;
- border: none;
- background: var(--bg-ink-500);
-
- }
- .column-render {
- margin: 8px 0 !important;
- padding: 16px;
- border-radius: 6px;
- border: 1px solid var(--bg-slate-500);
- background: var(--bg-ink-400);
+ .subtitle {
+ color: var(---bg-vanilla-400);
+ font-size: var(--font-size-sm);
+ font-style: normal;
+ font-weight: var(--font-weight-normal);
+ line-height: 20px; /* 142.857% */
+ letter-spacing: -0.07px;
+ }
- .title-with-action {
- display: flex;
- justify-content: space-between;
- align-items: center;
+ .ant-input-affix-wrapper {
+ margin-top: 16px;
+ margin-bottom: 8px;
+ }
- .save-view-title {
- display: flex;
- align-items: center;
- gap: 6px;
- .dot {
- min-height: 6px;
- min-width: 6px;
- border-radius: 50%;
- }
- .ant-typography {
- color: var(--bg-vanilla-400);
- font-size: var(--font-size-sm);
- font-style: normal;
- font-weight: var(--font-weight-medium);
- line-height: 20px;
- letter-spacing: -0.07px;
- }
- }
+ .ant-table-row {
+ .ant-table-cell {
+ padding: 0;
+ border: none;
+ background: var(--bg-ink-500);
+ }
+ .column-render {
+ margin: 8px 0 !important;
+ padding: 16px;
+ border-radius: 6px;
+ border: 1px solid var(--bg-slate-500);
+ background: var(--bg-ink-400);
- .action-btn {
- display: flex;
- align-items: center;
- gap: 20px;
- cursor: pointer;
- }
+ .title-with-action {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
- }
- .view-details {
- margin-top: 8px;
- display: flex;
- align-items: center;
+ .save-view-title {
+ display: flex;
+ align-items: center;
+ gap: 6px;
+ .dot {
+ min-height: 6px;
+ min-width: 6px;
+ border-radius: 50%;
+ }
+ .ant-typography {
+ color: var(--bg-vanilla-400);
+ font-size: var(--font-size-sm);
+ font-style: normal;
+ font-weight: var(--font-weight-medium);
+ line-height: 20px;
+ letter-spacing: -0.07px;
+ }
+ }
- .view-tag {
- width: 14px;
- height: 14px;
- border-radius: 50px;
- background: var(--bg-slate-300);
- display: flex;
- justify-content: center;
- align-items: center;
+ .action-btn {
+ display: flex;
+ align-items: center;
+ gap: 20px;
+ cursor: pointer;
- .tag-text {
- color: var(--bg-vanilla-400);
- leading-trim: both;
- text-edge: cap;
- font-size: 10px;
- font-style: normal;
- font-weight: var(--font-weight-normal);
- line-height: normal;
- letter-spacing: -0.05px;
- }
- }
+ .hidden {
+ display: none;
+ }
+ }
+ }
+ .view-details {
+ margin-top: 8px;
+ display: flex;
+ align-items: center;
- .view-created-by {
- margin-left: 8px;
- }
+ .view-tag {
+ width: 14px;
+ height: 14px;
+ border-radius: 50px;
+ background: var(--bg-slate-300);
+ display: flex;
+ justify-content: center;
+ align-items: center;
- .view-created-at {
- margin-left: 24px;
- display: flex;
- align-items: center;
- .ant-typography {
- margin-left: 6px;
- color: var(--bg-vanilla-400);
- font-size: var(--font-size-sm);
- font-style: normal;
- font-weight: var(--font-weight-normal);
- line-height: 18px; /* 128.571% */
- letter-spacing: -0.07px;
- }
- }
- }
- }
- }
+ .tag-text {
+ color: var(--bg-vanilla-400);
+ leading-trim: both;
+ text-edge: cap;
+ font-size: 10px;
+ font-style: normal;
+ font-weight: var(--font-weight-normal);
+ line-height: normal;
+ letter-spacing: -0.05px;
+ }
+ }
- .ant-pagination-item {
+ .view-created-by {
+ margin-left: 8px;
+ }
- display: flex;
- justify-content: center;
- align-items: center;
+ .view-created-at {
+ margin-left: 24px;
+ display: flex;
+ align-items: center;
+ .ant-typography {
+ margin-left: 6px;
+ color: var(--bg-vanilla-400);
+ font-size: var(--font-size-sm);
+ font-style: normal;
+ font-weight: var(--font-weight-normal);
+ line-height: 18px; /* 128.571% */
+ letter-spacing: -0.07px;
+ }
+ }
+ }
+ }
+ }
- > a {
- color: var(--bg-vanilla-400);
- font-variant-numeric: lining-nums tabular-nums slashed-zero;
- font-feature-settings: 'dlig' on, 'salt' on, 'case' on, 'cpsp' on;
- font-size: var(--font-size-sm);
- font-style: normal;
- font-weight: var(--font-weight-normal);
- line-height: 20px; /* 142.857% */
- }
+ .ant-pagination-item {
+ display: flex;
+ justify-content: center;
+ align-items: center;
- }
+ > a {
+ color: var(--bg-vanilla-400);
+ font-variant-numeric: lining-nums tabular-nums slashed-zero;
+ font-feature-settings: 'dlig' on, 'salt' on, 'case' on, 'cpsp' on;
+ font-size: var(--font-size-sm);
+ font-style: normal;
+ font-weight: var(--font-weight-normal);
+ line-height: 20px; /* 142.857% */
+ }
+ }
- .ant-pagination-item-active {
- background-color: var(--bg-robin-500);
- > a {
- color: var(--bg-ink-500) !important;
- font-size: var(--font-size-sm);
- font-style: normal;
- font-weight: var(--font-weight-medium);
- line-height: 20px;
- }
- }
- }
-}
+ .ant-pagination-item-active {
+ background-color: var(--bg-robin-500);
+ > a {
+ color: var(--bg-ink-500) !important;
+ font-size: var(--font-size-sm);
+ font-style: normal;
+ font-weight: var(--font-weight-medium);
+ line-height: 20px;
+ }
+ }
+ }
+}
.delete-view-modal {
- width: calc(100% - 30px) !important; /* Adjust the 20px as needed */
- max-width: 384px;
+ width: calc(100% - 30px) !important; /* Adjust the 20px as needed */
+ max-width: 384px;
.ant-modal-content {
padding: 0;
border-radius: 4px;
border: 1px solid var(--bg-slate-500);
background: var(--bg-ink-400);
- box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.20);
+ box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2);
.ant-modal-header {
padding: 16px;
@@ -177,11 +176,11 @@
.ant-typography {
color: var(--bg-vanilla-400);
- font-size: var(--font-size-sm);
- font-style: normal;
- font-weight: var(--font-weight-normal);
- line-height: 20px;
- letter-spacing: -0.07px;
+ font-size: var(--font-size-sm);
+ font-style: normal;
+ font-weight: var(--font-weight-normal);
+ line-height: 20px;
+ letter-spacing: -0.07px;
}
.save-view-input {
@@ -211,7 +210,6 @@
}
}
}
-
}
.ant-modal-footer {
@@ -223,127 +221,126 @@
.cancel-btn {
display: flex;
align-items: center;
- border: none;
+ border: none;
border-radius: 2px;
- background: var(--bg-slate-500);
+ background: var(--bg-slate-500);
}
- .delete-btn {
- display: flex;
+ .delete-btn {
+ display: flex;
align-items: center;
- border: none;
- border-radius: 2px;
- background: var(--bg-cherry-500);
- margin-left: 12px;
- }
+ border: none;
+ border-radius: 2px;
+ background: var(--bg-cherry-500);
+ margin-left: 12px;
+ }
- .delete-btn:hover {
- color: var(--bg-vanilla-100);
- background: var(--bg-cherry-600);
- }
+ .delete-btn:hover {
+ color: var(--bg-vanilla-100);
+ background: var(--bg-cherry-600);
+ }
}
}
.title {
color: var(--bg-vanilla-100);
- font-size: var(--font-size-sm);
- font-style: normal;
- font-weight: var(--font-weight-medium);
- line-height: 20px; /* 142.857% */
+ font-size: var(--font-size-sm);
+ font-style: normal;
+ font-weight: var(--font-weight-medium);
+ line-height: 20px; /* 142.857% */
}
}
.lightMode {
- .save-view-container {
- .save-view-content {
+ .save-view-container {
+ .save-view-content {
+ .title {
+ color: var(--bg-ink-500);
+ }
- .title {
- color: var(--bg-ink-500);
- }
+ .ant-table-row {
+ .ant-table-cell {
+ background: var(--bg-vanilla-200);
+ }
- .ant-table-row {
- .ant-table-cell {
- background: var(--bg-vanilla-200);
- }
+ &:hover {
+ .ant-table-cell {
+ background: var(--bg-vanilla-200) !important;
+ }
+ }
- &:hover {
- .ant-table-cell {
- background: var(--bg-vanilla-200) !important;
- }
- }
+ .column-render {
+ border: 1px solid var(--bg-vanilla-200);
+ background: var(--bg-vanilla-100);
- .column-render {
- border: 1px solid var(--bg-vanilla-200);
- background: var(--bg-vanilla-100);
+ .title-with-action {
+ .save-view-title {
+ .ant-typography {
+ color: var(--bg-ink-500);
+ }
+ }
- .title-with-action {
- .save-view-title {
- .ant-typography {
- color: var(--bg-ink-500);
- }
- }
+ .action-btn {
+ .ant-typography {
+ color: var(--bg-ink-500);
+ }
+ }
+ }
- .action-btn {
- .ant-typography {
- color: var(--bg-ink-500);
- }
- }
- }
+ .view-details {
+ .view-tag {
+ background: var(--bg-vanilla-200);
+ .tag-text {
+ color: var(--bg-ink-500);
+ }
+ }
- .view-details {
- .view-tag {
- background: var(--bg-vanilla-200);
- .tag-text {
- color: var(--bg-ink-500);
- }
- }
+ .view-created-by {
+ color: var(--bg-ink-500);
+ }
- .view-created-by {
- color: var(--bg-ink-500);
- }
+ .view-created-at {
+ .ant-typography {
+ color: var(--bg-ink-500);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
- .view-created-at {
- .ant-typography {
- color: var(--bg-ink-500);
- }
- }
- }
- }
- }
- }
- }
+ .delete-view-modal {
+ .ant-modal-content {
+ border: 1px solid var(--bg-vanilla-200);
+ background: var(--bg-vanilla-100);
- .delete-view-modal {
- .ant-modal-content {
- border: 1px solid var(--bg-vanilla-200);
- background: var(--bg-vanilla-100);
+ .ant-modal-header {
+ background: var(--bg-vanilla-100);
- .ant-modal-header {
- background: var(--bg-vanilla-100);
+ .title {
+ color: var(--bg-ink-500);
+ }
+ }
- .title {
- color: var(--bg-ink-500);
- }
- }
+ .ant-modal-body {
+ .ant-typography {
+ color: var(--bg-ink-500);
+ }
- .ant-modal-body {
- .ant-typography {
- color: var(--bg-ink-500);
- }
+ .save-view-input {
+ .ant-input {
+ background: var(--bg-vanilla-200);
+ color: var(--bg-ink-500);
+ }
+ }
+ }
- .save-view-input {
- .ant-input {
- background: var(--bg-vanilla-200);
- color: var(--bg-ink-500);
- }
- }
- }
-
- .ant-modal-footer {
- .cancel-btn {
- background: var(--bg-vanilla-300);
- color: var(--bg-ink-400);
- }
- }
- }
- }
-}
\ No newline at end of file
+ .ant-modal-footer {
+ .cancel-btn {
+ background: var(--bg-vanilla-300);
+ color: var(--bg-ink-400);
+ }
+ }
+ }
+ }
+}
diff --git a/frontend/src/pages/SaveView/index.tsx b/frontend/src/pages/SaveView/index.tsx
index afdf05686b..a0ec01a5fd 100644
--- a/frontend/src/pages/SaveView/index.tsx
+++ b/frontend/src/pages/SaveView/index.tsx
@@ -32,14 +32,20 @@ import {
} from 'lucide-react';
import { ChangeEvent, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
+import { useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
+import { AppState } from 'store/reducers';
import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery';
import { ViewProps } from 'types/api/saveViews/types';
import { DataSource } from 'types/common/queryBuilder';
+import AppReducer from 'types/reducer/app';
+import { USER_ROLES } from 'types/roles';
import { ROUTES_VS_SOURCEPAGE, SOURCEPAGE_VS_ROUTES } from './constants';
import { deleteViewHandler } from './utils';
+const allowedRoles = [USER_ROLES.ADMIN, USER_ROLES.AUTHOR, USER_ROLES.EDITOR];
+
function SaveView(): JSX.Element {
const { pathname } = useLocation();
const sourcepage = ROUTES_VS_SOURCEPAGE[pathname];
@@ -61,6 +67,8 @@ function SaveView(): JSX.Element {
setIsDeleteModalOpen(false);
};
+ const { role } = useSelector((state) => state.app);
+
const handleDeleteModelOpen = (uuid: string, name: string): void => {
setActiveViewKey(uuid);
setActiveViewName(name);
@@ -217,6 +225,9 @@ function SaveView(): JSX.Element {
// Combine time and date
const formattedDateAndTime = `${formattedTime} ⎯ ${formattedDate}`;
+
+ const isEditDeleteSupported = allowedRoles.includes(role as string);
+
return (
@@ -234,11 +245,13 @@ function SaveView(): JSX.Element {
handleEditModelOpen(view, bgColor)}
/>
handleRedirectQuery(view)} />
handleDeleteModelOpen(view.uuid, view.name)}
/>
diff --git a/frontend/src/pages/TracesExplorer/index.tsx b/frontend/src/pages/TracesExplorer/index.tsx
index 3d39aab2cb..99527fba98 100644
--- a/frontend/src/pages/TracesExplorer/index.tsx
+++ b/frontend/src/pages/TracesExplorer/index.tsx
@@ -5,7 +5,7 @@ import axios from 'axios';
import ExplorerCard from 'components/ExplorerCard/ExplorerCard';
import { AVAILABLE_EXPORT_PANEL_TYPES } from 'constants/panelTypes';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
-import ExplorerOptions from 'container/ExplorerOptions/ExplorerOptions';
+import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
import ExportPanel from 'container/ExportPanel';
import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions';
import DateTimeSelector from 'container/TopNav/DateTimeSelectionV2';
@@ -208,12 +208,12 @@ function TracesExplorer(): JSX.Element {
onChange={handleExplorerTabChange}
/>
-
>
diff --git a/frontend/src/providers/Dashboard/Dashboard.tsx b/frontend/src/providers/Dashboard/Dashboard.tsx
index a7fa94c044..326a4aae83 100644
--- a/frontend/src/providers/Dashboard/Dashboard.tsx
+++ b/frontend/src/providers/Dashboard/Dashboard.tsx
@@ -52,6 +52,7 @@ const DashboardContext = createContext({
updatedTimeRef: {} as React.MutableRefObject,
toScrollWidgetId: '',
setToScrollWidgetId: () => {},
+ updateLocalStorageDashboardVariables: () => {},
});
interface Props {
@@ -96,9 +97,10 @@ export function DashboardProvider({
const [selectedDashboard, setSelectedDashboard] = useState();
- const { currentDashboard } = useDashboardVariablesFromLocalStorage(
- dashboardId,
- );
+ const {
+ currentDashboard,
+ updateLocalStorageDashboardVariables,
+ } = useDashboardVariablesFromLocalStorage(dashboardId);
const updatedTimeRef = useRef(null); // Using ref to store the updated time
const modalRef = useRef(null);
@@ -320,6 +322,7 @@ export function DashboardProvider({
setSelectedDashboard,
updatedTimeRef,
setToScrollWidgetId,
+ updateLocalStorageDashboardVariables,
}),
// eslint-disable-next-line react-hooks/exhaustive-deps
[
@@ -330,6 +333,8 @@ export function DashboardProvider({
dashboardId,
layouts,
toScrollWidgetId,
+ updateLocalStorageDashboardVariables,
+ currentDashboard,
],
);
diff --git a/frontend/src/providers/Dashboard/types.ts b/frontend/src/providers/Dashboard/types.ts
index a8e249015e..1f171cb621 100644
--- a/frontend/src/providers/Dashboard/types.ts
+++ b/frontend/src/providers/Dashboard/types.ts
@@ -19,4 +19,15 @@ export interface IDashboardContext {
updatedTimeRef: React.MutableRefObject;
toScrollWidgetId: string;
setToScrollWidgetId: React.Dispatch>;
+ updateLocalStorageDashboardVariables: (
+ id: string,
+ selectedValue:
+ | string
+ | number
+ | boolean
+ | (string | number | boolean)[]
+ | null
+ | undefined,
+ allSelected: boolean,
+ ) => void;
}
diff --git a/frontend/src/providers/QueryBuilder.tsx b/frontend/src/providers/QueryBuilder.tsx
index 80bc673a83..1fde9fc224 100644
--- a/frontend/src/providers/QueryBuilder.tsx
+++ b/frontend/src/providers/QueryBuilder.tsx
@@ -68,6 +68,7 @@ export const QueryBuilderContext = createContext({
removeQueryBuilderEntityByIndex: () => {},
removeQueryTypeItemByIndex: () => {},
addNewBuilderQuery: () => {},
+ cloneQuery: () => {},
addNewFormula: () => {},
addNewQueryItem: () => {},
redirectWithQueryBuilderData: () => {},
@@ -307,6 +308,23 @@ export function QueryBuilderProvider({
[initialDataSource],
);
+ const cloneNewBuilderQuery = useCallback(
+ (queries: IBuilderQuery[], query: IBuilderQuery): IBuilderQuery => {
+ const existNames = queries.map((item) => item.queryName);
+ const clonedQuery: IBuilderQuery = {
+ ...query,
+ queryName: createNewBuilderItemName({ existNames, sourceNames: alphabet }),
+ expression: createNewBuilderItemName({
+ existNames,
+ sourceNames: alphabet,
+ }),
+ };
+
+ return clonedQuery;
+ },
+ [],
+ );
+
const createNewBuilderFormula = useCallback((formulas: IBuilderFormula[]) => {
const existNames = formulas.map((item) => item.queryName);
@@ -373,6 +391,28 @@ export function QueryBuilderProvider({
});
}, [createNewBuilderQuery]);
+ const cloneQuery = useCallback(
+ (type: string, query: IBuilderQuery): void => {
+ setCurrentQuery((prevState) => {
+ if (prevState.builder.queryData.length >= MAX_QUERIES) return prevState;
+
+ const clonedQuery = cloneNewBuilderQuery(
+ prevState.builder.queryData,
+ query,
+ );
+
+ return {
+ ...prevState,
+ builder: {
+ ...prevState.builder,
+ queryData: [...prevState.builder.queryData, clonedQuery],
+ },
+ };
+ });
+ },
+ [cloneNewBuilderQuery],
+ );
+
const addNewFormula = useCallback(() => {
setCurrentQuery((prevState) => {
if (prevState.builder.queryFormulas.length >= MAX_FORMULAS) return prevState;
@@ -647,6 +687,7 @@ export function QueryBuilderProvider({
handleSetConfig,
removeQueryBuilderEntityByIndex,
removeQueryTypeItemByIndex,
+ cloneQuery,
addNewBuilderQuery,
addNewFormula,
addNewQueryItem,
@@ -671,6 +712,7 @@ export function QueryBuilderProvider({
handleSetConfig,
removeQueryBuilderEntityByIndex,
removeQueryTypeItemByIndex,
+ cloneQuery,
addNewBuilderQuery,
addNewFormula,
addNewQueryItem,
diff --git a/frontend/src/store/actions/metrics/getService.ts b/frontend/src/store/actions/metrics/getService.ts
index 8de8f3c134..8afdd9bff9 100644
--- a/frontend/src/store/actions/metrics/getService.ts
+++ b/frontend/src/store/actions/metrics/getService.ts
@@ -17,13 +17,6 @@ export const GetService = (
try {
const { globalTime } = getState();
- if (
- props.maxTime !== globalTime.maxTime &&
- props.minTime !== globalTime.minTime
- ) {
- return;
- }
-
const { maxTime, minTime } = GetMinMax(globalTime.selectedTime, [
globalTime.minTime / 1000000,
globalTime.maxTime / 1000000,
diff --git a/frontend/src/store/reducers/app.ts b/frontend/src/store/reducers/app.ts
index 9e0db3cd6f..4db3965cad 100644
--- a/frontend/src/store/reducers/app.ts
+++ b/frontend/src/store/reducers/app.ts
@@ -224,7 +224,6 @@ const appReducer = (
}
case UPDATE_USER_FLAG: {
- console.log('herei n update user flag');
return {
...state,
userFlags: { ...state.userFlags, ...action.payload.flags },
diff --git a/frontend/src/types/api/alerts/def.ts b/frontend/src/types/api/alerts/def.ts
index 42d599948d..af3a4bc912 100644
--- a/frontend/src/types/api/alerts/def.ts
+++ b/frontend/src/types/api/alerts/def.ts
@@ -22,6 +22,7 @@ export interface AlertDef {
disabled?: boolean;
preferredChannels?: string[];
broadcastToAll?: boolean;
+ version?: string;
}
export interface RuleCondition {
@@ -31,6 +32,8 @@ export interface RuleCondition {
matchType?: string;
targetUnit?: string;
selectedQueryName?: string;
+ alertOnAbsent?: boolean | undefined;
+ absentFor?: number | undefined;
}
export interface Labels {
diff --git a/frontend/src/types/api/channels/createEmail.ts b/frontend/src/types/api/channels/createEmail.ts
new file mode 100644
index 0000000000..652e848773
--- /dev/null
+++ b/frontend/src/types/api/channels/createEmail.ts
@@ -0,0 +1,8 @@
+import { EmailChannel } from 'container/CreateAlertChannels/config';
+
+export type Props = EmailChannel;
+
+export interface PayloadProps {
+ data: string;
+ status: string;
+}
diff --git a/frontend/src/types/api/channels/editEmail.ts b/frontend/src/types/api/channels/editEmail.ts
new file mode 100644
index 0000000000..3dfe404e28
--- /dev/null
+++ b/frontend/src/types/api/channels/editEmail.ts
@@ -0,0 +1,10 @@
+import { EmailChannel } from 'container/CreateAlertChannels/config';
+
+export interface Props extends EmailChannel {
+ id: string;
+}
+
+export interface PayloadProps {
+ data: string;
+ status: string;
+}
diff --git a/frontend/src/types/api/dashboard/create.ts b/frontend/src/types/api/dashboard/create.ts
index 9b0e26457d..b553ecd17b 100644
--- a/frontend/src/types/api/dashboard/create.ts
+++ b/frontend/src/types/api/dashboard/create.ts
@@ -4,6 +4,7 @@ export type Props =
| {
title: Dashboard['data']['title'];
uploadedGrafana: boolean;
+ version?: string;
}
| { DashboardData: DashboardData; uploadedGrafana: boolean };
diff --git a/frontend/src/types/api/dashboard/getAll.ts b/frontend/src/types/api/dashboard/getAll.ts
index 2111d3d57b..e616ee28ea 100644
--- a/frontend/src/types/api/dashboard/getAll.ts
+++ b/frontend/src/types/api/dashboard/getAll.ts
@@ -62,6 +62,7 @@ export interface DashboardData {
title: string;
layout?: Layout[];
variables: Record;
+ version?: string;
}
export interface IBaseWidget {
diff --git a/frontend/src/types/api/integrations/types.ts b/frontend/src/types/api/integrations/types.ts
new file mode 100644
index 0000000000..ae18b73caf
--- /dev/null
+++ b/frontend/src/types/api/integrations/types.ts
@@ -0,0 +1,105 @@
+interface IntegrationsProps {
+ author: {
+ email: string;
+ homepage: string;
+ name: string;
+ };
+ description: string;
+ id: string;
+ icon: string;
+ is_installed: boolean;
+ title: string;
+}
+
+export interface AllIntegrationsProps {
+ status: string;
+ data: {
+ integrations: IntegrationsProps[];
+ };
+}
+
+export interface IntegrationDetailedProps {
+ description: string;
+ id: string;
+ installation: {
+ installed_at: string;
+ } | null;
+ title: string;
+ author: {
+ email: string;
+ homepage: string;
+ name: string;
+ };
+ icon: string;
+ connection_status: {
+ logs: {
+ last_received_ts_ms: number;
+ last_received_from: string;
+ } | null;
+ metrics: {
+ last_received_ts_ms: number;
+ last_received_from: string;
+ } | null;
+ };
+ categories: string[];
+ assets: {
+ logs: {
+ pipelines: [];
+ };
+ dashboards: [];
+ alerts: [];
+ };
+ overview: string;
+ configuration: [
+ {
+ title: string;
+ instructions: string;
+ },
+ ];
+ data_collected: {
+ logs: string[];
+ metrics: string[];
+ };
+}
+export interface GetIntegrationProps {
+ data: IntegrationDetailedProps;
+}
+
+export interface IntegrationStatusProps {
+ connection_status: {
+ logs: {
+ last_received_ts_ms: number;
+ last_received_from: string;
+ } | null;
+ metrics: {
+ last_received_ts_ms: number;
+ last_received_from: string;
+ } | null;
+ };
+}
+
+export interface GetIntegrationStatusProps {
+ data: IntegrationStatusProps;
+}
+
+export interface GetIntegrationPayloadProps {
+ integrationId: string;
+ enabled?: boolean;
+}
+
+export interface InstallIntegrationKeyProps {
+ integration_id: string;
+ config: any;
+}
+
+export interface InstalledIntegrationsSuccessResponse {
+ data: IntegrationsProps;
+}
+
+export interface UninstallIntegrationProps {
+ integration_id: string;
+}
+
+export interface UninstallIntegrationSuccessResponse {
+ data: any;
+}
diff --git a/frontend/src/types/api/queryBuilder/queryBuilderData.ts b/frontend/src/types/api/queryBuilder/queryBuilderData.ts
index ff8c143bf2..6a54254617 100644
--- a/frontend/src/types/api/queryBuilder/queryBuilderData.ts
+++ b/frontend/src/types/api/queryBuilder/queryBuilderData.ts
@@ -47,12 +47,21 @@ export type OrderByPayload = {
order: string;
};
+export interface QueryFunctionProps {
+ name: string;
+ args: string[];
+}
+
// Type for query builder
export type IBuilderQuery = {
queryName: string;
dataSource: DataSource;
aggregateOperator: string;
aggregateAttribute: BaseAutocompleteData;
+ timeAggregation: string;
+ spaceAggregation?: string;
+ temporality?: string;
+ functions: QueryFunctionProps[];
filters: TagFilter;
groupBy: BaseAutocompleteData[];
expression: string;
diff --git a/frontend/src/types/api/widgets/getQuery.ts b/frontend/src/types/api/widgets/getQuery.ts
index 0b36af1541..5f455698dd 100644
--- a/frontend/src/types/api/widgets/getQuery.ts
+++ b/frontend/src/types/api/widgets/getQuery.ts
@@ -14,6 +14,8 @@ export interface QueryData {
queryName: string;
legend?: string;
values: [number, string][];
+ quantity?: number[];
+ unit?: string;
}
export interface SeriesItem {
@@ -28,6 +30,9 @@ export interface QueryDataV3 {
queryName: string;
legend?: string;
series: SeriesItem[] | null;
+ quantity?: number;
+ unitPrice?: number;
+ unit?: string;
}
export interface Props {
diff --git a/frontend/src/types/common/operations.types.ts b/frontend/src/types/common/operations.types.ts
index 58fd4533b9..9421509e9f 100644
--- a/frontend/src/types/common/operations.types.ts
+++ b/frontend/src/types/common/operations.types.ts
@@ -4,6 +4,7 @@ import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteRe
import {
IBuilderFormula,
IBuilderQuery,
+ QueryFunctionProps,
} from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
@@ -13,6 +14,7 @@ type UseQueryOperationsParams = Pick &
Pick & {
formula?: IBuilderFormula;
isListViewPanel?: boolean;
+ entityVersion: string;
};
export type HandleChangeQueryData = <
@@ -37,12 +39,15 @@ export type UseQueryOperations = (
isTracePanelType: boolean;
isMetricsDataSource: boolean;
operators: SelectOption[];
+ spaceAggregationOptions: SelectOption[];
listOfAdditionalFilters: string[];
handleChangeOperator: (value: string) => void;
+ handleSpaceAggregationChange: (value: string) => void;
handleChangeAggregatorAttribute: (value: BaseAutocompleteData) => void;
handleChangeDataSource: (newSource: DataSource) => void;
handleDeleteQuery: () => void;
handleChangeQueryData: HandleChangeQueryData;
handleChangeFormulaData: HandleChangeFormulaData;
+ handleQueryFunctionsUpdates: (functions: QueryFunctionProps[]) => void;
listOfAdditionalFormulaFilters: string[];
};
diff --git a/frontend/src/types/common/queryBuilder.ts b/frontend/src/types/common/queryBuilder.ts
index e8dad50083..02ea8beebb 100644
--- a/frontend/src/types/common/queryBuilder.ts
+++ b/frontend/src/types/common/queryBuilder.ts
@@ -61,7 +61,14 @@ export enum BoolOperators {
COUNT_DISTINCT = 'count_distinct',
}
+export enum Temporality {
+ Unspecified = 'Unspecified',
+ Delta = 'Delta',
+ Cumulative = 'Cumulative',
+}
+
export enum MetricAggregateOperator {
+ EMPTY = '', // used as time aggregator for histograms
NOOP = 'noop',
COUNT = 'count',
COUNT_DISTINCT = 'count_distinct',
@@ -92,6 +99,8 @@ export enum MetricAggregateOperator {
HIST_QUANTILE_90 = 'hist_quantile_90',
HIST_QUANTILE_95 = 'hist_quantile_95',
HIST_QUANTILE_99 = 'hist_quantile_99',
+ INCREASE = 'increase',
+ LATEST = 'latest',
}
export enum TracesAggregatorOperator {
@@ -142,6 +151,24 @@ export enum LogsAggregatorOperator {
RATE_MAX = 'rate_max',
}
+export enum QueryFunctionsTypes {
+ CUTOFF_MIN = 'cutOffMin',
+ CUTOFF_MAX = 'cutOffMax',
+ CLAMP_MIN = 'clampMin',
+ CLAMP_MAX = 'clampMax',
+ ABSOLUTE = 'absolute',
+ LOG_2 = 'log2',
+ LOG_10 = 'log10',
+ CUMULATIVE_SUM = 'cumSum',
+ EWMA_3 = 'ewma3',
+ EWMA_5 = 'ewma5',
+ EWMA_7 = 'ewma7',
+ MEDIAN_3 = 'median3',
+ MEDIAN_5 = 'median5',
+ MEDIAN_7 = 'median7',
+ TIME_SHIFT = 'timeShift',
+}
+
export type PanelTypeKeys =
| 'TIME_SERIES'
| 'VALUE'
@@ -184,6 +211,7 @@ export type QueryBuilderContextType = {
) => void;
addNewBuilderQuery: () => void;
addNewFormula: () => void;
+ cloneQuery: (type: string, query: IBuilderQuery) => void;
addNewQueryItem: (type: EQueryType.PROM | EQueryType.CLICKHOUSE) => void;
redirectWithQueryBuilderData: (
query: Query,
diff --git a/frontend/src/utils/permission/index.ts b/frontend/src/utils/permission/index.ts
index 265ffe02a5..b18be180cd 100644
--- a/frontend/src/utils/permission/index.ts
+++ b/frontend/src/utils/permission/index.ts
@@ -82,6 +82,10 @@ export const routePermission: Record = {
LOGS_PIPELINES: ['ADMIN', 'EDITOR', 'VIEWER'],
TRACE_EXPLORER: ['ADMIN', 'EDITOR', 'VIEWER'],
GET_STARTED: ['ADMIN', 'EDITOR', 'VIEWER'],
+ GET_STARTED_APPLICATION_MONITORING: ['ADMIN', 'EDITOR', 'VIEWER'],
+ GET_STARTED_INFRASTRUCTURE_MONITORING: ['ADMIN', 'EDITOR', 'VIEWER'],
+ GET_STARTED_LOGS_MANAGEMENT: ['ADMIN', 'EDITOR', 'VIEWER'],
+ GET_STARTED_AWS_MONITORING: ['ADMIN', 'EDITOR', 'VIEWER'],
WORKSPACE_LOCKED: ['ADMIN', 'EDITOR', 'VIEWER'],
BILLING: ['ADMIN', 'EDITOR', 'VIEWER'],
SUPPORT: ['ADMIN', 'EDITOR', 'VIEWER'],
@@ -92,4 +96,7 @@ export const routePermission: Record = {
LOGS_BASE: [],
OLD_LOGS_EXPLORER: [],
SHORTCUTS: ['ADMIN', 'EDITOR', 'VIEWER'],
+ INTEGRATIONS_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
+ INTEGRATIONS_INSTALLED: ['ADMIN', 'EDITOR', 'VIEWER'],
+ INTEGRATIONS_MARKETPLACE: ['ADMIN', 'EDITOR', 'VIEWER'],
};
diff --git a/frontend/tests/dashboards/utils.ts b/frontend/tests/dashboards/utils.ts
index b69aedf905..4db3769c8e 100644
--- a/frontend/tests/dashboards/utils.ts
+++ b/frontend/tests/dashboards/utils.ts
@@ -1,4 +1,5 @@
import { Page } from '@playwright/test';
+
import { JsonApplicationType } from '../fixtures/constant';
// API endpoints
@@ -41,6 +42,99 @@ export const timeSeriesGraphName = 'Time1';
let widgetsId: string;
+export const insertWidgetIdInResponse = (widgetID: string): any => ({
+ status: 'success',
+ data: {
+ id: 219,
+ uuid: 'd697fddb-a771-4bb4-aa38-810f000ed96a',
+ created_at: '2023-11-17T20:44:03.167646604Z',
+ created_by: 'vikrant@signoz.io',
+ updated_at: '2023-11-17T20:51:23.058536475Z',
+ updated_by: 'vikrant@signoz.io',
+ data: {
+ description: 'Playwright Dashboard T',
+ layout: [
+ {
+ h: 3,
+ i: '9fbcf0db-1572-4572-bf6b-0a84dd10ed85',
+ w: 6,
+ x: 0,
+ y: 0,
+ },
+ ],
+ version: 'v3',
+ name: '',
+ tags: [],
+ title: 'Playwright Dashboard',
+ variables: {},
+ widgets: [
+ {
+ description: '',
+ id: widgetID,
+ isStacked: false,
+ nullZeroValues: '',
+ opacity: '',
+ panelTypes: 'graph',
+ query: {
+ builder: {
+ queryData: [
+ {
+ aggregateAttribute: {
+ dataType: '',
+ id: '------',
+ isColumn: false,
+ isJSON: false,
+ key: '',
+ type: '',
+ },
+ aggregateOperator: 'count',
+ dataSource: 'metrics',
+ disabled: false,
+ expression: 'A',
+ filters: {
+ items: [],
+ op: 'AND',
+ },
+ groupBy: [],
+ having: [],
+ legend: '',
+ limit: null,
+ orderBy: [],
+ queryName: 'A',
+ reduceTo: 'avg',
+ stepInterval: 60,
+ },
+ ],
+ queryFormulas: [],
+ },
+ clickhouse_sql: [
+ {
+ disabled: false,
+ legend: '',
+ name: 'A',
+ query: '',
+ },
+ ],
+ id: '6b4011e4-bcea-497d-81a9-0ee7816b679d',
+ promql: [
+ {
+ disabled: false,
+ legend: '',
+ name: 'A',
+ query: '',
+ },
+ ],
+ queryType: 'builder',
+ },
+ timePreferance: 'GLOBAL_TIME',
+ title: '',
+ },
+ ],
+ },
+ isLocked: 0,
+ },
+});
+
// mock API calls
export const dashboardsListAndCreate = async (
page: Page,
@@ -76,7 +170,8 @@ export const getTimeSeriesQueryData = async (
page: Page,
response: any,
): Promise => {
- await page.route(`**/${queryRangeApiEndpoint}`, (route) =>
+ // eslint-disable-next-line sonarjs/no-identical-functions
+ await page.route(`**/${queryRangeApiEndpoint}`, (route): any =>
route.fulfill({
status: 200,
contentType: JsonApplicationType,
@@ -84,97 +179,3 @@ export const getTimeSeriesQueryData = async (
}),
);
};
-
-export const insertWidgetIdInResponse = (widgetID: string) => {
- return {
- status: 'success',
- data: {
- id: 219,
- uuid: 'd697fddb-a771-4bb4-aa38-810f000ed96a',
- created_at: '2023-11-17T20:44:03.167646604Z',
- created_by: 'vikrant@signoz.io',
- updated_at: '2023-11-17T20:51:23.058536475Z',
- updated_by: 'vikrant@signoz.io',
- data: {
- description: 'Playwright Dashboard T',
- layout: [
- {
- h: 3,
- i: '9fbcf0db-1572-4572-bf6b-0a84dd10ed85',
- w: 6,
- x: 0,
- y: 0,
- },
- ],
- name: '',
- tags: [],
- title: 'Playwright Dashboard',
- variables: {},
- widgets: [
- {
- description: '',
- id: widgetID,
- isStacked: false,
- nullZeroValues: '',
- opacity: '',
- panelTypes: 'graph',
- query: {
- builder: {
- queryData: [
- {
- aggregateAttribute: {
- dataType: '',
- id: '------',
- isColumn: false,
- isJSON: false,
- key: '',
- type: '',
- },
- aggregateOperator: 'count',
- dataSource: 'metrics',
- disabled: false,
- expression: 'A',
- filters: {
- items: [],
- op: 'AND',
- },
- groupBy: [],
- having: [],
- legend: '',
- limit: null,
- orderBy: [],
- queryName: 'A',
- reduceTo: 'sum',
- stepInterval: 60,
- },
- ],
- queryFormulas: [],
- },
- clickhouse_sql: [
- {
- disabled: false,
- legend: '',
- name: 'A',
- query: '',
- },
- ],
- id: '6b4011e4-bcea-497d-81a9-0ee7816b679d',
- promql: [
- {
- disabled: false,
- legend: '',
- name: 'A',
- query: '',
- },
- ],
- queryType: 'builder',
- },
- timePreferance: 'GLOBAL_TIME',
- title: '',
- },
- ],
- },
- isLocked: 0,
- },
- };
-};
diff --git a/frontend/tests/fixtures/api/dashboard/dashboardGetCallWithTimeSeriesWidget200.json b/frontend/tests/fixtures/api/dashboard/dashboardGetCallWithTimeSeriesWidget200.json
index e65361d6e7..1c8f1fe8ea 100644
--- a/frontend/tests/fixtures/api/dashboard/dashboardGetCallWithTimeSeriesWidget200.json
+++ b/frontend/tests/fixtures/api/dashboard/dashboardGetCallWithTimeSeriesWidget200.json
@@ -56,7 +56,7 @@
"limit": null,
"orderBy": [],
"queryName": "A",
- "reduceTo": "sum",
+ "reduceTo": "avg",
"stepInterval": 60
}
],
diff --git a/frontend/tests/fixtures/api/dashboard/putDashboardWithTimeSeries200.json b/frontend/tests/fixtures/api/dashboard/putDashboardWithTimeSeries200.json
index 7dab6c646b..bbfe51f9c9 100644
--- a/frontend/tests/fixtures/api/dashboard/putDashboardWithTimeSeries200.json
+++ b/frontend/tests/fixtures/api/dashboard/putDashboardWithTimeSeries200.json
@@ -56,7 +56,7 @@
"limit": null,
"orderBy": [],
"queryName": "A",
- "reduceTo": "sum",
+ "reduceTo": "avg",
"stepInterval": 60
}
],
diff --git a/frontend/tests/fixtures/api/traces/traceExplorerViews200.json b/frontend/tests/fixtures/api/traces/traceExplorerViews200.json
index fa929a1061..801a56425e 100644
--- a/frontend/tests/fixtures/api/traces/traceExplorerViews200.json
+++ b/frontend/tests/fixtures/api/traces/traceExplorerViews200.json
@@ -40,7 +40,7 @@
"order": "desc"
}
],
- "reduceTo": "sum"
+ "reduceTo": "avg"
}
},
"panelType": "table",
diff --git a/frontend/webpack.config.js b/frontend/webpack.config.js
index 281067ad47..65883594bb 100644
--- a/frontend/webpack.config.js
+++ b/frontend/webpack.config.js
@@ -67,7 +67,9 @@ const config = {
devtool: 'source-map',
entry: resolve(__dirname, './src/index.tsx'),
devServer: {
- historyApiFallback: true,
+ historyApiFallback: {
+ disableDotRule: true,
+ },
open: true,
hot: true,
liveReload: true,
diff --git a/frontend/yarn.lock b/frontend/yarn.lock
index 6474b180c1..df275f43e4 100644
--- a/frontend/yarn.lock
+++ b/frontend/yarn.lock
@@ -4198,6 +4198,13 @@
dependencies:
"@types/unist" "^2"
+"@types/hast@^3.0.0":
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/@types/hast/-/hast-3.0.4.tgz#1d6b39993b82cea6ad783945b0508c25903e15aa"
+ integrity sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==
+ dependencies:
+ "@types/unist" "*"
+
"@types/history@^4.7.11":
version "4.7.11"
resolved "https://registry.npmjs.org/@types/history/-/history-4.7.11.tgz"
@@ -4297,6 +4304,13 @@
dependencies:
"@types/unist" "^2"
+"@types/mdast@^4.0.0":
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-4.0.3.tgz#1e011ff013566e919a4232d1701ad30d70cab333"
+ integrity sha512-LsjtqsyF+d2/yFOYaN22dHZI1Cpwkrj+g06G8+qtUKlhovPW89YhqSnfKtMbkgmEtYpH2gydRNULd6y8mciAFg==
+ dependencies:
+ "@types/unist" "*"
+
"@types/mdx@^2.0.0":
version "2.0.7"
resolved "https://registry.yarnpkg.com/@types/mdx/-/mdx-2.0.7.tgz#c7482e995673e01b83f8e96df83b3843ea76401f"
@@ -4577,6 +4591,11 @@
resolved "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.3.tgz"
integrity sha512-NfQ4gyz38SL8sDNrSixxU2Os1a5xcdFxipAFxYEuLUlvU2uDwS4NUpsImcf1//SlWItCVMMLiylsxbmNMToV/g==
+"@types/unist@*", "@types/unist@^3.0.0":
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/@types/unist/-/unist-3.0.2.tgz#6dd61e43ef60b34086287f83683a5c1b2dc53d20"
+ integrity sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==
+
"@types/unist@^2", "@types/unist@^2.0.0":
version "2.0.8"
resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.8.tgz#bb197b9639aa1a04cf464a617fe800cccd92ad5c"
@@ -4799,6 +4818,11 @@
resolved "https://registry.npmjs.org/@ungap/custom-elements/-/custom-elements-1.2.0.tgz"
integrity sha512-zdSuu79stAwVUtzkQU9B5jhGh2LavtkeX4kxd2jtMJmZt7QqRJ1KJW5bukt/vUOaUs3z674GHd+nqYm0bu0Gyg==
+"@ungap/structured-clone@^1.0.0":
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406"
+ integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==
+
"@volar/language-core@1.11.1", "@volar/language-core@~1.11.1":
version "1.11.1"
resolved "https://registry.yarnpkg.com/@volar/language-core/-/language-core-1.11.1.tgz#ecdf12ea8dc35fb8549e517991abcbf449a5ad4f"
@@ -7620,6 +7644,13 @@ detect-node@^2.0.4, detect-node@^2.1.0:
resolved "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz"
integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==
+devlop@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/devlop/-/devlop-1.1.0.tgz#4db7c2ca4dc6e0e834c30be70c94bbc976dc7018"
+ integrity sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==
+ dependencies:
+ dequal "^2.0.0"
+
diff-sequences@^27.5.1:
version "27.5.1"
resolved "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz"
@@ -9376,6 +9407,20 @@ hast-util-from-parse5@^7.0.0:
vfile-location "^4.0.0"
web-namespaces "^2.0.0"
+hast-util-from-parse5@^8.0.0:
+ version "8.0.1"
+ resolved "https://registry.yarnpkg.com/hast-util-from-parse5/-/hast-util-from-parse5-8.0.1.tgz#654a5676a41211e14ee80d1b1758c399a0327651"
+ integrity sha512-Er/Iixbc7IEa7r/XLtuG52zoqn/b3Xng/w6aZQ0xGVxzhw5xUFxcRqdPzP6yFi/4HBYRaifaI5fQ1RH8n0ZeOQ==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ "@types/unist" "^3.0.0"
+ devlop "^1.0.0"
+ hastscript "^8.0.0"
+ property-information "^6.0.0"
+ vfile "^6.0.0"
+ vfile-location "^5.0.0"
+ web-namespaces "^2.0.0"
+
hast-util-has-property@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/hast-util-has-property/-/hast-util-has-property-2.0.1.tgz#8ec99c3e8f02626304ee438cdb9f0528b017e083"
@@ -9408,6 +9453,13 @@ hast-util-parse-selector@^3.0.0:
dependencies:
"@types/hast" "^2.0.0"
+hast-util-parse-selector@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz#352879fa86e25616036037dd8931fb5f34cb4a27"
+ integrity sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==
+ dependencies:
+ "@types/hast" "^3.0.0"
+
hast-util-raw@^7.0.0, hast-util-raw@^7.2.0:
version "7.2.3"
resolved "https://registry.yarnpkg.com/hast-util-raw/-/hast-util-raw-7.2.3.tgz#dcb5b22a22073436dbdc4aa09660a644f4991d99"
@@ -9425,6 +9477,25 @@ hast-util-raw@^7.0.0, hast-util-raw@^7.2.0:
web-namespaces "^2.0.0"
zwitch "^2.0.0"
+hast-util-raw@^9.0.0:
+ version "9.0.2"
+ resolved "https://registry.yarnpkg.com/hast-util-raw/-/hast-util-raw-9.0.2.tgz#39b4a4886bd9f0a5dd42e86d02c966c2c152884c"
+ integrity sha512-PldBy71wO9Uq1kyaMch9AHIghtQvIwxBUkv823pKmkTM3oV1JxtsTNYdevMxvUHqcnOAuO65JKU2+0NOxc2ksA==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ "@types/unist" "^3.0.0"
+ "@ungap/structured-clone" "^1.0.0"
+ hast-util-from-parse5 "^8.0.0"
+ hast-util-to-parse5 "^8.0.0"
+ html-void-elements "^3.0.0"
+ mdast-util-to-hast "^13.0.0"
+ parse5 "^7.0.0"
+ unist-util-position "^5.0.0"
+ unist-util-visit "^5.0.0"
+ vfile "^6.0.0"
+ web-namespaces "^2.0.0"
+ zwitch "^2.0.0"
+
hast-util-select@^5.0.5, hast-util-select@~5.0.1:
version "5.0.5"
resolved "https://registry.yarnpkg.com/hast-util-select/-/hast-util-select-5.0.5.tgz#be9ccb71d2278681ca024727f12abd4f93b3e9bc"
@@ -9496,6 +9567,19 @@ hast-util-to-parse5@^7.0.0:
web-namespaces "^2.0.0"
zwitch "^2.0.0"
+hast-util-to-parse5@^8.0.0:
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz#477cd42d278d4f036bc2ea58586130f6f39ee6ed"
+ integrity sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ comma-separated-tokens "^2.0.0"
+ devlop "^1.0.0"
+ property-information "^6.0.0"
+ space-separated-tokens "^2.0.0"
+ web-namespaces "^2.0.0"
+ zwitch "^2.0.0"
+
hast-util-to-string@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/hast-util-to-string/-/hast-util-to-string-2.0.0.tgz#b008b0a4ea472bf34dd390b7eea1018726ae152a"
@@ -9530,6 +9614,17 @@ hastscript@^7.0.0:
property-information "^6.0.0"
space-separated-tokens "^2.0.0"
+hastscript@^8.0.0:
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-8.0.0.tgz#4ef795ec8dee867101b9f23cc830d4baf4fd781a"
+ integrity sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ comma-separated-tokens "^2.0.0"
+ hast-util-parse-selector "^4.0.0"
+ property-information "^6.0.0"
+ space-separated-tokens "^2.0.0"
+
he@^1.2.0:
version "1.2.0"
resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz"
@@ -9643,6 +9738,11 @@ html-void-elements@^2.0.0:
resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-2.0.1.tgz#29459b8b05c200b6c5ee98743c41b979d577549f"
integrity sha512-0quDb7s97CfemeJAnW9wC0hw78MtW7NU3hqtCD75g2vFlDLt36llsYD7uB7SUzojLMP24N5IatXf7ylGXiGG9A==
+html-void-elements@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-3.0.0.tgz#fc9dbd84af9e747249034d4d62602def6517f1d7"
+ integrity sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==
+
html-webpack-plugin@5.5.0:
version "5.5.0"
resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50"
@@ -11880,6 +11980,21 @@ mdast-util-to-hast@^12.1.0:
unist-util-position "^4.0.0"
unist-util-visit "^4.0.0"
+mdast-util-to-hast@^13.0.0:
+ version "13.1.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-to-hast/-/mdast-util-to-hast-13.1.0.tgz#1ae54d903150a10fe04d59f03b2b95fd210b2124"
+ integrity sha512-/e2l/6+OdGp/FB+ctrJ9Avz71AN/GRH3oi/3KAx/kMnoUsD6q0woXlDT8lLEeViVKE7oZxE7RXzvO3T8kF2/sA==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ "@types/mdast" "^4.0.0"
+ "@ungap/structured-clone" "^1.0.0"
+ devlop "^1.0.0"
+ micromark-util-sanitize-uri "^2.0.0"
+ trim-lines "^3.0.0"
+ unist-util-position "^5.0.0"
+ unist-util-visit "^5.0.0"
+ vfile "^6.0.0"
+
mdast-util-to-markdown@^1.0.0, mdast-util-to-markdown@^1.3.0:
version "1.5.0"
resolved "https://registry.yarnpkg.com/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz#c13343cb3fc98621911d33b5cd42e7d0731171c6"
@@ -12216,6 +12331,14 @@ micromark-util-character@^1.0.0:
micromark-util-symbol "^1.0.0"
micromark-util-types "^1.0.0"
+micromark-util-character@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-character/-/micromark-util-character-2.1.0.tgz#31320ace16b4644316f6bf057531689c71e2aee1"
+ integrity sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==
+ dependencies:
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
micromark-util-chunked@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz#37a24d33333c8c69a74ba12a14651fd9ea8a368b"
@@ -12262,6 +12385,11 @@ micromark-util-encode@^1.0.0:
resolved "https://registry.yarnpkg.com/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz#92e4f565fd4ccb19e0dcae1afab9a173bbeb19a5"
integrity sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==
+micromark-util-encode@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz#0921ac7953dc3f1fd281e3d1932decfdb9382ab1"
+ integrity sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==
+
micromark-util-events-to-acorn@^1.0.0:
version "1.2.3"
resolved "https://registry.yarnpkg.com/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-1.2.3.tgz#a4ab157f57a380e646670e49ddee97a72b58b557"
@@ -12304,6 +12432,15 @@ micromark-util-sanitize-uri@^1.0.0, micromark-util-sanitize-uri@^1.1.0:
micromark-util-encode "^1.0.0"
micromark-util-symbol "^1.0.0"
+micromark-util-sanitize-uri@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz#ec8fbf0258e9e6d8f13d9e4770f9be64342673de"
+ integrity sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==
+ dependencies:
+ micromark-util-character "^2.0.0"
+ micromark-util-encode "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+
micromark-util-subtokenize@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz#941c74f93a93eaf687b9054aeb94642b0e92edb1"
@@ -12319,11 +12456,21 @@ micromark-util-symbol@^1.0.0:
resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz#813cd17837bdb912d069a12ebe3a44b6f7063142"
integrity sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==
+micromark-util-symbol@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz#12225c8f95edf8b17254e47080ce0862d5db8044"
+ integrity sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==
+
micromark-util-types@^1.0.0, micromark-util-types@^1.0.1:
version "1.1.0"
resolved "https://registry.yarnpkg.com/micromark-util-types/-/micromark-util-types-1.1.0.tgz#e6676a8cae0bb86a2171c498167971886cb7e283"
integrity sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==
+micromark-util-types@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-types/-/micromark-util-types-2.0.0.tgz#63b4b7ffeb35d3ecf50d1ca20e68fc7caa36d95e"
+ integrity sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==
+
micromark@^3.0.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/micromark/-/micromark-3.2.0.tgz#1af9fef3f995ea1ea4ac9c7e2f19c48fd5c006e9"
@@ -13265,6 +13412,13 @@ parse5@6.0.1, parse5@^6.0.0, parse5@^6.0.1:
resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b"
integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==
+parse5@^7.0.0:
+ version "7.1.2"
+ resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.1.2.tgz#0736bebbfd77793823240a23b7fc5e010b7f8e32"
+ integrity sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==
+ dependencies:
+ entities "^4.4.0"
+
parseurl@~1.3.2, parseurl@~1.3.3:
version "1.3.3"
resolved "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz"
@@ -15013,6 +15167,15 @@ rehype-prism-plus@~1.6.1:
unist-util-filter "^4.0.0"
unist-util-visit "^4.0.0"
+rehype-raw@7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/rehype-raw/-/rehype-raw-7.0.0.tgz#59d7348fd5dbef3807bbaa1d443efd2dd85ecee4"
+ integrity sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ hast-util-raw "^9.0.0"
+ vfile "^6.0.0"
+
rehype-raw@^6.1.1:
version "6.1.1"
resolved "https://registry.yarnpkg.com/rehype-raw/-/rehype-raw-6.1.1.tgz#81bbef3793bd7abacc6bf8335879d1b6c868c9d4"
@@ -16844,6 +17007,13 @@ unist-util-is@^5.0.0:
dependencies:
"@types/unist" "^2.0.0"
+unist-util-is@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-6.0.0.tgz#b775956486aff107a9ded971d996c173374be424"
+ integrity sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==
+ dependencies:
+ "@types/unist" "^3.0.0"
+
unist-util-position-from-estree@^1.0.0, unist-util-position-from-estree@^1.1.0:
version "1.1.2"
resolved "https://registry.yarnpkg.com/unist-util-position-from-estree/-/unist-util-position-from-estree-1.1.2.tgz#8ac2480027229de76512079e377afbcabcfcce22"
@@ -16858,6 +17028,13 @@ unist-util-position@^4.0.0:
dependencies:
"@types/unist" "^2.0.0"
+unist-util-position@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/unist-util-position/-/unist-util-position-5.0.0.tgz#678f20ab5ca1207a97d7ea8a388373c9cf896be4"
+ integrity sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==
+ dependencies:
+ "@types/unist" "^3.0.0"
+
unist-util-remove-position@^4.0.0:
version "4.0.2"
resolved "https://registry.yarnpkg.com/unist-util-remove-position/-/unist-util-remove-position-4.0.2.tgz#a89be6ea72e23b1a402350832b02a91f6a9afe51"
@@ -16873,6 +17050,13 @@ unist-util-stringify-position@^3.0.0:
dependencies:
"@types/unist" "^2.0.0"
+unist-util-stringify-position@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz#449c6e21a880e0855bf5aabadeb3a740314abac2"
+ integrity sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==
+ dependencies:
+ "@types/unist" "^3.0.0"
+
unist-util-visit-parents@^5.0.0, unist-util-visit-parents@^5.1.1:
version "5.1.3"
resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz#b4520811b0ca34285633785045df7a8d6776cfeb"
@@ -16881,6 +17065,14 @@ unist-util-visit-parents@^5.0.0, unist-util-visit-parents@^5.1.1:
"@types/unist" "^2.0.0"
unist-util-is "^5.0.0"
+unist-util-visit-parents@^6.0.0:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz#4d5f85755c3b8f0dc69e21eca5d6d82d22162815"
+ integrity sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ unist-util-is "^6.0.0"
+
unist-util-visit@^4.0.0, unist-util-visit@^4.1.0, unist-util-visit@^4.1.2, unist-util-visit@~4.1.0:
version "4.1.2"
resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-4.1.2.tgz#125a42d1eb876283715a3cb5cceaa531828c72e2"
@@ -16890,6 +17082,15 @@ unist-util-visit@^4.0.0, unist-util-visit@^4.1.0, unist-util-visit@^4.1.2, unist
unist-util-is "^5.0.0"
unist-util-visit-parents "^5.1.1"
+unist-util-visit@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-5.0.0.tgz#a7de1f31f72ffd3519ea71814cccf5fd6a9217d6"
+ integrity sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ unist-util-is "^6.0.0"
+ unist-util-visit-parents "^6.0.0"
+
universalify@^0.1.0:
version "0.1.2"
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
@@ -17126,6 +17327,14 @@ vfile-location@^4.0.0:
"@types/unist" "^2.0.0"
vfile "^5.0.0"
+vfile-location@^5.0.0:
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-5.0.2.tgz#220d9ca1ab6f8b2504a4db398f7ebc149f9cb464"
+ integrity sha512-NXPYyxyBSH7zB5U6+3uDdd6Nybz6o6/od9rk8bp9H8GR3L+cm/fC0uUTbqBmUTnMCUDslAGBOIKNfvvb+gGlDg==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ vfile "^6.0.0"
+
vfile-message@^3.0.0:
version "3.1.4"
resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-3.1.4.tgz#15a50816ae7d7c2d1fa87090a7f9f96612b59dea"
@@ -17134,6 +17343,14 @@ vfile-message@^3.0.0:
"@types/unist" "^2.0.0"
unist-util-stringify-position "^3.0.0"
+vfile-message@^4.0.0:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-4.0.2.tgz#c883c9f677c72c166362fd635f21fc165a7d1181"
+ integrity sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ unist-util-stringify-position "^4.0.0"
+
vfile@^5.0.0:
version "5.3.7"
resolved "https://registry.yarnpkg.com/vfile/-/vfile-5.3.7.tgz#de0677e6683e3380fafc46544cfe603118826ab7"
@@ -17144,6 +17361,15 @@ vfile@^5.0.0:
unist-util-stringify-position "^3.0.0"
vfile-message "^3.0.0"
+vfile@^6.0.0:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/vfile/-/vfile-6.0.1.tgz#1e8327f41eac91947d4fe9d237a2dd9209762536"
+ integrity sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ unist-util-stringify-position "^4.0.0"
+ vfile-message "^4.0.0"
+
vite-plugin-dts@^3.6.4:
version "3.7.0"
resolved "https://registry.yarnpkg.com/vite-plugin-dts/-/vite-plugin-dts-3.7.0.tgz#654ee7c38c0cdd4589b9bc198a264f34172bd870"
diff --git a/go.mod b/go.mod
index 0ca9965546..213be786b1 100644
--- a/go.mod
+++ b/go.mod
@@ -3,9 +3,10 @@ module go.signoz.io/signoz
go 1.21.3
require (
- github.com/ClickHouse/clickhouse-go/v2 v2.15.0
+ github.com/ClickHouse/clickhouse-go/v2 v2.20.0
+ github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
- github.com/SigNoz/signoz-otel-collector v0.88.14
+ github.com/SigNoz/signoz-otel-collector v0.88.15
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
github.com/antonmedv/expr v1.15.3
@@ -48,7 +49,7 @@ require (
github.com/smartystreets/assertions v1.13.1
github.com/smartystreets/goconvey v1.8.1
github.com/soheilhy/cmux v0.1.5
- github.com/srikanthccv/ClickHouse-go-mock v0.4.0
+ github.com/srikanthccv/ClickHouse-go-mock v0.7.0
github.com/stretchr/testify v1.8.4
go.opentelemetry.io/collector/component v0.88.0
go.opentelemetry.io/collector/confmap v0.88.0
@@ -61,13 +62,13 @@ require (
go.opentelemetry.io/collector/processor v0.88.0
go.opentelemetry.io/collector/receiver v0.88.0
go.opentelemetry.io/collector/service v0.88.0
- go.opentelemetry.io/otel v1.22.0
- go.opentelemetry.io/otel/sdk v1.22.0
+ go.opentelemetry.io/otel v1.24.0
+ go.opentelemetry.io/otel/sdk v1.23.1
go.uber.org/multierr v1.11.0
- go.uber.org/zap v1.26.0
- golang.org/x/crypto v0.17.0
+ go.uber.org/zap v1.27.0
+ golang.org/x/crypto v0.19.0
golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1
- golang.org/x/net v0.19.0
+ golang.org/x/net v0.21.0
golang.org/x/oauth2 v0.13.0
google.golang.org/grpc v1.59.0
google.golang.org/protobuf v1.31.0
@@ -83,10 +84,9 @@ require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0 // indirect
- github.com/ClickHouse/ch-go v0.58.2 // indirect
- github.com/DATA-DOG/go-sqlmock v1.5.0 // indirect
+ github.com/ClickHouse/ch-go v0.61.3 // indirect
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect
- github.com/andybalholm/brotli v1.0.6 // indirect
+ github.com/andybalholm/brotli v1.1.0 // indirect
github.com/aws/aws-sdk-go v1.45.26 // indirect
github.com/beevik/etree v1.1.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
@@ -122,7 +122,7 @@ require (
github.com/josharian/intern v1.0.0 // indirect
github.com/jpillora/backoff v1.0.0 // indirect
github.com/jtolds/gls v4.20.0+incompatible // indirect
- github.com/klauspost/compress v1.17.5 // indirect
+ github.com/klauspost/compress v1.17.7 // indirect
github.com/klauspost/cpuid v1.2.3 // indirect
github.com/knadh/koanf/v2 v2.0.1 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
@@ -141,7 +141,7 @@ require (
github.com/oklog/run v1.1.0 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.88.0 // indirect
- github.com/paulmach/orb v0.10.0 // indirect
+ github.com/paulmach/orb v0.11.1 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
@@ -154,7 +154,7 @@ require (
github.com/robfig/cron/v3 v3.0.1 // indirect
github.com/segmentio/asm v1.2.0 // indirect
github.com/segmentio/backo-go v1.0.1 // indirect
- github.com/shirou/gopsutil/v3 v3.23.9 // indirect
+ github.com/shirou/gopsutil/v3 v3.23.12 // indirect
github.com/shoenig/go-m1cpu v0.1.6 // indirect
github.com/shopspring/decimal v1.3.1 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
@@ -183,14 +183,14 @@ require (
go.opentelemetry.io/otel/exporters/prometheus v0.42.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.42.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.19.0 // indirect
- go.opentelemetry.io/otel/metric v1.22.0 // indirect
+ go.opentelemetry.io/otel/metric v1.24.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.19.0 // indirect
- go.opentelemetry.io/otel/trace v1.22.0 // indirect
+ go.opentelemetry.io/otel/trace v1.24.0 // indirect
go.opentelemetry.io/proto/otlp v1.0.0 // indirect
go.uber.org/atomic v1.11.0 // indirect
- go.uber.org/goleak v1.2.1 // indirect
+ go.uber.org/goleak v1.3.0 // indirect
golang.org/x/sync v0.6.0 // indirect
- golang.org/x/sys v0.16.0 // indirect
+ golang.org/x/sys v0.17.0 // indirect
golang.org/x/text v0.14.0 // indirect
golang.org/x/time v0.3.0 // indirect
gonum.org/v1/gonum v0.14.0 // indirect
@@ -203,4 +203,4 @@ require (
k8s.io/utils v0.0.0-20230711102312-30195339c3c7 // indirect
)
-replace github.com/prometheus/prometheus => github.com/SigNoz/prometheus v1.9.78
+replace github.com/prometheus/prometheus => github.com/SigNoz/prometheus v1.9.79-0.1
diff --git a/go.sum b/go.sum
index 8ba2afd692..625e71e882 100644
--- a/go.sum
+++ b/go.sum
@@ -84,22 +84,22 @@ github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0 h1:OBhqkivkhkM
github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0/go.mod h1:kgDmCTgBzIEPFElEF+FK0SdjAor06dRq2Go927dnQ6o=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
-github.com/ClickHouse/ch-go v0.58.2 h1:jSm2szHbT9MCAB1rJ3WuCJqmGLi5UTjlNu+f530UTS0=
-github.com/ClickHouse/ch-go v0.58.2/go.mod h1:Ap/0bEmiLa14gYjCiRkYGbXvbe8vwdrfTYWhsuQ99aw=
-github.com/ClickHouse/clickhouse-go/v2 v2.15.0 h1:G0hTKyO8fXXR1bGnZ0DY3vTG01xYfOGW76zgjg5tmC4=
-github.com/ClickHouse/clickhouse-go/v2 v2.15.0/go.mod h1:kXt1SRq0PIRa6aKZD7TnFnY9PQKmc2b13sHtOYcK6cQ=
-github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
-github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
+github.com/ClickHouse/ch-go v0.61.3 h1:MmBwUhXrAOBZK7n/sWBzq6FdIQ01cuF2SaaO8KlDRzI=
+github.com/ClickHouse/ch-go v0.61.3/go.mod h1:1PqXjMz/7S1ZUaKvwPA3i35W2bz2mAMFeCi6DIXgGwQ=
+github.com/ClickHouse/clickhouse-go/v2 v2.20.0 h1:bvlLQ31XJfl7MxIqAq2l1G6JhHYzqEXdvfpMeU6bkKc=
+github.com/ClickHouse/clickhouse-go/v2 v2.20.0/go.mod h1:VQfyA+tCwCRw2G7ogfY8V0fq/r0yJWzy8UDrjiP/Lbs=
+github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU=
+github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU=
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
-github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY=
-github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww=
-github.com/SigNoz/signoz-otel-collector v0.88.14 h1:/40pH8au6M8PhUhdCXd4c+7nJ9h0VgoDaV9ERKbUtf4=
-github.com/SigNoz/signoz-otel-collector v0.88.14/go.mod h1:RH9OEjni6tkh9RgN/meSPxv3kykjcFscqMwJgbUAXmo=
+github.com/SigNoz/prometheus v1.9.79-0.1 h1:RjsOw7oXVKx7IDA+/sRXW2x5pnw60/tT9MMuEz3+8DU=
+github.com/SigNoz/prometheus v1.9.79-0.1/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww=
+github.com/SigNoz/signoz-otel-collector v0.88.15 h1:JUi9wzlj7WonPiXD4fak7yv/JMgd39sYFBGKTJIvP2Q=
+github.com/SigNoz/signoz-otel-collector v0.88.15/go.mod h1:Dst94AfUCw8+w2R32FvOwTpjzL//ZaY3tIPGpyJ4iqw=
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=
@@ -113,8 +113,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc=
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
-github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sxfOI=
-github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
+github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
+github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/antonmedv/expr v1.15.3 h1:q3hOJZNvLvhqE8OHBs1cFRdbXFNKuA+bHmRaI+AmRmI=
github.com/antonmedv/expr v1.15.3/go.mod h1:0E/6TxnOlRNp81GMzX9QfDPAmHo2Phg00y4JUv1ihsE=
@@ -189,14 +189,14 @@ github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/r
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/digitalocean/godo v1.99.0 h1:gUHO7n9bDaZFWvbzOum4bXE0/09ZuYA9yA8idQHX57E=
github.com/digitalocean/godo v1.99.0/go.mod h1:SsS2oXo2rznfM/nORlZ/6JaUJZFhmKTib1YhopUc8NA=
+github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0=
+github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI=
github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ=
-github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8=
-github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
-github.com/docker/docker v24.0.7+incompatible h1:Wo6l37AuwP3JaMnZa226lzVXGA3F9Ig1seQen0cKYlM=
-github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
-github.com/docker/go-connections v0.4.1-0.20210727194412-58542c764a11 h1:IPrmumsT9t5BS7XcPhgsCTlkWbYg80SEXUzDpReaU6Y=
-github.com/docker/go-connections v0.4.1-0.20210727194412-58542c764a11/go.mod h1:a6bNUGTbQBsY6VRHTr4h/rkOXjl244DyRD0tx3fgq4Q=
+github.com/docker/docker v25.0.3+incompatible h1:D5fy/lYmY7bvZa0XTZ5/UJPljor41F+vdyJG5luQLfQ=
+github.com/docker/docker v25.0.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
+github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
+github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
@@ -517,9 +517,10 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
+github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCyc/Ib3bDTKd379tNMpB/7/H5TjM2Y9QJ5THLbE=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
-github.com/klauspost/compress v1.17.5 h1:d4vBd+7CHydUqpFBgUEKkSdtSugf9YFmSkvUYPquI5E=
-github.com/klauspost/compress v1.17.5/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
+github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg=
+github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/cpuid v1.2.3 h1:CCtW0xUnWGVINKvE/WWOYKdsPV6mawAtvQuSl8guwQs=
github.com/klauspost/cpuid v1.2.3/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/knadh/koanf v1.5.0 h1:q2TSd/3Pyc/5yP9ldIrSdIz26MCcyNQzW0pEAugLPNs=
@@ -663,8 +664,8 @@ github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FI
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
-github.com/paulmach/orb v0.10.0 h1:guVYVqzxHE/CQ1KpfGO077TR0ATHSNjp4s6XGLn3W9s=
-github.com/paulmach/orb v0.10.0/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU=
+github.com/paulmach/orb v0.11.1 h1:3koVegMC4X/WeiXYz9iswopaTwMem53NzTJuTF20JzU=
+github.com/paulmach/orb v0.11.1/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU=
github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY=
github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
@@ -757,8 +758,8 @@ github.com/segmentio/backo-go v1.0.1 h1:68RQccglxZeyURy93ASB/2kc9QudzgIDexJ927N+
github.com/segmentio/backo-go v1.0.1/go.mod h1:9/Rh6yILuLysoQnZ2oNooD2g7aBnvM7r/fNVxRNWfBc=
github.com/sethvargo/go-password v0.2.0 h1:BTDl4CC/gjf/axHMaDQtw507ogrXLci6XRiLc7i/UHI=
github.com/sethvargo/go-password v0.2.0/go.mod h1:Ym4Mr9JXLBycr02MFuVQ/0JHidNetSgbzutTr3zsYXE=
-github.com/shirou/gopsutil/v3 v3.23.9 h1:ZI5bWVeu2ep4/DIxB4U9okeYJ7zp/QLTO4auRb/ty/E=
-github.com/shirou/gopsutil/v3 v3.23.9/go.mod h1:x/NWSb71eMcjFIO0vhyGW5nZ7oSIgVjrCnADckb85GA=
+github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4=
+github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM=
github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM=
github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ=
github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU=
@@ -791,8 +792,8 @@ github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
-github.com/srikanthccv/ClickHouse-go-mock v0.4.0 h1:tLk7qoDLg7Z5YD5mOmNqjRDbsm6ehJVXOFvSnG+gQAg=
-github.com/srikanthccv/ClickHouse-go-mock v0.4.0/go.mod h1:kRG9cuhS527AMXqKYgsii/CP28L/22fyJcOBExmLpEw=
+github.com/srikanthccv/ClickHouse-go-mock v0.7.0 h1:XhRMX2663xkDGq3DYavw8m75O94s9u76hOIjo9QBl8c=
+github.com/srikanthccv/ClickHouse-go-mock v0.7.0/go.mod h1:IJZ/eL1h4cOy/Jo3PzNKXSPmqRus15BC2MbduYPpA/g=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
@@ -890,8 +891,8 @@ go.opentelemetry.io/contrib/propagators/b3 v1.20.0 h1:Yty9Vs4F3D6/liF1o6FNt0PvN8
go.opentelemetry.io/contrib/propagators/b3 v1.20.0/go.mod h1:On4VgbkqYL18kbJlWsa18+cMNe6rYpBnPi1ARI/BrsU=
go.opentelemetry.io/contrib/zpages v0.45.0 h1:jIwHHGoWzJoZdbIUtWdErjL85Gni6BignnAFqDtMRL4=
go.opentelemetry.io/contrib/zpages v0.45.0/go.mod h1:4mIdA5hqH6hEx9sZgV50qKfQO8aIYolUZboHmz+G7vw=
-go.opentelemetry.io/otel v1.22.0 h1:xS7Ku+7yTFvDfDraDIJVpw7XPyuHlB9MCiqqX5mcJ6Y=
-go.opentelemetry.io/otel v1.22.0/go.mod h1:eoV4iAi3Ea8LkAEI9+GFT44O6T/D0GWAVFyZVCC6pMI=
+go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
+go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo=
go.opentelemetry.io/otel/bridge/opencensus v0.42.0 h1:QvC+bcZkWMphWPiVqRQygMj6M0/3TOuJEO+erRA7kI8=
go.opentelemetry.io/otel/bridge/opencensus v0.42.0/go.mod h1:XJojP7g5DqYdiyArix/H9i1XzPPlIUc9dGLKtF9copI=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU=
@@ -912,14 +913,14 @@ go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.42.0 h1:4jJuoeOo9W6hZn
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.42.0/go.mod h1:/MtYTE1SfC2QIcE0bDot6fIX+h+WvXjgTqgn9P0LNPE=
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.19.0 h1:Nw7Dv4lwvGrI68+wULbcq7su9K2cebeCUrDjVrUJHxM=
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.19.0/go.mod h1:1MsF6Y7gTqosgoZvHlzcaaM8DIMNZgJh87ykokoNH7Y=
-go.opentelemetry.io/otel/metric v1.22.0 h1:lypMQnGyJYeuYPhOM/bgjbFM6WE44W1/T45er4d8Hhg=
-go.opentelemetry.io/otel/metric v1.22.0/go.mod h1:evJGjVpZv0mQ5QBRJoBF64yMuOf4xCWdXjK8pzFvliY=
-go.opentelemetry.io/otel/sdk v1.22.0 h1:6coWHw9xw7EfClIC/+O31R8IY3/+EiRFHevmHafB2Gw=
-go.opentelemetry.io/otel/sdk v1.22.0/go.mod h1:iu7luyVGYovrRpe2fmj3CVKouQNdTOkxtLzPvPz1DOc=
+go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
+go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco=
+go.opentelemetry.io/otel/sdk v1.23.1 h1:O7JmZw0h76if63LQdsBMKQDWNb5oEcOThG9IrxscV+E=
+go.opentelemetry.io/otel/sdk v1.23.1/go.mod h1:LzdEVR5am1uKOOwfBWFef2DCi1nu3SA8XQxx2IerWFk=
go.opentelemetry.io/otel/sdk/metric v1.19.0 h1:EJoTO5qysMsYCa+w4UghwFV/ptQgqSL/8Ni+hx+8i1k=
go.opentelemetry.io/otel/sdk/metric v1.19.0/go.mod h1:XjG0jQyFJrv2PbMvwND7LwCEhsJzCzV5210euduKcKY=
-go.opentelemetry.io/otel/trace v1.22.0 h1:Hg6pPujv0XG9QaVbGOBVHunyuLcCC3jN7WEhPx83XD0=
-go.opentelemetry.io/otel/trace v1.22.0/go.mod h1:RbbHXVqKES9QhzZq/fE5UnOSILqRt40a21sPw2He1xo=
+go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
+go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I=
go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
@@ -927,14 +928,14 @@ go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
-go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
-go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4=
+go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
+go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
-go.uber.org/zap v1.26.0 h1:sI7k6L95XOKS281NhVKOFCUNIvv9e0w4BF8N3u+tCRo=
-go.uber.org/zap v1.26.0/go.mod h1:dtElttAiwGvoJ/vj4IwHBS/gXsEu/pZ50mUIRWuG0so=
+go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
+go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
@@ -945,8 +946,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
-golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
-golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
+golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
+golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -1043,8 +1044,8 @@ golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
-golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
-golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
+golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
+golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@@ -1177,14 +1178,14 @@ golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
-golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
+golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
-golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4=
-golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
+golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U=
+golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
diff --git a/pkg/query-service/README.md b/pkg/query-service/README.md
index 25b8b65eef..83925eece7 100644
--- a/pkg/query-service/README.md
+++ b/pkg/query-service/README.md
@@ -13,7 +13,7 @@ https://github.com/SigNoz/signoz/blob/main/CONTRIBUTING.md#to-run-clickhouse-set
- Change the alertmanager section in `signoz/deploy/docker/clickhouse-setup/docker-compose.yaml` as follows:
```console
alertmanager:
- image: signoz/alertmanager:0.23.4
+ image: signoz/alertmanager:0.23.5
volumes:
- ./data/alertmanager:/data
expose:
diff --git a/pkg/query-service/agentConf/manager.go b/pkg/query-service/agentConf/manager.go
index 0e77383f7e..0fdab4e990 100644
--- a/pkg/query-service/agentConf/manager.go
+++ b/pkg/query-service/agentConf/manager.go
@@ -111,10 +111,6 @@ func (m *Manager) RecommendAgentConfig(currentConfYaml []byte) (
return nil, "", errors.Wrap(apiErr.ToError(), "failed to get latest agent config version")
}
- if latestConfig == nil {
- continue
- }
-
updatedConf, serializedSettingsUsed, apiErr := feature.RecommendAgentConfig(
recommendation, latestConfig,
)
@@ -124,13 +120,24 @@ func (m *Manager) RecommendAgentConfig(currentConfYaml []byte) (
))
}
recommendation = updatedConf
- configId := fmt.Sprintf("%s:%d", featureType, latestConfig.Version)
+
+ // It is possible for a feature to recommend collector config
+ // before any user created config versions exist.
+ //
+ // For example, log pipeline config for installed integrations will
+ // have to be recommended even if the user hasn't created any pipelines yet
+ configVersion := -1
+ if latestConfig != nil {
+ configVersion = latestConfig.Version
+ }
+ configId := fmt.Sprintf("%s:%d", featureType, configVersion)
+
settingVersionsUsed = append(settingVersionsUsed, configId)
m.updateDeployStatus(
context.Background(),
featureType,
- latestConfig.Version,
+ configVersion,
string(DeployInitiated),
"Deployment has started",
configId,
@@ -209,6 +216,10 @@ func StartNewVersion(
return cfg, nil
}
+func NotifyConfigUpdate(ctx context.Context) {
+ m.notifyConfigUpdateSubscribers()
+}
+
func Redeploy(ctx context.Context, typ ElementTypeDef, version int) *model.ApiError {
configVersion, err := GetConfigVersion(ctx, typ, version)
diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go
index 1834aa0ff0..df41572155 100644
--- a/pkg/query-service/app/clickhouseReader/reader.go
+++ b/pkg/query-service/app/clickhouseReader/reader.go
@@ -47,6 +47,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/app/logs"
"go.signoz.io/signoz/pkg/query-service/app/services"
"go.signoz.io/signoz/pkg/query-service/auth"
+ "go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/constants"
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
"go.signoz.io/signoz/pkg/query-service/interfaces"
@@ -142,6 +143,17 @@ func NewReader(
os.Exit(1)
}
+ return NewReaderFromClickhouseConnection(db, options, localDB, configFile, featureFlag, cluster)
+}
+
+func NewReaderFromClickhouseConnection(
+ db driver.Conn,
+ options *Options,
+ localDB *sqlx.DB,
+ configFile string,
+ featureFlag interfaces.FeatureLookup,
+ cluster string,
+) *ClickHouseReader {
alertManager, err := am.New("")
if err != nil {
zap.S().Errorf("msg: failed to initialize alert manager: ", "/t error:", err)
@@ -149,8 +161,10 @@ func NewReader(
os.Exit(1)
}
+ wrap := clickhouseConnWrapper{conn: db}
+
return &ClickHouseReader{
- db: db,
+ db: wrap,
localDB: localDB,
TraceDB: options.primary.TraceDB,
alertManager: alertManager,
@@ -744,33 +758,47 @@ func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, erro
return &services, nil
}
-func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig) (*map[string][]string, *model.ApiError) {
+func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig, start, end time.Time) (*map[string][]string, *map[string][]string, *model.ApiError) {
+ start = start.In(time.UTC)
+
+ // The `top_level_operations` that have `time` >= start
operations := map[string][]string{}
- query := fmt.Sprintf(`SELECT DISTINCT name, serviceName FROM %s.%s`, r.TraceDB, r.topLevelOperationsTable)
+ // All top level operations for a service
+ allOperations := map[string][]string{}
+ query := fmt.Sprintf(`SELECT DISTINCT name, serviceName, time FROM %s.%s`, r.TraceDB, r.topLevelOperationsTable)
rows, err := r.db.Query(ctx, query)
if err != nil {
zap.S().Error("Error in processing sql query: ", err)
- return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query")}
+ return nil, nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
}
defer rows.Close()
for rows.Next() {
var name, serviceName string
- if err := rows.Scan(&name, &serviceName); err != nil {
- return nil, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("Error in reading data")}
+ var t time.Time
+ if err := rows.Scan(&name, &serviceName, &t); err != nil {
+ return nil, nil, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error in reading data")}
}
if _, ok := operations[serviceName]; !ok {
operations[serviceName] = []string{}
}
+ if _, ok := allOperations[serviceName]; !ok {
+ allOperations[serviceName] = []string{}
+ }
if skipConfig.ShouldSkip(serviceName, name) {
continue
}
- operations[serviceName] = append(operations[serviceName], name)
+ allOperations[serviceName] = append(allOperations[serviceName], name)
+ // We can't use the `end` because the `top_level_operations` table has the most recent instances of the operations
+ // We can only use the `start` time to filter the operations
+ if t.After(start) {
+ operations[serviceName] = append(operations[serviceName], name)
+ }
}
- return &operations, nil
+ return &operations, &allOperations, nil
}
func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams, skipConfig *model.SkipConfig) (*[]model.ServiceItem, *model.ApiError) {
@@ -779,7 +807,7 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
return nil, &model.ApiError{Typ: model.ErrorExec, Err: ErrNoIndexTable}
}
- topLevelOps, apiErr := r.GetTopLevelOperations(ctx, skipConfig)
+ topLevelOps, allTopLevelOps, apiErr := r.GetTopLevelOperations(ctx, skipConfig, *queryParams.Start, *queryParams.End)
if apiErr != nil {
return nil, apiErr
}
@@ -798,6 +826,22 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
defer func() { <-sem }()
var serviceItem model.ServiceItem
var numErrors uint64
+
+ // Even if the total number of operations within the time range is less and the all
+ // the top level operations are high, we want to warn to let user know the issue
+ // with the instrumentation
+ serviceItem.DataWarning = model.DataWarning{
+ TopLevelOps: (*allTopLevelOps)[svc],
+ }
+
+ // default max_query_size = 262144
+ // Let's assume the average size of the item in `ops` is 50 bytes
+ // We can have 262144/50 = 5242 items in the `ops` array
+ // Although we have make it as big as 5k, We cap the number of items
+ // in the `ops` array to 1500
+
+ ops = ops[:int(math.Min(1500, float64(len(ops))))]
+
query := fmt.Sprintf(
`SELECT
quantile(0.99)(durationNano) as p99,
@@ -846,6 +890,10 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
return
}
subQuery, argsSubQuery, errStatus = buildQueryWithTagParams(ctx, tags)
+ if errStatus != nil {
+ zap.S().Error("Error building query with tag params: ", err)
+ return
+ }
query += subQuery
args = append(args, argsSubQuery...)
err = r.db.QueryRow(ctx, errorQuery, args...).Scan(&numErrors)
@@ -872,7 +920,7 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *model.GetServiceOverviewParams, skipConfig *model.SkipConfig) (*[]model.ServiceOverviewItem, *model.ApiError) {
- topLevelOps, apiErr := r.GetTopLevelOperations(ctx, skipConfig)
+ topLevelOps, _, apiErr := r.GetTopLevelOperations(ctx, skipConfig, *queryParams.Start, *queryParams.End)
if apiErr != nil {
return nil, apiErr
}
@@ -1564,7 +1612,7 @@ func buildQueryWithTagParams(ctx context.Context, tags []model.TagQuery) (string
case model.NotExistsOperator:
subQuery, argsSubQuery = addExistsOperator(item, tagMapType, true)
default:
- return "", nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Tag Operator %s not supported", item.GetOperator())}
+ return "", nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("filter operator %s not supported", item.GetOperator())}
}
query += subQuery
args = append(args, argsSubQuery...)
@@ -3468,14 +3516,37 @@ func (r *ClickHouseReader) GetDashboardsInfo(ctx context.Context) (*model.Dashbo
zap.S().Debug("Error in processing sql query: ", err)
return &dashboardsInfo, err
}
+ totalDashboardsWithPanelAndName := 0
for _, dashboard := range dashboardsData {
+ if isDashboardWithPanelAndName(dashboard.Data) {
+ totalDashboardsWithPanelAndName = totalDashboardsWithPanelAndName + 1
+ }
dashboardsInfo = countPanelsInDashboard(dashboard.Data)
}
dashboardsInfo.TotalDashboards = len(dashboardsData)
-
+ dashboardsInfo.TotalDashboardsWithPanelAndName = totalDashboardsWithPanelAndName
return &dashboardsInfo, nil
}
+func isDashboardWithPanelAndName(data map[string]interface{}) bool {
+ isDashboardName := false
+ isDashboardWithPanelAndName := false
+ if data != nil && data["title"] != nil && data["widgets"] != nil {
+ title, ok := data["title"].(string)
+ if ok && title != "Sample Title" {
+ isDashboardName = true
+ }
+ widgets, ok := data["widgets"].(interface{})
+ if ok && isDashboardName {
+ data, ok := widgets.([]interface{})
+ if ok && len(data) > 0 {
+ isDashboardWithPanelAndName = true
+ }
+ }
+ }
+
+ return isDashboardWithPanelAndName
+}
func countPanelsInDashboard(data map[string]interface{}) model.DashboardsInfo {
var logsPanelCount, tracesPanelCount, metricsPanelCount int
// totalPanels := 0
@@ -3974,7 +4045,7 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, req
var rows driver.Rows
var response v3.AggregateAttributeResponse
- query = fmt.Sprintf("SELECT DISTINCT metric_name, type from %s.%s WHERE metric_name ILIKE $1", signozMetricDBName, signozTSTableNameV41Day)
+ query = fmt.Sprintf("SELECT metric_name, type, is_monotonic, temporality FROM %s.%s WHERE metric_name ILIKE $1 GROUP BY metric_name, type, is_monotonic, temporality", signozMetricDBName, signozTSTableNameV41Day)
if req.Limit != 0 {
query = query + fmt.Sprintf(" LIMIT %d;", req.Limit)
}
@@ -3986,11 +4057,18 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, req
}
defer rows.Close()
- var metricName, typ string
+ seen := make(map[string]struct{})
+
+ var metricName, typ, temporality string
+ var isMonotonic bool
for rows.Next() {
- if err := rows.Scan(&metricName, &typ); err != nil {
+ if err := rows.Scan(&metricName, &typ, &isMonotonic, &temporality); err != nil {
return nil, fmt.Errorf("error while scanning rows: %s", err.Error())
}
+ // Non-monotonic cumulative sums are treated as gauges
+ if typ == "Sum" && !isMonotonic && temporality == string(v3.Cumulative) {
+ typ = "Gauge"
+ }
// unlike traces/logs `tag`/`resource` type, the `Type` will be metric type
key := v3.AttributeKey{
Key: metricName,
@@ -3998,6 +4076,11 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, req
Type: v3.AttributeKeyType(typ),
IsColumn: true,
}
+ // remove duplicates
+ if _, ok := seen[metricName+typ]; ok {
+ continue
+ }
+ seen[metricName+typ] = struct{}{}
response.AttributeKeys = append(response.AttributeKeys, key)
}
@@ -4012,11 +4095,11 @@ func (r *ClickHouseReader) GetMetricAttributeKeys(ctx context.Context, req *v3.F
var response v3.FilterAttributeKeyResponse
// skips the internal attributes i.e attributes starting with __
- query = fmt.Sprintf("SELECT DISTINCT arrayJoin(tagKeys) as distinctTagKey from (SELECT DISTINCT(JSONExtractKeys(labels)) tagKeys from %s.%s WHERE metric_name=$1) WHERE distinctTagKey ILIKE $2 AND distinctTagKey NOT LIKE '\\_\\_%%'", signozMetricDBName, signozTSTableName)
+ query = fmt.Sprintf("SELECT arrayJoin(tagKeys) AS distinctTagKey FROM (SELECT JSONExtractKeys(labels) AS tagKeys FROM %s.%s WHERE metric_name=$1 AND unix_milli >= $2 GROUP BY tagKeys) WHERE distinctTagKey ILIKE $3 AND distinctTagKey NOT LIKE '\\_\\_%%' GROUP BY distinctTagKey", signozMetricDBName, signozTSTableNameV41Day)
if req.Limit != 0 {
query = query + fmt.Sprintf(" LIMIT %d;", req.Limit)
}
- rows, err = r.db.Query(ctx, query, req.AggregateAttribute, fmt.Sprintf("%%%s%%", req.SearchText))
+ rows, err = r.db.Query(ctx, query, req.AggregateAttribute, common.PastDayRoundOff(), fmt.Sprintf("%%%s%%", req.SearchText))
if err != nil {
zap.S().Error(err)
return nil, fmt.Errorf("error while executing query: %s", err.Error())
@@ -4047,11 +4130,11 @@ func (r *ClickHouseReader) GetMetricAttributeValues(ctx context.Context, req *v3
var rows driver.Rows
var attributeValues v3.FilterAttributeValueResponse
- query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, $1)) from %s.%s WHERE metric_name=$2 AND JSONExtractString(labels, $3) ILIKE $4", signozMetricDBName, signozTSTableName)
+ query = fmt.Sprintf("SELECT JSONExtractString(labels, $1) AS tagValue FROM %s.%s WHERE metric_name=$2 AND JSONExtractString(labels, $3) ILIKE $4 AND unix_milli >= $5 GROUP BY tagValue", signozMetricDBName, signozTSTableNameV41Day)
if req.Limit != 0 {
query = query + fmt.Sprintf(" LIMIT %d;", req.Limit)
}
- rows, err = r.db.Query(ctx, query, req.FilterAttributeKey, req.AggregateAttribute, req.FilterAttributeKey, fmt.Sprintf("%%%s%%", req.SearchText))
+ rows, err = r.db.Query(ctx, query, req.FilterAttributeKey, req.AggregateAttribute, req.FilterAttributeKey, fmt.Sprintf("%%%s%%", req.SearchText), common.PastDayRoundOff())
if err != nil {
zap.S().Error(err)
@@ -4598,10 +4681,26 @@ func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNam
return seriesList, nil
}
+func logComment(ctx context.Context) string {
+ // Get the key-value pairs from context for log comment
+ kv := ctx.Value("log_comment")
+ if kv == nil {
+ return ""
+ }
+
+ logCommentKVs, ok := kv.(map[string]string)
+ if !ok {
+ return ""
+ }
+
+ x, _ := json.Marshal(logCommentKVs)
+ return string(x)
+}
+
// GetTimeSeriesResultV3 runs the query and returns list of time series
func (r *ClickHouseReader) GetTimeSeriesResultV3(ctx context.Context, query string) ([]*v3.Series, error) {
- defer utils.Elapsed("GetTimeSeriesResultV3", query)()
+ defer utils.Elapsed("GetTimeSeriesResultV3", query, fmt.Sprintf("logComment: %s", logComment(ctx)))()
rows, err := r.db.Query(ctx, query)
@@ -4626,7 +4725,7 @@ func (r *ClickHouseReader) GetTimeSeriesResultV3(ctx context.Context, query stri
// GetListResultV3 runs the query and returns list of rows
func (r *ClickHouseReader) GetListResultV3(ctx context.Context, query string) ([]*v3.Row, error) {
- defer utils.Elapsed("GetListResultV3", query)()
+ defer utils.Elapsed("GetListResultV3", query, fmt.Sprintf("logComment: %s", logComment(ctx)))()
rows, err := r.db.Query(ctx, query)
diff --git a/pkg/query-service/app/clickhouseReader/wrapper.go b/pkg/query-service/app/clickhouseReader/wrapper.go
new file mode 100644
index 0000000000..6c75fa4b20
--- /dev/null
+++ b/pkg/query-service/app/clickhouseReader/wrapper.go
@@ -0,0 +1,82 @@
+package clickhouseReader
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "github.com/ClickHouse/clickhouse-go/v2"
+ "github.com/ClickHouse/clickhouse-go/v2/lib/driver"
+)
+
+type clickhouseConnWrapper struct {
+ conn clickhouse.Conn
+}
+
+func (c clickhouseConnWrapper) Close() error {
+ return c.conn.Close()
+}
+
+func (c clickhouseConnWrapper) Ping(ctx context.Context) error {
+ return c.conn.Ping(ctx)
+}
+
+func (c clickhouseConnWrapper) Stats() driver.Stats {
+ return c.conn.Stats()
+}
+
+func (c clickhouseConnWrapper) logComment(ctx context.Context) context.Context {
+ // Get the key-value pairs from context for log comment
+ kv := ctx.Value("log_comment")
+ if kv == nil {
+ return ctx
+ }
+
+ logCommentKVs, ok := kv.(map[string]string)
+ if !ok {
+ return ctx
+ }
+
+ logComment := ""
+ for k, v := range logCommentKVs {
+ logComment += fmt.Sprintf("%s=%s, ", k, v)
+ }
+ logComment = strings.TrimSuffix(logComment, ", ")
+
+ ctx = clickhouse.Context(ctx, clickhouse.WithSettings(clickhouse.Settings{
+ "log_comment": logComment,
+ }))
+ return ctx
+}
+
+func (c clickhouseConnWrapper) Query(ctx context.Context, query string, args ...interface{}) (driver.Rows, error) {
+ return c.conn.Query(c.logComment(ctx), query, args...)
+}
+
+func (c clickhouseConnWrapper) QueryRow(ctx context.Context, query string, args ...interface{}) driver.Row {
+ return c.conn.QueryRow(c.logComment(ctx), query, args...)
+}
+
+func (c clickhouseConnWrapper) Select(ctx context.Context, dest interface{}, query string, args ...interface{}) error {
+ return c.conn.Select(c.logComment(ctx), dest, query, args...)
+}
+
+func (c clickhouseConnWrapper) Exec(ctx context.Context, query string, args ...interface{}) error {
+ return c.conn.Exec(c.logComment(ctx), query, args...)
+}
+
+func (c clickhouseConnWrapper) AsyncInsert(ctx context.Context, query string, wait bool, args ...interface{}) error {
+ return c.conn.AsyncInsert(c.logComment(ctx), query, wait, args...)
+}
+
+func (c clickhouseConnWrapper) PrepareBatch(ctx context.Context, query string, opts ...driver.PrepareBatchOption) (driver.Batch, error) {
+ return c.conn.PrepareBatch(c.logComment(ctx), query, opts...)
+}
+
+func (c clickhouseConnWrapper) ServerVersion() (*driver.ServerVersion, error) {
+ return c.conn.ServerVersion()
+}
+
+func (c clickhouseConnWrapper) Contributors() []string {
+ return c.conn.Contributors()
+}
diff --git a/pkg/query-service/app/formula.go b/pkg/query-service/app/formula.go
index 657a7bcad9..619ae15cb3 100644
--- a/pkg/query-service/app/formula.go
+++ b/pkg/query-service/app/formula.go
@@ -87,23 +87,6 @@ func joinAndCalculate(results []*v3.Result, uniqueLabelSet map[string]string, ex
}
}
- vars := expression.Vars()
- var doesNotHaveAllVars bool
- for _, v := range vars {
- if _, ok := seriesMap[v]; !ok {
- doesNotHaveAllVars = true
- break
- }
- }
-
- // There is no series that matches the label set from all queries
- // TODO: Does the lack of a series from one query mean that the result should be nil?
- // Or should we interpret the series as having a value of 0 at all timestamps?
- // The current behaviour with ClickHouse is to show no data
- if doesNotHaveAllVars {
- return nil, nil
- }
-
resultSeries := &v3.Series{
Labels: uniqueLabelSet,
}
@@ -120,6 +103,13 @@ func joinAndCalculate(results []*v3.Result, uniqueLabelSet map[string]string, ex
for queryName, series := range seriesMap {
values[queryName] = series[timestamp]
}
+
+ // If the value is not present in the values map, set it to 0
+ for _, v := range expression.Vars() {
+ if _, ok := values[v]; !ok {
+ values[v] = 0
+ }
+ }
newValue, err := expression.Evaluate(values)
if err != nil {
return nil, err
diff --git a/pkg/query-service/app/formula_test.go b/pkg/query-service/app/formula_test.go
index 17a073ae32..365d794836 100644
--- a/pkg/query-service/app/formula_test.go
+++ b/pkg/query-service/app/formula_test.go
@@ -235,7 +235,39 @@ func TestProcessResults(t *testing.T) {
},
},
want: &v3.Result{
- Series: []*v3.Series{},
+ Series: []*v3.Series{
+ {
+ Labels: map[string]string{
+ "service_name": "frontend",
+ "operation": "GET /api",
+ },
+ Points: []v3.Point{
+ {
+ Timestamp: 1,
+ Value: 10,
+ },
+ {
+ Timestamp: 2,
+ Value: 20,
+ },
+ },
+ },
+ {
+ Labels: map[string]string{
+ "service_name": "redis",
+ },
+ Points: []v3.Point{
+ {
+ Timestamp: 1,
+ Value: 30,
+ },
+ {
+ Timestamp: 3,
+ Value: 40,
+ },
+ },
+ },
+ },
},
},
}
@@ -350,6 +382,21 @@ func TestProcessResultsErrorRate(t *testing.T) {
},
want: &v3.Result{
Series: []*v3.Series{
+ {
+ Labels: map[string]string{
+ "service_name": "frontend",
+ },
+ Points: []v3.Point{
+ {
+ Timestamp: 1,
+ Value: 0,
+ },
+ {
+ Timestamp: 2,
+ Value: 0,
+ },
+ },
+ },
{
Labels: map[string]string{
"service_name": "redis",
@@ -365,6 +412,21 @@ func TestProcessResultsErrorRate(t *testing.T) {
},
},
},
+ {
+ Labels: map[string]string{
+ "service_name": "route",
+ },
+ Points: []v3.Point{
+ {
+ Timestamp: 1,
+ Value: 0,
+ },
+ {
+ Timestamp: 2,
+ Value: 0,
+ },
+ },
+ },
},
},
},
@@ -906,132 +968,118 @@ func TestFormula(t *testing.T) {
},
},
},
- want: &v3.Result{},
- },
- {
- name: "Group keys on both sides are overlapping but do not match exactly",
- expression: "A/B",
- results: []*v3.Result{
- {
- QueryName: "A",
- Series: []*v3.Series{
- {
- Labels: map[string]string{
- "host_name": "ip-10-420-69-1",
- "state": "running",
- },
- Points: []v3.Point{
- {
- Timestamp: 1,
- Value: 10,
- },
- {
- Timestamp: 2,
- Value: 20,
- },
- {
- Timestamp: 4,
- Value: 40,
- },
- {
- Timestamp: 5,
- Value: 50,
- },
- {
- Timestamp: 7,
- Value: 70,
- },
- },
+ want: &v3.Result{
+ Series: []*v3.Series{
+ {
+ Labels: map[string]string{
+ "host_name": "ip-10-420-69-1",
+ "state": "running",
},
- {
- Labels: map[string]string{
- "host_name": "ip-10-420-69-2",
- "state": "idle",
+ Points: []v3.Point{
+ {
+ Timestamp: 1,
+ Value: math.Inf(0),
},
- Points: []v3.Point{
- {
- Timestamp: 1,
- Value: 12,
- },
- {
- Timestamp: 2,
- Value: 45,
- },
- {
- Timestamp: 3,
- Value: 30,
- },
- {
- Timestamp: 4,
- Value: 40,
- },
- {
- Timestamp: 5,
- Value: 50,
- },
+ {
+ Timestamp: 2,
+ Value: math.Inf(0),
+ },
+ {
+ Timestamp: 4,
+ Value: math.Inf(0),
+ },
+ {
+ Timestamp: 5,
+ Value: math.Inf(0),
+ },
+ {
+ Timestamp: 7,
+ Value: math.Inf(0),
},
},
},
- },
- {
- QueryName: "B",
- Series: []*v3.Series{
- {
- Labels: map[string]string{
- "os.type": "linux",
- "state": "running",
+ {
+ Labels: map[string]string{
+ "host_name": "ip-10-420-69-2",
+ "state": "idle",
+ },
+ Points: []v3.Point{
+ {
+ Timestamp: 1,
+ Value: math.Inf(0),
},
- Points: []v3.Point{
- {
- Timestamp: 1,
- Value: 22,
- },
- {
- Timestamp: 2,
- Value: 65,
- },
- {
- Timestamp: 3,
- Value: 30,
- },
- {
- Timestamp: 4,
- Value: 40,
- },
- {
- Timestamp: 5,
- Value: 50,
- },
+ {
+ Timestamp: 2,
+ Value: math.Inf(0),
+ },
+ {
+ Timestamp: 3,
+ Value: math.Inf(0),
+ },
+ {
+ Timestamp: 4,
+ Value: math.Inf(0),
+ },
+ {
+ Timestamp: 5,
+ Value: math.Inf(0),
},
},
- {
- Labels: map[string]string{
- "os.type": "windows",
- "state": "busy",
+ },
+ {
+ Labels: map[string]string{
+ "host_name": "ip-10-420-69-1",
+ "state": "not_running_chalamet",
+ },
+ Points: []v3.Point{
+ {
+ Timestamp: 1,
+ Value: 0,
},
- Points: []v3.Point{
- {
- Timestamp: 1,
- Value: 22,
- },
- {
- Timestamp: 2,
- Value: 65,
- },
- {
- Timestamp: 4,
- Value: 40,
- },
- {
- Timestamp: 5,
- Value: 50,
- },
+ {
+ Timestamp: 2,
+ Value: 0,
+ },
+ {
+ Timestamp: 3,
+ Value: 0,
+ },
+ {
+ Timestamp: 4,
+ Value: 0,
+ },
+ {
+ Timestamp: 5,
+ Value: 0,
+ },
+ },
+ },
+ {
+ Labels: map[string]string{
+ "host_name": "ip-10-420-69-2",
+ "state": "busy",
+ },
+ Points: []v3.Point{
+ {
+ Timestamp: 1,
+ Value: 0,
+ },
+ {
+ Timestamp: 2,
+ Value: 0,
+ },
+ {
+ Timestamp: 4,
+ Value: 0,
+ },
+ {
+ Timestamp: 5,
+ Value: 0,
},
},
},
},
},
- want: &v3.Result{},
},
{
name: "Group keys on the left side are a superset of the right side",
@@ -1193,6 +1241,59 @@ func TestFormula(t *testing.T) {
},
},
},
+ {
+ Labels: map[string]string{
+ "host_name": "ip-10-420-69-2",
+ "state": "idle",
+ "os.type": "linux",
+ },
+ Points: []v3.Point{
+ {
+ Timestamp: 1,
+ Value: math.Inf(0),
+ },
+ {
+ Timestamp: 2,
+ Value: math.Inf(0),
+ },
+ {
+ Timestamp: 3,
+ Value: math.Inf(0),
+ },
+ {
+ Timestamp: 4,
+ Value: math.Inf(0),
+ },
+ {
+ Timestamp: 5,
+ Value: math.Inf(0),
+ },
+ },
+ },
+ {
+ Labels: map[string]string{
+ "state": "busy",
+ "os.type": "linux",
+ },
+ Points: []v3.Point{
+ {
+ Timestamp: 1,
+ Value: 0,
+ },
+ {
+ Timestamp: 2,
+ Value: 0,
+ },
+ {
+ Timestamp: 4,
+ Value: 0,
+ },
+ {
+ Timestamp: 5,
+ Value: 0,
+ },
+ },
+ },
},
},
},
@@ -1454,18 +1555,22 @@ func TestFormula(t *testing.T) {
expression, err := govaluate.NewEvaluableExpression(tt.expression)
if err != nil {
t.Errorf("Error parsing expression: %v", err)
+ return
}
got, err := processResults(tt.results, expression)
if err != nil {
t.Errorf("Error processing results: %v", err)
+ return
}
if len(got.Series) != len(tt.want.Series) {
t.Errorf("processResults(): number of series - got = %v, want %v", len(got.Series), len(tt.want.Series))
+ return
}
for i := range got.Series {
if len(got.Series[i].Points) != len(tt.want.Series[i].Points) {
t.Errorf("processResults(): number of points - got = %v, want %v", len(got.Series[i].Points), len(tt.want.Series[i].Points))
+ return
}
for j := range got.Series[i].Points {
if got.Series[i].Points[j].Value != tt.want.Series[i].Points[j].Value {
diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go
index 784fdb940d..e8200635f7 100644
--- a/pkg/query-service/app/http_handler.go
+++ b/pkg/query-service/app/http_handler.go
@@ -8,6 +8,7 @@ import (
"fmt"
"io"
"net/http"
+ "regexp"
"strconv"
"strings"
"sync"
@@ -23,6 +24,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/agentConf"
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
"go.signoz.io/signoz/pkg/query-service/app/explorer"
+ "go.signoz.io/signoz/pkg/query-service/app/integrations"
"go.signoz.io/signoz/pkg/query-service/app/logs"
logsv3 "go.signoz.io/signoz/pkg/query-service/app/logs/v3"
"go.signoz.io/signoz/pkg/query-service/app/metrics"
@@ -94,6 +96,8 @@ type APIHandler struct {
maxOpenConns int
dialTimeout time.Duration
+ IntegrationsController *integrations.Controller
+
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
// SetupCompleted indicates if SigNoz is ready for general use.
@@ -125,8 +129,12 @@ type APIHandlerOpts struct {
// feature flags querier
FeatureFlags interfaces.FeatureLookup
+ // Integrations
+ IntegrationsController *integrations.Controller
+
// Log parsing pipelines
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
+
// cache
Cache cache.Cache
@@ -174,6 +182,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
alertManager: alertManager,
ruleManager: opts.RuleManager,
featureFlags: opts.FeatureFlags,
+ IntegrationsController: opts.IntegrationsController,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
querier: querier,
querierV2: querierv2,
@@ -858,11 +867,15 @@ func (aH *APIHandler) listRules(w http.ResponseWriter, r *http.Request) {
func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) {
allDashboards, err := dashboards.GetDashboards(r.Context())
-
if err != nil {
RespondError(w, err, nil)
return
}
+
+ ic := aH.IntegrationsController
+ installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context())
+ allDashboards = append(allDashboards, installedIntegrationDashboards...)
+
tagsFromReq, ok := r.URL.Query()["tags"]
if !ok || len(tagsFromReq) == 0 || tagsFromReq[0] == "" {
aH.Respond(w, allDashboards)
@@ -1031,8 +1044,19 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
dashboard, apiError := dashboards.GetDashboard(r.Context(), uuid)
if apiError != nil {
- RespondError(w, apiError, nil)
- return
+ if apiError.Type() != model.ErrorNotFound {
+ RespondError(w, apiError, nil)
+ return
+ }
+
+ dashboard, apiError = aH.IntegrationsController.GetInstalledIntegrationDashboardById(
+ r.Context(), uuid,
+ )
+ if apiError != nil {
+ RespondError(w, apiError, nil)
+ return
+ }
+
}
aH.Respond(w, dashboard)
@@ -1532,7 +1556,9 @@ func (aH *APIHandler) getServiceOverview(w http.ResponseWriter, r *http.Request)
func (aH *APIHandler) getServicesTopLevelOps(w http.ResponseWriter, r *http.Request) {
- result, apiErr := aH.reader.GetTopLevelOperations(r.Context(), aH.skipConfig)
+ var start, end time.Time
+
+ result, _, apiErr := aH.reader.GetTopLevelOperations(r.Context(), aH.skipConfig, start, end)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
@@ -2392,6 +2418,200 @@ func (aH *APIHandler) WriteJSON(w http.ResponseWriter, r *http.Request, response
w.Write(resp)
}
+// Integrations
+func (ah *APIHandler) RegisterIntegrationRoutes(router *mux.Router, am *AuthMiddleware) {
+ subRouter := router.PathPrefix("/api/v1/integrations").Subrouter()
+
+ subRouter.HandleFunc(
+ "/install", am.ViewAccess(ah.InstallIntegration),
+ ).Methods(http.MethodPost)
+
+ subRouter.HandleFunc(
+ "/uninstall", am.ViewAccess(ah.UninstallIntegration),
+ ).Methods(http.MethodPost)
+
+ // Used for polling for status in v0
+ subRouter.HandleFunc(
+ "/{integrationId}/connection_status", am.ViewAccess(ah.GetIntegrationConnectionStatus),
+ ).Methods(http.MethodGet)
+
+ subRouter.HandleFunc(
+ "/{integrationId}", am.ViewAccess(ah.GetIntegration),
+ ).Methods(http.MethodGet)
+
+ subRouter.HandleFunc(
+ "", am.ViewAccess(ah.ListIntegrations),
+ ).Methods(http.MethodGet)
+}
+
+func (ah *APIHandler) ListIntegrations(
+ w http.ResponseWriter, r *http.Request,
+) {
+ params := map[string]string{}
+ for k, values := range r.URL.Query() {
+ params[k] = values[0]
+ }
+
+ resp, apiErr := ah.IntegrationsController.ListIntegrations(
+ r.Context(), params,
+ )
+ if apiErr != nil {
+ RespondError(w, apiErr, "Failed to fetch integrations")
+ return
+ }
+ ah.Respond(w, resp)
+}
+
+func (ah *APIHandler) GetIntegration(
+ w http.ResponseWriter, r *http.Request,
+) {
+ integrationId := mux.Vars(r)["integrationId"]
+ integration, apiErr := ah.IntegrationsController.GetIntegration(
+ r.Context(), integrationId,
+ )
+ if apiErr != nil {
+ RespondError(w, apiErr, "Failed to fetch integration details")
+ return
+ }
+
+ ah.Respond(w, integration)
+}
+
+func (ah *APIHandler) GetIntegrationConnectionStatus(
+ w http.ResponseWriter, r *http.Request,
+) {
+ integrationId := mux.Vars(r)["integrationId"]
+ connectionTests, apiErr := ah.IntegrationsController.GetIntegrationConnectionTests(
+ r.Context(), integrationId,
+ )
+ if apiErr != nil {
+ RespondError(w, apiErr, "Failed to fetch integration connection tests")
+ return
+ }
+
+ lookbackSecondsStr := r.URL.Query().Get("lookback_seconds")
+ lookbackSeconds, err := strconv.ParseInt(lookbackSecondsStr, 10, 64)
+ if err != nil {
+ lookbackSeconds = 15 * 60
+ }
+
+ connectionStatus, apiErr := ah.calculateConnectionStatus(
+ r.Context(), connectionTests, lookbackSeconds,
+ )
+ if apiErr != nil {
+ RespondError(w, apiErr, "Failed to calculate integration connection status")
+ return
+ }
+
+ ah.Respond(w, connectionStatus)
+}
+
+func (ah *APIHandler) calculateConnectionStatus(
+ ctx context.Context,
+ connectionTests *integrations.IntegrationConnectionTests,
+ lookbackSeconds int64,
+) (*integrations.IntegrationConnectionStatus, *model.ApiError) {
+ result := &integrations.IntegrationConnectionStatus{}
+
+ if connectionTests.Logs != nil {
+ qrParams := &v3.QueryRangeParamsV3{
+ Start: time.Now().UnixMilli() - (lookbackSeconds * 1000),
+ End: time.Now().UnixMilli(),
+ CompositeQuery: &v3.CompositeQuery{
+ PanelType: v3.PanelTypeList,
+ QueryType: v3.QueryTypeBuilder,
+ BuilderQueries: map[string]*v3.BuilderQuery{
+ "A": {
+ PageSize: 1,
+ Filters: connectionTests.Logs,
+ QueryName: "A",
+ DataSource: v3.DataSourceLogs,
+ Expression: "A",
+ AggregateOperator: v3.AggregateOperatorNoOp,
+ },
+ },
+ },
+ }
+ queryRes, err, _ := ah.querier.QueryRange(
+ ctx, qrParams, map[string]v3.AttributeKey{},
+ )
+ if err != nil {
+ return nil, model.InternalError(fmt.Errorf(
+ "could not query for integration connection status: %w", err,
+ ))
+ }
+ if len(queryRes) > 0 && queryRes[0].List != nil && len(queryRes[0].List) > 0 {
+ lastLog := queryRes[0].List[0]
+
+ resourceSummaryParts := []string{}
+ lastLogResourceAttribs := lastLog.Data["resources_string"]
+ if lastLogResourceAttribs != nil {
+ resourceAttribs, ok := lastLogResourceAttribs.(*map[string]string)
+ if !ok {
+ return nil, model.InternalError(fmt.Errorf(
+ "could not cast log resource attribs",
+ ))
+ }
+ for k, v := range *resourceAttribs {
+ resourceSummaryParts = append(resourceSummaryParts, fmt.Sprintf(
+ "%s=%s", k, v,
+ ))
+ }
+ }
+ lastLogResourceSummary := strings.Join(resourceSummaryParts, ", ")
+
+ result.Logs = &integrations.SignalConnectionStatus{
+ LastReceivedTsMillis: lastLog.Timestamp.UnixMilli(),
+ LastReceivedFrom: lastLogResourceSummary,
+ }
+ }
+ }
+
+ return result, nil
+}
+
+func (ah *APIHandler) InstallIntegration(
+ w http.ResponseWriter, r *http.Request,
+) {
+ req := integrations.InstallIntegrationRequest{}
+
+ err := json.NewDecoder(r.Body).Decode(&req)
+ if err != nil {
+ RespondError(w, model.BadRequest(err), nil)
+ return
+ }
+
+ integration, apiErr := ah.IntegrationsController.Install(
+ r.Context(), &req,
+ )
+ if apiErr != nil {
+ RespondError(w, apiErr, nil)
+ return
+ }
+
+ ah.Respond(w, integration)
+}
+
+func (ah *APIHandler) UninstallIntegration(
+ w http.ResponseWriter, r *http.Request,
+) {
+ req := integrations.UninstallIntegrationRequest{}
+
+ err := json.NewDecoder(r.Body).Decode(&req)
+ if err != nil {
+ RespondError(w, model.BadRequest(err), nil)
+ return
+ }
+
+ apiErr := ah.IntegrationsController.Uninstall(r.Context(), &req)
+ if apiErr != nil {
+ RespondError(w, apiErr, nil)
+ return
+ }
+
+ ah.Respond(w, map[string]interface{}{})
+}
+
// logs
func (aH *APIHandler) RegisterLogsRoutes(router *mux.Router, am *AuthMiddleware) {
subRouter := router.PathPrefix("/api/v1/logs").Subrouter()
@@ -2585,16 +2805,17 @@ func (ah *APIHandler) listLogsPipelines(ctx context.Context) (
*logparsingpipeline.PipelinesResponse, *model.ApiError,
) {
// get lateset agent config
+ latestVersion := -1
lastestConfig, err := agentConf.GetLatestVersion(ctx, logPipelines)
- if err != nil {
- if err.Type() != model.ErrorNotFound {
- return nil, model.WrapApiError(err, "failed to get latest agent config version")
- } else {
- return nil, nil
- }
+ if err != nil && err.Type() != model.ErrorNotFound {
+ return nil, model.WrapApiError(err, "failed to get latest agent config version")
}
- payload, err := ah.LogsParsingPipelineController.GetPipelinesByVersion(ctx, lastestConfig.Version)
+ if lastestConfig != nil {
+ latestVersion = lastestConfig.Version
+ }
+
+ payload, err := ah.LogsParsingPipelineController.GetPipelinesByVersion(ctx, latestVersion)
if err != nil {
return nil, model.WrapApiError(err, "failed to get pipelines")
}
@@ -3129,12 +3350,19 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
applyMetricLimit(result, queryRangeParams)
+ sendQueryResultEvents(r, result, queryRangeParams)
+ // only adding applyFunctions instead of postProcess since experssion are
+ // are executed in clickhouse directly and we wanted to add support for timeshift
+ if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
+ applyFunctions(result, queryRangeParams)
+ }
+
resp := v3.QueryRangeResponse{
Result: result,
}
// This checks if the time for context to complete has exceeded.
- // it adds flag to notify the user of incomplete respone
+ // it adds flag to notify the user of incomplete response
select {
case <-ctx.Done():
resp.ContextTimeout = true
@@ -3146,6 +3374,50 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
aH.Respond(w, resp)
}
+func sendQueryResultEvents(r *http.Request, result []*v3.Result, queryRangeParams *v3.QueryRangeParamsV3) {
+ referrer := r.Header.Get("Referer")
+
+ dashboardMatched, err := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer)
+ if err != nil {
+ zap.S().Errorf("error while matching the referrer: %v", err)
+ }
+ alertMatched, err := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer)
+ if err != nil {
+ zap.S().Errorf("error while matching the referrer: %v", err)
+ }
+
+ if alertMatched || dashboardMatched {
+
+ if len(result) > 0 && (len(result[0].Series) > 0 || len(result[0].List) > 0) {
+
+ userEmail, err := auth.GetEmailFromJwt(r.Context())
+ if err == nil {
+ signozLogsUsed, signozMetricsUsed, signozTracesUsed := telemetry.GetInstance().CheckSigNozSignals(queryRangeParams)
+ if signozLogsUsed || signozMetricsUsed || signozTracesUsed {
+ if dashboardMatched {
+ telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_SUCCESSFUL_DASHBOARD_PANEL_QUERY, map[string]interface{}{
+ "queryType": queryRangeParams.CompositeQuery.QueryType,
+ "panelType": queryRangeParams.CompositeQuery.PanelType,
+ "tracesUsed": signozTracesUsed,
+ "logsUsed": signozLogsUsed,
+ "metricsUsed": signozMetricsUsed,
+ }, userEmail)
+ }
+ if alertMatched {
+ telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_SUCCESSFUL_ALERT_QUERY, map[string]interface{}{
+ "queryType": queryRangeParams.CompositeQuery.QueryType,
+ "panelType": queryRangeParams.CompositeQuery.PanelType,
+ "tracesUsed": signozTracesUsed,
+ "logsUsed": signozLogsUsed,
+ "metricsUsed": signozMetricsUsed,
+ }, userEmail)
+ }
+ }
+ }
+ }
+ }
+}
+
func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) {
queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
@@ -3304,7 +3576,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
RespondError(w, apiErrObj, errQuriesByName)
return
}
-
+ sendQueryResultEvents(r, result, queryRangeParams)
resp := v3.QueryRangeResponse{
Result: result,
}
@@ -3403,7 +3675,7 @@ func applyFunctions(results []*v3.Result, queryRangeParams *v3.QueryRangeParamsV
for idx, result := range results {
builderQueries := queryRangeParams.CompositeQuery.BuilderQueries
- if builderQueries != nil && (builderQueries[result.QueryName].DataSource == v3.DataSourceMetrics) {
+ if builderQueries != nil {
functions := builderQueries[result.QueryName].Functions
for _, function := range functions {
diff --git a/pkg/query-service/app/integrations/builtin.go b/pkg/query-service/app/integrations/builtin.go
new file mode 100644
index 0000000000..a612e45ed3
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin.go
@@ -0,0 +1,225 @@
+package integrations
+
+import (
+ "context"
+ "embed"
+ "strings"
+
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "io/fs"
+ "path"
+
+ koanfJson "github.com/knadh/koanf/parsers/json"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "golang.org/x/exp/maps"
+ "golang.org/x/exp/slices"
+)
+
+type BuiltInIntegrations struct{}
+
+var builtInIntegrations map[string]IntegrationDetails
+
+func (bi *BuiltInIntegrations) list(ctx context.Context) (
+ []IntegrationDetails, *model.ApiError,
+) {
+ integrations := maps.Values(builtInIntegrations)
+ slices.SortFunc(integrations, func(i1, i2 IntegrationDetails) bool {
+ return i1.Id < i2.Id
+ })
+ return integrations, nil
+}
+
+func (bi *BuiltInIntegrations) get(
+ ctx context.Context, integrationIds []string,
+) (
+ map[string]IntegrationDetails, *model.ApiError,
+) {
+ result := map[string]IntegrationDetails{}
+ for _, iid := range integrationIds {
+ i, exists := builtInIntegrations[iid]
+ if exists {
+ result[iid] = i
+ }
+ }
+ return result, nil
+}
+
+//go:embed builtin_integrations/*
+var integrationFiles embed.FS
+
+func init() {
+ err := readBuiltIns()
+ if err != nil {
+ panic(fmt.Errorf("couldn't read builtin integrations: %w", err))
+ }
+}
+
+func readBuiltIns() error {
+ rootDirName := "builtin_integrations"
+ builtinDirs, err := fs.ReadDir(integrationFiles, rootDirName)
+ if err != nil {
+ return fmt.Errorf("couldn't list integrations dirs: %w", err)
+ }
+
+ builtInIntegrations = map[string]IntegrationDetails{}
+ for _, d := range builtinDirs {
+ if !d.IsDir() {
+ continue
+ }
+
+ integrationDir := path.Join(rootDirName, d.Name())
+ i, err := readBuiltInIntegration(integrationDir)
+ if err != nil {
+ return fmt.Errorf("couldn't parse integration %s from files: %w", d.Name(), err)
+ }
+
+ _, exists := builtInIntegrations[i.Id]
+ if exists {
+ return fmt.Errorf(
+ "duplicate integration for id %s at %s", i.Id, d.Name(),
+ )
+ }
+ builtInIntegrations[i.Id] = *i
+ }
+ return nil
+}
+
+func readBuiltInIntegration(dirpath string) (
+ *IntegrationDetails, error,
+) {
+ integrationJsonPath := path.Join(dirpath, "integration.json")
+
+ serializedSpec, err := integrationFiles.ReadFile(integrationJsonPath)
+ if err != nil {
+ return nil, fmt.Errorf("couldn't find integration.json in %s: %w", dirpath, err)
+ }
+
+ integrationSpec, err := koanfJson.Parser().Unmarshal(serializedSpec)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "couldn't parse integration json from %s: %w", integrationJsonPath, err,
+ )
+ }
+
+ hydrated, err := hydrateFileUris(integrationSpec, dirpath)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "couldn't hydrate files referenced in integration %s: %w", integrationJsonPath, err,
+ )
+ }
+
+ hydratedSpec := hydrated.(map[string]interface{})
+ hydratedSpecJson, err := koanfJson.Parser().Marshal(hydratedSpec)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "couldn't serialize hydrated integration spec back to JSON %s: %w", integrationJsonPath, err,
+ )
+ }
+
+ var integration IntegrationDetails
+ err = json.Unmarshal(hydratedSpecJson, &integration)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "couldn't parse hydrated JSON spec read from %s: %w",
+ integrationJsonPath, err,
+ )
+ }
+
+ err = validateIntegration(integration)
+ if err != nil {
+ return nil, fmt.Errorf("invalid integration spec %s: %w", integration.Id, err)
+ }
+
+ integration.Id = "builtin-" + integration.Id
+
+ return &integration, nil
+}
+
+func validateIntegration(i IntegrationDetails) error {
+ // Validate dashboard data
+ seenDashboardIds := map[string]interface{}{}
+ for _, dd := range i.Assets.Dashboards {
+ did, exists := dd["id"]
+ if !exists {
+ return fmt.Errorf("id is required. not specified in dashboard titled %v", dd["title"])
+ }
+ dashboardId, ok := did.(string)
+ if !ok {
+ return fmt.Errorf("id must be string in dashboard titled %v", dd["title"])
+ }
+ if _, seen := seenDashboardIds[dashboardId]; seen {
+ return fmt.Errorf("multiple dashboards found with id %s", dashboardId)
+ }
+ seenDashboardIds[dashboardId] = nil
+ }
+
+ // TODO(Raj): Validate all parts of plugged in integrations
+
+ return nil
+}
+
+func hydrateFileUris(spec interface{}, basedir string) (interface{}, error) {
+ if specMap, ok := spec.(map[string]interface{}); ok {
+ result := map[string]interface{}{}
+ for k, v := range specMap {
+ hydrated, err := hydrateFileUris(v, basedir)
+ if err != nil {
+ return nil, err
+ }
+ result[k] = hydrated
+ }
+ return result, nil
+
+ } else if specSlice, ok := spec.([]interface{}); ok {
+ result := []interface{}{}
+ for _, v := range specSlice {
+ hydrated, err := hydrateFileUris(v, basedir)
+ if err != nil {
+ return nil, err
+ }
+ result = append(result, hydrated)
+ }
+ return result, nil
+
+ } else if maybeFileUri, ok := spec.(string); ok {
+ return readFileIfUri(maybeFileUri, basedir)
+ }
+
+ return spec, nil
+
+}
+
+func readFileIfUri(maybeFileUri string, basedir string) (interface{}, error) {
+ fileUriPrefix := "file://"
+ if !strings.HasPrefix(maybeFileUri, fileUriPrefix) {
+ return maybeFileUri, nil
+ }
+
+ relativePath := maybeFileUri[len(fileUriPrefix):]
+ fullPath := path.Join(basedir, relativePath)
+
+ fileContents, err := integrationFiles.ReadFile(fullPath)
+ if err != nil {
+ return nil, fmt.Errorf("couldn't read referenced file: %w", err)
+ }
+ if strings.HasSuffix(maybeFileUri, ".md") {
+ return string(fileContents), nil
+
+ } else if strings.HasSuffix(maybeFileUri, ".json") {
+ parsed, err := koanfJson.Parser().Unmarshal(fileContents)
+ if err != nil {
+ return nil, fmt.Errorf("couldn't parse referenced JSON file: %w", err)
+ }
+ return parsed, nil
+
+ } else if strings.HasSuffix(maybeFileUri, ".svg") {
+ base64Svg := base64.StdEncoding.EncodeToString(fileContents)
+ dataUri := fmt.Sprintf("data:image/svg+xml;base64,%s", base64Svg)
+ return dataUri, nil
+
+ }
+
+ return nil, fmt.Errorf("unsupported file type %s", maybeFileUri)
+}
diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/assets/dashboards/overview.json b/pkg/query-service/app/integrations/builtin_integrations/mongo/assets/dashboards/overview.json
new file mode 100644
index 0000000000..5b993cb2ca
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/assets/dashboards/overview.json
@@ -0,0 +1,797 @@
+{
+ "id": "mongo-overview",
+ "description": "This dashboard provides a high-level overview of your MongoDB. It includes read/write performance, most-used replicas, collection metrics etc...",
+ "layout": [
+ {
+ "h": 3,
+ "i": "0c3d2b15-89be-4d62-a821-b26d93332ed3",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 3
+ },
+ {
+ "h": 3,
+ "i": "14504a3c-4a05-4d22-bab3-e22e94f51380",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 6
+ },
+ {
+ "h": 3,
+ "i": "dcfb3829-c3f2-44bb-907d-8dc8a6dc4aab",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 3
+ },
+ {
+ "h": 3,
+ "i": "bfc9e80b-02bf-4122-b3da-3dd943d35012",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 0
+ },
+ {
+ "h": 3,
+ "i": "4c07a7d2-893a-46c2-bcdb-a19b6efeac3a",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 0
+ },
+ {
+ "h": 3,
+ "i": "a5a64eec-1034-4aa6-8cb1-05673c4426c6",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 6
+ },
+ {
+ "h": 3,
+ "i": "503af589-ef4d-4fe3-8934-c8f7eb480d9a",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 9
+ }
+ ],
+ "name": "",
+ "tags": [
+ "mongo",
+ "database"
+ ],
+ "title": "Mongo overview",
+ "variables": {
+ "a2c21714-a814-4d31-9b56-7367c3208801": {
+ "allSelected": true,
+ "customValue": "",
+ "description": "List of hosts sending mongo metrics",
+ "id": "a2c21714-a814-4d31-9b56-7367c3208801",
+ "modificationUUID": "448e675a-4531-45b1-b434-a9ee809470d6",
+ "multiSelect": true,
+ "name": "host_name",
+ "order": 0,
+ "queryValue": "SELECT JSONExtractString(labels, 'host_name') AS host_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'mongodb_memory_usage'\nGROUP BY host_name",
+ "selectedValue": [
+ "Srikanths-MacBook-Pro.local"
+ ],
+ "showALLOption": true,
+ "sort": "ASC",
+ "textboxValue": "",
+ "type": "QUERY"
+ }
+ },
+ "widgets": [
+ {
+ "description": "Total number of operations",
+ "fillSpans": false,
+ "id": "4c07a7d2-893a-46c2-bcdb-a19b6efeac3a",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "mongodb_operation_count--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "mongodb_operation_count",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum_rate",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "a468a30b",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{operation}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "7da5d899-8b06-4139-9a89-47baf9551ff8",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Operations count",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "The total time spent performing operations.",
+ "fillSpans": false,
+ "id": "bfc9e80b-02bf-4122-b3da-3dd943d35012",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "mongodb_operation_time--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "mongodb_operation_time",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum_rate",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "31be3166",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{operation}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "2ca35957-894a-46ae-a2a6-95d7e400d8e1",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Total operations time",
+ "yAxisUnit": "ms"
+ },
+ {
+ "description": "The number of cache operations",
+ "fillSpans": false,
+ "id": "dcfb3829-c3f2-44bb-907d-8dc8a6dc4aab",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "mongodb_cache_operations--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "mongodb_cache_operations",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum_rate",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "01b45814",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "type--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "type",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{type}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "bb439198-dcf5-4767-b0d0-ab5785159b8d",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Cache operations",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "14504a3c-4a05-4d22-bab3-e22e94f51380",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "mongodb_operation_latency_time--float64--Gauge--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "mongodb_operation_latency_time",
+ "type": "Gauge"
+ },
+ "aggregateOperator": "max",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "2e165319",
+ "key": {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "read"
+ },
+ {
+ "id": "888e920b",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [],
+ "having": [],
+ "legend": "Latency",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "4a9cafe8-778b-476c-b825-c04e165bf285",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Read latency",
+ "yAxisUnit": "µs"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "a5a64eec-1034-4aa6-8cb1-05673c4426c6",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "mongodb_operation_latency_time--float64--Gauge--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "mongodb_operation_latency_time",
+ "type": "Gauge"
+ },
+ "aggregateOperator": "max",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "53b37ca7",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "9862c46c",
+ "key": {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "write"
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [],
+ "having": [],
+ "legend": "Latency",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "446827eb-a4f2-4ff3-966b-fb65288c983b",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Write latency",
+ "yAxisUnit": "µs"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "503af589-ef4d-4fe3-8934-c8f7eb480d9a",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "mongodb_operation_latency_time--float64--Gauge--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "mongodb_operation_latency_time",
+ "type": "Gauge"
+ },
+ "aggregateOperator": "max",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "c33ad4b6",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "c70ecfd0",
+ "key": {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "command"
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [],
+ "having": [],
+ "legend": "Latency",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "7b7b977d-0921-4552-8cfe-d82dfde63ef4",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Command latency",
+ "yAxisUnit": "µs"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "0c3d2b15-89be-4d62-a821-b26d93332ed3",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "mongodb_network_io_receive--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "mongodb_network_io_receive",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "5c9d7fe3",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Bytes received :: {{host_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ },
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "mongodb_network_io_transmit--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "mongodb_network_io_transmit",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "B",
+ "filters": {
+ "items": [
+ {
+ "id": "96520885",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Bytes transmitted :: {{host_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "B",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "41eea5bc-f9cf-45c2-92fb-ef226d6b540b",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Network IO",
+ "yAxisUnit": "bytes"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/assets/pipelines/log-parser.json b/pkg/query-service/app/integrations/builtin_integrations/mongo/assets/pipelines/log-parser.json
new file mode 100644
index 0000000000..e75db3ec5d
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/assets/pipelines/log-parser.json
@@ -0,0 +1,33 @@
+{
+ "id": "parse-default-mongo-access-log",
+ "name": "Parse default mongo access log",
+ "alias": "parse-default-mongo-access-log",
+ "description": "Parse standard mongo access log",
+ "enabled": true,
+ "filter": {
+ "op": "AND",
+ "items": [
+ {
+ "key": {
+ "type": "tag",
+ "key": "source",
+ "dataType": "string"
+ },
+ "op": "=",
+ "value": "mongo"
+ }
+ ]
+ },
+ "config": [
+ {
+ "type": "grok_parser",
+ "id": "parse-body-grok",
+ "enabled": true,
+ "orderId": 1,
+ "name": "Parse Body",
+ "parse_to": "attributes",
+ "pattern": "%{GREEDYDATA}",
+ "parse_from": "body"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/config/configure-otel-collector.md b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/configure-otel-collector.md
new file mode 100644
index 0000000000..35238c9e9a
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/configure-otel-collector.md
@@ -0,0 +1,74 @@
+### Configure otel collector
+
+#### Save collector config file
+
+Save the following collector config in a file named `mongo-collector-config.yaml`
+
+```bash
+receivers:
+ mongodb:
+ # - For standalone MongoDB deployments this is the hostname and port of the mongod instance
+ # - For replica sets specify the hostnames and ports of the mongod instances that are in the replica set configuration. If the replica_set field is specified, nodes will be autodiscovered.
+ # - For a sharded MongoDB deployment, please specify a list of the mongos hosts.
+ hosts:
+ - endpoint: 127.0.0.1:27017
+ # If authentication is required, the user can with clusterMonitor permissions can be provided here
+ username: monitoring
+ # If authentication is required, the password can be provided here.
+ password: ${env:MONGODB_PASSWORD}
+ collection_interval: 60s
+ # If TLS is enabled, the following fields can be used to configure the connection
+ tls:
+ insecure: true
+ insecure_skip_verify: true
+ # ca_file: /etc/ssl/certs/ca-certificates.crt
+ # cert_file: /etc/ssl/certs/mongodb.crt
+ # key_file: /etc/ssl/certs/mongodb.key
+ metrics:
+ mongodb.lock.acquire.count:
+ enabled: true
+ mongodb.lock.acquire.time:
+ enabled: true
+ mongodb.lock.acquire.wait_count:
+ enabled: true
+ mongodb.lock.deadlock.count:
+ enabled: true
+ mongodb.operation.latency.time:
+ enabled: true
+
+processors:
+ # enriches the data with additional host information
+ # see https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/resourcedetectionprocessor#resource-detection-processor
+ resourcedetection/system:
+ # add additional detectors if needed
+ detectors: ["system"]
+ system:
+ hostname_sources: ["os"]
+
+exporters:
+ # export to local collector
+ otlp/local:
+ endpoint: "localhost:4317"
+ tls:
+ insecure: true
+ # export to SigNoz cloud
+ otlp/signoz:
+ endpoint: "ingest.{region}.signoz.cloud:443"
+ tls:
+ insecure: false
+ headers:
+ "signoz-access-token": ""
+
+service:
+ pipelines:
+ metrics/mongodb:
+ receivers: [mongodb]
+ # note: remove this processor if the collector host is not running on the same host as the mongo instance
+ processors: [resourcedetection/system]
+ exporters: [otlp/local]
+
+```
+
+#### Use collector config file
+
+Run your collector with the added flag `--config mongo-collector-config.yaml`
diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/config/prerequisites.md b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/prerequisites.md
new file mode 100644
index 0000000000..5a844d6988
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/config/prerequisites.md
@@ -0,0 +1,22 @@
+### Prepare mongo for monitoring
+
+- Have a running mongodb instance
+- Have the monitoring user created
+- Have the monitoring user granted the necessary permissions
+
+Mongodb recommends to set up a least privilege user (LPU) with a `clusterMonitor` role in order to collect.
+
+Run the following command to create a user with the necessary permissions.
+
+```bash
+use admin
+db.createUser(
+ {
+ user: "monitoring",
+ pwd: "",
+ roles: ["clusterMonitor"]
+ }
+);
+```
+
+Replace `` with a strong password and set is as env var `MONGODB_PASSWORD`.
diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/icon.svg b/pkg/query-service/app/integrations/builtin_integrations/mongo/icon.svg
new file mode 100644
index 0000000000..4ffedc6339
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/icon.svg
@@ -0,0 +1,28 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json b/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json
new file mode 100644
index 0000000000..c5d0fcefcd
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/integration.json
@@ -0,0 +1,88 @@
+{
+ "id": "mongo",
+ "title": "Mongo",
+ "description": "Monitor mongo using logs and metrics.",
+ "author": {
+ "name": "SigNoz",
+ "email": "integrations@signoz.io",
+ "homepage": "https://signoz.io"
+ },
+ "icon": "file://icon.svg",
+ "categories": [
+ "Database"
+ ],
+ "overview": "file://overview.md",
+ "configuration": [
+ {
+ "title": "Prerequisites",
+ "instructions": "file://config/prerequisites.md"
+ },
+ {
+ "title": "Configure Otel Collector",
+ "instructions": "file://config/configure-otel-collector.md"
+ }
+ ],
+ "assets": {
+ "logs": {
+ "pipelines": [
+ "file://assets/pipelines/log-parser.json"
+ ]
+ },
+ "dashboards": [
+ "file://assets/dashboards/overview.json"
+ ],
+ "alerts": []
+ },
+ "connection_tests": {
+ "logs": {
+ "op": "AND",
+ "items": [
+ {
+ "key": {
+ "type": "tag",
+ "key": "source",
+ "dataType": "string"
+ },
+ "op": "=",
+ "value": "mongo"
+ }
+ ]
+ }
+ },
+ "data_collected": {
+ "logs": [
+ {
+ "name": "Request Method",
+ "path": "attributes[\"http.request.method\"]",
+ "type": "string",
+ "description": "HTTP method"
+ },
+ {
+ "name": "Request Path",
+ "path": "attributes[\"url.path\"]",
+ "type": "string",
+ "description": "path requested"
+ },
+ {
+ "name": "Response Status Code",
+ "path": "attributes[\"http.response.status_code\"]",
+ "type": "int",
+ "description": "HTTP response code"
+ }
+ ],
+ "metrics": [
+ {
+ "name": "http.server.request.duration",
+ "type": "Histogram",
+ "unit": "s",
+ "description": "Duration of HTTP server requests"
+ },
+ {
+ "name": "http.server.active_requests",
+ "type": "UpDownCounter",
+ "unit": "{ request }",
+ "description": "Number of active HTTP server requests"
+ }
+ ]
+ }
+}
diff --git a/pkg/query-service/app/integrations/builtin_integrations/mongo/overview.md b/pkg/query-service/app/integrations/builtin_integrations/mongo/overview.md
new file mode 100644
index 0000000000..c7a84541ad
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/mongo/overview.md
@@ -0,0 +1,3 @@
+### Monitor MongoDB with SigNoz
+
+Collect key MongoDB metrics and parse your MongoDB logs
diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/assets/pipelines/log-parser.json b/pkg/query-service/app/integrations/builtin_integrations/nginx/assets/pipelines/log-parser.json
new file mode 100644
index 0000000000..e9521e45ff
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/assets/pipelines/log-parser.json
@@ -0,0 +1,62 @@
+{
+ "id": "parse-default-nginx-access-log",
+ "name": "Parse default nginx access log",
+ "alias": "parse-default-nginx-access-log",
+ "description": "Parse standard nginx access log",
+ "enabled": true,
+ "filter": {
+ "op": "AND",
+ "items": [
+ {
+ "key": {
+ "type": "tag",
+ "key": "source",
+ "dataType": "string"
+ },
+ "op": "=",
+ "value": "nginx"
+ }
+ ]
+ },
+ "config": [
+ {
+ "type": "grok_parser",
+ "id": "parse-body-grok",
+ "enabled": true,
+ "orderId": 1,
+ "name": "Parse Body",
+ "parse_to": "attributes",
+ "pattern": "%{IP:client.address} - %{USERNAME:enduser.id} \\[%{HTTPDATE:time.local}\\] \"((%{WORD:http.method} %{DATA:http.path}(\\?%{DATA:http.query})? %{WORD:network.protocol.name}/%{NOTSPACE:network.protocol.version})|%{DATA})\" %{INT:http.response.status_code:int} %{INT:http.request.body.bytes:int} \"%{NOTSPACE:http.referer}\" \"%{DATA:http.user.agent}\" %{INT:http.request.bytes:int} %{NUMBER:http.request.time:float} \\[%{DATA:proxy.upstream.name}?\\] \\[%{DATA:proxy.alternative.upstream.name}?\\] ((%{IP:network.peer.address}:%{INT:network.peer.port:int})|%{DATA})? (%{INT:http.response.bytes:int}|-)? (%{NUMBER:http.response.time:float}|-)? (%{NUMBER:network.peer.status.code:int}|-)? %{NOTSPACE:request.id}",
+ "parse_from": "body"
+ },
+ {
+ "type": "severity_parser",
+ "id": "parse-sev",
+ "enabled": true,
+ "orderId": 2,
+ "name": "Set Severity",
+ "parse_from": "attributes[\"http.response.status_code\"]",
+ "mapping": {
+ "debug": [
+ "1xx"
+ ],
+ "error": [
+ "4xx"
+ ],
+ "fatal": [
+ "5xx"
+ ],
+ "info": [
+ "2xx"
+ ],
+ "trace": [
+ "trace"
+ ],
+ "warn": [
+ "3xx"
+ ]
+ },
+ "overwrite_text": true
+ }
+ ]
+}
\ No newline at end of file
diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/config/configure-otel-collector.md b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/configure-otel-collector.md
new file mode 100644
index 0000000000..f5c22e16cb
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/configure-otel-collector.md
@@ -0,0 +1 @@
+### Configure otel collector
diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/config/prepare-nginx.md b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/prepare-nginx.md
new file mode 100644
index 0000000000..2677d09b6b
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/config/prepare-nginx.md
@@ -0,0 +1 @@
+### Prepare nginx for observability
diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/icon.svg b/pkg/query-service/app/integrations/builtin_integrations/nginx/icon.svg
new file mode 100644
index 0000000000..5687615020
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/icon.svg
@@ -0,0 +1,8 @@
+
+
+
+
\ No newline at end of file
diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json b/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json
new file mode 100644
index 0000000000..558f9780d0
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/integration.json
@@ -0,0 +1,87 @@
+{
+ "id": "nginx",
+ "title": "Nginx",
+ "description": "Monitor nginx using logs and metrics.",
+ "author": {
+ "name": "SigNoz",
+ "email": "integrations@signoz.io",
+ "homepage": "https://signoz.io"
+ },
+ "icon": "file://icon.svg",
+ "categories": [
+ "Ingress",
+ "HTTP"
+ ],
+ "overview": "file://overview.md",
+ "configuration": [
+ {
+ "title": "Prepare Nginx",
+ "instructions": "file://config/prepare-nginx.md"
+ },
+ {
+ "title": "Configure Otel Collector",
+ "instructions": "file://config/configure-otel-collector.md"
+ }
+ ],
+ "assets": {
+ "logs": {
+ "pipelines": [
+ "file://assets/pipelines/log-parser.json"
+ ]
+ },
+ "dashboards": null,
+ "alerts": null
+ },
+ "connection_tests": {
+ "logs": {
+ "op": "AND",
+ "items": [
+ {
+ "key": {
+ "type": "tag",
+ "key": "source",
+ "dataType": "string"
+ },
+ "op": "=",
+ "value": "nginx"
+ }
+ ]
+ }
+ },
+ "data_collected": {
+ "logs": [
+ {
+ "name": "Request Method",
+ "path": "attributes[\"http.request.method\"]",
+ "type": "string",
+ "description": "HTTP method"
+ },
+ {
+ "name": "Request Path",
+ "path": "attributes[\"url.path\"]",
+ "type": "string",
+ "description": "path requested"
+ },
+ {
+ "name": "Response Status Code",
+ "path": "attributes[\"http.response.status_code\"]",
+ "type": "int",
+ "description": "HTTP response code"
+ }
+ ],
+ "metrics": [
+ {
+ "name": "http.server.request.duration",
+ "type": "Histogram",
+ "unit": "s",
+ "description": "Duration of HTTP server requests"
+ },
+ {
+ "name": "http.server.active_requests",
+ "type": "UpDownCounter",
+ "unit": "{ request }",
+ "description": "Number of active HTTP server requests"
+ }
+ ]
+ }
+}
diff --git a/pkg/query-service/app/integrations/builtin_integrations/nginx/overview.md b/pkg/query-service/app/integrations/builtin_integrations/nginx/overview.md
new file mode 100644
index 0000000000..dac6354fc0
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/nginx/overview.md
@@ -0,0 +1,3 @@
+### Monitor Nginx with SigNoz
+
+Parse your Nginx logs and collect key metrics.
diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/assets/dashboards/overview.json b/pkg/query-service/app/integrations/builtin_integrations/postgres/assets/dashboards/overview.json
new file mode 100644
index 0000000000..944e06b03f
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/assets/dashboards/overview.json
@@ -0,0 +1,1869 @@
+{
+ "id": "postgres-overview",
+ "description": "This dashboard provides a high-level overview of your PostgreSQL databases. It includes replication, locks, and throughput etc...",
+ "layout": [
+ {
+ "h": 3,
+ "i": "9552123d-6265-48a7-8624-3f4a3fc3c9c0",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 18
+ },
+ {
+ "h": 3,
+ "i": "d7838815-4f5b-4454-86fd-f658b201f3a9",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 15
+ },
+ {
+ "h": 3,
+ "i": "f9a6f683-7455-4643-acc8-467cc5ea52cf",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 15
+ },
+ {
+ "h": 3,
+ "i": "8638a199-20a0-4255-b0a2-3b1ba06c485b",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 3,
+ "y": 12
+ },
+ {
+ "h": 3,
+ "i": "e9341e70-ccb3-47fc-af95-56ba8942c4f2",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 9
+ },
+ {
+ "h": 3,
+ "i": "6b700035-e3c2-4c48-99fa-ebfd6202eed3",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 9
+ },
+ {
+ "h": 3,
+ "i": "bada7864-1d23-4d49-a868-c6b8a93c738f",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 3,
+ "y": 6
+ },
+ {
+ "h": 3,
+ "i": "191d09a6-40b0-4de8-a5b0-aa4254454b99",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 0
+ },
+ {
+ "h": 3,
+ "i": "fa941c00-ce19-49cc-baf2-c38598767dee",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 0
+ },
+ {
+ "h": 3,
+ "i": "114fcf80-e1de-4716-b1aa-0e0738dba10e",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 3
+ },
+ {
+ "h": 3,
+ "i": "667428ef-9b9a-4e91-bd1e-938e0dc1ff32",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 3
+ }
+ ],
+ "name": "",
+ "tags": [
+ "postgres",
+ "database"
+ ],
+ "title": "Postgres overview",
+ "variables": {
+ "4250ef7b-8f42-4a24-902a-a764d070b92d": {
+ "allSelected": true,
+ "customValue": "",
+ "description": "List of hosts sending Postgres metrics",
+ "id": "4250ef7b-8f42-4a24-902a-a764d070b92d",
+ "key": "4250ef7b-8f42-4a24-902a-a764d070b92d",
+ "modificationUUID": "4427b655-c8d2-40ce-84ed-7cb058bd3041",
+ "multiSelect": true,
+ "name": "host_name",
+ "order": 0,
+ "queryValue": "SELECT JSONExtractString(labels, 'host_name') AS host_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'postgresql_operations'\nGROUP BY host_name",
+ "selectedValue": [
+ "Srikanths-MacBook-Pro.local"
+ ],
+ "showALLOption": true,
+ "sort": "ASC",
+ "textboxValue": "",
+ "type": "QUERY"
+ },
+ "8ecaee70-640f-46fd-83d9-a4fd18bc66e6": {
+ "customValue": "",
+ "description": "List of tables",
+ "id": "8ecaee70-640f-46fd-83d9-a4fd18bc66e6",
+ "modificationUUID": "a51321cd-47a2-470a-8df4-372e5bb36f2c",
+ "multiSelect": true,
+ "name": "table_name",
+ "order": 0,
+ "queryValue": "SELECT JSONExtractString(labels, 'postgresql_table_name') AS table_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'postgresql_operations' AND JSONExtractString(labels, 'postgresql_database_name') IN {{.db_name}}\nGROUP BY table_name",
+ "showALLOption": true,
+ "sort": "ASC",
+ "textboxValue": "",
+ "type": "QUERY",
+ "selectedValue": [
+ "public.activations",
+ "public.licenses",
+ "public.plans",
+ "public.subscription_items",
+ "public.subscriptions",
+ "public.trials",
+ "public.usage"
+ ],
+ "allSelected": true
+ },
+ "c66d1581-e5e1-440d-8ff6-ebcf078ab6dd": {
+ "allSelected": true,
+ "customValue": "",
+ "description": "List of databases",
+ "id": "c66d1581-e5e1-440d-8ff6-ebcf078ab6dd",
+ "key": "c66d1581-e5e1-440d-8ff6-ebcf078ab6dd",
+ "modificationUUID": "564a3f43-98f8-4189-b5e4-dcb518d73852",
+ "multiSelect": true,
+ "name": "db_name",
+ "order": 0,
+ "queryValue": "SELECT JSONExtractString(labels, 'postgresql_database_name') AS db_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'postgresql_operations'\nGROUP BY db_name",
+ "selectedValue": [
+ "postgres"
+ ],
+ "showALLOption": true,
+ "sort": "DISABLED",
+ "textboxValue": "",
+ "type": "QUERY"
+ }
+ },
+ "widgets": [
+ {
+ "description": "The average number of db insert operations.",
+ "fillSpans": false,
+ "id": "191d09a6-40b0-4de8-a5b0-aa4254454b99",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_operations--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_operations",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "c1dff946",
+ "key": {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "ins"
+ },
+ {
+ "id": "0cd6dc8f",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "2e60e171",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{postgresql_database_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "bf48ac4c-bc0c-41a0-87f4-6f8ae7888d1f",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Inserts",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "The average number of db update operations.",
+ "fillSpans": false,
+ "id": "fa941c00-ce19-49cc-baf2-c38598767dee",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_operations--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_operations",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "98463ec9",
+ "key": {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "upd"
+ },
+ {
+ "id": "47db4e8e",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "64020332",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{postgresql_database_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "34a6ac3a-b7f6-4b5f-a084-a44378033d82",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Updates",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "The average number of db delete operations.",
+ "fillSpans": false,
+ "id": "114fcf80-e1de-4716-b1aa-0e0738dba10e",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_operations--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_operations",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "62738de4",
+ "key": {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "del"
+ },
+ {
+ "id": "d18471e2",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "9d153899",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{postgresql_database_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "734393d1-76ed-4f4f-bef8-0a91d27ebec4",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Deleted",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "The average number of db heap-only update operations.",
+ "fillSpans": false,
+ "id": "667428ef-9b9a-4e91-bd1e-938e0dc1ff32",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_operations--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_operations",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "a91e35c4",
+ "key": {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "hot_upd"
+ },
+ {
+ "id": "2b419378",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "7b4a29a2",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{postgresql_database_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "f43c2d19-4abc-4f5e-881b-db7add4a870a",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Heap updates",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "bada7864-1d23-4d49-a868-c6b8a93c738f",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "table",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_operations--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_operations",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "d6aeccf7",
+ "key": {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "ins"
+ },
+ {
+ "id": "4004a127",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "ee4e9344",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Inserted",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ },
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_operations--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_operations",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "B",
+ "filters": {
+ "items": [
+ {
+ "id": "a12cceed",
+ "key": {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "upd"
+ },
+ {
+ "id": "11735104",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "2d542482",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Updated",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "B",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ },
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_operations--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_operations",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "C",
+ "filters": {
+ "items": [
+ {
+ "id": "1bca3e46",
+ "key": {
+ "dataType": "string",
+ "id": "operation--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "operation",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "del"
+ },
+ {
+ "id": "3631755d",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "44ffc874",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Deleted",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "C",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "5056105b-1c30-4d27-8187-64457f2a1ec6",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Operation by database",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "The number of database locks.",
+ "fillSpans": false,
+ "id": "6b700035-e3c2-4c48-99fa-ebfd6202eed3",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_database_locks--float64--Gauge--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_database_locks",
+ "type": "Gauge"
+ },
+ "aggregateOperator": "sum",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "527a3124",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "mode--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "mode",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{mode}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "877b0df3-9ae3-455e-ad27-bc3aa40b3f4c",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Locks by lock mode",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "e9341e70-ccb3-47fc-af95-56ba8942c4f2",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_deadlocks--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_deadlocks",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum_rate",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "ff14f172",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "efb83717",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{postgresql_database_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "5056105b-1c30-4d27-8187-64457f2a1ec6",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Deadlocks count",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "8638a199-20a0-4255-b0a2-3b1ba06c485b",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_backends--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_backends",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "ed335b00",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "20d2a4c5",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{postgresql_database_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "205b99a0-2f1c-4bd2-9ba0-cc2da6ef247a",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Connections per db",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "f9a6f683-7455-4643-acc8-467cc5ea52cf",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_rows--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_rows",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "70786905",
+ "key": {
+ "dataType": "string",
+ "id": "state--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "state",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "dead"
+ },
+ {
+ "id": "810e39a9",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "3e5ef839",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ },
+ {
+ "id": "9e913563",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_table_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_table_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.table_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [],
+ "having": [],
+ "legend": "Dead rows",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "cc7452c8-118b-4676-959e-7062bafc41ee",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Dead rows",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "d7838815-4f5b-4454-86fd-f658b201f3a9",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_index_scans--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_index_scans",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum_rate",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "da04d826",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "590332a7",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ },
+ {
+ "id": "171b9516",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_table_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_table_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.table_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_index_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_index_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "{{postgresql_index_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "2c6b630b-8bd9-4001-815b-f2b1f439a9dd",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Index scans by index",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "9552123d-6265-48a7-8624-3f4a3fc3c9c0",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "table",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_rows--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_rows",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "83f9cab9",
+ "key": {
+ "dataType": "string",
+ "id": "state--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "state",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "dead"
+ },
+ {
+ "id": "2a0284c2",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "c2aaf758",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ },
+ {
+ "id": "a603fda9",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_table_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_table_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.table_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_table_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_table_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Dead rows",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ },
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_rows--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_rows",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "B",
+ "filters": {
+ "items": [
+ {
+ "id": "82f1f0f5",
+ "key": {
+ "dataType": "string",
+ "id": "state--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "state",
+ "type": "tag"
+ },
+ "op": "=",
+ "value": "live"
+ },
+ {
+ "id": "14de7a06",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "0a88a27a",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ },
+ {
+ "id": "4417218d",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_table_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_table_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.table_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_table_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_table_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Live rows",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "B",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ },
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_index_scans--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_index_scans",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum_rate",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "C",
+ "filters": {
+ "items": [
+ {
+ "id": "22795c15",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "d7e7c193",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ },
+ {
+ "id": "d3ae1dbe",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_table_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_table_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.table_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_table_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_table_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Index scans",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "C",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ },
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "postgresql_table_size--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "postgresql_table_size",
+ "type": "Sum"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "D",
+ "filters": {
+ "items": [
+ {
+ "id": "48c436ab",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ },
+ {
+ "id": "cc617789",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_database_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_database_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.db_name}}"
+ ]
+ },
+ {
+ "id": "b4029d50",
+ "key": {
+ "dataType": "string",
+ "id": "postgresql_table_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_table_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.table_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "postgresql_table_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "postgresql_table_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Table size",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "D",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "26a9dcbf-4fc7-4ddd-b786-2078def1f462",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Table stats",
+ "yAxisUnit": "none"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/assets/pipelines/log-parser.json b/pkg/query-service/app/integrations/builtin_integrations/postgres/assets/pipelines/log-parser.json
new file mode 100644
index 0000000000..776565861c
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/assets/pipelines/log-parser.json
@@ -0,0 +1,33 @@
+{
+ "id": "parse-default-postgres-access-log",
+ "name": "Parse default postgres access log",
+ "alias": "parse-default-postgres-access-log",
+ "description": "Parse standard postgres access log",
+ "enabled": true,
+ "filter": {
+ "op": "AND",
+ "items": [
+ {
+ "key": {
+ "type": "tag",
+ "key": "source",
+ "dataType": "string"
+ },
+ "op": "=",
+ "value": "postgres"
+ }
+ ]
+ },
+ "config": [
+ {
+ "type": "grok_parser",
+ "id": "parse-body-grok",
+ "enabled": true,
+ "orderId": 1,
+ "name": "Parse Body",
+ "parse_to": "attributes",
+ "pattern": "%{GREEDYDATA}",
+ "parse_from": "body"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/configure-otel-collector.md b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/configure-otel-collector.md
new file mode 100644
index 0000000000..d0dcf896c1
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/configure-otel-collector.md
@@ -0,0 +1,72 @@
+### Configure otel collector
+
+#### Save collector config file
+
+Save the following collector config in a file named `postgres-collector-config.yaml`
+
+```bash
+receivers:
+ postgresql:
+ # The endpoint of the postgresql server. Whether using TCP or Unix sockets, this value should be host:port. If transport is set to unix, the endpoint will internally be translated from host:port to /host.s.PGSQL.port
+ endpoint: "localhost:5432"
+ # The frequency at which to collect metrics from the Postgres instance.
+ collection_interval: 60s
+ # The username used to access the postgres instance
+ username: monitoring
+ # The password used to access the postgres instance
+ password: ${env:POSTGRESQL_PASSWORD}
+ # The list of databases for which the receiver will attempt to collect statistics. If an empty list is provided, the receiver will attempt to collect statistics for all non-template databases
+ databases: []
+ # List of databases which will be excluded when collecting statistics.
+ exclude_databases: []
+ # # Defines the network to use for connecting to the server. Valid Values are `tcp` or `unix`
+ # transport: tcp
+ tls:
+ # set to false if SSL is enabled on the server
+ insecure: true
+ # ca_file: /etc/ssl/certs/ca-certificates.crt
+ # cert_file: /etc/ssl/certs/postgres.crt
+ # key_file: /etc/ssl/certs/postgres.key
+ metrics:
+ postgresql.database.locks:
+ enabled: true
+ postgresql.deadlocks:
+ enabled: true
+ postgresql.sequential_scans:
+ enabled: true
+
+processors:
+ # enriches the data with additional host information
+ # see https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/resourcedetectionprocessor#resource-detection-processor
+ resourcedetection/system:
+ # add additional detectors if needed
+ detectors: ["system"]
+ system:
+ hostname_sources: ["os"]
+
+exporters:
+ # export to local collector
+ otlp/local:
+ endpoint: "localhost:4317"
+ tls:
+ insecure: true
+ # export to SigNoz cloud
+ otlp/signoz:
+ endpoint: "ingest.{region}.signoz.cloud:443"
+ tls:
+ insecure: false
+ headers:
+ "signoz-access-token": ""
+
+service:
+ pipelines:
+ metrics/postgresql:
+ receivers: [postgresql]
+ # note: remove this processor if the collector host is not running on the same host as the postgres instance
+ processors: [resourcedetection/system]
+ exporters: [otlp/local]
+```
+
+#### Use collector config file
+
+Run your collector with the added flag `--config postgres-collector-config.yaml`
diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/config/prerequisites.md b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/prerequisites.md
new file mode 100644
index 0000000000..519509e4e2
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/config/prerequisites.md
@@ -0,0 +1,26 @@
+### Prepare postgres for monitoring
+
+- Have a running postgresql instance
+- Have the monitoring user created
+- Have the monitoring user granted the necessary permissions
+
+This receiver supports PostgreSQL versions 9.6+
+
+For PostgreSQL versions 10+, run:
+
+```bash
+create user monitoring with password '';
+grant pg_monitor to monitoring;
+grant SELECT ON pg_stat_database to monitoring;
+```
+
+For PostgreSQL versions >= 9.6 and <10, run:
+
+```bash
+create user monitoring with password '';
+grant SELECT ON pg_stat_database to monitoring;
+```
+
+Set the following environment variables:
+
+- POSTGRESQL_PASSWORD
diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/icon.svg b/pkg/query-service/app/integrations/builtin_integrations/postgres/icon.svg
new file mode 100644
index 0000000000..32bcf493ef
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/icon.svg
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json b/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json
new file mode 100644
index 0000000000..9b3da798bc
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/integration.json
@@ -0,0 +1,88 @@
+{
+ "id": "postgres",
+ "title": "PostgreSQL",
+ "description": "Monitor postgres using logs and metrics.",
+ "author": {
+ "name": "SigNoz",
+ "email": "integrations@signoz.io",
+ "homepage": "https://signoz.io"
+ },
+ "icon": "file://icon.svg",
+ "categories": [
+ "Database"
+ ],
+ "overview": "file://overview.md",
+ "configuration": [
+ {
+ "title": "Prerequisites",
+ "instructions": "file://config/prerequisites.md"
+ },
+ {
+ "title": "Configure Otel Collector",
+ "instructions": "file://config/configure-otel-collector.md"
+ }
+ ],
+ "assets": {
+ "logs": {
+ "pipelines": [
+ "file://assets/pipelines/log-parser.json"
+ ]
+ },
+ "dashboards": [
+ "file://assets/dashboards/overview.json"
+ ],
+ "alerts": []
+ },
+ "connection_tests": {
+ "logs": {
+ "op": "AND",
+ "items": [
+ {
+ "key": {
+ "type": "tag",
+ "key": "source",
+ "dataType": "string"
+ },
+ "op": "=",
+ "value": "postgres"
+ }
+ ]
+ }
+ },
+ "data_collected": {
+ "logs": [
+ {
+ "name": "Request Method",
+ "path": "attributes[\"http.request.method\"]",
+ "type": "string",
+ "description": "HTTP method"
+ },
+ {
+ "name": "Request Path",
+ "path": "attributes[\"url.path\"]",
+ "type": "string",
+ "description": "path requested"
+ },
+ {
+ "name": "Response Status Code",
+ "path": "attributes[\"http.response.status_code\"]",
+ "type": "int",
+ "description": "HTTP response code"
+ }
+ ],
+ "metrics": [
+ {
+ "name": "http.server.request.duration",
+ "type": "Histogram",
+ "unit": "s",
+ "description": "Duration of HTTP server requests"
+ },
+ {
+ "name": "http.server.active_requests",
+ "type": "UpDownCounter",
+ "unit": "{ request }",
+ "description": "Number of active HTTP server requests"
+ }
+ ]
+ }
+}
diff --git a/pkg/query-service/app/integrations/builtin_integrations/postgres/overview.md b/pkg/query-service/app/integrations/builtin_integrations/postgres/overview.md
new file mode 100644
index 0000000000..4af57e6b20
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/postgres/overview.md
@@ -0,0 +1,3 @@
+### Monitor Postgres with SigNoz
+
+Parse your Postgres logs and collect key metrics.
diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/assets/dashboards/overview.json b/pkg/query-service/app/integrations/builtin_integrations/redis/assets/dashboards/overview.json
new file mode 100644
index 0000000000..3fd2c255ce
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/redis/assets/dashboards/overview.json
@@ -0,0 +1,924 @@
+{
+ "id": "redis-overview",
+ "description": "This dashboard shows the Redis instance overview. It includes latency, hit/miss rate, connections, and memory information.\n",
+ "layout": [
+ {
+ "h": 3,
+ "i": "d4c164bc-8fc2-4dbc-aadd-8d17479ca649",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 9
+ },
+ {
+ "h": 3,
+ "i": "2fbaef0d-3cdb-4ce3-aa3c-9bbbb41786d9",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 3,
+ "y": 6
+ },
+ {
+ "h": 3,
+ "i": "f5ee1511-0d2b-4404-9ce0-e991837decc2",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 3
+ },
+ {
+ "h": 3,
+ "i": "b19c7058-b806-4ea2-974a-ca555b168991",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 3
+ },
+ {
+ "h": 3,
+ "i": "bf0deeeb-e926-4234-944c-82bacd96af47",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 0
+ },
+ {
+ "h": 3,
+ "i": "a77227c7-16f5-4353-952e-b183c715a61c",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 0
+ },
+ {
+ "h": 3,
+ "i": "9698cee2-b1f3-4c0b-8c9f-3da4f0e05f17",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 9
+ },
+ {
+ "h": 3,
+ "i": "64a5f303-d7db-44ff-9a0e-948e5c653320",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 0,
+ "y": 12
+ },
+ {
+ "h": 3,
+ "i": "3e80a918-69af-4c9a-bc57-a94e1d41b05c",
+ "moved": false,
+ "static": false,
+ "w": 6,
+ "x": 6,
+ "y": 12
+ }
+ ],
+ "name": "",
+ "tags": [
+ "redis",
+ "database"
+ ],
+ "title": "Redis overview",
+ "variables": {
+ "94f19b3c-ad9f-4b47-a9b2-f312c09fa965": {
+ "allSelected": true,
+ "customValue": "",
+ "description": "List of hosts sending Redis metrics",
+ "id": "94f19b3c-ad9f-4b47-a9b2-f312c09fa965",
+ "key": "94f19b3c-ad9f-4b47-a9b2-f312c09fa965",
+ "modificationUUID": "4c5b0c03-9cbc-425b-8d8e-7152e5c39ba8",
+ "multiSelect": true,
+ "name": "host_name",
+ "order": 0,
+ "queryValue": "SELECT JSONExtractString(labels, 'host_name') AS host_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'redis_cpu_time'\nGROUP BY host_name",
+ "selectedValue": [
+ "Srikanths-MacBook-Pro.local"
+ ],
+ "showALLOption": true,
+ "sort": "ASC",
+ "textboxValue": "",
+ "type": "QUERY"
+ }
+ },
+ "widgets": [
+ {
+ "description": "Rate successful lookup of keys in the main dictionary",
+ "fillSpans": false,
+ "id": "a77227c7-16f5-4353-952e-b183c715a61c",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "redis_keyspace_hits--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "redis_keyspace_hits",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum_rate",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "e99669ea",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [],
+ "having": [],
+ "legend": "Hit/s across all hosts",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "42c9c117-bfaf-49f7-b528-aad099392295",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Hits/s",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "Number of clients pending on a blocking call",
+ "fillSpans": false,
+ "id": "bf0deeeb-e926-4234-944c-82bacd96af47",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "redis_clients_blocked--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "redis_clients_blocked",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "97247f25",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [],
+ "having": [],
+ "legend": "Blocked clients across all hosts",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "b77a9e11-fb98-4a95-88a8-c3ad25c14369",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Clients blocked",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "b19c7058-b806-4ea2-974a-ca555b168991",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "",
+ "id": "redis_db_keys------false",
+ "isColumn": false,
+ "key": "redis_db_keys",
+ "type": ""
+ },
+ "aggregateOperator": "sum",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [],
+ "op": "AND"
+ },
+ "groupBy": [],
+ "having": [],
+ "legend": "",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "b77a9e11-fb98-4a95-88a8-c3ad25c14369",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Keyspace Keys",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "Number of changes since the last dump",
+ "fillSpans": false,
+ "id": "f5ee1511-0d2b-4404-9ce0-e991837decc2",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "redis_rdb_changes_since_last_save--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "redis_rdb_changes_since_last_save",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "d4aef346",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [],
+ "having": [],
+ "legend": "Number of unsaved changes",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "32cedddf-606d-4de1-8c1d-4b7049e6430c",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Unsaved changes",
+ "yAxisUnit": "none"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "2fbaef0d-3cdb-4ce3-aa3c-9bbbb41786d9",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "redis_commands--float64--Gauge--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "redis_commands",
+ "type": "Gauge"
+ },
+ "aggregateOperator": "sum",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "458dc402",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [],
+ "having": [],
+ "legend": "ops/s",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "c70de4dd-a68a-42df-a249-6610c296709c",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Command/s",
+ "yAxisUnit": "ops"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "d4c164bc-8fc2-4dbc-aadd-8d17479ca649",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "redis_memory_used--float64--Gauge--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "redis_memory_used",
+ "type": "Gauge"
+ },
+ "aggregateOperator": "sum",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "394a537e",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Used::{{host_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ },
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "redis_maxmemory--float64--Gauge--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "redis_maxmemory",
+ "type": "Gauge"
+ },
+ "aggregateOperator": "max",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "B",
+ "filters": {
+ "items": [
+ {
+ "id": "0c0754da",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Max::{{host_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "B",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "2f47df76-f09e-4152-8623-971f0fe66bfe",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Memory usage",
+ "yAxisUnit": "bytes"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "9698cee2-b1f3-4c0b-8c9f-3da4f0e05f17",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "redis_memory_rss--float64--Gauge--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "redis_memory_rss",
+ "type": "Gauge"
+ },
+ "aggregateOperator": "sum",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "4dc9ae49",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Rss::{{host_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "fddd043c-1385-481c-9f4c-381f261e1dd9",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "RSS Memory",
+ "yAxisUnit": "bytes"
+ },
+ {
+ "description": "",
+ "fillSpans": false,
+ "id": "64a5f303-d7db-44ff-9a0e-948e5c653320",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "redis_memory_fragmentation_ratio--float64--Gauge--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "redis_memory_fragmentation_ratio",
+ "type": "Gauge"
+ },
+ "aggregateOperator": "avg",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "79dc25f3",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Rss::{{host_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "3e802b07-0249-4d79-a5c7-6580ab535ad0",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Fragmentation ratio",
+ "yAxisUnit": "short"
+ },
+ {
+ "description": "Number of evicted keys due to maxmemory limit",
+ "fillSpans": false,
+ "id": "3e80a918-69af-4c9a-bc57-a94e1d41b05c",
+ "isStacked": false,
+ "nullZeroValues": "zero",
+ "opacity": "1",
+ "panelTypes": "graph",
+ "query": {
+ "builder": {
+ "queryData": [
+ {
+ "aggregateAttribute": {
+ "dataType": "float64",
+ "id": "redis_keys_evicted--float64--Sum--true",
+ "isColumn": true,
+ "isJSON": false,
+ "key": "redis_keys_evicted",
+ "type": "Sum"
+ },
+ "aggregateOperator": "sum_rate",
+ "dataSource": "metrics",
+ "disabled": false,
+ "expression": "A",
+ "filters": {
+ "items": [
+ {
+ "id": "53d189ac",
+ "key": {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ },
+ "op": "in",
+ "value": [
+ "{{.host_name}}"
+ ]
+ }
+ ],
+ "op": "AND"
+ },
+ "groupBy": [
+ {
+ "dataType": "string",
+ "id": "host_name--string--tag--false",
+ "isColumn": false,
+ "isJSON": false,
+ "key": "host_name",
+ "type": "tag"
+ }
+ ],
+ "having": [],
+ "legend": "Rss::{{host_name}}",
+ "limit": null,
+ "orderBy": [],
+ "queryName": "A",
+ "reduceTo": "sum",
+ "stepInterval": 60
+ }
+ ],
+ "queryFormulas": []
+ },
+ "clickhouse_sql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "id": "15d1d9d7-eb10-464b-aa7b-33ff211996f7",
+ "promql": [
+ {
+ "disabled": false,
+ "legend": "",
+ "name": "A",
+ "query": ""
+ }
+ ],
+ "queryType": "builder"
+ },
+ "softMax": null,
+ "softMin": null,
+ "thresholds": [],
+ "timePreferance": "GLOBAL_TIME",
+ "title": "Eviction rate",
+ "yAxisUnit": "short"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/assets/pipelines/log-parser.json b/pkg/query-service/app/integrations/builtin_integrations/redis/assets/pipelines/log-parser.json
new file mode 100644
index 0000000000..d06760e0b8
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/redis/assets/pipelines/log-parser.json
@@ -0,0 +1,33 @@
+{
+ "id": "parse-default-redis-access-log",
+ "name": "Parse default redis access log",
+ "alias": "parse-default-redis-access-log",
+ "description": "Parse standard redis access log",
+ "enabled": true,
+ "filter": {
+ "op": "AND",
+ "items": [
+ {
+ "key": {
+ "type": "tag",
+ "key": "source",
+ "dataType": "string"
+ },
+ "op": "=",
+ "value": "redis"
+ }
+ ]
+ },
+ "config": [
+ {
+ "type": "grok_parser",
+ "id": "parse-body-grok",
+ "enabled": true,
+ "orderId": 1,
+ "name": "Parse Body",
+ "parse_to": "attributes",
+ "pattern": "%{GREEDYDATA}",
+ "parse_from": "body"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/config/configure-otel-collector.md b/pkg/query-service/app/integrations/builtin_integrations/redis/config/configure-otel-collector.md
new file mode 100644
index 0000000000..8dd52dd07e
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/redis/config/configure-otel-collector.md
@@ -0,0 +1,63 @@
+### Configure otel collector
+
+#### Save collector config file
+
+Save the following collector config in a file named `redis-collector-config.yaml`
+
+```bash
+receivers:
+ redis:
+ # The hostname and port of the Redis instance, separated by a colon.
+ endpoint: "localhost:6379"
+ # The frequency at which to collect metrics from the Redis instance.
+ collection_interval: 60s
+ # # The password used to access the Redis instance; must match the password specified in the requirepass server configuration option.
+ # password: ${env:REDIS_PASSWORD}
+ # # Defines the network to use for connecting to the server. Valid Values are `tcp` or `Unix`
+ # transport: tcp
+ # tls:
+ # insecure: false
+ # ca_file: /etc/ssl/certs/ca-certificates.crt
+ # cert_file: /etc/ssl/certs/redis.crt
+ # key_file: /etc/ssl/certs/redis.key
+ metrics:
+ redis.maxmemory:
+ enabled: true
+ redis.cmd.latency:
+ enabled: true
+
+processors:
+ # enriches the data with additional host information
+ # see https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/resourcedetectionprocessor#resource-detection-processor
+ resourcedetection/system:
+ # add additional detectors if needed
+ detectors: ["system"]
+ system:
+ hostname_sources: ["os"]
+
+exporters:
+ # export to local collector
+ otlp/local:
+ endpoint: "localhost:4317"
+ tls:
+ insecure: true
+ # export to SigNoz cloud
+ otlp/signoz:
+ endpoint: "ingest.{region}.signoz.cloud:443"
+ tls:
+ insecure: false
+ headers:
+ "signoz-access-token": ""
+
+service:
+ pipelines:
+ metrics/redis:
+ receivers: [redis]
+ # note: remove this processor if the collector host is not running on the same host as the redis instance
+ processors: [resourcedetection/system]
+ exporters: [otlp/local]
+```
+
+#### Use collector config file
+
+Run your collector with the added flag `--config redis-collector-config.yaml`
diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/config/prerequisites.md b/pkg/query-service/app/integrations/builtin_integrations/redis/config/prerequisites.md
new file mode 100644
index 0000000000..4e98933b69
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/redis/config/prerequisites.md
@@ -0,0 +1,5 @@
+### Prepare redis for monitoring
+
+- Have a running redis instance
+- Have the monitoring user created
+- Have the monitoring user granted the necessary permissions
diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/icon.svg b/pkg/query-service/app/integrations/builtin_integrations/redis/icon.svg
new file mode 100644
index 0000000000..63e5dfd2e4
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/redis/icon.svg
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json b/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json
new file mode 100644
index 0000000000..862a98b306
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/redis/integration.json
@@ -0,0 +1,88 @@
+{
+ "id": "redis",
+ "title": "Redis",
+ "description": "Monitor redis using logs and metrics.",
+ "author": {
+ "name": "SigNoz",
+ "email": "integrations@signoz.io",
+ "homepage": "https://signoz.io"
+ },
+ "icon": "file://icon.svg",
+ "categories": [
+ "Database"
+ ],
+ "overview": "file://overview.md",
+ "configuration": [
+ {
+ "title": "Prerequisites",
+ "instructions": "file://config/prerequisites.md"
+ },
+ {
+ "title": "Configure Otel Collector",
+ "instructions": "file://config/configure-otel-collector.md"
+ }
+ ],
+ "assets": {
+ "logs": {
+ "pipelines": [
+ "file://assets/pipelines/log-parser.json"
+ ]
+ },
+ "dashboards": [
+ "file://assets/dashboards/overview.json"
+ ],
+ "alerts": []
+ },
+ "connection_tests": {
+ "logs": {
+ "op": "AND",
+ "items": [
+ {
+ "key": {
+ "type": "tag",
+ "key": "source",
+ "dataType": "string"
+ },
+ "op": "=",
+ "value": "redis"
+ }
+ ]
+ }
+ },
+ "data_collected": {
+ "logs": [
+ {
+ "name": "Request Method",
+ "path": "attributes[\"http.request.method\"]",
+ "type": "string",
+ "description": "HTTP method"
+ },
+ {
+ "name": "Request Path",
+ "path": "attributes[\"url.path\"]",
+ "type": "string",
+ "description": "path requested"
+ },
+ {
+ "name": "Response Status Code",
+ "path": "attributes[\"http.response.status_code\"]",
+ "type": "int",
+ "description": "HTTP response code"
+ }
+ ],
+ "metrics": [
+ {
+ "name": "http.server.request.duration",
+ "type": "Histogram",
+ "unit": "s",
+ "description": "Duration of HTTP server requests"
+ },
+ {
+ "name": "http.server.active_requests",
+ "type": "UpDownCounter",
+ "unit": "{ request }",
+ "description": "Number of active HTTP server requests"
+ }
+ ]
+ }
+}
diff --git a/pkg/query-service/app/integrations/builtin_integrations/redis/overview.md b/pkg/query-service/app/integrations/builtin_integrations/redis/overview.md
new file mode 100644
index 0000000000..60ce2337b6
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_integrations/redis/overview.md
@@ -0,0 +1,3 @@
+### Monitor Redis with SigNoz
+
+Parse your Redis logs and collect key metrics.
diff --git a/pkg/query-service/app/integrations/builtin_test.go b/pkg/query-service/app/integrations/builtin_test.go
new file mode 100644
index 0000000000..cb72d5dcba
--- /dev/null
+++ b/pkg/query-service/app/integrations/builtin_test.go
@@ -0,0 +1,32 @@
+package integrations
+
+import (
+ "context"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestBuiltinIntegrations(t *testing.T) {
+ require := require.New(t)
+
+ repo := BuiltInIntegrations{}
+
+ builtins, apiErr := repo.list(context.Background())
+ require.Nil(apiErr)
+ require.Greater(
+ len(builtins), 0,
+ "some built in integrations are expected to be bundled.",
+ )
+
+ nginxIntegrationId := "builtin-nginx"
+ res, apiErr := repo.get(context.Background(), []string{
+ nginxIntegrationId,
+ })
+ require.Nil(apiErr)
+
+ nginxIntegration, exists := res[nginxIntegrationId]
+ require.True(exists)
+ require.False(strings.HasPrefix(nginxIntegration.Overview, "file://"))
+}
diff --git a/pkg/query-service/app/integrations/controller.go b/pkg/query-service/app/integrations/controller.go
new file mode 100644
index 0000000000..a45ab3fb04
--- /dev/null
+++ b/pkg/query-service/app/integrations/controller.go
@@ -0,0 +1,129 @@
+package integrations
+
+import (
+ "context"
+ "fmt"
+
+ "github.com/jmoiron/sqlx"
+ "go.signoz.io/signoz/pkg/query-service/agentConf"
+ "go.signoz.io/signoz/pkg/query-service/app/dashboards"
+ "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
+ "go.signoz.io/signoz/pkg/query-service/model"
+)
+
+type Controller struct {
+ mgr *Manager
+}
+
+func NewController(db *sqlx.DB) (
+ *Controller, error,
+) {
+ mgr, err := NewManager(db)
+ if err != nil {
+ return nil, fmt.Errorf("couldn't create integrations manager: %w", err)
+ }
+
+ return &Controller{
+ mgr: mgr,
+ }, nil
+}
+
+type IntegrationsListResponse struct {
+ Integrations []IntegrationsListItem `json:"integrations"`
+
+ // Pagination details to come later
+}
+
+func (c *Controller) ListIntegrations(
+ ctx context.Context, params map[string]string,
+) (
+ *IntegrationsListResponse, *model.ApiError,
+) {
+ var filters *IntegrationsFilter
+ if isInstalledFilter, exists := params["is_installed"]; exists {
+ isInstalled := !(isInstalledFilter == "false")
+ filters = &IntegrationsFilter{
+ IsInstalled: &isInstalled,
+ }
+ }
+
+ integrations, apiErr := c.mgr.ListIntegrations(ctx, filters)
+ if apiErr != nil {
+ return nil, apiErr
+ }
+
+ return &IntegrationsListResponse{
+ Integrations: integrations,
+ }, nil
+}
+
+func (c *Controller) GetIntegration(
+ ctx context.Context, integrationId string,
+) (*Integration, *model.ApiError) {
+ return c.mgr.GetIntegration(ctx, integrationId)
+}
+
+func (c *Controller) GetIntegrationConnectionTests(
+ ctx context.Context, integrationId string,
+) (*IntegrationConnectionTests, *model.ApiError) {
+ return c.mgr.GetIntegrationConnectionTests(ctx, integrationId)
+}
+
+type InstallIntegrationRequest struct {
+ IntegrationId string `json:"integration_id"`
+ Config map[string]interface{} `json:"config"`
+}
+
+func (c *Controller) Install(
+ ctx context.Context, req *InstallIntegrationRequest,
+) (*IntegrationsListItem, *model.ApiError) {
+ res, apiErr := c.mgr.InstallIntegration(
+ ctx, req.IntegrationId, req.Config,
+ )
+ if apiErr != nil {
+ return nil, apiErr
+ }
+ agentConf.NotifyConfigUpdate(ctx)
+ return res, nil
+}
+
+type UninstallIntegrationRequest struct {
+ IntegrationId string `json:"integration_id"`
+}
+
+func (c *Controller) Uninstall(
+ ctx context.Context, req *UninstallIntegrationRequest,
+) *model.ApiError {
+ if len(req.IntegrationId) < 1 {
+ return model.BadRequest(fmt.Errorf(
+ "integration_id is required.",
+ ))
+ }
+
+ apiErr := c.mgr.UninstallIntegration(
+ ctx, req.IntegrationId,
+ )
+ if apiErr != nil {
+ return apiErr
+ }
+ agentConf.NotifyConfigUpdate(ctx)
+ return nil
+}
+
+func (c *Controller) GetPipelinesForInstalledIntegrations(
+ ctx context.Context,
+) ([]logparsingpipeline.Pipeline, *model.ApiError) {
+ return c.mgr.GetPipelinesForInstalledIntegrations(ctx)
+}
+
+func (c *Controller) GetDashboardsForInstalledIntegrations(
+ ctx context.Context,
+) ([]dashboards.Dashboard, *model.ApiError) {
+ return c.mgr.GetDashboardsForInstalledIntegrations(ctx)
+}
+
+func (c *Controller) GetInstalledIntegrationDashboardById(
+ ctx context.Context, dashboardUuid string,
+) (*dashboards.Dashboard, *model.ApiError) {
+ return c.mgr.GetInstalledIntegrationDashboardById(ctx, dashboardUuid)
+}
diff --git a/pkg/query-service/app/integrations/manager.go b/pkg/query-service/app/integrations/manager.go
index 3caf352172..110d370c1b 100644
--- a/pkg/query-service/app/integrations/manager.go
+++ b/pkg/query-service/app/integrations/manager.go
@@ -4,53 +4,110 @@ import (
"context"
"fmt"
"slices"
+ "strings"
"time"
+ "github.com/google/uuid"
+ "github.com/jmoiron/sqlx"
+ "go.signoz.io/signoz/pkg/query-service/app/dashboards"
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
"go.signoz.io/signoz/pkg/query-service/model"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.signoz.io/signoz/pkg/query-service/rules"
+ "go.signoz.io/signoz/pkg/query-service/utils"
)
type IntegrationAuthor struct {
- Name string
- Email string
- HomePage string
+ Name string `json:"name"`
+ Email string `json:"email"`
+ HomePage string `json:"homepage"`
}
type IntegrationSummary struct {
- Id string
- Title string
- Description string // A short description
+ Id string `json:"id"`
+ Title string `json:"title"`
+ Description string `json:"description"` // A short description
- Author IntegrationAuthor
+ Author IntegrationAuthor `json:"author"`
+
+ Icon string `json:"icon"`
}
type IntegrationAssets struct {
- // Each integration is expected to specify all log transformations
- // in a single pipeline with a source based filter
- LogPipeline *logparsingpipeline.PostablePipeline
+ Logs LogsAssets `json:"logs"`
+ Dashboards []dashboards.Data `json:"dashboards"`
- // TBD: Dashboards, alerts, saved views, facets (indexed attribs)...
+ Alerts []rules.PostableRule `json:"alerts"`
+}
+
+type LogsAssets struct {
+ Pipelines []logparsingpipeline.PostablePipeline `json:"pipelines"`
+}
+
+type IntegrationConfigStep struct {
+ Title string `json:"title"`
+ Instructions string `json:"instructions"`
+}
+
+type DataCollectedForIntegration struct {
+ Logs []CollectedLogAttribute `json:"logs"`
+ Metrics []CollectedMetric `json:"metrics"`
+}
+
+type CollectedLogAttribute struct {
+ Name string `json:"name"`
+ Path string `json:"path"`
+ Type string `json:"type"`
+}
+
+type CollectedMetric struct {
+ Name string `json:"name"`
+ Type string `json:"type"`
+ Unit string `json:"unit"`
+}
+
+type SignalConnectionStatus struct {
+ LastReceivedTsMillis int64 `json:"last_received_ts_ms"` // epoch milliseconds
+ LastReceivedFrom string `json:"last_received_from"` // resource identifier
+}
+
+type IntegrationConnectionStatus struct {
+ Logs *SignalConnectionStatus `json:"logs"`
+ Metrics *SignalConnectionStatus `json:"metrics"`
+}
+
+type IntegrationConnectionTests struct {
+ Logs *v3.FilterSet `json:"logs"`
+
+ // TODO(Raj): Add connection tests for other signals.
}
type IntegrationDetails struct {
IntegrationSummary
- IntegrationAssets
+
+ Categories []string `json:"categories"`
+ Overview string `json:"overview"` // markdown
+ Configuration []IntegrationConfigStep `json:"configuration"`
+ DataCollected DataCollectedForIntegration `json:"data_collected"`
+ Assets IntegrationAssets `json:"assets"`
+
+ ConnectionTests *IntegrationConnectionTests `json:"connection_tests"`
}
type IntegrationsListItem struct {
IntegrationSummary
- IsInstalled bool
+ IsInstalled bool `json:"is_installed"`
}
type InstalledIntegration struct {
- IntegrationId string `db:"integration_id"`
- Config InstalledIntegrationConfig `db:"config_json"`
- InstalledAt time.Time `db:"installed_at"`
+ IntegrationId string `json:"integration_id" db:"integration_id"`
+ Config InstalledIntegrationConfig `json:"config_json" db:"config_json"`
+ InstalledAt time.Time `json:"installed_at" db:"installed_at"`
}
type InstalledIntegrationConfig map[string]interface{}
type Integration struct {
IntegrationDetails
- Installation *InstalledIntegration
+ Installation *InstalledIntegration `json:"installation"`
}
type Manager struct {
@@ -58,6 +115,20 @@ type Manager struct {
installedIntegrationsRepo InstalledIntegrationsRepo
}
+func NewManager(db *sqlx.DB) (*Manager, error) {
+ iiRepo, err := NewInstalledIntegrationsSqliteRepo(db)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "could not init sqlite DB for installed integrations: %w", err,
+ )
+ }
+
+ return &Manager{
+ availableIntegrationsRepo: &BuiltInIntegrations{},
+ installedIntegrationsRepo: iiRepo,
+ }, nil
+}
+
type IntegrationsFilter struct {
IsInstalled *bool
}
@@ -132,6 +203,19 @@ func (m *Manager) GetIntegration(
}, nil
}
+func (m *Manager) GetIntegrationConnectionTests(
+ ctx context.Context,
+ integrationId string,
+) (*IntegrationConnectionTests, *model.ApiError) {
+ integrationDetails, apiErr := m.getIntegrationDetails(
+ ctx, integrationId,
+ )
+ if apiErr != nil {
+ return nil, apiErr
+ }
+ return integrationDetails.ConnectionTests, nil
+}
+
func (m *Manager) InstallIntegration(
ctx context.Context,
integrationId string,
@@ -164,11 +248,131 @@ func (m *Manager) UninstallIntegration(
return m.installedIntegrationsRepo.delete(ctx, integrationId)
}
+func (m *Manager) GetPipelinesForInstalledIntegrations(
+ ctx context.Context,
+) ([]logparsingpipeline.Pipeline, *model.ApiError) {
+ installedIntegrations, apiErr := m.getDetailsForInstalledIntegrations(ctx)
+ if apiErr != nil {
+ return nil, apiErr
+ }
+
+ pipelines := []logparsingpipeline.Pipeline{}
+ for _, ii := range installedIntegrations {
+ for _, p := range ii.Assets.Logs.Pipelines {
+ pp := logparsingpipeline.Pipeline{
+ // Alias is used for identifying integration pipelines. Id can't be used for this
+ // since versioning while saving pipelines requires a new id for each version
+ // to avoid altering history when pipelines are edited/reordered etc
+ Alias: AliasForIntegrationPipeline(ii.Id, p.Alias),
+ Id: uuid.NewString(),
+ OrderId: p.OrderId,
+ Enabled: p.Enabled,
+ Name: p.Name,
+ Description: &p.Description,
+ Filter: p.Filter,
+ Config: p.Config,
+ }
+ pipelines = append(pipelines, pp)
+ }
+ }
+
+ return pipelines, nil
+}
+
+func (m *Manager) dashboardUuid(integrationId string, dashboardId string) string {
+ return strings.Join([]string{"integration", integrationId, dashboardId}, "--")
+}
+
+func (m *Manager) parseDashboardUuid(dashboardUuid string) (
+ integrationId string, dashboardId string, err *model.ApiError,
+) {
+ parts := strings.SplitN(dashboardUuid, "--", 3)
+ if len(parts) != 3 || parts[0] != "integration" {
+ return "", "", model.BadRequest(fmt.Errorf(
+ "invalid installed integration dashboard id",
+ ))
+ }
+
+ return parts[1], parts[2], nil
+}
+
+func (m *Manager) GetInstalledIntegrationDashboardById(
+ ctx context.Context,
+ dashboardUuid string,
+) (*dashboards.Dashboard, *model.ApiError) {
+ integrationId, dashboardId, apiErr := m.parseDashboardUuid(dashboardUuid)
+ if apiErr != nil {
+ return nil, apiErr
+ }
+
+ integration, apiErr := m.GetIntegration(ctx, integrationId)
+ if apiErr != nil {
+ return nil, apiErr
+ }
+
+ if integration.Installation == nil {
+ return nil, model.BadRequest(fmt.Errorf(
+ "integration with id %s is not installed", integrationId,
+ ))
+ }
+
+ for _, dd := range integration.IntegrationDetails.Assets.Dashboards {
+ if dId, exists := dd["id"]; exists {
+ if id, ok := dId.(string); ok && id == dashboardId {
+ isLocked := 1
+ return &dashboards.Dashboard{
+ Uuid: m.dashboardUuid(integrationId, string(dashboardId)),
+ Locked: &isLocked,
+ Data: dd,
+ }, nil
+ }
+ }
+ }
+
+ return nil, model.NotFoundError(fmt.Errorf(
+ "integration dashboard with id %s not found", dashboardUuid,
+ ))
+}
+
+func (m *Manager) GetDashboardsForInstalledIntegrations(
+ ctx context.Context,
+) ([]dashboards.Dashboard, *model.ApiError) {
+ installedIntegrations, apiErr := m.getDetailsForInstalledIntegrations(ctx)
+ if apiErr != nil {
+ return nil, apiErr
+ }
+
+ result := []dashboards.Dashboard{}
+
+ for _, ii := range installedIntegrations {
+ for _, dd := range ii.Assets.Dashboards {
+ if dId, exists := dd["id"]; exists {
+ if dashboardId, ok := dId.(string); ok {
+ isLocked := 1
+ result = append(result, dashboards.Dashboard{
+ Uuid: m.dashboardUuid(ii.IntegrationSummary.Id, dashboardId),
+ Locked: &isLocked,
+ Data: dd,
+ })
+ }
+ }
+ }
+ }
+
+ return result, nil
+}
+
// Helpers.
func (m *Manager) getIntegrationDetails(
ctx context.Context,
integrationId string,
) (*IntegrationDetails, *model.ApiError) {
+ if len(strings.TrimSpace(integrationId)) < 1 {
+ return nil, model.BadRequest(fmt.Errorf(
+ "integrationId is required",
+ ))
+ }
+
ais, apiErr := m.availableIntegrationsRepo.get(
ctx, []string{integrationId},
)
@@ -206,3 +410,19 @@ func (m *Manager) getInstalledIntegration(
}
return &installation, nil
}
+
+func (m *Manager) getDetailsForInstalledIntegrations(
+ ctx context.Context,
+) (
+ map[string]IntegrationDetails, *model.ApiError,
+) {
+ installations, apiErr := m.installedIntegrationsRepo.list(ctx)
+ if apiErr != nil {
+ return nil, apiErr
+ }
+
+ installedIds := utils.MapSlice(installations, func(i InstalledIntegration) string {
+ return i.IntegrationId
+ })
+ return m.availableIntegrationsRepo.get(ctx, installedIds)
+}
diff --git a/pkg/query-service/app/integrations/pipeline_utils.go b/pkg/query-service/app/integrations/pipeline_utils.go
new file mode 100644
index 0000000000..49ab5dd82a
--- /dev/null
+++ b/pkg/query-service/app/integrations/pipeline_utils.go
@@ -0,0 +1,33 @@
+package integrations
+
+import (
+ "strings"
+
+ "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+)
+
+const IntegrationPipelineIdSeparator string = "--"
+
+func AliasForIntegrationPipeline(
+ integrationId string, pipelineName string,
+) string {
+ return strings.Join(
+ []string{constants.IntegrationPipelineIdPrefix, integrationId, pipelineName},
+ IntegrationPipelineIdSeparator,
+ )
+}
+
+// Returns ptr to integration_id string if `p` is a pipeline for an installed integration.
+// Returns null otherwise.
+func IntegrationIdForPipeline(p logparsingpipeline.Pipeline) *string {
+ if strings.HasPrefix(p.Alias, constants.IntegrationPipelineIdPrefix) {
+ parts := strings.Split(p.Alias, IntegrationPipelineIdSeparator)
+ if len(parts) < 2 {
+ return nil
+ }
+ integrationId := parts[1]
+ return &integrationId
+ }
+ return nil
+}
diff --git a/pkg/query-service/app/integrations/sqlite_repo.go b/pkg/query-service/app/integrations/sqlite_repo.go
index 94e9c4d51d..2c3e9fc699 100644
--- a/pkg/query-service/app/integrations/sqlite_repo.go
+++ b/pkg/query-service/app/integrations/sqlite_repo.go
@@ -62,6 +62,7 @@ func (r *InstalledIntegrationsSqliteRepo) list(
config_json,
installed_at
from integrations_installed
+ order by installed_at
`,
)
if err != nil {
diff --git a/pkg/query-service/app/integrations/test_utils.go b/pkg/query-service/app/integrations/test_utils.go
index 6dcb9ec355..1ff964b3e6 100644
--- a/pkg/query-service/app/integrations/test_utils.go
+++ b/pkg/query-service/app/integrations/test_utils.go
@@ -2,37 +2,19 @@ package integrations
import (
"context"
- "os"
"slices"
"testing"
- "github.com/jmoiron/sqlx"
+ "go.signoz.io/signoz/pkg/query-service/app/dashboards"
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.signoz.io/signoz/pkg/query-service/rules"
+ "go.signoz.io/signoz/pkg/query-service/utils"
)
-func NewTestSqliteDB(t *testing.T) (
- db *sqlx.DB, dbFilePath string,
-) {
- testDBFile, err := os.CreateTemp("", "test-signoz-db-*")
- if err != nil {
- t.Fatalf("could not create temp file for test db: %v", err)
- }
- testDBFilePath := testDBFile.Name()
- t.Cleanup(func() { os.Remove(testDBFilePath) })
- testDBFile.Close()
-
- testDB, err := sqlx.Open("sqlite3", testDBFilePath)
- if err != nil {
- t.Fatalf("could not open test db sqlite file: %v", err)
- }
-
- return testDB, testDBFilePath
-}
-
func NewTestIntegrationsManager(t *testing.T) *Manager {
- testDB, _ := NewTestSqliteDB(t)
+ testDB := utils.NewQueryServiceDBForTests(t)
installedIntegrationsRepo, err := NewInstalledIntegrationsSqliteRepo(testDB)
if err != nil {
@@ -61,35 +43,70 @@ func (t *TestAvailableIntegrationsRepo) list(
Email: "integrations@signoz.io",
HomePage: "https://signoz.io",
},
+ Icon: `data:image/svg+xml;utf8, ... `,
},
- IntegrationAssets: IntegrationAssets{
- LogPipeline: &logparsingpipeline.PostablePipeline{
- Name: "pipeline1",
- Alias: "pipeline1",
- Enabled: true,
- Filter: &v3.FilterSet{
- Operator: "AND",
- Items: []v3.FilterItem{
- {
- Key: v3.AttributeKey{
- Key: "method",
- DataType: v3.AttributeKeyDataTypeString,
- Type: v3.AttributeKeyTypeTag,
+ Categories: []string{"testcat1", "testcat2"},
+ Overview: "test integration overview",
+ Configuration: []IntegrationConfigStep{
+ {
+ Title: "Step 1",
+ Instructions: "Set source attrib on your signals",
+ },
+ },
+ DataCollected: DataCollectedForIntegration{
+ Logs: []CollectedLogAttribute{},
+ Metrics: []CollectedMetric{},
+ },
+ Assets: IntegrationAssets{
+ Logs: LogsAssets{
+ Pipelines: []logparsingpipeline.PostablePipeline{
+ {
+ Name: "pipeline1",
+ Alias: "pipeline1",
+ Enabled: true,
+ Filter: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "source",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ Operator: "=",
+ Value: "nginx",
+ },
+ },
+ },
+ Config: []logparsingpipeline.PipelineOperator{
+ {
+ OrderId: 1,
+ ID: "add",
+ Type: "add",
+ Field: "attributes.test",
+ Value: "val",
+ Enabled: true,
+ Name: "test add",
},
- Operator: "=",
- Value: "GET",
},
},
},
- Config: []logparsingpipeline.PipelineOperator{
+ },
+ Dashboards: []dashboards.Data{},
+ Alerts: []rules.PostableRule{},
+ },
+ ConnectionTests: &IntegrationConnectionTests{
+ Logs: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
{
- OrderId: 1,
- ID: "add",
- Type: "add",
- Field: "attributes.test",
- Value: "val",
- Enabled: true,
- Name: "test add",
+ Key: v3.AttributeKey{
+ Key: "source",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ Operator: "=",
+ Value: "nginx",
},
},
},
@@ -104,35 +121,70 @@ func (t *TestAvailableIntegrationsRepo) list(
Email: "integrations@signoz.io",
HomePage: "https://signoz.io",
},
+ Icon: `data:image/svg+xml;utf8, ... `,
},
- IntegrationAssets: IntegrationAssets{
- LogPipeline: &logparsingpipeline.PostablePipeline{
- Name: "pipeline2",
- Alias: "pipeline2",
- Enabled: true,
- Filter: &v3.FilterSet{
- Operator: "AND",
- Items: []v3.FilterItem{
- {
- Key: v3.AttributeKey{
- Key: "method",
- DataType: v3.AttributeKeyDataTypeString,
- Type: v3.AttributeKeyTypeTag,
+ Categories: []string{"testcat1", "testcat2"},
+ Overview: "test integration overview",
+ Configuration: []IntegrationConfigStep{
+ {
+ Title: "Step 1",
+ Instructions: "Set source attrib on your signals",
+ },
+ },
+ DataCollected: DataCollectedForIntegration{
+ Logs: []CollectedLogAttribute{},
+ Metrics: []CollectedMetric{},
+ },
+ Assets: IntegrationAssets{
+ Logs: LogsAssets{
+ Pipelines: []logparsingpipeline.PostablePipeline{
+ {
+ Name: "pipeline2",
+ Alias: "pipeline2",
+ Enabled: true,
+ Filter: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "source",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ Operator: "=",
+ Value: "redis",
+ },
+ },
+ },
+ Config: []logparsingpipeline.PipelineOperator{
+ {
+ OrderId: 1,
+ ID: "add",
+ Type: "add",
+ Field: "attributes.test",
+ Value: "val",
+ Enabled: true,
+ Name: "test add",
},
- Operator: "=",
- Value: "GET",
},
},
},
- Config: []logparsingpipeline.PipelineOperator{
+ },
+ Dashboards: []dashboards.Data{},
+ Alerts: []rules.PostableRule{},
+ },
+ ConnectionTests: &IntegrationConnectionTests{
+ Logs: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
{
- OrderId: 1,
- ID: "add",
- Type: "add",
- Field: "attributes.test",
- Value: "val",
- Enabled: true,
- Name: "test add",
+ Key: v3.AttributeKey{
+ Key: "source",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ Operator: "=",
+ Value: "nginx",
},
},
},
diff --git a/pkg/query-service/app/limit.go b/pkg/query-service/app/limit.go
index 7b6d728dd0..ab68ae5ac4 100644
--- a/pkg/query-service/app/limit.go
+++ b/pkg/query-service/app/limit.go
@@ -20,7 +20,7 @@ func applyMetricLimit(results []*v3.Result, queryRangeParams *v3.QueryRangeParam
limit := builderQueries[result.QueryName].Limit
orderByList := builderQueries[result.QueryName].OrderBy
- if limit > 0 {
+ {
if len(orderByList) == 0 {
// If no orderBy is specified, sort by value in descending order
orderByList = []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "desc"}}
diff --git a/pkg/query-service/app/logparsingpipeline/controller.go b/pkg/query-service/app/logparsingpipeline/controller.go
index eed3befec5..9527fe9e8d 100644
--- a/pkg/query-service/app/logparsingpipeline/controller.go
+++ b/pkg/query-service/app/logparsingpipeline/controller.go
@@ -4,25 +4,38 @@ import (
"context"
"encoding/json"
"fmt"
+ "slices"
+ "strings"
+ "github.com/google/uuid"
"github.com/jmoiron/sqlx"
"github.com/pkg/errors"
"go.signoz.io/signoz/pkg/query-service/agentConf"
"go.signoz.io/signoz/pkg/query-service/auth"
+ "go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/model"
- "go.uber.org/multierr"
+ "go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
// Controller takes care of deployment cycle of log parsing pipelines.
type LogParsingPipelineController struct {
Repo
+
+ GetIntegrationPipelines func(context.Context) ([]Pipeline, *model.ApiError)
}
-func NewLogParsingPipelinesController(db *sqlx.DB, engine string) (*LogParsingPipelineController, error) {
+func NewLogParsingPipelinesController(
+ db *sqlx.DB,
+ engine string,
+ getIntegrationPipelines func(context.Context) ([]Pipeline, *model.ApiError),
+) (*LogParsingPipelineController, error) {
repo := NewRepo(db)
err := repo.InitDB(engine)
- return &LogParsingPipelineController{Repo: repo}, err
+ return &LogParsingPipelineController{
+ Repo: repo,
+ GetIntegrationPipelines: getIntegrationPipelines,
+ }, err
}
// PipelinesResponse is used to prepare http response for pipelines config related requests
@@ -47,29 +60,22 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
var pipelines []Pipeline
// scan through postable pipelines, to select the existing pipelines or insert missing ones
- for _, r := range postable {
+ for idx, r := range postable {
// note: we process only new and changed pipelines here, deleted pipelines are not expected
// from client. if user deletes a pipelines, the client should not send that pipelines in the update.
// in effect, the new config version will not have that pipelines.
- if r.Id == "" {
- // looks like a new or changed pipeline, store it first
- inserted, err := ic.insertPipeline(ctx, &r)
- if err != nil {
- zap.S().Errorf("failed to insert edited pipeline %s", err.Error())
- return nil, model.WrapApiError(err, "failed to insert edited pipeline")
- } else {
- pipelines = append(pipelines, *inserted)
- }
- } else {
- selected, err := ic.GetPipeline(ctx, r.Id)
- if err != nil {
- zap.S().Errorf("failed to find edited pipeline %s", err.Error())
- return nil, model.WrapApiError(err, "failed to find edited pipeline")
- }
- pipelines = append(pipelines, *selected)
+ // For versioning, pipelines get stored with unique ids each time they are saved.
+ // This ensures updating a pipeline doesn't alter historical versions that referenced
+ // the same pipeline id.
+ r.Id = uuid.NewString()
+ r.OrderId = idx + 1
+ pipeline, apiErr := ic.insertPipeline(ctx, &r)
+ if apiErr != nil {
+ return nil, model.WrapApiError(apiErr, "failed to insert pipeline")
}
+ pipelines = append(pipelines, *pipeline)
}
@@ -85,34 +91,85 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
return nil, err
}
- history, _ := agentConf.GetConfigHistory(ctx, agentConf.ElementTypeLogPipelines, 10)
- insertedCfg, _ := agentConf.GetConfigVersion(ctx, agentConf.ElementTypeLogPipelines, cfg.Version)
+ return ic.GetPipelinesByVersion(ctx, cfg.Version)
+}
- response := &PipelinesResponse{
- ConfigVersion: insertedCfg,
- Pipelines: pipelines,
- History: history,
+// Returns effective list of pipelines including user created
+// pipelines and pipelines for installed integrations
+func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
+ ctx context.Context, version int,
+) ([]Pipeline, *model.ApiError) {
+ result := []Pipeline{}
+
+ if version >= 0 {
+ savedPipelines, errors := ic.getPipelinesByVersion(ctx, version)
+ if errors != nil {
+ zap.S().Errorf("failed to get pipelines for version %d, %w", version, errors)
+ return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version"))
+ }
+ result = savedPipelines
}
- if err != nil {
- return response, model.WrapApiError(err, "failed to apply pipelines")
+ integrationPipelines, apiErr := ic.GetIntegrationPipelines(ctx)
+ if apiErr != nil {
+ return nil, model.WrapApiError(
+ apiErr, "could not get pipelines for installed integrations",
+ )
}
- return response, nil
+
+ // Filter out any integration pipelines included in pipelines saved by user
+ // if the corresponding integration is no longer installed.
+ ipAliases := utils.MapSlice(integrationPipelines, func(p Pipeline) string {
+ return p.Alias
+ })
+ result = utils.FilterSlice(result, func(p Pipeline) bool {
+ if !strings.HasPrefix(p.Alias, constants.IntegrationPipelineIdPrefix) {
+ return true
+ }
+ return slices.Contains(ipAliases, p.Alias)
+ })
+
+ // Add installed integration pipelines to the list of pipelines saved by user.
+ // Users are allowed to enable/disable and reorder integration pipelines while
+ // saving the pipeline list.
+ for _, ip := range integrationPipelines {
+ userPipelineIdx := slices.IndexFunc(result, func(p Pipeline) bool {
+ return p.Alias == ip.Alias
+ })
+ if userPipelineIdx >= 0 {
+ ip.Enabled = result[userPipelineIdx].Enabled
+ result[userPipelineIdx] = ip
+ } else {
+ // installed integration pipelines get added to the end of the list by default.
+ result = append(result, ip)
+ }
+ }
+
+ for idx := range result {
+ result[idx].OrderId = idx + 1
+ }
+
+ return result, nil
}
// GetPipelinesByVersion responds with version info and associated pipelines
func (ic *LogParsingPipelineController) GetPipelinesByVersion(
ctx context.Context, version int,
) (*PipelinesResponse, *model.ApiError) {
- pipelines, errors := ic.getPipelinesByVersion(ctx, version)
+ pipelines, errors := ic.getEffectivePipelinesByVersion(ctx, version)
if errors != nil {
zap.S().Errorf("failed to get pipelines for version %d, %w", version, errors)
return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version"))
}
- configVersion, err := agentConf.GetConfigVersion(ctx, agentConf.ElementTypeLogPipelines, version)
- if err != nil {
- zap.S().Errorf("failed to get config for version %d, %s", version, err.Error())
- return nil, model.WrapApiError(err, "failed to get config for given version")
+
+ var configVersion *agentConf.ConfigVersion
+ if version >= 0 {
+ cv, err := agentConf.GetConfigVersion(ctx, agentConf.ElementTypeLogPipelines, version)
+ if err != nil {
+ zap.S().Errorf("failed to get config for version %d, %s", version, err.Error())
+ return nil, model.WrapApiError(err, "failed to get config for given version")
+ }
+ configVersion = cv
}
return &PipelinesResponse{
@@ -163,26 +220,29 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig(
serializedSettingsUsed string,
apiErr *model.ApiError,
) {
+ pipelinesVersion := -1
+ if configVersion != nil {
+ pipelinesVersion = configVersion.Version
+ }
- pipelines, errs := pc.getPipelinesByVersion(
- context.Background(), configVersion.Version,
+ pipelinesResp, apiErr := pc.GetPipelinesByVersion(
+ context.Background(), pipelinesVersion,
)
- if len(errs) > 0 {
- return nil, "", model.InternalError(multierr.Combine(errs...))
+ if apiErr != nil {
+ return nil, "", apiErr
}
updatedConf, apiErr := GenerateCollectorConfigWithPipelines(
- currentConfYaml, pipelines,
+ currentConfYaml, pipelinesResp.Pipelines,
)
if apiErr != nil {
return nil, "", model.WrapApiError(apiErr, "could not marshal yaml for updated conf")
}
- rawPipelineData, err := json.Marshal(pipelines)
+ rawPipelineData, err := json.Marshal(pipelinesResp.Pipelines)
if err != nil {
return nil, "", model.BadRequest(errors.Wrap(err, "could not serialize pipelines to JSON"))
}
return updatedConf, string(rawPipelineData), nil
-
}
diff --git a/pkg/query-service/app/logs/v3/enrich_query.go b/pkg/query-service/app/logs/v3/enrich_query.go
index fe8ed8b9ed..c8a5a797b2 100644
--- a/pkg/query-service/app/logs/v3/enrich_query.go
+++ b/pkg/query-service/app/logs/v3/enrich_query.go
@@ -103,6 +103,7 @@ func enrichLogsQuery(query *v3.BuilderQuery, fields map[string]v3.AttributeKey)
for i := 0; i < len(query.Filters.Items); i++ {
query.Filters.Items[i] = jsonFilterEnrich(query.Filters.Items[i])
if query.Filters.Items[i].Key.IsJSON {
+ query.Filters.Items[i] = jsonReplaceField(query.Filters.Items[i], fields)
continue
}
query.Filters.Items[i].Key = enrichFieldWithMetadata(query.Filters.Items[i].Key, fields)
@@ -181,6 +182,19 @@ func jsonFilterEnrich(filter v3.FilterItem) v3.FilterItem {
return filter
}
+func jsonReplaceField(filter v3.FilterItem, fields map[string]v3.AttributeKey) v3.FilterItem {
+ key, found := strings.CutPrefix(filter.Key.Key, "body.")
+ if !found {
+ return filter
+ }
+
+ if field, ok := fields[key]; ok && field.DataType == filter.Key.DataType {
+ filter.Key = field
+ }
+
+ return filter
+}
+
func parseStrValue(valueStr string, operator v3.FilterOperator) (string, interface{}) {
valueType := "string"
diff --git a/pkg/query-service/app/logs/v3/enrich_query_test.go b/pkg/query-service/app/logs/v3/enrich_query_test.go
index 8b831f56ef..4903139610 100644
--- a/pkg/query-service/app/logs/v3/enrich_query_test.go
+++ b/pkg/query-service/app/logs/v3/enrich_query_test.go
@@ -456,6 +456,128 @@ func TestJsonEnrich(t *testing.T) {
}
}
+func TestJsonReplaceField(t *testing.T) {
+ fields := map[string]v3.AttributeKey{
+ "method.name": {
+ Key: "method.name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ "status": {
+ Key: "status",
+ DataType: v3.AttributeKeyDataTypeInt64,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ "data.error": {
+ Key: "data.error",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ IsColumn: true,
+ },
+ }
+ var TestJsonReplaceFieldData = []struct {
+ Name string
+ Filter v3.FilterItem
+ Result v3.FilterItem
+ }{
+ {
+ Name: "key in nested json",
+ Filter: v3.FilterItem{
+ Key: v3.AttributeKey{
+ Key: "body.method.name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeUnspecified,
+ },
+ Operator: "has",
+ Value: "index_service",
+ },
+ Result: v3.FilterItem{
+ Key: v3.AttributeKey{
+ Key: "method.name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ IsJSON: false,
+ },
+ Operator: "has",
+ Value: "index_service",
+ },
+ },
+ {
+ Name: "key at top level",
+ Filter: v3.FilterItem{
+ Key: v3.AttributeKey{
+ Key: "body.status",
+ DataType: v3.AttributeKeyDataTypeInt64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ },
+ Operator: "=",
+ Value: 10,
+ },
+ Result: v3.FilterItem{
+ Key: v3.AttributeKey{
+ Key: "status",
+ DataType: v3.AttributeKeyDataTypeInt64,
+ Type: v3.AttributeKeyTypeTag,
+ IsJSON: false,
+ },
+ Operator: "=",
+ Value: 10,
+ },
+ },
+ {
+ Name: "key not present",
+ Filter: v3.FilterItem{
+ Key: v3.AttributeKey{
+ Key: "body.status.code",
+ DataType: v3.AttributeKeyDataTypeInt64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ },
+ Operator: "=",
+ Value: 10,
+ },
+ Result: v3.FilterItem{
+ Key: v3.AttributeKey{
+ Key: "body.status.code",
+ DataType: v3.AttributeKeyDataTypeInt64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsJSON: false,
+ },
+ Operator: "=",
+ Value: 10,
+ },
+ },
+ {
+ Name: "key materialized",
+ Filter: v3.FilterItem{
+ Key: v3.AttributeKey{
+ Key: "body.data.error",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeUnspecified,
+ },
+ Operator: "=",
+ Value: 10,
+ },
+ Result: v3.FilterItem{
+ Key: v3.AttributeKey{
+ Key: "data.error",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ IsJSON: false,
+ IsColumn: true,
+ },
+ Operator: "=",
+ Value: 10,
+ },
+ },
+ }
+ for _, tt := range TestJsonReplaceFieldData {
+ Convey(tt.Name, t, func() {
+ res := jsonReplaceField(tt.Filter, fields)
+ So(res, ShouldResemble, tt.Result)
+ })
+ }
+}
+
var testParseStrValueData = []struct {
Name string
Operator v3.FilterOperator
diff --git a/pkg/query-service/app/metrics/v4/query_builder.go b/pkg/query-service/app/metrics/v4/query_builder.go
index ae9ee9b69a..c58c98c93f 100644
--- a/pkg/query-service/app/metrics/v4/query_builder.go
+++ b/pkg/query-service/app/metrics/v4/query_builder.go
@@ -21,14 +21,10 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P
start, end = common.AdjustedMetricTimeRange(start, end, mq.StepInterval, mq.TimeAggregation)
- if mq.ShiftBy != 0 {
- start = start - mq.ShiftBy*1000
- end = end - mq.ShiftBy*1000
- }
-
var quantile float64
- if v3.IsPercentileOperator(mq.SpaceAggregation) {
+ if v3.IsPercentileOperator(mq.SpaceAggregation) &&
+ mq.AggregateAttribute.Type != v3.AttributeKeyType(v3.MetricTypeExponentialHistogram) {
quantile = v3.GetPercentileFromOperator(mq.SpaceAggregation)
// If quantile is set, we need to group by le
// and set the space aggregation to sum
@@ -81,7 +77,8 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P
groupBy := helpers.GroupByAttributeKeyTags(groupByWithoutLe...)
orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, groupByWithoutLe)
- if quantile != 0 {
+ // fixed-bucket histogram quantiles are calculated with UDF
+ if quantile != 0 && mq.AggregateAttribute.Type != v3.AttributeKeyType(v3.MetricTypeExponentialHistogram) {
query = fmt.Sprintf(`SELECT %s, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s`, groupBy, quantile, query, groupBy, orderBy)
}
diff --git a/pkg/query-service/app/opamp/config_provider_test.go b/pkg/query-service/app/opamp/config_provider_test.go
index 6718ff1581..1a6efe122a 100644
--- a/pkg/query-service/app/opamp/config_provider_test.go
+++ b/pkg/query-service/app/opamp/config_provider_test.go
@@ -2,7 +2,6 @@ package opamp
import (
"fmt"
- "os"
"testing"
"github.com/knadh/koanf"
@@ -13,6 +12,7 @@ import (
"github.com/pkg/errors"
"github.com/stretchr/testify/require"
"go.signoz.io/signoz/pkg/query-service/app/opamp/model"
+ "go.signoz.io/signoz/pkg/query-service/utils"
"golang.org/x/exp/maps"
)
@@ -165,16 +165,8 @@ type testbed struct {
}
func newTestbed(t *testing.T) *testbed {
- // Init opamp model.
- testDBFile, err := os.CreateTemp("", "test-signoz-db-*")
- if err != nil {
- t.Fatalf("could not create temp file for test db: %v", err)
- }
- testDBFilePath := testDBFile.Name()
- t.Cleanup(func() { os.Remove(testDBFilePath) })
- testDBFile.Close()
-
- _, err = model.InitDB(testDBFilePath)
+ testDB := utils.NewQueryServiceDBForTests(t)
+ _, err := model.InitDB(testDB)
if err != nil {
t.Fatalf("could not init opamp model: %v", err)
}
diff --git a/pkg/query-service/app/opamp/model/agents.go b/pkg/query-service/app/opamp/model/agents.go
index 50a554b957..2e2118e216 100644
--- a/pkg/query-service/app/opamp/model/agents.go
+++ b/pkg/query-service/app/opamp/model/agents.go
@@ -29,14 +29,9 @@ func (a *Agents) Count() int {
return len(a.connections)
}
-// InitDB initializes the database and creates the agents table.
-func InitDB(dataSourceName string) (*sqlx.DB, error) {
- var err error
-
- db, err = sqlx.Open("sqlite3", dataSourceName)
- if err != nil {
- return nil, err
- }
+// Initialize the database and create schema if needed
+func InitDB(qsDB *sqlx.DB) (*sqlx.DB, error) {
+ db = qsDB
tableSchema := `CREATE TABLE IF NOT EXISTS agents (
agent_id TEXT PRIMARY KEY UNIQUE,
@@ -46,7 +41,7 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) {
effective_config TEXT NOT NULL
);`
- _, err = db.Exec(tableSchema)
+ _, err := db.Exec(tableSchema)
if err != nil {
return nil, fmt.Errorf("Error in creating agents table: %s", err.Error())
}
diff --git a/pkg/query-service/app/parser.go b/pkg/query-service/app/parser.go
index ad2a9fd8de..9a9f388ab5 100644
--- a/pkg/query-service/app/parser.go
+++ b/pkg/query-service/app/parser.go
@@ -829,8 +829,10 @@ func parseAggregateAttributeRequest(r *http.Request) (*v3.AggregateAttributeRequ
limit = 50
}
- if err := aggregateOperator.Validate(); err != nil {
- return nil, err
+ if dataSource != v3.DataSourceMetrics {
+ if err := aggregateOperator.Validate(); err != nil {
+ return nil, err
+ }
}
if err := dataSource.Validate(); err != nil {
@@ -861,8 +863,10 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
return nil, err
}
- if err := aggregateOperator.Validate(); err != nil {
- return nil, err
+ if dataSource != v3.DataSourceMetrics {
+ if err := aggregateOperator.Validate(); err != nil {
+ return nil, err
+ }
}
req = v3.FilterAttributeKeyRequest{
@@ -894,8 +898,10 @@ func parseFilterAttributeValueRequest(r *http.Request) (*v3.FilterAttributeValue
return nil, err
}
- if err := aggregateOperator.Validate(); err != nil {
- return nil, err
+ if dataSource != v3.DataSourceMetrics {
+ if err := aggregateOperator.Validate(); err != nil {
+ return nil, err
+ }
}
req = v3.FilterAttributeValueRequest{
@@ -1019,6 +1025,25 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
}
}
+ var timeShiftBy int64
+ if len(query.Functions) > 0 {
+ for idx := range query.Functions {
+ function := &query.Functions[idx]
+ if function.Name == v3.FunctionNameTimeShift {
+ // move the function to the beginning of the list
+ // so any other function can use the shifted time
+ var fns []v3.Function
+ fns = append(fns, *function)
+ fns = append(fns, query.Functions[:idx]...)
+ fns = append(fns, query.Functions[idx+1:]...)
+ query.Functions = fns
+ timeShiftBy = int64(function.Args[0].(float64))
+ break
+ }
+ }
+ }
+ query.ShiftBy = timeShiftBy
+
if query.Filters == nil || len(query.Filters.Items) == 0 {
continue
}
@@ -1045,25 +1070,6 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
}
}
}
-
- var timeShiftBy int64
- if len(query.Functions) > 0 {
- for idx := range query.Functions {
- function := &query.Functions[idx]
- if function.Name == v3.FunctionNameTimeShift {
- // move the function to the beginning of the list
- // so any other function can use the shifted time
- var fns []v3.Function
- fns = append(fns, *function)
- fns = append(fns, query.Functions[:idx]...)
- fns = append(fns, query.Functions[idx+1:]...)
- query.Functions = fns
- timeShiftBy = int64(function.Args[0].(float64))
- break
- }
- }
- }
- query.ShiftBy = timeShiftBy
}
}
queryRangeParams.Variables = formattedVars
diff --git a/pkg/query-service/app/parser_test.go b/pkg/query-service/app/parser_test.go
index 8b172027a4..5b9e776486 100644
--- a/pkg/query-service/app/parser_test.go
+++ b/pkg/query-service/app/parser_test.go
@@ -492,23 +492,23 @@ func TestParseQueryRangeParamsCompositeQuery(t *testing.T) {
expectErr: true,
errMsg: "data source is invalid",
},
- {
- desc: "invalid aggregate operator for builder query",
- compositeQuery: v3.CompositeQuery{
- PanelType: v3.PanelTypeGraph,
- QueryType: v3.QueryTypeBuilder,
- BuilderQueries: map[string]*v3.BuilderQuery{
- "A": {
- QueryName: "A",
- DataSource: "metrics",
- AggregateOperator: "invalid",
- Expression: "A",
- },
- },
- },
- expectErr: true,
- errMsg: "aggregate operator is invalid",
- },
+ // {
+ // desc: "invalid aggregate operator for builder query",
+ // compositeQuery: v3.CompositeQuery{
+ // PanelType: v3.PanelTypeGraph,
+ // QueryType: v3.QueryTypeBuilder,
+ // BuilderQueries: map[string]*v3.BuilderQuery{
+ // "A": {
+ // QueryName: "A",
+ // DataSource: "metrics",
+ // AggregateOperator: "invalid",
+ // Expression: "A",
+ // },
+ // },
+ // },
+ // expectErr: true,
+ // errMsg: "aggregate operator is invalid",
+ // },
{
desc: "invalid aggregate attribute for builder query",
compositeQuery: v3.CompositeQuery{
diff --git a/pkg/query-service/app/querier/helper.go b/pkg/query-service/app/querier/helper.go
index addd9744e3..47f65fe007 100644
--- a/pkg/query-service/app/querier/helper.go
+++ b/pkg/query-service/app/querier/helper.go
@@ -90,11 +90,18 @@ func (q *querier) runBuilderQuery(
preferRPM = q.featureLookUp.CheckFeature(constants.PreferRPM) == nil
}
+ start := params.Start
+ end := params.End
+ if builderQuery.ShiftBy != 0 {
+ start = start - builderQuery.ShiftBy*1000
+ end = end - builderQuery.ShiftBy*1000
+ }
+
if builderQuery.DataSource == v3.DataSourceLogs {
var query string
var err error
if _, ok := cacheKeys[queryName]; !ok {
- query, err = prepareLogsQuery(ctx, params.Start, params.End, builderQuery, params, preferRPM)
+ query, err = prepareLogsQuery(ctx, start, end, builderQuery, params, preferRPM)
if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
return
@@ -114,7 +121,7 @@ func (q *querier) runBuilderQuery(
cachedData = data
}
}
- misses := q.findMissingTimeRanges(params.Start, params.End, params.Step, cachedData)
+ misses := q.findMissingTimeRanges(start, end, params.Step, cachedData)
missedSeries := make([]*v3.Series, 0)
cachedSeries := make([]*v3.Series, 0)
for _, miss := range misses {
@@ -152,7 +159,7 @@ func (q *querier) runBuilderQuery(
}
// response doesn't need everything
- filterCachedPoints(mergedSeries, params.Start, params.End)
+ filterCachedPoints(mergedSeries, start, end)
ch <- channelResult{
Err: nil,
@@ -181,8 +188,8 @@ func (q *querier) runBuilderQuery(
// for ts query with group by and limit form two queries
if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
limitQuery, err := tracesV3.PrepareTracesQuery(
- params.Start,
- params.End,
+ start,
+ end,
params.CompositeQuery.PanelType,
builderQuery,
keys,
@@ -193,8 +200,8 @@ func (q *querier) runBuilderQuery(
return
}
placeholderQuery, err := tracesV3.PrepareTracesQuery(
- params.Start,
- params.End,
+ start,
+ end,
params.CompositeQuery.PanelType,
builderQuery,
keys,
@@ -207,8 +214,8 @@ func (q *querier) runBuilderQuery(
query = fmt.Sprintf(placeholderQuery, limitQuery)
} else {
query, err = tracesV3.PrepareTracesQuery(
- params.Start,
- params.End,
+ start,
+ end,
params.CompositeQuery.PanelType,
builderQuery,
keys,
@@ -229,7 +236,7 @@ func (q *querier) runBuilderQuery(
// We are only caching the graph panel queries. A non-existant cache key means that the query is not cached.
// If the query is not cached, we execute the query and return the result without caching it.
if _, ok := cacheKeys[queryName]; !ok {
- query, err := metricsV3.PrepareMetricQuery(params.Start, params.End, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{PreferRPM: preferRPM})
+ query, err := metricsV3.PrepareMetricQuery(start, end, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{PreferRPM: preferRPM})
if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
return
@@ -249,7 +256,7 @@ func (q *querier) runBuilderQuery(
cachedData = data
}
}
- misses := q.findMissingTimeRanges(params.Start, params.End, params.Step, cachedData)
+ misses := q.findMissingTimeRanges(start, end, params.Step, cachedData)
missedSeries := make([]*v3.Series, 0)
cachedSeries := make([]*v3.Series, 0)
for _, miss := range misses {
@@ -298,7 +305,7 @@ func (q *querier) runBuilderQuery(
}
// response doesn't need everything
- filterCachedPoints(mergedSeries, params.Start, params.End)
+ filterCachedPoints(mergedSeries, start, end)
ch <- channelResult{
Err: nil,
Name: queryName,
diff --git a/pkg/query-service/app/querier/querier_test.go b/pkg/query-service/app/querier/querier_test.go
index 605d2f5180..37514b6f23 100644
--- a/pkg/query-service/app/querier/querier_test.go
+++ b/pkg/query-service/app/querier/querier_test.go
@@ -701,3 +701,253 @@ func TestQueryRangeValueType(t *testing.T) {
}
}
}
+
+// test timeshift
+func TestQueryRangeTimeShift(t *testing.T) {
+ params := []*v3.QueryRangeParamsV3{
+ {
+ Start: 1675115596722, //31, 3:23
+ End: 1675115596722 + 120*60*1000, //31, 5:23
+ Step: 5 * time.Minute.Milliseconds(),
+ CompositeQuery: &v3.CompositeQuery{
+ QueryType: v3.QueryTypeBuilder,
+ PanelType: v3.PanelTypeGraph,
+ BuilderQueries: map[string]*v3.BuilderQuery{
+ "A": {
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceLogs,
+ AggregateAttribute: v3.AttributeKey{},
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ AggregateOperator: v3.AggregateOperatorCount,
+ Expression: "A",
+ ShiftBy: 86400,
+ },
+ },
+ },
+ },
+ }
+ opts := QuerierOptions{
+ Reader: nil,
+ FluxInterval: 5 * time.Minute,
+ KeyGenerator: queryBuilder.NewKeyGenerator(),
+ TestingMode: true,
+ }
+ q := NewQuerier(opts)
+ // logs queries are generates in ns
+ expectedTimeRangeInQueryString := fmt.Sprintf("timestamp >= %d AND timestamp <= %d", (1675115596722-86400*1000)*1000000, ((1675115596722+120*60*1000)-86400*1000)*1000000)
+
+ for i, param := range params {
+ _, err, errByName := q.QueryRange(context.Background(), param, nil)
+ if err != nil {
+ t.Errorf("expected no error, got %s", err)
+ }
+ if len(errByName) > 0 {
+ t.Errorf("expected no error, got %v", errByName)
+ }
+ if !strings.Contains(q.QueriesExecuted()[i], expectedTimeRangeInQueryString) {
+ t.Errorf("expected query to contain %s, got %s", expectedTimeRangeInQueryString, q.QueriesExecuted()[i])
+ }
+ }
+}
+
+// timeshift works with caching
+func TestQueryRangeTimeShiftWithCache(t *testing.T) {
+ params := []*v3.QueryRangeParamsV3{
+ {
+ Start: 1675115596722 + 60*60*1000 - 86400*1000, //30, 4:23
+ End: 1675115596722 + 120*60*1000 - 86400*1000, //30, 5:23
+ Step: 5 * time.Minute.Milliseconds(),
+ CompositeQuery: &v3.CompositeQuery{
+ QueryType: v3.QueryTypeBuilder,
+ PanelType: v3.PanelTypeGraph,
+ BuilderQueries: map[string]*v3.BuilderQuery{
+ "A": {
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceLogs,
+ AggregateAttribute: v3.AttributeKey{},
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ AggregateOperator: v3.AggregateOperatorCount,
+ Expression: "A",
+ GroupBy: []v3.AttributeKey{
+ {Key: "service_name", IsColumn: false},
+ {Key: "method", IsColumn: false},
+ },
+ },
+ },
+ },
+ },
+ {
+ Start: 1675115596722, //31, 3:23
+ End: 1675115596722 + 120*60*1000, //31, 5:23
+ Step: 5 * time.Minute.Milliseconds(),
+ CompositeQuery: &v3.CompositeQuery{
+ QueryType: v3.QueryTypeBuilder,
+ PanelType: v3.PanelTypeGraph,
+ BuilderQueries: map[string]*v3.BuilderQuery{
+ "A": {
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceLogs,
+ AggregateAttribute: v3.AttributeKey{},
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ AggregateOperator: v3.AggregateOperatorCount,
+ Expression: "A",
+ ShiftBy: 86400,
+ GroupBy: []v3.AttributeKey{
+ {Key: "service_name", IsColumn: false},
+ {Key: "method", IsColumn: false},
+ },
+ },
+ },
+ },
+ },
+ }
+ cache := inmemory.New(&inmemory.Options{TTL: 60 * time.Minute, CleanupInterval: 10 * time.Minute})
+ opts := QuerierOptions{
+ Cache: cache,
+ Reader: nil,
+ FluxInterval: 5 * time.Minute,
+ KeyGenerator: queryBuilder.NewKeyGenerator(),
+ TestingMode: true,
+ ReturnedSeries: []*v3.Series{
+ {
+ Labels: map[string]string{},
+ Points: []v3.Point{
+ {Timestamp: 1675115596722 + 60*60*1000 - 86400*1000, Value: 1},
+ {Timestamp: 1675115596722 + 120*60*1000 - 86400*1000 + 60*60*1000, Value: 2},
+ },
+ },
+ },
+ }
+ q := NewQuerier(opts)
+
+ // logs queries are generates in ns
+ expectedTimeRangeInQueryString := []string{
+ fmt.Sprintf("timestamp >= %d AND timestamp <= %d", (1675115596722+60*60*1000-86400*1000)*1000000, (1675115596722+120*60*1000-86400*1000)*1000000),
+ fmt.Sprintf("timestamp >= %d AND timestamp <= %d", (1675115596722-86400*1000)*1000000, ((1675115596722+60*60*1000)-86400*1000-1)*1000000),
+ }
+
+ for i, param := range params {
+ _, err, errByName := q.QueryRange(context.Background(), param, nil)
+ if err != nil {
+ t.Errorf("expected no error, got %s", err)
+ }
+ if len(errByName) > 0 {
+ t.Errorf("expected no error, got %v", errByName)
+ }
+ if !strings.Contains(q.QueriesExecuted()[i], expectedTimeRangeInQueryString[i]) {
+ t.Errorf("expected query to contain %s, got %s", expectedTimeRangeInQueryString[i], q.QueriesExecuted()[i])
+ }
+ }
+}
+
+// timeshift with limit queries
+func TestQueryRangeTimeShiftWithLimitAndCache(t *testing.T) {
+ params := []*v3.QueryRangeParamsV3{
+ {
+ Start: 1675115596722 + 60*60*1000 - 86400*1000, //30, 4:23
+ End: 1675115596722 + 120*60*1000 - 86400*1000, //30, 5:23
+ Step: 5 * time.Minute.Milliseconds(),
+ CompositeQuery: &v3.CompositeQuery{
+ QueryType: v3.QueryTypeBuilder,
+ PanelType: v3.PanelTypeGraph,
+ BuilderQueries: map[string]*v3.BuilderQuery{
+ "A": {
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceLogs,
+ AggregateAttribute: v3.AttributeKey{},
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ AggregateOperator: v3.AggregateOperatorCount,
+ Expression: "A",
+ GroupBy: []v3.AttributeKey{
+ {Key: "service_name", IsColumn: false},
+ {Key: "method", IsColumn: false},
+ },
+ Limit: 5,
+ },
+ },
+ },
+ },
+ {
+ Start: 1675115596722, //31, 3:23
+ End: 1675115596722 + 120*60*1000, //31, 5:23
+ Step: 5 * time.Minute.Milliseconds(),
+ CompositeQuery: &v3.CompositeQuery{
+ QueryType: v3.QueryTypeBuilder,
+ PanelType: v3.PanelTypeGraph,
+ BuilderQueries: map[string]*v3.BuilderQuery{
+ "A": {
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceLogs,
+ AggregateAttribute: v3.AttributeKey{},
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ AggregateOperator: v3.AggregateOperatorCount,
+ Expression: "A",
+ ShiftBy: 86400,
+ GroupBy: []v3.AttributeKey{
+ {Key: "service_name", IsColumn: false},
+ {Key: "method", IsColumn: false},
+ },
+ Limit: 5,
+ },
+ },
+ },
+ },
+ }
+ cache := inmemory.New(&inmemory.Options{TTL: 60 * time.Minute, CleanupInterval: 10 * time.Minute})
+ opts := QuerierOptions{
+ Cache: cache,
+ Reader: nil,
+ FluxInterval: 5 * time.Minute,
+ KeyGenerator: queryBuilder.NewKeyGenerator(),
+ TestingMode: true,
+ ReturnedSeries: []*v3.Series{
+ {
+ Labels: map[string]string{},
+ Points: []v3.Point{
+ {Timestamp: 1675115596722 + 60*60*1000 - 86400*1000, Value: 1},
+ {Timestamp: 1675115596722 + 120*60*1000 - 86400*1000 + 60*60*1000, Value: 2},
+ },
+ },
+ },
+ }
+ q := NewQuerier(opts)
+
+ // logs queries are generates in ns
+ expectedTimeRangeInQueryString := []string{
+ fmt.Sprintf("timestamp >= %d AND timestamp <= %d", (1675115596722+60*60*1000-86400*1000)*1000000, (1675115596722+120*60*1000-86400*1000)*1000000),
+ fmt.Sprintf("timestamp >= %d AND timestamp <= %d", (1675115596722-86400*1000)*1000000, ((1675115596722+60*60*1000)-86400*1000-1)*1000000),
+ }
+
+ for i, param := range params {
+ _, err, errByName := q.QueryRange(context.Background(), param, nil)
+ if err != nil {
+ t.Errorf("expected no error, got %s", err)
+ }
+ if len(errByName) > 0 {
+ t.Errorf("expected no error, got %v", errByName)
+ }
+ if !strings.Contains(q.QueriesExecuted()[i], expectedTimeRangeInQueryString[i]) {
+ t.Errorf("expected query to contain %s, got %s", expectedTimeRangeInQueryString[i], q.QueriesExecuted()[i])
+ }
+ }
+}
diff --git a/pkg/query-service/app/querier/v2/helper.go b/pkg/query-service/app/querier/v2/helper.go
index 61ab056251..24738806d3 100644
--- a/pkg/query-service/app/querier/v2/helper.go
+++ b/pkg/query-service/app/querier/v2/helper.go
@@ -36,6 +36,14 @@ func (q *querier) runBuilderQuery(
preferRPM = q.featureLookUp.CheckFeature(constants.PreferRPM) == nil
}
+ // making a local clone since we should not update the global params if there is sift by
+ start := params.Start
+ end := params.End
+ if builderQuery.ShiftBy != 0 {
+ start = start - builderQuery.ShiftBy*1000
+ end = end - builderQuery.ShiftBy*1000
+ }
+
// TODO: handle other data sources
if builderQuery.DataSource == v3.DataSourceLogs {
var query string
@@ -43,8 +51,8 @@ func (q *querier) runBuilderQuery(
// for ts query with limit replace it as it is already formed
if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
limitQuery, err := logsV3.PrepareLogsQuery(
- params.Start,
- params.End,
+ start,
+ end,
params.CompositeQuery.QueryType,
params.CompositeQuery.PanelType,
builderQuery,
@@ -55,8 +63,8 @@ func (q *querier) runBuilderQuery(
return
}
placeholderQuery, err := logsV3.PrepareLogsQuery(
- params.Start,
- params.End,
+ start,
+ end,
params.CompositeQuery.QueryType,
params.CompositeQuery.PanelType,
builderQuery,
@@ -69,8 +77,8 @@ func (q *querier) runBuilderQuery(
query = strings.Replace(placeholderQuery, "#LIMIT_PLACEHOLDER", limitQuery, 1)
} else {
query, err = logsV3.PrepareLogsQuery(
- params.Start,
- params.End,
+ start,
+ end,
params.CompositeQuery.QueryType,
params.CompositeQuery.PanelType,
builderQuery,
@@ -98,8 +106,8 @@ func (q *querier) runBuilderQuery(
// for ts query with group by and limit form two queries
if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
limitQuery, err := tracesV3.PrepareTracesQuery(
- params.Start,
- params.End,
+ start,
+ end,
params.CompositeQuery.PanelType,
builderQuery,
keys,
@@ -110,8 +118,8 @@ func (q *querier) runBuilderQuery(
return
}
placeholderQuery, err := tracesV3.PrepareTracesQuery(
- params.Start,
- params.End,
+ start,
+ end,
params.CompositeQuery.PanelType,
builderQuery,
keys,
@@ -124,8 +132,8 @@ func (q *querier) runBuilderQuery(
query = fmt.Sprintf(placeholderQuery, limitQuery)
} else {
query, err = tracesV3.PrepareTracesQuery(
- params.Start,
- params.End,
+ start,
+ end,
params.CompositeQuery.PanelType,
builderQuery,
keys,
@@ -146,7 +154,7 @@ func (q *querier) runBuilderQuery(
// We are only caching the graph panel queries. A non-existant cache key means that the query is not cached.
// If the query is not cached, we execute the query and return the result without caching it.
if _, ok := cacheKeys[queryName]; !ok {
- query, err := metricsV4.PrepareMetricQuery(params.Start, params.End, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{PreferRPM: preferRPM})
+ query, err := metricsV4.PrepareMetricQuery(start, end, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{PreferRPM: preferRPM})
if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
return
@@ -166,7 +174,7 @@ func (q *querier) runBuilderQuery(
cachedData = data
}
}
- misses := q.findMissingTimeRanges(params.Start, params.End, params.Step, cachedData)
+ misses := q.findMissingTimeRanges(start, end, params.Step, cachedData)
missedSeries := make([]*v3.Series, 0)
cachedSeries := make([]*v3.Series, 0)
for _, miss := range misses {
diff --git a/pkg/query-service/app/queryBuilder/query_builder.go b/pkg/query-service/app/queryBuilder/query_builder.go
index 988acfb458..647edd191b 100644
--- a/pkg/query-service/app/queryBuilder/query_builder.go
+++ b/pkg/query-service/app/queryBuilder/query_builder.go
@@ -183,6 +183,13 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3, args ...in
PreferRPMFeatureEnabled := err == nil
// Build queries for each builder query
for queryName, query := range compositeQuery.BuilderQueries {
+ // making a local clone since we should not update the global params if there is sift by
+ start := params.Start
+ end := params.End
+ if query.ShiftBy != 0 {
+ start = start - query.ShiftBy*1000
+ end = end - query.ShiftBy*1000
+ }
if query.Expression == queryName {
switch query.DataSource {
case v3.DataSourceTraces:
@@ -192,12 +199,12 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3, args ...in
}
// for ts query with group by and limit form two queries
if compositeQuery.PanelType == v3.PanelTypeGraph && query.Limit > 0 && len(query.GroupBy) > 0 {
- limitQuery, err := qb.options.BuildTraceQuery(params.Start, params.End, compositeQuery.PanelType, query,
+ limitQuery, err := qb.options.BuildTraceQuery(start, end, compositeQuery.PanelType, query,
keys, tracesV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: PreferRPMFeatureEnabled})
if err != nil {
return nil, err
}
- placeholderQuery, err := qb.options.BuildTraceQuery(params.Start, params.End, compositeQuery.PanelType,
+ placeholderQuery, err := qb.options.BuildTraceQuery(start, end, compositeQuery.PanelType,
query, keys, tracesV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: PreferRPMFeatureEnabled})
if err != nil {
return nil, err
@@ -205,7 +212,7 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3, args ...in
query := fmt.Sprintf(placeholderQuery, limitQuery)
queries[queryName] = query
} else {
- queryString, err := qb.options.BuildTraceQuery(params.Start, params.End, compositeQuery.PanelType,
+ queryString, err := qb.options.BuildTraceQuery(start, end, compositeQuery.PanelType,
query, keys, tracesV3.Options{PreferRPM: PreferRPMFeatureEnabled, GraphLimitQtype: ""})
if err != nil {
return nil, err
@@ -215,25 +222,25 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3, args ...in
case v3.DataSourceLogs:
// for ts query with limit replace it as it is already formed
if compositeQuery.PanelType == v3.PanelTypeGraph && query.Limit > 0 && len(query.GroupBy) > 0 {
- limitQuery, err := qb.options.BuildLogQuery(params.Start, params.End, compositeQuery.QueryType, compositeQuery.PanelType, query, logsV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: PreferRPMFeatureEnabled})
+ limitQuery, err := qb.options.BuildLogQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, logsV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: PreferRPMFeatureEnabled})
if err != nil {
return nil, err
}
- placeholderQuery, err := qb.options.BuildLogQuery(params.Start, params.End, compositeQuery.QueryType, compositeQuery.PanelType, query, logsV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: PreferRPMFeatureEnabled})
+ placeholderQuery, err := qb.options.BuildLogQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, logsV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: PreferRPMFeatureEnabled})
if err != nil {
return nil, err
}
query := fmt.Sprintf(placeholderQuery, limitQuery)
queries[queryName] = query
} else {
- queryString, err := qb.options.BuildLogQuery(params.Start, params.End, compositeQuery.QueryType, compositeQuery.PanelType, query, logsV3.Options{PreferRPM: PreferRPMFeatureEnabled, GraphLimitQtype: ""})
+ queryString, err := qb.options.BuildLogQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, logsV3.Options{PreferRPM: PreferRPMFeatureEnabled, GraphLimitQtype: ""})
if err != nil {
return nil, err
}
queries[queryName] = queryString
}
case v3.DataSourceMetrics:
- queryString, err := qb.options.BuildMetricQuery(params.Start, params.End, compositeQuery.QueryType, compositeQuery.PanelType, query, metricsV3.Options{PreferRPM: PreferRPMFeatureEnabled})
+ queryString, err := qb.options.BuildMetricQuery(start, end, compositeQuery.QueryType, compositeQuery.PanelType, query, metricsV3.Options{PreferRPM: PreferRPMFeatureEnabled})
if err != nil {
return nil, err
}
diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go
index eb50a775ce..e9c80c2507 100644
--- a/pkg/query-service/app/server.go
+++ b/pkg/query-service/app/server.go
@@ -9,7 +9,9 @@ import (
"net"
"net/http"
_ "net/http/pprof" // http profiler
+ "net/url"
"os"
+ "strings"
"time"
"github.com/gorilla/handlers"
@@ -21,6 +23,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/agentConf"
"go.signoz.io/signoz/pkg/query-service/app/clickhouseReader"
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
+ "go.signoz.io/signoz/pkg/query-service/app/integrations"
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
"go.signoz.io/signoz/pkg/query-service/app/opamp"
opAmpModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model"
@@ -155,8 +158,15 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
if err != nil {
return nil, err
}
- // ingestion pipelines manager
- logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(localDB, "sqlite")
+
+ integrationsController, err := integrations.NewController(localDB)
+ if err != nil {
+ return nil, fmt.Errorf("couldn't create integrations controller: %w", err)
+ }
+
+ logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
+ localDB, "sqlite", integrationsController.GetPipelinesForInstalledIntegrations,
+ )
if err != nil {
return nil, err
}
@@ -173,6 +183,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
AppDao: dao.DB(),
RuleManager: rm,
FeatureFlags: fm,
+ IntegrationsController: integrationsController,
LogsParsingPipelineController: logParsingPipelineController,
Cache: c,
FluxInterval: fluxInterval,
@@ -204,7 +215,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
s.privateHTTP = privateServer
- _, err = opAmpModel.InitDB(constants.RELATIONAL_DATASOURCE_PATH)
+ _, err = opAmpModel.InitDB(localDB)
if err != nil {
return nil, err
}
@@ -257,6 +268,7 @@ func (s *Server) createPublicServer(api *APIHandler) (*http.Server, error) {
r := NewRouter()
+ r.Use(LogCommentEnricher)
r.Use(setTimeoutMiddleware)
r.Use(s.analyticsMiddleware)
r.Use(loggingMiddleware)
@@ -266,6 +278,7 @@ func (s *Server) createPublicServer(api *APIHandler) (*http.Server, error) {
api.RegisterRoutes(r, am)
api.RegisterMetricsRoutes(r, am)
api.RegisterLogsRoutes(r, am)
+ api.RegisterIntegrationRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
api.RegisterQueryRangeV4Routes(r, am)
@@ -295,6 +308,65 @@ func loggingMiddleware(next http.Handler) http.Handler {
})
}
+func LogCommentEnricher(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ referrer := r.Header.Get("Referer")
+
+ var path, dashboardID, alertID, page, client, viewName, tab string
+
+ if referrer != "" {
+ referrerURL, _ := url.Parse(referrer)
+ client = "browser"
+ path = referrerURL.Path
+
+ if strings.Contains(path, "/dashboard") {
+ // Split the path into segments
+ pathSegments := strings.Split(referrerURL.Path, "/")
+ // The dashboard ID should be the segment after "/dashboard/"
+ // Loop through pathSegments to find "dashboard" and then take the next segment as the ID
+ for i, segment := range pathSegments {
+ if segment == "dashboard" && i < len(pathSegments)-1 {
+ // Return the next segment, which should be the dashboard ID
+ dashboardID = pathSegments[i+1]
+ }
+ }
+ page = "dashboards"
+ } else if strings.Contains(path, "/alerts") {
+ urlParams := referrerURL.Query()
+ alertID = urlParams.Get("ruleId")
+ page = "alerts"
+ } else if strings.Contains(path, "logs") && strings.Contains(path, "explorer") {
+ page = "logs-explorer"
+ viewName = referrerURL.Query().Get("viewName")
+ } else if strings.Contains(path, "/trace") || strings.Contains(path, "traces-explorer") {
+ page = "traces-explorer"
+ viewName = referrerURL.Query().Get("viewName")
+ } else if strings.Contains(path, "/services") {
+ page = "services"
+ tab = referrerURL.Query().Get("tab")
+ if tab == "" {
+ tab = "OVER_METRICS"
+ }
+ }
+ } else {
+ client = "api"
+ }
+
+ kvs := map[string]string{
+ "path": path,
+ "dashboardID": dashboardID,
+ "alertID": alertID,
+ "source": page,
+ "client": client,
+ "viewName": viewName,
+ "servicesTab": tab,
+ }
+
+ r = r.WithContext(context.WithValue(r.Context(), "log_comment", kvs))
+ next.ServeHTTP(w, r)
+ })
+}
+
// loggingMiddlewarePrivate is used for logging private api calls
// from internal services like alert manager
func loggingMiddlewarePrivate(next http.Handler) http.Handler {
@@ -361,7 +433,7 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface
data["queryType"] = postData.CompositeQuery.QueryType
data["panelType"] = postData.CompositeQuery.PanelType
- signozLogsUsed, signozMetricsUsed = telemetry.GetInstance().CheckSigNozSignals(postData)
+ signozLogsUsed, signozMetricsUsed, _ = telemetry.GetInstance().CheckSigNozSignals(postData)
}
}
diff --git a/pkg/query-service/common/metrics.go b/pkg/query-service/common/metrics.go
index 8596ba9d7c..c6b8075991 100644
--- a/pkg/query-service/common/metrics.go
+++ b/pkg/query-service/common/metrics.go
@@ -2,6 +2,7 @@ package common
import (
"math"
+ "time"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
@@ -17,3 +18,8 @@ func AdjustedMetricTimeRange(start, end, step int64, aggregaOperator v3.TimeAggr
end = end - (end % (adjustStep * 1000))
return start, end
}
+
+func PastDayRoundOff() int64 {
+ now := time.Now().UnixMilli()
+ return int64(math.Floor(float64(now)/float64(time.Hour.Milliseconds()*24))) * time.Hour.Milliseconds() * 24
+}
diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go
index e7a482d02f..6181a66ea8 100644
--- a/pkg/query-service/constants/constants.go
+++ b/pkg/query-service/constants/constants.go
@@ -213,6 +213,7 @@ const (
SIGNOZ_TIMESERIES_v4_LOCAL_TABLENAME = "time_series_v4"
SIGNOZ_TIMESERIES_v4_6HRS_LOCAL_TABLENAME = "time_series_v4_6hrs"
SIGNOZ_TIMESERIES_v4_1DAY_LOCAL_TABLENAME = "time_series_v4_1day"
+ SIGNOZ_TIMESERIES_v4_1DAY_TABLENAME = "distributed_time_series_v4_1day"
)
var TimeoutExcludedRoutes = map[string]bool{
@@ -223,7 +224,8 @@ var TimeoutExcludedRoutes = map[string]bool{
// alert related constants
const (
// AlertHelpPage is used in case default alert repo url is not set
- AlertHelpPage = "https://signoz.io/docs/userguide/alerts-management/#generator-url"
+ AlertHelpPage = "https://signoz.io/docs/userguide/alerts-management/#generator-url"
+ AlertTimeFormat = "2006-01-02 15:04:05"
)
func GetOrDefaultEnv(key string, fallback string) string {
@@ -307,6 +309,8 @@ var ReservedColumnTargetAliases = map[string]struct{}{
// logsPPLPfx is a short constant for logsPipelinePrefix
const LogsPPLPfx = "logstransform/pipeline_"
+const IntegrationPipelineIdPrefix = "integration"
+
// The datatype present here doesn't represent the actual datatype of column in the logs table.
var StaticFieldsLogsV3 = map[string]v3.AttributeKey{
diff --git a/pkg/query-service/interfaces/interface.go b/pkg/query-service/interfaces/interface.go
index 9d0d65c39c..1ca1fd9958 100644
--- a/pkg/query-service/interfaces/interface.go
+++ b/pkg/query-service/interfaces/interface.go
@@ -23,7 +23,7 @@ type Reader interface {
GetInstantQueryMetricsResult(ctx context.Context, query *model.InstantQueryMetricsParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
GetQueryRangeResult(ctx context.Context, query *model.QueryRangeParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
GetServiceOverview(ctx context.Context, query *model.GetServiceOverviewParams, skipConfig *model.SkipConfig) (*[]model.ServiceOverviewItem, *model.ApiError)
- GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig) (*map[string][]string, *model.ApiError)
+ GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig, start, end time.Time) (*map[string][]string, *map[string][]string, *model.ApiError)
GetServices(ctx context.Context, query *model.GetServicesParams, skipConfig *model.SkipConfig) (*[]model.ServiceItem, *model.ApiError)
GetTopOperations(ctx context.Context, query *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError)
GetUsage(ctx context.Context, query *model.GetUsageParams) (*[]model.UsageItem, error)
diff --git a/pkg/query-service/model/featureSet.go b/pkg/query-service/model/featureSet.go
index 26cd70b908..2d0f4a55be 100644
--- a/pkg/query-service/model/featureSet.go
+++ b/pkg/query-service/model/featureSet.go
@@ -21,6 +21,7 @@ const AlertChannelWebhook = "ALERT_CHANNEL_WEBHOOK"
const AlertChannelPagerduty = "ALERT_CHANNEL_PAGERDUTY"
const AlertChannelMsTeams = "ALERT_CHANNEL_MSTEAMS"
const AlertChannelOpsgenie = "ALERT_CHANNEL_OPSGENIE"
+const AlertChannelEmail = "ALERT_CHANNEL_EMAIL"
var BasicPlan = FeatureSet{
Feature{
@@ -100,6 +101,13 @@ var BasicPlan = FeatureSet{
UsageLimit: -1,
Route: "",
},
+ Feature{
+ Name: AlertChannelEmail,
+ Active: true,
+ Usage: 0,
+ UsageLimit: -1,
+ Route: "",
+ },
Feature{
Name: AlertChannelMsTeams,
Active: false,
diff --git a/pkg/query-service/model/response.go b/pkg/query-service/model/response.go
index ae99473720..05da7f5ab7 100644
--- a/pkg/query-service/model/response.go
+++ b/pkg/query-service/model/response.go
@@ -171,16 +171,21 @@ type AlertingRuleResponse struct {
// Value float64 `json:"value"`
}
+type DataWarning struct {
+ TopLevelOps []string `json:"topLevelOps"`
+}
+
type ServiceItem struct {
- ServiceName string `json:"serviceName" ch:"serviceName"`
- Percentile99 float64 `json:"p99" ch:"p99"`
- AvgDuration float64 `json:"avgDuration" ch:"avgDuration"`
- NumCalls uint64 `json:"numCalls" ch:"numCalls"`
- CallRate float64 `json:"callRate" ch:"callRate"`
- NumErrors uint64 `json:"numErrors" ch:"numErrors"`
- ErrorRate float64 `json:"errorRate" ch:"errorRate"`
- Num4XX uint64 `json:"num4XX" ch:"num4xx"`
- FourXXRate float64 `json:"fourXXRate" ch:"fourXXRate"`
+ ServiceName string `json:"serviceName" ch:"serviceName"`
+ Percentile99 float64 `json:"p99" ch:"p99"`
+ AvgDuration float64 `json:"avgDuration" ch:"avgDuration"`
+ NumCalls uint64 `json:"numCalls" ch:"numCalls"`
+ CallRate float64 `json:"callRate" ch:"callRate"`
+ NumErrors uint64 `json:"numErrors" ch:"numErrors"`
+ ErrorRate float64 `json:"errorRate" ch:"errorRate"`
+ Num4XX uint64 `json:"num4XX" ch:"num4xx"`
+ FourXXRate float64 `json:"fourXXRate" ch:"fourXXRate"`
+ DataWarning DataWarning `json:"dataWarning"`
}
type ServiceErrorItem struct {
Time time.Time `json:"time" ch:"time"`
@@ -624,10 +629,11 @@ type AlertsInfo struct {
}
type DashboardsInfo struct {
- TotalDashboards int `json:"totalDashboards"`
- LogsBasedPanels int `json:"logsBasedPanels"`
- MetricBasedPanels int `json:"metricBasedPanels"`
- TracesBasedPanels int `json:"tracesBasedPanels"`
+ TotalDashboards int `json:"totalDashboards"`
+ TotalDashboardsWithPanelAndName int `json:"totalDashboardsWithPanelAndName"` // dashboards with panel and name without sample title
+ LogsBasedPanels int `json:"logsBasedPanels"`
+ MetricBasedPanels int `json:"metricBasedPanels"`
+ TracesBasedPanels int `json:"tracesBasedPanels"`
}
type TagTelemetryData struct {
diff --git a/pkg/query-service/model/v3/v3.go b/pkg/query-service/model/v3/v3.go
index c01660d6e7..43e7a940ab 100644
--- a/pkg/query-service/model/v3/v3.go
+++ b/pkg/query-service/model/v3/v3.go
@@ -509,11 +509,11 @@ const (
SpaceAggregationMin SpaceAggregation = "min"
SpaceAggregationMax SpaceAggregation = "max"
SpaceAggregationCount SpaceAggregation = "count"
- SpaceAggregationPercentile50 SpaceAggregation = "percentile_50"
- SpaceAggregationPercentile75 SpaceAggregation = "percentile_75"
- SpaceAggregationPercentile90 SpaceAggregation = "percentile_90"
- SpaceAggregationPercentile95 SpaceAggregation = "percentile_95"
- SpaceAggregationPercentile99 SpaceAggregation = "percentile_99"
+ SpaceAggregationPercentile50 SpaceAggregation = "p50"
+ SpaceAggregationPercentile75 SpaceAggregation = "p75"
+ SpaceAggregationPercentile90 SpaceAggregation = "p90"
+ SpaceAggregationPercentile95 SpaceAggregation = "p95"
+ SpaceAggregationPercentile99 SpaceAggregation = "p99"
)
func (s SpaceAggregation) Validate() error {
@@ -654,19 +654,22 @@ func (b *BuilderQuery) Validate() error {
}
if b.DataSource == DataSourceMetrics {
// if AggregateOperator is specified, then the request is using v3 payload
- if b.AggregateOperator != "" {
- if err := b.AggregateOperator.Validate(); err != nil {
- return fmt.Errorf("aggregate operator is invalid: %w", err)
- }
- } else {
- if err := b.TimeAggregation.Validate(); err != nil {
- return fmt.Errorf("time aggregation is invalid: %w", err)
- }
+ // if b.AggregateOperator != "" && b.SpaceAggregation == SpaceAggregationUnspecified {
+ // if err := b.AggregateOperator.Validate(); err != nil {
+ // return fmt.Errorf("aggregate operator is invalid: %w", err)
+ // }
+ // } else {
+ // // the time aggregation is not needed for percentile operators
+ // if !IsPercentileOperator(b.SpaceAggregation) {
+ // if err := b.TimeAggregation.Validate(); err != nil {
+ // return fmt.Errorf("time aggregation is invalid: %w", err)
+ // }
+ // }
- if err := b.SpaceAggregation.Validate(); err != nil {
- return fmt.Errorf("space aggregation is invalid: %w", err)
- }
- }
+ // if err := b.SpaceAggregation.Validate(); err != nil {
+ // return fmt.Errorf("space aggregation is invalid: %w", err)
+ // }
+ // }
} else {
if err := b.AggregateOperator.Validate(); err != nil {
return fmt.Errorf("aggregate operator is invalid: %w", err)
@@ -689,7 +692,7 @@ func (b *BuilderQuery) Validate() error {
}
}
- if b.DataSource == DataSourceMetrics && len(b.GroupBy) > 0 {
+ if b.DataSource == DataSourceMetrics && len(b.GroupBy) > 0 && b.SpaceAggregation == SpaceAggregationUnspecified {
if b.AggregateOperator == AggregateOperatorNoOp || b.AggregateOperator == AggregateOperatorRate {
return fmt.Errorf("group by requires aggregate operator other than noop or rate")
}
@@ -723,13 +726,30 @@ func (b *BuilderQuery) Validate() error {
if len(function.Args) == 0 {
return fmt.Errorf("timeShiftBy param missing in query")
}
+ _, ok := function.Args[0].(float64)
+ if !ok {
+ // if string, attempt to convert to float
+ timeShiftBy, err := strconv.ParseFloat(function.Args[0].(string), 64)
+ if err != nil {
+ return fmt.Errorf("timeShiftBy param should be a number")
+ }
+ function.Args[0] = timeShiftBy
+ }
} else if function.Name == FunctionNameEWMA3 ||
function.Name == FunctionNameEWMA5 ||
function.Name == FunctionNameEWMA7 {
if len(function.Args) == 0 {
return fmt.Errorf("alpha param missing in query")
}
- alpha := function.Args[0].(float64)
+ alpha, ok := function.Args[0].(float64)
+ if !ok {
+ // if string, attempt to convert to float
+ alpha, err := strconv.ParseFloat(function.Args[0].(string), 64)
+ if err != nil {
+ return fmt.Errorf("alpha param should be a float")
+ }
+ function.Args[0] = alpha
+ }
if alpha < 0 || alpha > 1 {
return fmt.Errorf("alpha param should be between 0 and 1")
}
@@ -740,6 +760,15 @@ func (b *BuilderQuery) Validate() error {
if len(function.Args) == 0 {
return fmt.Errorf("threshold param missing in query")
}
+ _, ok := function.Args[0].(float64)
+ if !ok {
+ // if string, attempt to convert to float
+ threshold, err := strconv.ParseFloat(function.Args[0].(string), 64)
+ if err != nil {
+ return fmt.Errorf("threshold param should be a float")
+ }
+ function.Args[0] = threshold
+ }
}
}
}
diff --git a/pkg/query-service/rules/alerting.go b/pkg/query-service/rules/alerting.go
index 623d5dea21..b2ee0b53d0 100644
--- a/pkg/query-service/rules/alerting.go
+++ b/pkg/query-service/rules/alerting.go
@@ -142,9 +142,11 @@ type RuleCondition struct {
CompositeQuery *v3.CompositeQuery `json:"compositeQuery,omitempty" yaml:"compositeQuery,omitempty"`
CompareOp CompareOp `yaml:"op,omitempty" json:"op,omitempty"`
Target *float64 `yaml:"target,omitempty" json:"target,omitempty"`
- MatchType `json:"matchType,omitempty"`
- TargetUnit string `json:"targetUnit,omitempty"`
- SelectedQuery string `json:"selectedQueryName,omitempty"`
+ AlertOnAbsent bool `yaml:"alertOnAbsent,omitempty" json:"alertOnAbsent,omitempty"`
+ AbsentFor time.Duration `yaml:"absentFor,omitempty" json:"absentFor,omitempty"`
+ MatchType MatchType `json:"matchType,omitempty"`
+ TargetUnit string `json:"targetUnit,omitempty"`
+ SelectedQuery string `json:"selectedQueryName,omitempty"`
}
func (rc *RuleCondition) IsValid() bool {
diff --git a/pkg/query-service/rules/apiParams.go b/pkg/query-service/rules/apiParams.go
index 6000ec280f..0ccf885b3d 100644
--- a/pkg/query-service/rules/apiParams.go
+++ b/pkg/query-service/rules/apiParams.go
@@ -50,6 +50,8 @@ type PostableRule struct {
PreferredChannels []string `json:"preferredChannels,omitempty"`
+ Version string `json:"version,omitempty"`
+
// legacy
Expr string `yaml:"expr,omitempty" json:"expr,omitempty"`
OldYaml string `json:"yaml,omitempty"`
diff --git a/pkg/query-service/rules/manager.go b/pkg/query-service/rules/manager.go
index d87d1820e0..530bb30d14 100644
--- a/pkg/query-service/rules/manager.go
+++ b/pkg/query-service/rules/manager.go
@@ -866,7 +866,6 @@ func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *m
if parsedRule.RuleType == RuleTypeThreshold {
// add special labels for test alerts
- parsedRule.Labels[labels.AlertAdditionalInfoLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
parsedRule.Labels[labels.RuleSourceLabel] = ""
parsedRule.Labels[labels.AlertRuleIdLabel] = ""
diff --git a/pkg/query-service/rules/promRule.go b/pkg/query-service/rules/promRule.go
index 5607366e6b..1a4a89e3d2 100644
--- a/pkg/query-service/rules/promRule.go
+++ b/pkg/query-service/rules/promRule.go
@@ -182,26 +182,6 @@ func (r *PromRule) Annotations() qslabels.BaseLabels {
return r.annotations
}
-func (r *PromRule) sample(alert *Alert, ts time.Time) pql.Sample {
- lb := plabels.NewBuilder(r.labels)
-
- alertLabels := alert.Labels.(plabels.Labels)
- for _, l := range alertLabels {
- lb.Set(l.Name, l.Value)
- }
-
- lb.Set(qslabels.MetricNameLabel, alertMetricName)
- lb.Set(qslabels.AlertNameLabel, r.name)
- lb.Set(qslabels.AlertStateLabel, alert.State.String())
-
- s := pql.Sample{
- Metric: lb.Labels(),
- T: timestamp.FromTime(ts),
- F: 1,
- }
- return s
-}
-
// GetEvaluationDuration returns the time in seconds it took to evaluate the alerting rule.
func (r *PromRule) GetEvaluationDuration() time.Duration {
r.mtx.Lock()
@@ -388,6 +368,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (
if !shouldAlert {
continue
}
+ zap.S().Debugf("rule: %s, alerting for series: %v", r.Name(), series)
thresholdFormatter := formatter.FromUnit(r.ruleCondition.TargetUnit)
threshold := thresholdFormatter.Format(r.targetVal(), r.ruleCondition.TargetUnit)
@@ -454,6 +435,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (
}
}
+ zap.S().Debugf("For rule: %s, found %d alerts", r.Name(), len(alerts))
// alerts[h] is ready, add or update active list now
for h, a := range alerts {
// Check whether we already have alerting state for the identifying label set.
diff --git a/pkg/query-service/rules/resultTypes.go b/pkg/query-service/rules/resultTypes.go
index 78474526bd..5f39208b01 100644
--- a/pkg/query-service/rules/resultTypes.go
+++ b/pkg/query-service/rules/resultTypes.go
@@ -20,6 +20,8 @@ type Sample struct {
// Label keys as-is from the result query.
// The original labels are used to prepare the related{logs, traces} link in alert notification
MetricOrig labels.Labels
+
+ IsMissing bool
}
func (s Sample) String() string {
diff --git a/pkg/query-service/rules/ruleTask.go b/pkg/query-service/rules/ruleTask.go
index 46d3a0917a..b2f6f09921 100644
--- a/pkg/query-service/rules/ruleTask.go
+++ b/pkg/query-service/rules/ruleTask.go
@@ -318,6 +318,13 @@ func (g *RuleTask) Eval(ctx context.Context, ts time.Time) {
rule.SetEvaluationTimestamp(t)
}(time.Now())
+ kvs := map[string]string{
+ "alertID": rule.ID(),
+ "source": "alerts",
+ "client": "query-service",
+ }
+ ctx = context.WithValue(ctx, "log_comment", kvs)
+
_, err := rule.Eval(ctx, ts, g.opts.Queriers)
if err != nil {
rule.SetHealth(HealthBad)
diff --git a/pkg/query-service/rules/thresholdRule.go b/pkg/query-service/rules/thresholdRule.go
index 9687038a40..0fdb3745ca 100644
--- a/pkg/query-service/rules/thresholdRule.go
+++ b/pkg/query-service/rules/thresholdRule.go
@@ -18,6 +18,7 @@ import (
"go.uber.org/zap"
"github.com/ClickHouse/clickhouse-go/v2"
+ "github.com/ClickHouse/clickhouse-go/v2/lib/driver"
"go.signoz.io/signoz/pkg/query-service/converter"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
@@ -31,6 +32,7 @@ import (
logsv3 "go.signoz.io/signoz/pkg/query-service/app/logs/v3"
metricsv3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
+ metricsV4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
"go.signoz.io/signoz/pkg/query-service/formatter"
@@ -59,10 +61,19 @@ type ThresholdRule struct {
// map of active alerts
active map[uint64]*Alert
- queryBuilder *queryBuilder.QueryBuilder
+ queryBuilder *queryBuilder.QueryBuilder
+ version string
+ queryBuilderV4 *queryBuilder.QueryBuilder
+ // temporalityMap is a map of metric name to temporality
+ // to avoid fetching temporality for the same metric multiple times
+ // querying the v4 table on low cardinal temporality column
+ // should be fast but we can still avoid the query if we have the data in memory
+ temporalityMap map[string]map[v3.Temporality]bool
opts ThresholdRuleOpts
- typ string
+
+ lastTimestampWithDatapoints time.Time
+ typ string
}
type ThresholdRuleOpts struct {
@@ -102,6 +113,8 @@ func NewThresholdRule(
active: map[uint64]*Alert{},
opts: opts,
typ: p.AlertType,
+ version: p.Version,
+ temporalityMap: make(map[string]map[v3.Temporality]bool),
}
if int64(t.evalWindow) == 0 {
@@ -115,6 +128,13 @@ func NewThresholdRule(
}
t.queryBuilder = queryBuilder.NewQueryBuilder(builderOpts, featureFlags)
+ builderOptsV4 := queryBuilder.QueryBuilderOptions{
+ BuildMetricQuery: metricsV4.PrepareMetricQuery,
+ BuildTraceQuery: tracesV3.PrepareTracesQuery,
+ BuildLogQuery: logsv3.PrepareLogsQuery,
+ }
+ t.queryBuilderV4 = queryBuilder.NewQueryBuilder(builderOptsV4, featureFlags)
+
zap.S().Info("msg:", "creating new alerting rule", "\t name:", t.name, "\t condition:", t.ruleCondition.String(), "\t generatorURL:", t.GeneratorURL())
return &t, nil
@@ -274,6 +294,84 @@ func (r *ThresholdRule) ActiveAlerts() []*Alert {
return res
}
+func (r *ThresholdRule) FetchTemporality(ctx context.Context, metricNames []string, ch driver.Conn) (map[string]map[v3.Temporality]bool, error) {
+
+ metricNameToTemporality := make(map[string]map[v3.Temporality]bool)
+
+ query := fmt.Sprintf(`SELECT DISTINCT metric_name, temporality FROM %s.%s WHERE metric_name IN $1`, constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_v4_1DAY_TABLENAME)
+
+ rows, err := ch.Query(ctx, query, metricNames)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+
+ for rows.Next() {
+ var metricName, temporality string
+ err := rows.Scan(&metricName, &temporality)
+ if err != nil {
+ return nil, err
+ }
+ if _, ok := metricNameToTemporality[metricName]; !ok {
+ metricNameToTemporality[metricName] = make(map[v3.Temporality]bool)
+ }
+ metricNameToTemporality[metricName][v3.Temporality(temporality)] = true
+ }
+ return metricNameToTemporality, nil
+}
+
+// populateTemporality same as addTemporality but for v4 and better
+func (r *ThresholdRule) populateTemporality(ctx context.Context, qp *v3.QueryRangeParamsV3, ch driver.Conn) error {
+
+ missingTemporality := make([]string, 0)
+ metricNameToTemporality := make(map[string]map[v3.Temporality]bool)
+ if qp.CompositeQuery != nil && len(qp.CompositeQuery.BuilderQueries) > 0 {
+ for _, query := range qp.CompositeQuery.BuilderQueries {
+ // if there is no temporality specified in the query but we have it in the map
+ // then use the value from the map
+ if query.Temporality == "" && r.temporalityMap[query.AggregateAttribute.Key] != nil {
+ // We prefer delta if it is available
+ if r.temporalityMap[query.AggregateAttribute.Key][v3.Delta] {
+ query.Temporality = v3.Delta
+ } else if r.temporalityMap[query.AggregateAttribute.Key][v3.Cumulative] {
+ query.Temporality = v3.Cumulative
+ } else {
+ query.Temporality = v3.Unspecified
+ }
+ }
+ // we don't have temporality for this metric
+ if query.DataSource == v3.DataSourceMetrics && query.Temporality == "" {
+ missingTemporality = append(missingTemporality, query.AggregateAttribute.Key)
+ }
+ if _, ok := metricNameToTemporality[query.AggregateAttribute.Key]; !ok {
+ metricNameToTemporality[query.AggregateAttribute.Key] = make(map[v3.Temporality]bool)
+ }
+ }
+ }
+
+ nameToTemporality, err := r.FetchTemporality(ctx, missingTemporality, ch)
+ if err != nil {
+ return err
+ }
+
+ if qp.CompositeQuery != nil && len(qp.CompositeQuery.BuilderQueries) > 0 {
+ for name := range qp.CompositeQuery.BuilderQueries {
+ query := qp.CompositeQuery.BuilderQueries[name]
+ if query.DataSource == v3.DataSourceMetrics && query.Temporality == "" {
+ if nameToTemporality[query.AggregateAttribute.Key][v3.Delta] {
+ query.Temporality = v3.Delta
+ } else if nameToTemporality[query.AggregateAttribute.Key][v3.Cumulative] {
+ query.Temporality = v3.Cumulative
+ } else {
+ query.Temporality = v3.Unspecified
+ }
+ r.temporalityMap[query.AggregateAttribute.Key] = nameToTemporality[query.AggregateAttribute.Key]
+ }
+ }
+ }
+ return nil
+}
+
// ForEachActiveAlert runs the given function on each alert.
// This should be used when you want to use the actual alerts from the ThresholdRule
// and not on its copy.
@@ -435,6 +533,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
if err := rows.Scan(vars...); err != nil {
return nil, err
}
+ r.lastTimestampWithDatapoints = time.Now()
sample := Sample{}
// Why do we maintain two labels sets? Alertmanager requires
@@ -459,8 +558,8 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
if colName == "ts" || colName == "interval" {
sample.Point.T = timval.Unix()
} else {
- lbls.Set(colName, timval.Format("2006-01-02 15:04:05"))
- lblsOrig.Set(columnNames[i], timval.Format("2006-01-02 15:04:05"))
+ lbls.Set(colName, timval.Format(constants.AlertTimeFormat))
+ lblsOrig.Set(columnNames[i], timval.Format(constants.AlertTimeFormat))
}
case *float64:
@@ -613,6 +712,20 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
zap.S().Debugf("ruleid:", r.ID(), "\t resultmap(potential alerts):", len(resultMap))
+ // if the data is missing for `For` duration then we should send alert
+ if r.ruleCondition.AlertOnAbsent && r.lastTimestampWithDatapoints.Add(r.Condition().AbsentFor).Before(time.Now()) {
+ zap.S().Debugf("ruleid:", r.ID(), "\t msg: no data found for rule condition")
+ lbls := labels.NewBuilder(labels.Labels{})
+ if !r.lastTimestampWithDatapoints.IsZero() {
+ lbls.Set("lastSeen", r.lastTimestampWithDatapoints.Format(constants.AlertTimeFormat))
+ }
+ result = append(result, Sample{
+ Metric: lbls.Labels(),
+ IsMissing: true,
+ })
+ return result, nil
+ }
+
for _, sample := range resultMap {
// check alert rule condition before dumping results, if sendUnmatchedResults
// is set then add results irrespective of condition
@@ -626,7 +739,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
return result, nil
}
-func (r *ThresholdRule) prepareBuilderQueries(ts time.Time) (map[string]string, error) {
+func (r *ThresholdRule) prepareBuilderQueries(ts time.Time, ch driver.Conn) (map[string]string, error) {
params := r.prepareQueryRange(ts)
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
// check if any enrichment is required for logs if yes then enrich them
@@ -638,7 +751,17 @@ func (r *ThresholdRule) prepareBuilderQueries(ts time.Time) (map[string]string,
}
- runQueries, err := r.queryBuilder.PrepareQueries(params)
+ var runQueries map[string]string
+ var err error
+
+ if r.version == "v4" {
+ if ch != nil {
+ r.populateTemporality(context.Background(), params, ch)
+ }
+ runQueries, err = r.queryBuilderV4.PrepareQueries(params)
+ } else {
+ runQueries, err = r.queryBuilder.PrepareQueries(params)
+ }
return runQueries, err
}
@@ -717,10 +840,11 @@ func (r *ThresholdRule) prepareLinksToLogs(ts time.Time, lbls labels.Labels) str
return ""
}
+ q := r.prepareQueryRange(ts)
// Logs list view expects time in milliseconds
tr := timeRange{
- Start: ts.Add(-time.Duration(r.evalWindow)).UnixMilli(),
- End: ts.UnixMilli(),
+ Start: q.Start,
+ End: q.End,
PageSize: 100,
}
@@ -780,10 +904,11 @@ func (r *ThresholdRule) prepareLinksToTraces(ts time.Time, lbls labels.Labels) s
return ""
}
+ q := r.prepareQueryRange(ts)
// Traces list view expects time in nanoseconds
tr := timeRange{
- Start: ts.Add(-time.Duration(r.evalWindow)).UnixNano(),
- End: ts.UnixNano(),
+ Start: q.Start * time.Second.Microseconds(),
+ End: q.End * time.Second.Microseconds(),
PageSize: 100,
}
@@ -896,7 +1021,7 @@ func (r *ThresholdRule) GetSelectedQuery() string {
var err error
if r.ruleCondition.QueryType() == v3.QueryTypeBuilder {
- queries, err = r.prepareBuilderQueries(time.Now())
+ queries, err = r.prepareBuilderQueries(time.Now(), nil)
if err != nil {
zap.S().Errorf("ruleid:", r.ID(), "\t msg: failed to prepare metric queries", zap.Error(err))
return ""
@@ -950,7 +1075,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time, ch c
// fetch the target query based on query type
if r.ruleCondition.QueryType() == v3.QueryTypeBuilder {
- queries, err = r.prepareBuilderQueries(ts)
+ queries, err = r.prepareBuilderQueries(ts, ch)
if err != nil {
zap.S().Errorf("ruleid:", r.ID(), "\t msg: failed to prepare metric queries", zap.Error(err))
@@ -1069,6 +1194,11 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie
annotations := make(labels.Labels, 0, len(r.annotations))
for _, a := range r.annotations {
+ if smpl.IsMissing {
+ if a.Name == labels.AlertDescriptionLabel || a.Name == labels.AlertSummaryLabel {
+ a.Value = labels.AlertMissingData
+ }
+ }
annotations = append(annotations, labels.Label{Name: normalizeLabelName(a.Name), Value: expand(a.Value)})
}
diff --git a/pkg/query-service/rules/thresholdRule_test.go b/pkg/query-service/rules/thresholdRule_test.go
index 2b39084bec..fde35364bc 100644
--- a/pkg/query-service/rules/thresholdRule_test.go
+++ b/pkg/query-service/rules/thresholdRule_test.go
@@ -376,7 +376,7 @@ func TestPrepareLinksToLogs(t *testing.T) {
ts := time.UnixMilli(1705469040000)
link := rule.prepareLinksToLogs(ts, labels.Labels{})
- assert.Contains(t, link, "&timeRange=%7B%22start%22%3A1705468740000%2C%22end%22%3A1705469040000%2C%22pageSize%22%3A100%7D&startTime=1705468740000&endTime=1705469040000")
+ assert.Contains(t, link, "&timeRange=%7B%22start%22%3A1705468620000%2C%22end%22%3A1705468920000%2C%22pageSize%22%3A100%7D&startTime=1705468620000&endTime=1705468920000")
}
func TestPrepareLinksToTraces(t *testing.T) {
@@ -418,5 +418,5 @@ func TestPrepareLinksToTraces(t *testing.T) {
ts := time.UnixMilli(1705469040000)
link := rule.prepareLinksToTraces(ts, labels.Labels{})
- assert.Contains(t, link, "&timeRange=%7B%22start%22%3A1705468740000000000%2C%22end%22%3A1705469040000000000%2C%22pageSize%22%3A100%7D&startTime=1705468740000000000&endTime=1705469040000000000")
+ assert.Contains(t, link, "&timeRange=%7B%22start%22%3A1705468620000000000%2C%22end%22%3A1705468920000000000%2C%22pageSize%22%3A100%7D&startTime=1705468620000000000&endTime=1705468920000000000")
}
diff --git a/pkg/query-service/telemetry/telemetry.go b/pkg/query-service/telemetry/telemetry.go
index ea93d75a0a..ff2ed9aa1a 100644
--- a/pkg/query-service/telemetry/telemetry.go
+++ b/pkg/query-service/telemetry/telemetry.go
@@ -22,41 +22,45 @@ import (
)
const (
- TELEMETRY_EVENT_PATH = "API Call"
- TELEMETRY_EVENT_USER = "User"
- TELEMETRY_EVENT_INPRODUCT_FEEDBACK = "InProduct Feedback Submitted"
- TELEMETRY_EVENT_NUMBER_OF_SERVICES = "Number of Services"
- TELEMETRY_EVENT_NUMBER_OF_SERVICES_PH = "Number of Services V2"
- TELEMETRY_EVENT_HEART_BEAT = "Heart Beat"
- TELEMETRY_EVENT_ORG_SETTINGS = "Org Settings"
- DEFAULT_SAMPLING = 0.1
- TELEMETRY_LICENSE_CHECK_FAILED = "License Check Failed"
- TELEMETRY_LICENSE_UPDATED = "License Updated"
- TELEMETRY_LICENSE_ACT_FAILED = "License Activation Failed"
- TELEMETRY_EVENT_ENVIRONMENT = "Environment"
- TELEMETRY_EVENT_LANGUAGE = "Language"
- TELEMETRY_EVENT_SERVICE = "ServiceName"
- TELEMETRY_EVENT_LOGS_FILTERS = "Logs Filters"
- TELEMETRY_EVENT_DISTRIBUTED = "Distributed"
- TELEMETRY_EVENT_QUERY_RANGE_V3 = "Query Range V3 Metadata"
- TELEMETRY_EVENT_DASHBOARDS_ALERTS = "Dashboards/Alerts Info"
- TELEMETRY_EVENT_ACTIVE_USER = "Active User"
- TELEMETRY_EVENT_ACTIVE_USER_PH = "Active User V2"
- TELEMETRY_EVENT_USER_INVITATION_SENT = "User Invitation Sent"
- TELEMETRY_EVENT_USER_INVITATION_ACCEPTED = "User Invitation Accepted"
- DEFAULT_CLOUD_EMAIL = "admin@signoz.cloud"
+ TELEMETRY_EVENT_PATH = "API Call"
+ TELEMETRY_EVENT_USER = "User"
+ TELEMETRY_EVENT_INPRODUCT_FEEDBACK = "InProduct Feedback Submitted"
+ TELEMETRY_EVENT_NUMBER_OF_SERVICES = "Number of Services"
+ TELEMETRY_EVENT_NUMBER_OF_SERVICES_PH = "Number of Services V2"
+ TELEMETRY_EVENT_HEART_BEAT = "Heart Beat"
+ TELEMETRY_EVENT_ORG_SETTINGS = "Org Settings"
+ DEFAULT_SAMPLING = 0.1
+ TELEMETRY_LICENSE_CHECK_FAILED = "License Check Failed"
+ TELEMETRY_LICENSE_UPDATED = "License Updated"
+ TELEMETRY_LICENSE_ACT_FAILED = "License Activation Failed"
+ TELEMETRY_EVENT_ENVIRONMENT = "Environment"
+ TELEMETRY_EVENT_LANGUAGE = "Language"
+ TELEMETRY_EVENT_SERVICE = "ServiceName"
+ TELEMETRY_EVENT_LOGS_FILTERS = "Logs Filters"
+ TELEMETRY_EVENT_DISTRIBUTED = "Distributed"
+ TELEMETRY_EVENT_QUERY_RANGE_V3 = "Query Range V3 Metadata"
+ TELEMETRY_EVENT_DASHBOARDS_ALERTS = "Dashboards/Alerts Info"
+ TELEMETRY_EVENT_ACTIVE_USER = "Active User"
+ TELEMETRY_EVENT_ACTIVE_USER_PH = "Active User V2"
+ TELEMETRY_EVENT_USER_INVITATION_SENT = "User Invitation Sent"
+ TELEMETRY_EVENT_USER_INVITATION_ACCEPTED = "User Invitation Accepted"
+ TELEMETRY_EVENT_SUCCESSFUL_DASHBOARD_PANEL_QUERY = "Successful Dashboard Panel Query"
+ TELEMETRY_EVENT_SUCCESSFUL_ALERT_QUERY = "Successful Alert Query"
+ DEFAULT_CLOUD_EMAIL = "admin@signoz.cloud"
)
var SAAS_EVENTS_LIST = map[string]struct{}{
- TELEMETRY_EVENT_NUMBER_OF_SERVICES: {},
- TELEMETRY_EVENT_ACTIVE_USER: {},
- TELEMETRY_EVENT_HEART_BEAT: {},
- TELEMETRY_EVENT_LANGUAGE: {},
- TELEMETRY_EVENT_SERVICE: {},
- TELEMETRY_EVENT_ENVIRONMENT: {},
- TELEMETRY_EVENT_USER_INVITATION_SENT: {},
- TELEMETRY_EVENT_USER_INVITATION_ACCEPTED: {},
- TELEMETRY_EVENT_DASHBOARDS_ALERTS: {},
+ TELEMETRY_EVENT_NUMBER_OF_SERVICES: {},
+ TELEMETRY_EVENT_ACTIVE_USER: {},
+ TELEMETRY_EVENT_HEART_BEAT: {},
+ TELEMETRY_EVENT_LANGUAGE: {},
+ TELEMETRY_EVENT_SERVICE: {},
+ TELEMETRY_EVENT_ENVIRONMENT: {},
+ TELEMETRY_EVENT_USER_INVITATION_SENT: {},
+ TELEMETRY_EVENT_USER_INVITATION_ACCEPTED: {},
+ TELEMETRY_EVENT_DASHBOARDS_ALERTS: {},
+ TELEMETRY_EVENT_SUCCESSFUL_DASHBOARD_PANEL_QUERY: {},
+ TELEMETRY_EVENT_SUCCESSFUL_ALERT_QUERY: {},
}
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
@@ -93,9 +97,10 @@ func (a *Telemetry) IsSampled() bool {
}
-func (telemetry *Telemetry) CheckSigNozSignals(postData *v3.QueryRangeParamsV3) (bool, bool) {
+func (telemetry *Telemetry) CheckSigNozSignals(postData *v3.QueryRangeParamsV3) (bool, bool, bool) {
signozLogsUsed := false
signozMetricsUsed := false
+ signozTracesUsed := false
if postData.CompositeQuery.QueryType == v3.QueryTypeBuilder {
for _, query := range postData.CompositeQuery.BuilderQueries {
@@ -105,6 +110,8 @@ func (telemetry *Telemetry) CheckSigNozSignals(postData *v3.QueryRangeParamsV3)
!strings.Contains(query.AggregateAttribute.Key, "signoz_") &&
len(query.AggregateAttribute.Key) > 0 {
signozMetricsUsed = true
+ } else if query.DataSource == v3.DataSourceTraces && len(query.Filters.Items) > 0 {
+ signozTracesUsed = true
}
}
} else if postData.CompositeQuery.QueryType == v3.QueryTypePromQL {
@@ -118,9 +125,15 @@ func (telemetry *Telemetry) CheckSigNozSignals(postData *v3.QueryRangeParamsV3)
if strings.Contains(query.Query, "signoz_metrics") && len(query.Query) > 0 {
signozMetricsUsed = true
}
+ if strings.Contains(query.Query, "signoz_logs") && len(query.Query) > 0 {
+ signozLogsUsed = true
+ }
+ if strings.Contains(query.Query, "signoz_traces") && len(query.Query) > 0 {
+ signozTracesUsed = true
+ }
}
}
- return signozLogsUsed, signozMetricsUsed
+ return signozLogsUsed, signozMetricsUsed, signozTracesUsed
}
func (telemetry *Telemetry) AddActiveTracesUser() {
@@ -275,14 +288,15 @@ func createTelemetry() {
dashboardsInfo, err := telemetry.reader.GetDashboardsInfo(context.Background())
if err == nil {
dashboardsAlertsData := map[string]interface{}{
- "totalDashboards": dashboardsInfo.TotalDashboards,
- "logsBasedPanels": dashboardsInfo.LogsBasedPanels,
- "metricBasedPanels": dashboardsInfo.MetricBasedPanels,
- "tracesBasedPanels": dashboardsInfo.TracesBasedPanels,
- "totalAlerts": alertsInfo.TotalAlerts,
- "logsBasedAlerts": alertsInfo.LogsBasedAlerts,
- "metricBasedAlerts": alertsInfo.MetricBasedAlerts,
- "tracesBasedAlerts": alertsInfo.TracesBasedAlerts,
+ "totalDashboards": dashboardsInfo.TotalDashboards,
+ "totalDashboardsWithPanelAndName": dashboardsInfo.TotalDashboardsWithPanelAndName,
+ "logsBasedPanels": dashboardsInfo.LogsBasedPanels,
+ "metricBasedPanels": dashboardsInfo.MetricBasedPanels,
+ "tracesBasedPanels": dashboardsInfo.TracesBasedPanels,
+ "totalAlerts": alertsInfo.TotalAlerts,
+ "logsBasedAlerts": alertsInfo.LogsBasedAlerts,
+ "metricBasedAlerts": alertsInfo.MetricBasedAlerts,
+ "tracesBasedAlerts": alertsInfo.TracesBasedAlerts,
}
// send event only if there are dashboards or alerts
if dashboardsInfo.TotalDashboards > 0 || alertsInfo.TotalAlerts > 0 {
diff --git a/pkg/query-service/tests/integration/logparsingpipeline_test.go b/pkg/query-service/tests/integration/logparsingpipeline_test.go
index 4c260596e5..9ef47171a4 100644
--- a/pkg/query-service/tests/integration/logparsingpipeline_test.go
+++ b/pkg/query-service/tests/integration/logparsingpipeline_test.go
@@ -1,14 +1,11 @@
package tests
import (
- "bytes"
- "context"
"encoding/json"
"fmt"
"io"
- "net/http"
"net/http/httptest"
- "os"
+ "runtime/debug"
"strings"
"testing"
@@ -18,10 +15,10 @@ import (
"github.com/knadh/koanf/parsers/yaml"
"github.com/open-telemetry/opamp-go/protobufs"
"github.com/pkg/errors"
- "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.signoz.io/signoz/pkg/query-service/agentConf"
"go.signoz.io/signoz/pkg/query-service/app"
+ "go.signoz.io/signoz/pkg/query-service/app/integrations"
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
"go.signoz.io/signoz/pkg/query-service/app/opamp"
opampModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model"
@@ -31,20 +28,21 @@ import (
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/queryBuilderToExpr"
+ "go.signoz.io/signoz/pkg/query-service/utils"
"golang.org/x/exp/maps"
"golang.org/x/exp/slices"
)
func TestLogPipelinesLifecycle(t *testing.T) {
- testbed := NewLogPipelinesTestBed(t)
- assert := assert.New(t)
+ testbed := NewLogPipelinesTestBed(t, nil)
+ require := require.New(t)
getPipelinesResp := testbed.GetPipelinesFromQS()
- assert.Equal(
+ require.Equal(
0, len(getPipelinesResp.Pipelines),
"There should be no pipelines at the start",
)
- assert.Equal(
+ require.Equal(
0, len(getPipelinesResp.History),
"There should be no pipelines config history at the start",
)
@@ -118,11 +116,11 @@ func TestLogPipelinesLifecycle(t *testing.T) {
)
// Deployment status should be pending.
- assert.Equal(
+ require.Equal(
1, len(getPipelinesResp.History),
"pipelines config history should not be empty after 1st configuration",
)
- assert.Equal(
+ require.Equal(
agentConf.DeployInitiated, getPipelinesResp.History[0].DeployStatus,
"pipelines deployment should be in progress after 1st configuration",
)
@@ -134,7 +132,7 @@ func TestLogPipelinesLifecycle(t *testing.T) {
assertPipelinesResponseMatchesPostedPipelines(
t, postablePipelines, getPipelinesResp,
)
- assert.Equal(
+ require.Equal(
agentConf.Deployed,
getPipelinesResp.History[0].DeployStatus,
"pipeline deployment should be complete after acknowledgment from opamp client",
@@ -149,12 +147,13 @@ func TestLogPipelinesLifecycle(t *testing.T) {
testbed.assertPipelinesSentToOpampClient(updatePipelinesResp.Pipelines)
testbed.assertNewAgentGetsPipelinesOnConnection(updatePipelinesResp.Pipelines)
- assert.Equal(
- 2, len(updatePipelinesResp.History),
+ getPipelinesResp = testbed.GetPipelinesFromQS()
+ require.Equal(
+ 2, len(getPipelinesResp.History),
"there should be 2 history entries after posting pipelines config for the 2nd time",
)
- assert.Equal(
- agentConf.DeployInitiated, updatePipelinesResp.History[0].DeployStatus,
+ require.Equal(
+ agentConf.DeployInitiated, getPipelinesResp.History[0].DeployStatus,
"deployment should be in progress for latest pipeline config",
)
@@ -165,7 +164,7 @@ func TestLogPipelinesLifecycle(t *testing.T) {
assertPipelinesResponseMatchesPostedPipelines(
t, postablePipelines, getPipelinesResp,
)
- assert.Equal(
+ require.Equal(
agentConf.Deployed,
getPipelinesResp.History[0].DeployStatus,
"deployment for latest pipeline config should be complete after acknowledgment from opamp client",
@@ -174,7 +173,7 @@ func TestLogPipelinesLifecycle(t *testing.T) {
func TestLogPipelinesHistory(t *testing.T) {
require := require.New(t)
- testbed := NewLogPipelinesTestBed(t)
+ testbed := NewLogPipelinesTestBed(t, nil)
// Only the latest config version can be "IN_PROGRESS",
// other incomplete deployments should have status "UNKNOWN"
@@ -356,7 +355,7 @@ func TestLogPipelinesValidation(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.Name, func(t *testing.T) {
- testbed := NewLogPipelinesTestBed(t)
+ testbed := NewLogPipelinesTestBed(t, nil)
testbed.PostPipelinesToQSExpectingStatusCode(
logparsingpipeline.PostablePipelines{
Pipelines: []logparsingpipeline.PostablePipeline{tc.Pipeline},
@@ -369,7 +368,7 @@ func TestLogPipelinesValidation(t *testing.T) {
func TestCanSavePipelinesWithoutConnectedAgents(t *testing.T) {
require := require.New(t)
- testbed := NewTestbedWithoutOpamp(t)
+ testbed := NewTestbedWithoutOpamp(t, nil)
getPipelinesResp := testbed.GetPipelinesFromQS()
require.Equal(0, len(getPipelinesResp.Pipelines))
@@ -422,7 +421,6 @@ func TestCanSavePipelinesWithoutConnectedAgents(t *testing.T) {
// configuring log pipelines and provides test helpers.
type LogPipelinesTestBed struct {
t *testing.T
- testDBFilePath string
testUser *model.User
apiHandler *app.APIHandler
agentConfMgr *agentConf.Manager
@@ -430,25 +428,20 @@ type LogPipelinesTestBed struct {
opampClientConn *opamp.MockOpAmpConnection
}
-func NewTestbedWithoutOpamp(t *testing.T) *LogPipelinesTestBed {
- // Create a tmp file based sqlite db for testing.
- testDBFile, err := os.CreateTemp("", "test-signoz-db-*")
- if err != nil {
- t.Fatalf("could not create temp file for test db: %v", err)
+// testDB can be injected for sharing a DB across multiple integration testbeds.
+func NewTestbedWithoutOpamp(t *testing.T, testDB *sqlx.DB) *LogPipelinesTestBed {
+ if testDB == nil {
+ testDB = utils.NewQueryServiceDBForTests(t)
}
- testDBFilePath := testDBFile.Name()
- t.Cleanup(func() { os.Remove(testDBFilePath) })
- testDBFile.Close()
- // TODO(Raj): move away from singleton DB instances to avoid
- // issues when running tests in parallel.
- dao.InitDao("sqlite", testDBFilePath)
-
- testDB, err := sqlx.Open("sqlite3", testDBFilePath)
+ ic, err := integrations.NewController(testDB)
if err != nil {
- t.Fatalf("could not open test db sqlite file: %v", err)
+ t.Fatalf("could not create integrations controller: %v", err)
}
- controller, err := logparsingpipeline.NewLogParsingPipelinesController(testDB, "sqlite")
+
+ controller, err := logparsingpipeline.NewLogParsingPipelinesController(
+ testDB, "sqlite", ic.GetPipelinesForInstalledIntegrations,
+ )
if err != nil {
t.Fatalf("could not create a logparsingpipelines controller: %v", err)
}
@@ -467,7 +460,7 @@ func NewTestbedWithoutOpamp(t *testing.T) *LogPipelinesTestBed {
}
// Mock an available opamp agent
- testDB, err = opampModel.InitDB(testDBFilePath)
+ testDB, err = opampModel.InitDB(testDB)
require.Nil(t, err, "failed to init opamp model")
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
@@ -479,16 +472,15 @@ func NewTestbedWithoutOpamp(t *testing.T) *LogPipelinesTestBed {
require.Nil(t, err, "failed to init agentConf")
return &LogPipelinesTestBed{
- t: t,
- testDBFilePath: testDBFilePath,
- testUser: user,
- apiHandler: apiHandler,
- agentConfMgr: agentConfMgr,
+ t: t,
+ testUser: user,
+ apiHandler: apiHandler,
+ agentConfMgr: agentConfMgr,
}
}
-func NewLogPipelinesTestBed(t *testing.T) *LogPipelinesTestBed {
- testbed := NewTestbedWithoutOpamp(t)
+func NewLogPipelinesTestBed(t *testing.T, testDB *sqlx.DB) *LogPipelinesTestBed {
+ testbed := NewTestbedWithoutOpamp(t, testDB)
opampServer := opamp.InitializeServer(nil, testbed.agentConfMgr)
err := opampServer.Start(opamp.GetAvailableLocalAddress())
@@ -590,8 +582,8 @@ func (tb *LogPipelinesTestBed) GetPipelinesFromQS() *logparsingpipeline.Pipeline
if response.StatusCode != 200 {
tb.t.Fatalf(
- "could not list log parsing pipelines. status: %d, body: %v",
- response.StatusCode, string(responseBody),
+ "could not list log parsing pipelines. status: %d, body: %v\n%s",
+ response.StatusCode, string(responseBody), string(debug.Stack()),
)
}
@@ -625,7 +617,7 @@ func assertPipelinesRecommendedInRemoteConfig(
pipelines []logparsingpipeline.Pipeline,
) {
collectorConfigFiles := msg.RemoteConfig.Config.ConfigMap
- assert.Equal(
+ require.Equal(
t, len(collectorConfigFiles), 1,
"otel config sent to client is expected to contain atleast 1 file",
)
@@ -653,7 +645,7 @@ func assertPipelinesRecommendedInRemoteConfig(
}
_, expectedLogProcessorNames, err := logparsingpipeline.PreparePipelineProcessor(pipelines)
- assert.Equal(
+ require.Equal(
t, expectedLogProcessorNames, collectorConfLogsPipelineProcNames,
"config sent to opamp client doesn't contain expected log pipelines",
)
@@ -661,7 +653,7 @@ func assertPipelinesRecommendedInRemoteConfig(
collectorConfProcessors := collectorConfSentToClient["processors"].(map[string]interface{})
for _, procName := range expectedLogProcessorNames {
pipelineProcessorInConf, procExists := collectorConfProcessors[procName]
- assert.True(t, procExists, fmt.Sprintf(
+ require.True(t, procExists, fmt.Sprintf(
"%s processor not found in config sent to opamp client", procName,
))
@@ -747,16 +739,16 @@ func assertPipelinesResponseMatchesPostedPipelines(
postablePipelines logparsingpipeline.PostablePipelines,
pipelinesResp *logparsingpipeline.PipelinesResponse,
) {
- assert.Equal(
+ require.Equal(
t, len(postablePipelines.Pipelines), len(pipelinesResp.Pipelines),
"length mistmatch between posted pipelines and pipelines in response",
)
for i, pipeline := range pipelinesResp.Pipelines {
postable := postablePipelines.Pipelines[i]
- assert.Equal(t, postable.Name, pipeline.Name, "pipeline.Name mismatch")
- assert.Equal(t, postable.OrderId, pipeline.OrderId, "pipeline.OrderId mismatch")
- assert.Equal(t, postable.Enabled, pipeline.Enabled, "pipeline.Enabled mismatch")
- assert.Equal(t, postable.Config, pipeline.Config, "pipeline.Config mismatch")
+ require.Equal(t, postable.Name, pipeline.Name, "pipeline.Name mismatch")
+ require.Equal(t, postable.OrderId, pipeline.OrderId, "pipeline.OrderId mismatch")
+ require.Equal(t, postable.Enabled, pipeline.Enabled, "pipeline.Enabled mismatch")
+ require.Equal(t, postable.Config, pipeline.Config, "pipeline.Config mismatch")
}
}
@@ -792,60 +784,3 @@ func newInitialAgentConfigMap() *protobufs.AgentConfigMap {
},
}
}
-
-func createTestUser() (*model.User, *model.ApiError) {
- // Create a test user for auth
- ctx := context.Background()
- org, apiErr := dao.DB().CreateOrg(ctx, &model.Organization{
- Name: "test",
- })
- if apiErr != nil {
- return nil, apiErr
- }
-
- group, apiErr := dao.DB().CreateGroup(ctx, &model.Group{
- Name: "test",
- })
- if apiErr != nil {
- return nil, apiErr
- }
-
- return dao.DB().CreateUser(
- ctx,
- &model.User{
- Name: "test",
- Email: "test@test.com",
- Password: "test",
- OrgId: org.Id,
- GroupId: group.Id,
- },
- true,
- )
-}
-
-func NewAuthenticatedTestRequest(
- user *model.User,
- path string,
- postData interface{},
-) (*http.Request, error) {
- userJwt, err := auth.GenerateJWTForUser(user)
- if err != nil {
- return nil, err
- }
-
- var req *http.Request
-
- if postData != nil {
- var body bytes.Buffer
- err = json.NewEncoder(&body).Encode(postData)
- if err != nil {
- return nil, err
- }
- req = httptest.NewRequest(http.MethodPost, path, &body)
- } else {
- req = httptest.NewRequest(http.MethodPost, path, nil)
- }
-
- req.Header.Add("Authorization", "Bearer "+userJwt.AccessJwt)
- return req, nil
-}
diff --git a/pkg/query-service/tests/integration/signoz_integrations_test.go b/pkg/query-service/tests/integration/signoz_integrations_test.go
new file mode 100644
index 0000000000..5294d06081
--- /dev/null
+++ b/pkg/query-service/tests/integration/signoz_integrations_test.go
@@ -0,0 +1,586 @@
+package tests
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "net/http/httptest"
+ "runtime/debug"
+ "slices"
+ "testing"
+
+ "github.com/jmoiron/sqlx"
+ mockhouse "github.com/srikanthccv/ClickHouse-go-mock"
+ "github.com/stretchr/testify/require"
+ "go.signoz.io/signoz/pkg/query-service/app"
+ "go.signoz.io/signoz/pkg/query-service/app/dashboards"
+ "go.signoz.io/signoz/pkg/query-service/app/integrations"
+ "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
+ "go.signoz.io/signoz/pkg/query-service/auth"
+ "go.signoz.io/signoz/pkg/query-service/dao"
+ "go.signoz.io/signoz/pkg/query-service/featureManager"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.signoz.io/signoz/pkg/query-service/utils"
+)
+
+// Higher level tests for UI facing APIs
+
+func TestSignozIntegrationLifeCycle(t *testing.T) {
+ require := require.New(t)
+ testbed := NewIntegrationsTestBed(t, nil)
+
+ installedResp := testbed.GetInstalledIntegrationsFromQS()
+ require.Equal(
+ len(installedResp.Integrations), 0,
+ "no integrations should be installed at the beginning",
+ )
+
+ availableResp := testbed.GetAvailableIntegrationsFromQS()
+ availableIntegrations := availableResp.Integrations
+ require.Greater(
+ len(availableIntegrations), 0,
+ "some integrations should come bundled with SigNoz",
+ )
+
+ // Should be able to install integration
+ require.False(availableIntegrations[0].IsInstalled)
+ testbed.RequestQSToInstallIntegration(
+ availableIntegrations[0].Id, map[string]interface{}{},
+ )
+
+ ii := testbed.GetIntegrationDetailsFromQS(availableIntegrations[0].Id)
+ require.Equal(ii.Id, availableIntegrations[0].Id)
+ require.NotNil(ii.Installation)
+
+ installedResp = testbed.GetInstalledIntegrationsFromQS()
+ installedIntegrations := installedResp.Integrations
+ require.Equal(len(installedIntegrations), 1)
+ require.Equal(installedIntegrations[0].Id, availableIntegrations[0].Id)
+
+ availableResp = testbed.GetAvailableIntegrationsFromQS()
+ availableIntegrations = availableResp.Integrations
+ require.Greater(len(availableIntegrations), 0)
+
+ // Integration connection status should get updated after signal data has been received.
+ testbed.mockLogQueryResponse([]model.SignozLog{})
+ connectionStatus := testbed.GetIntegrationConnectionStatus(ii.Id)
+ require.NotNil(connectionStatus)
+ require.Nil(connectionStatus.Logs)
+
+ testLog := makeTestSignozLog("test log body", map[string]interface{}{
+ "source": "nginx",
+ })
+ testbed.mockLogQueryResponse([]model.SignozLog{testLog})
+ connectionStatus = testbed.GetIntegrationConnectionStatus(ii.Id)
+ require.NotNil(connectionStatus)
+ require.NotNil(connectionStatus.Logs)
+ require.Equal(connectionStatus.Logs.LastReceivedTsMillis, int64(testLog.Timestamp/1000000))
+
+ // Should be able to uninstall integration
+ require.True(availableIntegrations[0].IsInstalled)
+ testbed.RequestQSToUninstallIntegration(
+ availableIntegrations[0].Id,
+ )
+
+ ii = testbed.GetIntegrationDetailsFromQS(availableIntegrations[0].Id)
+ require.Equal(ii.Id, availableIntegrations[0].Id)
+ require.Nil(ii.Installation)
+
+ installedResp = testbed.GetInstalledIntegrationsFromQS()
+ installedIntegrations = installedResp.Integrations
+ require.Equal(len(installedIntegrations), 0)
+
+ availableResp = testbed.GetAvailableIntegrationsFromQS()
+ availableIntegrations = availableResp.Integrations
+ require.Greater(len(availableIntegrations), 0)
+ require.False(availableIntegrations[0].IsInstalled)
+}
+
+func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
+ require := require.New(t)
+
+ testDB := utils.NewQueryServiceDBForTests(t)
+ integrationsTB := NewIntegrationsTestBed(t, testDB)
+ pipelinesTB := NewLogPipelinesTestBed(t, testDB)
+
+ availableIntegrationsResp := integrationsTB.GetAvailableIntegrationsFromQS()
+ availableIntegrations := availableIntegrationsResp.Integrations
+ require.Greater(
+ len(availableIntegrations), 0,
+ "some integrations should come bundled with SigNoz",
+ )
+
+ getPipelinesResp := pipelinesTB.GetPipelinesFromQS()
+ require.Equal(
+ 0, len(getPipelinesResp.Pipelines),
+ "There should be no pipelines at the start",
+ )
+
+ // Find an available integration that contains a log pipeline
+ var testAvailableIntegration *integrations.IntegrationsListItem
+ for _, ai := range availableIntegrations {
+ details := integrationsTB.GetIntegrationDetailsFromQS(ai.Id)
+ require.NotNil(details)
+ if len(details.Assets.Logs.Pipelines) > 0 {
+ testAvailableIntegration = &ai
+ break
+ }
+ }
+ require.NotNil(testAvailableIntegration)
+
+ // Installing an integration should add its pipelines to pipelines list
+ require.False(testAvailableIntegration.IsInstalled)
+ integrationsTB.RequestQSToInstallIntegration(
+ testAvailableIntegration.Id, map[string]interface{}{},
+ )
+
+ testIntegration := integrationsTB.GetIntegrationDetailsFromQS(testAvailableIntegration.Id)
+ require.NotNil(testIntegration.Installation)
+ testIntegrationPipelines := testIntegration.Assets.Logs.Pipelines
+ require.Greater(
+ len(testIntegrationPipelines), 0,
+ "test integration expected to have a pipeline",
+ )
+
+ getPipelinesResp = pipelinesTB.GetPipelinesFromQS()
+ require.Equal(
+ len(testIntegrationPipelines), len(getPipelinesResp.Pipelines),
+ "Pipelines for installed integrations should appear in pipelines list",
+ )
+ lastPipeline := getPipelinesResp.Pipelines[len(getPipelinesResp.Pipelines)-1]
+ require.NotNil(integrations.IntegrationIdForPipeline(lastPipeline))
+ require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(lastPipeline))
+
+ pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines)
+ pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines)
+
+ // After saving a user created pipeline, pipelines response should include
+ // both user created pipelines and pipelines for installed integrations.
+ postablePipelines := logparsingpipeline.PostablePipelines{
+ Pipelines: []logparsingpipeline.PostablePipeline{
+ {
+ OrderId: 1,
+ Name: "pipeline1",
+ Alias: "pipeline1",
+ Enabled: true,
+ Filter: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "method",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ Operator: "=",
+ Value: "GET",
+ },
+ },
+ },
+ Config: []logparsingpipeline.PipelineOperator{
+ {
+ OrderId: 1,
+ ID: "add",
+ Type: "add",
+ Field: "attributes.test",
+ Value: "val",
+ Enabled: true,
+ Name: "test add",
+ },
+ },
+ },
+ },
+ }
+
+ pipelinesTB.PostPipelinesToQS(postablePipelines)
+
+ getPipelinesResp = pipelinesTB.GetPipelinesFromQS()
+ require.Equal(1+len(testIntegrationPipelines), len(getPipelinesResp.Pipelines))
+ pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines)
+ pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines)
+
+ // Reordering integration pipelines should be possible.
+ postable := postableFromPipelines(getPipelinesResp.Pipelines)
+ slices.Reverse(postable.Pipelines)
+ for i := range postable.Pipelines {
+ postable.Pipelines[i].OrderId = i + 1
+ }
+
+ pipelinesTB.PostPipelinesToQS(postable)
+
+ getPipelinesResp = pipelinesTB.GetPipelinesFromQS()
+ firstPipeline := getPipelinesResp.Pipelines[0]
+ require.NotNil(integrations.IntegrationIdForPipeline(firstPipeline))
+ require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(firstPipeline))
+
+ pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines)
+ pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines)
+
+ // enabling/disabling integration pipelines should be possible.
+ require.True(firstPipeline.Enabled)
+
+ postable.Pipelines[0].Enabled = false
+ pipelinesTB.PostPipelinesToQS(postable)
+
+ getPipelinesResp = pipelinesTB.GetPipelinesFromQS()
+ require.Equal(1+len(testIntegrationPipelines), len(getPipelinesResp.Pipelines))
+
+ firstPipeline = getPipelinesResp.Pipelines[0]
+ require.NotNil(integrations.IntegrationIdForPipeline(firstPipeline))
+ require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(firstPipeline))
+
+ require.False(firstPipeline.Enabled)
+
+ pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines)
+ pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines)
+
+ // should not be able to edit integrations pipeline.
+ require.Greater(len(postable.Pipelines[0].Config), 0)
+ postable.Pipelines[0].Config = []logparsingpipeline.PipelineOperator{}
+ pipelinesTB.PostPipelinesToQS(postable)
+
+ getPipelinesResp = pipelinesTB.GetPipelinesFromQS()
+ require.Equal(1+len(testIntegrationPipelines), len(getPipelinesResp.Pipelines))
+
+ firstPipeline = getPipelinesResp.Pipelines[0]
+ require.NotNil(integrations.IntegrationIdForPipeline(firstPipeline))
+ require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(firstPipeline))
+
+ require.False(firstPipeline.Enabled)
+ require.Greater(len(firstPipeline.Config), 0)
+
+ // should not be able to delete integrations pipeline
+ postable.Pipelines = []logparsingpipeline.PostablePipeline{postable.Pipelines[1]}
+ pipelinesTB.PostPipelinesToQS(postable)
+
+ getPipelinesResp = pipelinesTB.GetPipelinesFromQS()
+ require.Equal(1+len(testIntegrationPipelines), len(getPipelinesResp.Pipelines))
+
+ lastPipeline = getPipelinesResp.Pipelines[1]
+ require.NotNil(integrations.IntegrationIdForPipeline(lastPipeline))
+ require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(lastPipeline))
+
+ // Uninstalling an integration should remove its pipelines
+ // from pipelines list in the UI
+ integrationsTB.RequestQSToUninstallIntegration(
+ testIntegration.Id,
+ )
+ getPipelinesResp = pipelinesTB.GetPipelinesFromQS()
+ require.Equal(
+ 1, len(getPipelinesResp.Pipelines),
+ "Pipelines for uninstalled integrations should get removed from pipelines list",
+ )
+ pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines)
+ pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines)
+}
+
+func TestDashboardsForInstalledIntegrationDashboards(t *testing.T) {
+ require := require.New(t)
+
+ testDB := utils.NewQueryServiceDBForTests(t)
+ integrationsTB := NewIntegrationsTestBed(t, testDB)
+
+ availableIntegrationsResp := integrationsTB.GetAvailableIntegrationsFromQS()
+ availableIntegrations := availableIntegrationsResp.Integrations
+ require.Greater(
+ len(availableIntegrations), 0,
+ "some integrations should come bundled with SigNoz",
+ )
+
+ dashboards := integrationsTB.GetDashboardsFromQS()
+ require.Equal(
+ 0, len(dashboards),
+ "There should be no dashboards at the start",
+ )
+
+ // Find an available integration that contains dashboards
+ var testAvailableIntegration *integrations.IntegrationsListItem
+ for _, ai := range availableIntegrations {
+ details := integrationsTB.GetIntegrationDetailsFromQS(ai.Id)
+ require.NotNil(details)
+ if len(details.Assets.Dashboards) > 0 {
+ testAvailableIntegration = &ai
+ break
+ }
+ }
+ require.NotNil(testAvailableIntegration)
+
+ // Installing an integration should make its dashboards appear in the dashboard list
+ require.False(testAvailableIntegration.IsInstalled)
+ integrationsTB.RequestQSToInstallIntegration(
+ testAvailableIntegration.Id, map[string]interface{}{},
+ )
+
+ testIntegration := integrationsTB.GetIntegrationDetailsFromQS(testAvailableIntegration.Id)
+ require.NotNil(testIntegration.Installation)
+ testIntegrationDashboards := testIntegration.Assets.Dashboards
+ require.Greater(
+ len(testIntegrationDashboards), 0,
+ "test integration is expected to have dashboards",
+ )
+
+ dashboards = integrationsTB.GetDashboardsFromQS()
+ require.Equal(
+ len(testIntegrationDashboards), len(dashboards),
+ "dashboards for installed integrations should appear in dashboards list",
+ )
+
+ // Should be able to get installed integrations dashboard by id
+ dd := integrationsTB.GetDashboardByIdFromQS(dashboards[0].Uuid)
+ require.Equal(*dd, dashboards[0])
+
+ // Integration dashboards should not longer appear in dashboard list after uninstallation
+ integrationsTB.RequestQSToUninstallIntegration(
+ testIntegration.Id,
+ )
+ dashboards = integrationsTB.GetDashboardsFromQS()
+ require.Equal(
+ 0, len(dashboards),
+ "dashboards for uninstalled integrations should not appear in dashboards list",
+ )
+}
+
+type IntegrationsTestBed struct {
+ t *testing.T
+ testUser *model.User
+ qsHttpHandler http.Handler
+ mockClickhouse mockhouse.ClickConnMockCommon
+}
+
+func (tb *IntegrationsTestBed) GetAvailableIntegrationsFromQS() *integrations.IntegrationsListResponse {
+ result := tb.RequestQS("/api/v1/integrations", nil)
+
+ dataJson, err := json.Marshal(result.Data)
+ if err != nil {
+ tb.t.Fatalf("could not marshal apiResponse.Data: %v", err)
+ }
+ var integrationsResp integrations.IntegrationsListResponse
+ err = json.Unmarshal(dataJson, &integrationsResp)
+ if err != nil {
+ tb.t.Fatalf("could not unmarshal apiResponse.Data json into PipelinesResponse")
+ }
+
+ return &integrationsResp
+}
+
+func (tb *IntegrationsTestBed) GetInstalledIntegrationsFromQS() *integrations.IntegrationsListResponse {
+ result := tb.RequestQS("/api/v1/integrations?is_installed=true", nil)
+
+ dataJson, err := json.Marshal(result.Data)
+ if err != nil {
+ tb.t.Fatalf("could not marshal apiResponse.Data: %v", err)
+ }
+ var integrationsResp integrations.IntegrationsListResponse
+ err = json.Unmarshal(dataJson, &integrationsResp)
+ if err != nil {
+ tb.t.Fatalf(" could not unmarshal apiResponse.Data json into PipelinesResponse")
+ }
+
+ return &integrationsResp
+}
+
+func (tb *IntegrationsTestBed) GetIntegrationDetailsFromQS(
+ integrationId string,
+) *integrations.Integration {
+ result := tb.RequestQS(fmt.Sprintf(
+ "/api/v1/integrations/%s", integrationId,
+ ), nil)
+
+ dataJson, err := json.Marshal(result.Data)
+ if err != nil {
+ tb.t.Fatalf("could not marshal apiResponse.Data: %v", err)
+ }
+ var integrationResp integrations.Integration
+ err = json.Unmarshal(dataJson, &integrationResp)
+ if err != nil {
+ tb.t.Fatalf("could not unmarshal apiResponse.Data json")
+ }
+
+ return &integrationResp
+}
+
+func (tb *IntegrationsTestBed) GetIntegrationConnectionStatus(
+ integrationId string,
+) *integrations.IntegrationConnectionStatus {
+ result := tb.RequestQS(fmt.Sprintf(
+ "/api/v1/integrations/%s/connection_status", integrationId,
+ ), nil)
+
+ dataJson, err := json.Marshal(result.Data)
+ if err != nil {
+ tb.t.Fatalf("could not marshal apiResponse.Data: %v", err)
+ }
+ var connectionStatus integrations.IntegrationConnectionStatus
+ err = json.Unmarshal(dataJson, &connectionStatus)
+ if err != nil {
+ tb.t.Fatalf("could not unmarshal apiResponse.Data json")
+ }
+
+ return &connectionStatus
+}
+
+func (tb *IntegrationsTestBed) RequestQSToInstallIntegration(
+ integrationId string, config map[string]interface{},
+) {
+ request := integrations.InstallIntegrationRequest{
+ IntegrationId: integrationId,
+ Config: config,
+ }
+ tb.RequestQS("/api/v1/integrations/install", request)
+}
+
+func (tb *IntegrationsTestBed) RequestQSToUninstallIntegration(
+ integrationId string,
+) {
+ request := integrations.UninstallIntegrationRequest{
+ IntegrationId: integrationId,
+ }
+ tb.RequestQS("/api/v1/integrations/uninstall", request)
+}
+
+func (tb *IntegrationsTestBed) GetDashboardsFromQS() []dashboards.Dashboard {
+ result := tb.RequestQS("/api/v1/dashboards", nil)
+
+ dataJson, err := json.Marshal(result.Data)
+ if err != nil {
+ tb.t.Fatalf("could not marshal apiResponse.Data: %v", err)
+ }
+
+ dashboards := []dashboards.Dashboard{}
+ err = json.Unmarshal(dataJson, &dashboards)
+ if err != nil {
+ tb.t.Fatalf(" could not unmarshal apiResponse.Data json into dashboards")
+ }
+
+ return dashboards
+}
+
+func (tb *IntegrationsTestBed) GetDashboardByIdFromQS(dashboardUuid string) *dashboards.Dashboard {
+ result := tb.RequestQS(fmt.Sprintf("/api/v1/dashboards/%s", dashboardUuid), nil)
+
+ dataJson, err := json.Marshal(result.Data)
+ if err != nil {
+ tb.t.Fatalf("could not marshal apiResponse.Data: %v", err)
+ }
+
+ dashboard := dashboards.Dashboard{}
+ err = json.Unmarshal(dataJson, &dashboard)
+ if err != nil {
+ tb.t.Fatalf(" could not unmarshal apiResponse.Data json into dashboards")
+ }
+
+ return &dashboard
+}
+
+func (tb *IntegrationsTestBed) RequestQS(
+ path string,
+ postData interface{},
+) *app.ApiResponse {
+ req, err := NewAuthenticatedTestRequest(
+ tb.testUser, path, postData,
+ )
+ if err != nil {
+ tb.t.Fatalf("couldn't create authenticated test request: %v", err)
+ }
+
+ respWriter := httptest.NewRecorder()
+ tb.qsHttpHandler.ServeHTTP(respWriter, req)
+ response := respWriter.Result()
+ responseBody, err := io.ReadAll(response.Body)
+ if err != nil {
+ tb.t.Fatalf("couldn't read response body received from QS: %v", err)
+ }
+
+ if response.StatusCode != 200 {
+ tb.t.Fatalf(
+ "unexpected response status from query service for path %s. status: %d, body: %v\n%v",
+ path, response.StatusCode, string(responseBody), string(debug.Stack()),
+ )
+ }
+
+ var result app.ApiResponse
+ err = json.Unmarshal(responseBody, &result)
+ if err != nil {
+ tb.t.Fatalf(
+ "Could not unmarshal QS response into an ApiResponse.\nResponse body: %s",
+ string(responseBody),
+ )
+ }
+
+ return &result
+}
+
+func (tb *IntegrationsTestBed) mockLogQueryResponse(logsInResponse []model.SignozLog) {
+ addLogsQueryExpectation(tb.mockClickhouse, logsInResponse)
+}
+
+// testDB can be injected for sharing a DB across multiple integration testbeds.
+func NewIntegrationsTestBed(t *testing.T, testDB *sqlx.DB) *IntegrationsTestBed {
+ if testDB == nil {
+ testDB = utils.NewQueryServiceDBForTests(t)
+ }
+
+ controller, err := integrations.NewController(testDB)
+ if err != nil {
+ t.Fatalf("could not create integrations controller: %v", err)
+ }
+
+ fm := featureManager.StartManager()
+ reader, mockClickhouse := NewMockClickhouseReader(t, testDB, fm)
+
+ apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
+ Reader: reader,
+ AppDao: dao.DB(),
+ IntegrationsController: controller,
+ FeatureFlags: fm,
+ })
+ if err != nil {
+ t.Fatalf("could not create a new ApiHandler: %v", err)
+ }
+
+ router := app.NewRouter()
+ am := app.NewAuthMiddleware(auth.GetUserFromRequest)
+ apiHandler.RegisterRoutes(router, am)
+ apiHandler.RegisterIntegrationRoutes(router, am)
+
+ user, apiErr := createTestUser()
+ if apiErr != nil {
+ t.Fatalf("could not create a test user: %v", apiErr)
+ }
+
+ return &IntegrationsTestBed{
+ t: t,
+ testUser: user,
+ qsHttpHandler: router,
+ mockClickhouse: mockClickhouse,
+ }
+}
+
+func postableFromPipelines(pipelines []logparsingpipeline.Pipeline) logparsingpipeline.PostablePipelines {
+ result := logparsingpipeline.PostablePipelines{}
+
+ for _, p := range pipelines {
+ postable := logparsingpipeline.PostablePipeline{
+ Id: p.Id,
+ OrderId: p.OrderId,
+ Name: p.Name,
+ Alias: p.Alias,
+ Enabled: p.Enabled,
+ Config: p.Config,
+ }
+
+ if p.Description != nil {
+ postable.Description = *p.Description
+ }
+
+ if p.Filter != nil {
+ postable.Filter = p.Filter
+ }
+
+ result.Pipelines = append(result.Pipelines, postable)
+ }
+
+ return result
+}
diff --git a/pkg/query-service/tests/integration/test_utils.go b/pkg/query-service/tests/integration/test_utils.go
new file mode 100644
index 0000000000..ac6e1db7c5
--- /dev/null
+++ b/pkg/query-service/tests/integration/test_utils.go
@@ -0,0 +1,200 @@
+package tests
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+ "time"
+
+ "github.com/DATA-DOG/go-sqlmock"
+ "github.com/google/uuid"
+ "github.com/jmoiron/sqlx"
+ "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry"
+ mockhouse "github.com/srikanthccv/ClickHouse-go-mock"
+ "github.com/stretchr/testify/require"
+ "go.signoz.io/signoz/pkg/query-service/app/clickhouseReader"
+ "go.signoz.io/signoz/pkg/query-service/auth"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ "go.signoz.io/signoz/pkg/query-service/dao"
+ "go.signoz.io/signoz/pkg/query-service/interfaces"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ "golang.org/x/exp/maps"
+)
+
+func NewMockClickhouseReader(
+ t *testing.T, testDB *sqlx.DB, featureFlags interfaces.FeatureLookup,
+) (
+ *clickhouseReader.ClickHouseReader, mockhouse.ClickConnMockCommon,
+) {
+ require.NotNil(t, testDB)
+
+ mockDB, err := mockhouse.NewClickHouseWithQueryMatcher(nil, sqlmock.QueryMatcherRegexp)
+
+ require.Nil(t, err, "could not init mock clickhouse")
+ reader := clickhouseReader.NewReaderFromClickhouseConnection(
+ mockDB,
+ clickhouseReader.NewOptions("", 10, 10, 10*time.Second, ""),
+ testDB,
+ "",
+ featureFlags,
+ "",
+ )
+
+ return reader, mockDB
+}
+
+func addLogsQueryExpectation(
+ mockClickhouse mockhouse.ClickConnMockCommon,
+ logsToReturn []model.SignozLog,
+) {
+ cols := []mockhouse.ColumnType{}
+ cols = append(cols, mockhouse.ColumnType{Type: "UInt64", Name: "timestamp"})
+ cols = append(cols, mockhouse.ColumnType{Type: "UInt64", Name: "observed_timestamp"})
+ cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "id"})
+ cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "trace_id"})
+ cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "span_id"})
+ cols = append(cols, mockhouse.ColumnType{Type: "UInt32", Name: "trace_flags"})
+ cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "severity_text"})
+ cols = append(cols, mockhouse.ColumnType{Type: "UInt8", Name: "severity_number"})
+ cols = append(cols, mockhouse.ColumnType{Type: "String", Name: "body"})
+ cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "resources_string_key"})
+ cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "resources_string_value"})
+ cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "attributes_string_key"})
+ cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "attributes_string_value"})
+ cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "attributes_int64_key"})
+ cols = append(cols, mockhouse.ColumnType{Type: "Array(Int64)", Name: "attributes_int64_value"})
+ cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "attributes_float64_key"})
+ cols = append(cols, mockhouse.ColumnType{Type: "Array(Float64)", Name: "attributes_float64_value"})
+ cols = append(cols, mockhouse.ColumnType{Type: "Array(String)", Name: "attributes_bool_key"})
+ cols = append(cols, mockhouse.ColumnType{Type: "Array(Bool)", Name: "attributes_bool_value"})
+
+ values := [][]any{}
+ for _, l := range logsToReturn {
+ rowValues := []any{}
+ rowValues = append(rowValues, l.Timestamp)
+ rowValues = append(rowValues, l.Timestamp)
+ rowValues = append(rowValues, l.ID)
+ rowValues = append(rowValues, l.TraceID)
+ rowValues = append(rowValues, l.SpanID)
+ rowValues = append(rowValues, l.TraceFlags)
+ rowValues = append(rowValues, l.SeverityText)
+ rowValues = append(rowValues, l.SeverityNumber)
+ rowValues = append(rowValues, l.Body)
+ rowValues = append(rowValues, maps.Keys(l.Resources_string))
+ rowValues = append(rowValues, maps.Values(l.Resources_string))
+ rowValues = append(rowValues, maps.Keys(l.Attributes_string))
+ rowValues = append(rowValues, maps.Values(l.Attributes_string))
+ rowValues = append(rowValues, maps.Keys(l.Attributes_int64))
+ rowValues = append(rowValues, maps.Values(l.Attributes_int64))
+ rowValues = append(rowValues, maps.Keys(l.Attributes_float64))
+ rowValues = append(rowValues, maps.Values(l.Attributes_float64))
+ rowValues = append(rowValues, maps.Keys(l.Attributes_bool))
+ rowValues = append(rowValues, maps.Values(l.Attributes_bool))
+ values = append(values, rowValues)
+ }
+
+ rows := mockhouse.NewRows(cols, values)
+ mockClickhouse.ExpectQuery(
+ "SELECT .*? from signoz_logs.distributed_logs.*",
+ ).WillReturnRows(rows)
+}
+
+func makeTestSignozLog(
+ body string,
+ attributes map[string]interface{},
+) model.SignozLog {
+
+ testLog := model.SignozLog{
+ Timestamp: uint64(time.Now().UnixNano()),
+ Body: body,
+ Attributes_bool: map[string]bool{},
+ Attributes_string: map[string]string{},
+ Attributes_int64: map[string]int64{},
+ Attributes_float64: map[string]float64{},
+ Resources_string: map[string]string{},
+ SeverityText: entry.Info.String(),
+ SeverityNumber: uint8(entry.Info),
+ SpanID: uuid.New().String(),
+ TraceID: uuid.New().String(),
+ }
+
+ for k, v := range attributes {
+ switch v.(type) {
+ case bool:
+ testLog.Attributes_bool[k] = v.(bool)
+ case string:
+ testLog.Attributes_string[k] = v.(string)
+ case int:
+ testLog.Attributes_int64[k] = int64(v.(int))
+ case float64:
+ testLog.Attributes_float64[k] = v.(float64)
+ default:
+ panic(fmt.Sprintf("found attribute value of unsupported type %T in test log", v))
+ }
+ }
+
+ return testLog
+}
+
+func createTestUser() (*model.User, *model.ApiError) {
+ // Create a test user for auth
+ ctx := context.Background()
+ org, apiErr := dao.DB().CreateOrg(ctx, &model.Organization{
+ Name: "test",
+ })
+ if apiErr != nil {
+ return nil, apiErr
+ }
+
+ group, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
+ if apiErr != nil {
+ return nil, apiErr
+ }
+
+ auth.InitAuthCache(ctx)
+
+ userId := uuid.NewString()
+ return dao.DB().CreateUser(
+ ctx,
+ &model.User{
+ Id: userId,
+ Name: "test",
+ Email: userId[:8] + "test@test.com",
+ Password: "test",
+ OrgId: org.Id,
+ GroupId: group.Id,
+ },
+ true,
+ )
+}
+
+func NewAuthenticatedTestRequest(
+ user *model.User,
+ path string,
+ postData interface{},
+) (*http.Request, error) {
+ userJwt, err := auth.GenerateJWTForUser(user)
+ if err != nil {
+ return nil, err
+ }
+
+ var req *http.Request
+
+ if postData != nil {
+ var body bytes.Buffer
+ err = json.NewEncoder(&body).Encode(postData)
+ if err != nil {
+ return nil, err
+ }
+ req = httptest.NewRequest(http.MethodPost, path, &body)
+ } else {
+ req = httptest.NewRequest(http.MethodGet, path, nil)
+ }
+
+ req.Header.Add("Authorization", "Bearer "+userJwt.AccessJwt)
+ return req, nil
+}
diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml
index dcbd059364..679fb7f401 100644
--- a/pkg/query-service/tests/test-deploy/docker-compose.yaml
+++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml
@@ -138,7 +138,7 @@ services:
# - ./data/clickhouse-3/:/var/lib/clickhouse/
alertmanager:
- image: signoz/alertmanager:0.23.4
+ image: signoz/alertmanager:0.23.5
container_name: signoz-alertmanager
volumes:
- ./data/alertmanager:/data
@@ -192,7 +192,7 @@ services:
<<: *db-depend
otel-collector-migrator:
- image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.14}
+ image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.15}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -205,7 +205,7 @@ services:
# condition: service_healthy
otel-collector:
- image: signoz/signoz-otel-collector:0.88.14
+ image: signoz/signoz-otel-collector:0.88.15
container_name: signoz-otel-collector
command:
[
diff --git a/pkg/query-service/utils/labels/labels.go b/pkg/query-service/utils/labels/labels.go
index ff4237a8f9..2e0041aafc 100644
--- a/pkg/query-service/utils/labels/labels.go
+++ b/pkg/query-service/utils/labels/labels.go
@@ -16,8 +16,6 @@ const sep = '\xff'
const (
MetricNameLabel = "__name__"
AlertNameLabel = "alertname"
- BucketLabel = "le"
- InstanceName = "instance"
// AlertStateLabel is the label name indicating the state of an alert.
AlertStateLabel = "alertstate"
@@ -25,9 +23,11 @@ const (
AlertRuleIdLabel = "ruleId"
RuleSourceLabel = "ruleSource"
- RuleThresholdLabel = "threshold"
- AlertAdditionalInfoLabel = "additionalInfo"
- AlertSummaryLabel = "summary"
+ RuleThresholdLabel = "threshold"
+ AlertSummaryLabel = "summary"
+ AlertDescriptionLabel = "description"
+
+ AlertMissingData = "Missing data"
)
// Label is a key/value pair of strings.
diff --git a/pkg/query-service/utils/slices.go b/pkg/query-service/utils/slices.go
new file mode 100644
index 0000000000..c196529a6a
--- /dev/null
+++ b/pkg/query-service/utils/slices.go
@@ -0,0 +1,29 @@
+package utils
+
+// Map as in map-reduce.
+func MapSlice[Slice ~[]Elem, Elem any, Output any](
+ slice Slice, mapper func(Elem) Output,
+) []Output {
+ result := []Output{}
+
+ for _, item := range slice {
+ mapped := mapper(item)
+ result = append(result, mapped)
+ }
+
+ return result
+}
+
+func FilterSlice[Slice ~[]Elem, Elem any](
+ slice Slice, filterFn func(Elem) bool,
+) Slice {
+ result := Slice{}
+
+ for _, item := range slice {
+ if filterFn(item) {
+ result = append(result, item)
+ }
+ }
+
+ return result
+}
diff --git a/pkg/query-service/utils/testutils.go b/pkg/query-service/utils/testutils.go
new file mode 100644
index 0000000000..d8989d9323
--- /dev/null
+++ b/pkg/query-service/utils/testutils.go
@@ -0,0 +1,31 @@
+package utils
+
+import (
+ "os"
+ "testing"
+
+ "github.com/jmoiron/sqlx"
+ "go.signoz.io/signoz/pkg/query-service/app/dashboards"
+ "go.signoz.io/signoz/pkg/query-service/dao"
+)
+
+func NewQueryServiceDBForTests(t *testing.T) *sqlx.DB {
+ testDBFile, err := os.CreateTemp("", "test-signoz-db-*")
+ if err != nil {
+ t.Fatalf("could not create temp file for test db: %v", err)
+ }
+ testDBFilePath := testDBFile.Name()
+ t.Cleanup(func() { os.Remove(testDBFilePath) })
+ testDBFile.Close()
+
+ testDB, err := sqlx.Open("sqlite3", testDBFilePath)
+ if err != nil {
+ t.Fatalf("could not open test db sqlite file: %v", err)
+ }
+
+ // TODO(Raj): This should not require passing in the DB file path
+ dao.InitDao("sqlite", testDBFilePath)
+ dashboards.InitDB(testDBFilePath)
+
+ return testDB
+}
diff --git a/pkg/query-service/utils/time.go b/pkg/query-service/utils/time.go
index ea644d7600..69b49e42ac 100644
--- a/pkg/query-service/utils/time.go
+++ b/pkg/query-service/utils/time.go
@@ -1,6 +1,7 @@
package utils
import (
+ "fmt"
"time"
"go.uber.org/zap"
@@ -8,7 +9,12 @@ import (
func Elapsed(funcName string, args ...interface{}) func() {
start := time.Now()
+ argsStr := ""
+ for _, v := range args {
+ argsStr += fmt.Sprintf("%v, ", v)
+ }
+ argsStr = argsStr[:len(argsStr)-2]
return func() {
- zap.S().Infof("func %s took %v with args %v", funcName, time.Since(start), args)
+ zap.S().Infof("func %s took %v with args %v", funcName, time.Since(start), string(argsStr))
}
}