} onClick={handleNext}>
Get Started
diff --git a/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx b/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx
index 2a7e6621ea..ca168fbf88 100644
--- a/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx
@@ -30,6 +30,9 @@ export default function ConnectionStatus(): JSX.Element {
const {
serviceName,
selectedDataSource,
+ selectedEnvironment,
+ activeStep,
+ selectedMethod,
selectedFramework,
} = useOnboardingContext();
const { queries } = useResourceAttribute();
@@ -40,7 +43,7 @@ export default function ConnectionStatus(): JSX.Element {
const { trackEvent } = useAnalytics();
- const [retryCount, setRetryCount] = useState(20); // Retry for 5 mins
+ const [retryCount, setRetryCount] = useState(20); // Retry for 3 mins 20s
const [loading, setLoading] = useState(true);
const [isReceivingData, setIsReceivingData] = useState(false);
const dispatch = useDispatch();
@@ -122,7 +125,12 @@ export default function ConnectionStatus(): JSX.Element {
if (data || isError) {
setRetryCount(retryCount - 1);
if (retryCount < 0) {
- trackEvent('❌ Onboarding: APM: Connection Status', {
+ trackEvent('Onboarding V2: Connection Status', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
serviceName,
status: 'Failed',
});
@@ -136,7 +144,12 @@ export default function ConnectionStatus(): JSX.Element {
setLoading(false);
setIsReceivingData(true);
- trackEvent('✅ Onboarding: APM: Connection Status', {
+ trackEvent('Onboarding V2: Connection Status', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
serviceName,
status: 'Successful',
});
diff --git a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx
index 3090bf6564..a52113c572 100644
--- a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx
@@ -11,7 +11,6 @@ import {
getSupportedFrameworks,
hasFrameworks,
} from 'container/OnboardingContainer/utils/dataSourceUtils';
-import useAnalytics from 'hooks/analytics/useAnalytics';
import { useEffect, useState } from 'react';
import { popupContainer } from 'utils/selectPopupContainer';
@@ -25,10 +24,7 @@ export interface DataSourceType {
export default function DataSource(): JSX.Element {
const [form] = Form.useForm();
- const { trackEvent } = useAnalytics();
-
const {
- activeStep,
serviceName,
selectedModule,
selectedDataSource,
@@ -56,39 +52,6 @@ export default function DataSource(): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
- useEffect(() => {
- // on language select
- trackEvent('Onboarding: Data Source Selected', {
- dataSource: selectedDataSource,
- module: {
- name: activeStep?.module?.title,
- id: activeStep?.module?.id,
- },
- step: {
- name: activeStep?.step?.title,
- id: activeStep?.step?.id,
- },
- });
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [selectedDataSource]);
-
- useEffect(() => {
- // on framework select
- trackEvent('Onboarding: Framework Selected', {
- dataSource: selectedDataSource,
- framework: selectedFramework,
- module: {
- name: activeStep?.module?.title,
- id: activeStep?.module?.id,
- },
- step: {
- name: activeStep?.step?.title,
- id: activeStep?.step?.id,
- },
- });
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [selectedFramework]);
-
useEffect(() => {
if (selectedModule && selectedDataSource) {
const frameworks = hasFrameworks({
@@ -125,8 +88,9 @@ export default function DataSource(): JSX.Element {
)}
key={dataSource.name}
onClick={(): void => {
- updateSelectedFramework('');
+ updateSelectedFramework(null);
updateSelectedDataSource(dataSource);
+ form.setFieldsValue({ selectFramework: null });
}}
>
@@ -152,6 +116,7 @@ export default function DataSource(): JSX.Element {
{
- // on language select
- trackEvent('Onboarding: Environment Selected', {
- dataSource: selectedDataSource,
- framework: selectedFramework,
- environment: selectedEnvironment,
- module: {
- name: activeStep?.module?.title,
- id: activeStep?.module?.id,
- },
- step: {
- name: activeStep?.step?.title,
- id: activeStep?.step?.id,
- },
- });
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [selectedEnvironment]);
-
return (
<>
diff --git a/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx b/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx
index 954dadd677..994234eca8 100644
--- a/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx
@@ -26,7 +26,11 @@ const enum ApplicationLogsType {
export default function LogsConnectionStatus(): JSX.Element {
const [loading, setLoading] = useState(true);
- const { selectedDataSource } = useOnboardingContext();
+ const {
+ selectedDataSource,
+ activeStep,
+ selectedEnvironment,
+ } = useOnboardingContext();
const { trackEvent } = useAnalytics();
const [isReceivingData, setIsReceivingData] = useState(false);
const [pollingInterval, setPollingInterval] = useState(15000); // initial Polling interval of 15 secs , Set to false after 5 mins
@@ -94,7 +98,10 @@ export default function LogsConnectionStatus(): JSX.Element {
setRetryCount(retryCount - 1);
if (retryCount < 0) {
- trackEvent('❌ Onboarding: Logs Management: Connection Status', {
+ trackEvent('Onboarding V2: Connection Status', {
+ dataSource: selectedDataSource?.id,
+ environment: selectedEnvironment,
+ module: activeStep?.module?.id,
status: 'Failed',
});
@@ -127,7 +134,10 @@ export default function LogsConnectionStatus(): JSX.Element {
setRetryCount(-1);
setPollingInterval(false);
- trackEvent('✅ Onboarding: Logs Management: Connection Status', {
+ trackEvent('Onboarding V2: Connection Status', {
+ dataSource: selectedDataSource?.id,
+ environment: selectedEnvironment,
+ module: activeStep?.module?.id,
status: 'Successful',
});
diff --git a/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx b/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx
index beec4d1065..d39e83ed53 100644
--- a/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx
@@ -8,7 +8,6 @@ import {
useOnboardingContext,
} from 'container/OnboardingContainer/context/OnboardingContext';
import { ModulesMap } from 'container/OnboardingContainer/OnboardingContainer';
-import useAnalytics from 'hooks/analytics/useAnalytics';
import { useEffect, useState } from 'react';
export interface IngestionInfoProps {
@@ -28,8 +27,6 @@ export default function MarkdownStep(): JSX.Element {
selectedMethod,
} = useOnboardingContext();
- const { trackEvent } = useAnalytics();
-
const [markdownContent, setMarkdownContent] = useState('');
const { step } = activeStep;
@@ -86,26 +83,6 @@ export default function MarkdownStep(): JSX.Element {
REGION: ingestionData?.REGION || 'region',
};
- useEffect(() => {
- trackEvent(
- `Onboarding: ${activeStep?.module?.id}: ${selectedDataSource?.name}: ${activeStep?.step?.title}`,
- {
- dataSource: selectedDataSource,
- framework: selectedFramework,
- environment: selectedEnvironment,
- module: {
- name: activeStep?.module?.title,
- id: activeStep?.module?.id,
- },
- step: {
- name: activeStep?.step?.title,
- id: activeStep?.step?.id,
- },
- },
- );
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [step]);
-
return (
diff --git a/frontend/src/container/OnboardingContainer/Steps/SelectMethod/SelectMethod.tsx b/frontend/src/container/OnboardingContainer/Steps/SelectMethod/SelectMethod.tsx
index 79cba00260..adde17d7be 100644
--- a/frontend/src/container/OnboardingContainer/Steps/SelectMethod/SelectMethod.tsx
+++ b/frontend/src/container/OnboardingContainer/Steps/SelectMethod/SelectMethod.tsx
@@ -3,45 +3,17 @@ import {
OnboardingMethods,
useOnboardingContext,
} from 'container/OnboardingContainer/context/OnboardingContext';
-import useAnalytics from 'hooks/analytics/useAnalytics';
-import { useEffect, useState } from 'react';
+import { useState } from 'react';
export default function SelectMethod(): JSX.Element {
- const {
- activeStep,
- selectedDataSource,
- selectedFramework,
- selectedEnvironment,
- selectedMethod,
- updateSelectedMethod,
- } = useOnboardingContext();
+ const { selectedMethod, updateSelectedMethod } = useOnboardingContext();
const [value, setValue] = useState(selectedMethod);
- const { trackEvent } = useAnalytics();
-
const onChange = (e: RadioChangeEvent): void => {
setValue(e.target.value);
updateSelectedMethod(e.target.value);
};
- useEffect(() => {
- // on language select
- trackEvent('Onboarding: Environment Selected', {
- dataSource: selectedDataSource,
- framework: selectedFramework,
- environment: selectedEnvironment,
- module: {
- name: activeStep?.module?.title,
- id: activeStep?.module?.id,
- },
- step: {
- name: activeStep?.step?.title,
- id: activeStep?.step?.id,
- },
- });
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [selectedMethod]);
-
return (
diff --git a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.styles.scss b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.styles.scss
index 3592d58798..02972209dd 100644
--- a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.styles.scss
+++ b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.styles.scss
@@ -39,6 +39,36 @@
.steps-container {
width: 20%;
height: 100%;
+
+ .steps-container-header {
+ display: flex;
+ align-items: center;
+ padding: 16px 0;
+ margin-bottom: 24px;
+
+ .brand-logo {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ gap: 16px;
+ cursor: pointer;
+
+ img {
+ height: 24px;
+ width: 24px;
+ }
+
+ .brand-logo-name {
+ font-family: 'Work Sans', sans-serif;
+ font-size: 18px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 18px;
+
+ color: #fff;
+ }
+ }
+ }
}
.selected-step-content {
@@ -153,3 +183,18 @@
.error-container {
margin: 8px 0;
}
+
+.lightMode {
+ .steps-container {
+ width: 20%;
+ height: 100%;
+
+ .steps-container-header {
+ .brand-logo {
+ .brand-logo-name {
+ color: black;
+ }
+ }
+ }
+ }
+}
diff --git a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx
index b1712ef263..7362e9a47d 100644
--- a/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx
+++ b/frontend/src/container/OnboardingContainer/common/ModuleStepsContainer/ModuleStepsContainer.tsx
@@ -1,3 +1,6 @@
+/* eslint-disable jsx-a11y/no-static-element-interactions */
+/* eslint-disable jsx-a11y/click-events-have-key-events */
+/* eslint-disable react/jsx-no-comment-textnodes */
/* eslint-disable sonarjs/prefer-single-boolean-return */
import './ModuleStepsContainer.styles.scss';
@@ -65,6 +68,7 @@ export default function ModuleStepsContainer({
selectedDataSource,
selectedEnvironment,
selectedFramework,
+ selectedMethod,
updateActiveStep,
updateErrorDetails,
resetProgress,
@@ -75,6 +79,7 @@ export default function ModuleStepsContainer({
const [metaData, setMetaData] = useState(defaultMetaData);
const lastStepIndex = selectedModuleSteps.length - 1;
+ // eslint-disable-next-line sonarjs/cognitive-complexity
const isValidForm = (): boolean => {
const { id: selectedModuleID } = selectedModule;
const dataSourceStep = stepsMap.dataSource;
@@ -103,7 +108,10 @@ export default function ModuleStepsContainer({
dataSource: selectedDataSource,
});
- if (doesHaveFrameworks && selectedFramework === '') {
+ if (
+ doesHaveFrameworks &&
+ (selectedFramework === null || selectedFramework === '')
+ ) {
return false;
}
@@ -128,14 +136,19 @@ export default function ModuleStepsContainer({
};
const redirectToModules = (): void => {
- trackEvent('Onboarding Complete', {
+ trackEvent('Onboarding V2 Complete', {
module: selectedModule.id,
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ serviceName,
});
if (selectedModule.id === ModulesMap.APM) {
history.push(ROUTES.APPLICATION);
} else if (selectedModule.id === ModulesMap.LogsManagement) {
- history.push(ROUTES.LOGS);
+ history.push(ROUTES.LOGS_EXPLORER);
} else if (selectedModule.id === ModulesMap.InfrastructureMonitoring) {
history.push(ROUTES.APPLICATION);
}
@@ -159,6 +172,101 @@ export default function ModuleStepsContainer({
module: selectedModule,
step: selectedModuleSteps[current + 1],
});
+ // on next step click track events
+ switch (selectedModuleSteps[current].id) {
+ case stepsMap.dataSource:
+ trackEvent('Onboarding V2: Data Source Selected', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.environmentDetails:
+ trackEvent('Onboarding V2: Environment Selected', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.selectMethod:
+ trackEvent('Onboarding V2: Method Selected', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
+ });
+ break;
+
+ case stepsMap.setupOtelCollector:
+ trackEvent('Onboarding V2: Setup Otel Collector', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.instrumentApplication:
+ trackEvent('Onboarding V2: Instrument Application', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.cloneRepository:
+ trackEvent('Onboarding V2: Clone Repository', {
+ dataSource: selectedDataSource?.id,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.runApplication:
+ trackEvent('Onboarding V2: Run Application', {
+ dataSource: selectedDataSource?.id,
+ framework: selectedFramework,
+ environment: selectedEnvironment,
+ selectedMethod,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.addHttpDrain:
+ trackEvent('Onboarding V2: Add HTTP Drain', {
+ dataSource: selectedDataSource?.id,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.startContainer:
+ trackEvent('Onboarding V2: Start Container', {
+ dataSource: selectedDataSource?.id,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.setupLogDrains:
+ trackEvent('Onboarding V2: Setup Log Drains', {
+ dataSource: selectedDataSource?.id,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.configureReceiver:
+ trackEvent('Onboarding V2: Configure Receiver', {
+ dataSource: selectedDataSource?.id,
+ environment: selectedEnvironment,
+ module: activeStep?.module?.id,
+ });
+ break;
+ case stepsMap.configureAws:
+ trackEvent('Onboarding V2: Configure AWS', {
+ dataSource: selectedDataSource?.id,
+ environment: selectedEnvironment,
+ module: activeStep?.module?.id,
+ });
+ break;
+ default:
+ break;
+ }
}
// set meta data
@@ -174,7 +282,7 @@ export default function ModuleStepsContainer({
},
{
name: 'Framework',
- value: selectedFramework,
+ value: selectedFramework || '',
},
{
name: 'Environment',
@@ -197,9 +305,21 @@ export default function ModuleStepsContainer({
}
};
+ const handleLogoClick = (): void => {
+ history.push('/');
+ };
+
return (
+
+
+
+
+
SigNoz
+
+
+
('');
- const [selectedFramework, setSelectedFramework] = useState('');
+ const [selectedFramework, setSelectedFramework] = useState(
+ null,
+ );
const [selectedMethod, setSelectedMethod] = useState(
OnboardingMethods.QUICK_START,
diff --git a/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx b/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx
index 058c16761b..c4494569df 100644
--- a/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx
+++ b/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx
@@ -506,8 +506,9 @@ function PipelineListsView({
pagination={false}
/>
- {showSaveButton && (
+ {isEditingActionMode && (
diff --git a/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx b/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx
index e7aa0ecedc..700665b957 100644
--- a/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx
+++ b/frontend/src/container/PipelinePage/PipelineListsView/SaveConfigButton.tsx
@@ -4,6 +4,7 @@ import { useTranslation } from 'react-i18next';
import { SaveConfigWrapper } from './styles';
function SaveConfigButton({
+ showSaveButton,
onSaveConfigurationHandler,
onCancelConfigurationHandler,
}: SaveConfigButtonTypes): JSX.Element {
@@ -11,14 +12,16 @@ function SaveConfigButton({
return (
-
- {t('save_configuration')}
-
+ {showSaveButton && (
+
+ {t('save_configuration')}
+
+ )}
{t('cancel')}
@@ -26,6 +29,7 @@ function SaveConfigButton({
);
}
export interface SaveConfigButtonTypes {
+ showSaveButton: boolean;
onSaveConfigurationHandler: VoidFunction;
onCancelConfigurationHandler: VoidFunction;
}
diff --git a/frontend/src/container/PipelinePage/PipelineListsView/styles.ts b/frontend/src/container/PipelinePage/PipelineListsView/styles.ts
index 0b3ddbff3d..d96eb7cd93 100644
--- a/frontend/src/container/PipelinePage/PipelineListsView/styles.ts
+++ b/frontend/src/container/PipelinePage/PipelineListsView/styles.ts
@@ -108,6 +108,7 @@ export const ModeAndConfigWrapper = styled.div`
export const SaveConfigWrapper = styled.div`
display: flex;
+ justify-content: flex-end;
gap: 0.938rem;
margin-top: 1.25rem;
`;
diff --git a/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx b/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx
index a5cf155b2f..dd39a5b1af 100644
--- a/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx
+++ b/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx
@@ -1,4 +1,5 @@
import { Col, Input, Row } from 'antd';
+import { LEGEND } from 'constants/global';
// ** Components
import {
FilterLabel,
@@ -13,6 +14,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
import { ChangeEvent, useCallback, useMemo } from 'react';
import { IBuilderFormula } from 'types/api/queryBuilder/queryBuilderData';
+import { getFormatedLegend } from 'utils/getFormatedLegend';
import { AdditionalFiltersToggler } from '../AdditionalFiltersToggler';
// ** Types
@@ -58,7 +60,7 @@ export function Formula({
const { name, value } = e.target;
const newFormula: IBuilderFormula = {
...formula,
- [name]: value,
+ [name]: name === LEGEND ? getFormatedLegend(value) : value,
};
handleSetFormulaData(index, newFormula);
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
index 7373690f2b..32bf8fbd40 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx
@@ -14,11 +14,11 @@ function OptionRenderer({
const optionType = getOptionType(label);
return (
-
+
{optionType ? (
- {value}
-
+
{value}
+
Type:
{optionType}
diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts
index c9c2e0e221..fd6d5f209e 100644
--- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts
+++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/style.ts
@@ -18,17 +18,18 @@ export const StyledCheckOutlined = styled(CheckOutlined)`
export const SelectOptionContainer = styled.div`
display: flex;
+ gap: 8px;
justify-content: space-between;
align-items: center;
+ overflow-x: auto;
`;
export const TagContainer = styled(Tag)`
&&& {
- border-radius: 0.25rem;
- padding: 0.063rem 0.5rem;
- font-weight: 600;
- font-size: 0.75rem;
- line-height: 1.25rem;
+ border-radius: 3px;
+ padding: 0.3rem 0.3rem;
+ font-weight: 400;
+ font-size: 0.6rem;
}
`;
diff --git a/frontend/src/container/ResetPassword/ResetPassword.test.tsx b/frontend/src/container/ResetPassword/ResetPassword.test.tsx
new file mode 100644
index 0000000000..b3345fedf3
--- /dev/null
+++ b/frontend/src/container/ResetPassword/ResetPassword.test.tsx
@@ -0,0 +1,72 @@
+import { fireEvent, render, screen, waitFor } from '@testing-library/react';
+import userEvent from '@testing-library/user-event';
+import { act } from 'react-dom/test-utils';
+
+import ResetPassword from './index';
+
+jest.mock('api/user/resetPassword', () => ({
+ __esModule: true,
+ default: jest.fn(),
+}));
+
+jest.useFakeTimers();
+
+describe('ResetPassword Component', () => {
+ beforeEach(() => {
+ userEvent.setup();
+ jest.clearAllMocks();
+ });
+
+ it('renders ResetPassword component correctly', () => {
+ render( );
+ expect(screen.getByText('Reset Your Password')).toBeInTheDocument();
+ expect(screen.getByLabelText('Password')).toBeInTheDocument();
+ // eslint-disable-next-line sonarjs/no-duplicate-string
+ expect(screen.getByLabelText('Confirm Password')).toBeInTheDocument();
+ expect(
+ // eslint-disable-next-line sonarjs/no-duplicate-string
+ screen.getByRole('button', { name: 'Get Started' }),
+ ).toBeInTheDocument();
+ });
+
+ it('disables the "Get Started" button when password is invalid', async () => {
+ render( );
+
+ const passwordInput = screen.getByLabelText('Password');
+ const confirmPasswordInput = screen.getByLabelText('Confirm Password');
+ const submitButton = screen.getByRole('button', { name: 'Get Started' });
+
+ act(() => {
+ // Set invalid password
+ fireEvent.change(passwordInput, { target: { value: 'password' } });
+ fireEvent.change(confirmPasswordInput, { target: { value: 'password' } });
+ });
+
+ await waitFor(() => {
+ // Expect the "Get Started" button to be disabled
+ expect(submitButton).toBeDisabled();
+ });
+ });
+
+ it('enables the "Get Started" button when password is valid', async () => {
+ render( );
+
+ const passwordInput = screen.getByLabelText('Password');
+ const confirmPasswordInput = screen.getByLabelText('Confirm Password');
+ const submitButton = screen.getByRole('button', { name: 'Get Started' });
+
+ act(() => {
+ fireEvent.change(passwordInput, { target: { value: 'newPassword' } });
+ fireEvent.change(confirmPasswordInput, { target: { value: 'newPassword' } });
+ });
+
+ act(() => {
+ jest.advanceTimersByTime(500);
+ });
+
+ await waitFor(() => {
+ // Expect the "Get Started" button to be enabled
+ expect(submitButton).toBeEnabled();
+ });
+ });
+});
diff --git a/frontend/src/container/ResetPassword/index.tsx b/frontend/src/container/ResetPassword/index.tsx
index eac4b098cd..f4be5310e2 100644
--- a/frontend/src/container/ResetPassword/index.tsx
+++ b/frontend/src/container/ResetPassword/index.tsx
@@ -3,6 +3,7 @@ import resetPasswordApi from 'api/user/resetPassword';
import { Logout } from 'api/utils';
import WelcomeLeftContainer from 'components/WelcomeLeftContainer';
import ROUTES from 'constants/routes';
+import useDebouncedFn from 'hooks/useDebouncedFunction';
import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
import { Label } from 'pages/SignUp/styles';
@@ -20,6 +21,8 @@ function ResetPassword({ version }: ResetPasswordProps): JSX.Element {
const [confirmPasswordError, setConfirmPasswordError] = useState(
false,
);
+
+ const [isValidPassword, setIsValidPassword] = useState(false);
const [loading, setLoading] = useState(false);
const { t } = useTranslation(['common']);
const { search } = useLocation();
@@ -35,7 +38,7 @@ function ResetPassword({ version }: ResetPasswordProps): JSX.Element {
}
}, [token]);
- const handleSubmit: () => Promise = async () => {
+ const handleFormSubmit: () => Promise = async () => {
try {
setLoading(true);
const { password } = form.getFieldsValue();
@@ -72,38 +75,88 @@ function ResetPassword({ version }: ResetPasswordProps): JSX.Element {
});
}
};
- const handleValuesChange: (changedValues: FormValues) => void = (
- changedValues,
- ) => {
- if ('confirmPassword' in changedValues) {
- const { confirmPassword } = changedValues;
- const isSamePassword = form.getFieldValue('password') === confirmPassword;
- setConfirmPasswordError(!isSamePassword);
+ const validatePassword = (): boolean => {
+ const { password, confirmPassword } = form.getFieldsValue();
+
+ if (
+ password &&
+ confirmPassword &&
+ password.trim() &&
+ confirmPassword.trim() &&
+ password.length > 0 &&
+ confirmPassword.length > 0
+ ) {
+ return password === confirmPassword;
+ }
+
+ return false;
+ };
+
+ const handleValuesChange = useDebouncedFn((): void => {
+ const { password, confirmPassword } = form.getFieldsValue();
+
+ if (!password || !confirmPassword) {
+ setIsValidPassword(false);
+ }
+
+ if (
+ password &&
+ confirmPassword &&
+ password.trim() &&
+ confirmPassword.trim()
+ ) {
+ const isValid = validatePassword();
+
+ setIsValidPassword(isValid);
+ setConfirmPasswordError(!isValid);
+ }
+ }, 100);
+
+ const handleSubmit = (): void => {
+ const isValid = validatePassword();
+ setIsValidPassword(isValid);
+
+ if (token) {
+ handleFormSubmit();
}
};
return (
-
+
Reset Your Password
- Password
-
-
-
+ Password
+
+
+
- Confirm Password
-
-
-
+ Confirm Password
+
+
+
{confirmPasswordError && (
- Passwords don’t match. Please try again
+ The passwords entered do not match. Please double-check and re-enter
+ your passwords.
)}
@@ -124,13 +178,7 @@ function ResetPassword({ version }: ResetPasswordProps): JSX.Element {
htmlType="submit"
data-attr="signup"
loading={loading}
- disabled={
- loading ||
- !form.getFieldValue('password') ||
- !form.getFieldValue('confirmPassword') ||
- confirmPasswordError ||
- token === null
- }
+ disabled={!isValidPassword || loading}
>
Get Started
diff --git a/frontend/src/container/ResetPassword/styles.ts b/frontend/src/container/ResetPassword/styles.ts
index e59a453695..f71860382e 100644
--- a/frontend/src/container/ResetPassword/styles.ts
+++ b/frontend/src/container/ResetPassword/styles.ts
@@ -4,8 +4,12 @@ import styled from 'styled-components';
export const FormWrapper = styled(Card)`
display: flex;
justify-content: center;
- max-width: 432px;
+ width: 432px;
flex: 1;
+
+ .ant-card-body {
+ width: 100%;
+ }
`;
export const ButtonContainer = styled.div`
diff --git a/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx b/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx
index 4257dc57ec..b272a39475 100644
--- a/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx
+++ b/frontend/src/container/ServiceApplication/Columns/GetColumnSearchProps.tsx
@@ -26,7 +26,11 @@ export const getColumnSearchProps = (
const queryString = getQueryString(avialableParams, urlParams);
return (
-
+
{metrics}
);
diff --git a/frontend/src/container/ServiceApplication/styles.ts b/frontend/src/container/ServiceApplication/styles.ts
index 0bcca1738b..0bf860bb9a 100644
--- a/frontend/src/container/ServiceApplication/styles.ts
+++ b/frontend/src/container/ServiceApplication/styles.ts
@@ -1,5 +1,4 @@
import { Typography } from 'antd';
-import { themeColors } from 'constants/theme';
import styled from 'styled-components';
export const Container = styled.div`
@@ -9,7 +8,7 @@ export const Container = styled.div`
export const Name = styled(Typography)`
&&& {
font-weight: 600;
- color: ${themeColors.lightBlue};
+ color: #4e74f8;
cursor: pointer;
}
`;
diff --git a/frontend/src/container/ServiceTable/styles.ts b/frontend/src/container/ServiceTable/styles.ts
index 3050081a56..0bf860bb9a 100644
--- a/frontend/src/container/ServiceTable/styles.ts
+++ b/frontend/src/container/ServiceTable/styles.ts
@@ -8,7 +8,7 @@ export const Container = styled.div`
export const Name = styled(Typography)`
&&& {
font-weight: 600;
- color: #177ddc;
+ color: #4e74f8;
cursor: pointer;
}
`;
diff --git a/frontend/src/container/SideNav/NavItem/NavItem.styles.scss b/frontend/src/container/SideNav/NavItem/NavItem.styles.scss
new file mode 100644
index 0000000000..f182a1df6d
--- /dev/null
+++ b/frontend/src/container/SideNav/NavItem/NavItem.styles.scss
@@ -0,0 +1,112 @@
+.nav-item {
+ border-radius: 2px;
+
+ display: flex;
+ flex-direction: row;
+ align-items: center;
+
+ height: 36px;
+ margin-bottom: 4px;
+
+ &.active {
+ .nav-item-active-marker {
+ background: #3f5ecc;
+ }
+ }
+
+ &:hover {
+ cursor: pointer;
+
+ .nav-item-data {
+ color: white;
+ background: #121317;
+ }
+ }
+
+ &.active {
+ .nav-item-data {
+ color: white;
+ background: #121317;
+ // color: #3f5ecc;
+ }
+ }
+
+ .nav-item-active-marker {
+ margin: 8px 0;
+ width: 8px;
+ height: 24px;
+ background: transparent;
+ border-radius: 3px;
+ margin-left: -5px;
+ }
+
+ .nav-item-data {
+ flex-grow: 1;
+
+ max-width: calc(100% - 24px);
+
+ display: flex;
+ margin: 0px 8px;
+ padding: 4px 12px;
+ flex-direction: row;
+ align-items: center;
+ gap: 8px;
+ align-self: stretch;
+ color: #c0c1c3;
+
+ border-radius: 3px;
+ font-family: Inter;
+ font-size: 13px;
+ font-style: normal;
+ font-weight: 400;
+ line-height: 18px;
+
+ background: transparent;
+ border-left: 2px solid transparent;
+
+ transition: 0.2s all linear;
+
+ .nav-item-icon {
+ height: 16px;
+ }
+
+ .nav-item-label {
+ // width: 220px;
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ }
+ }
+}
+
+.lightMode {
+ .nav-item {
+ &.active {
+ .nav-item-active-marker {
+ background: #3f5ecc;
+ }
+ }
+
+ &:hover {
+ cursor: pointer;
+
+ .nav-item-data {
+ color: #121317;
+
+ background: white;
+ }
+ }
+
+ &.active {
+ .nav-item-data {
+ color: #121317;
+ background: white;
+ // color: #4e74f8;
+ }
+ }
+
+ .nav-item-data {
+ color: #121317;
+ }
+ }
+}
diff --git a/frontend/src/container/SideNav/NavItem/NavItem.tsx b/frontend/src/container/SideNav/NavItem/NavItem.tsx
new file mode 100644
index 0000000000..301acc402e
--- /dev/null
+++ b/frontend/src/container/SideNav/NavItem/NavItem.tsx
@@ -0,0 +1,31 @@
+import './NavItem.styles.scss';
+
+import cx from 'classnames';
+
+import { SidebarItem } from '../sideNav.types';
+
+export default function NavItem({
+ isCollapsed,
+ item,
+ isActive,
+ onClick,
+}: {
+ isCollapsed: boolean;
+ item: SidebarItem;
+ isActive: boolean;
+ onClick: () => void;
+}): JSX.Element {
+ const { label, icon } = item;
+
+ return (
+ // eslint-disable-next-line jsx-a11y/click-events-have-key-events, jsx-a11y/no-static-element-interactions
+
+
+
+
{icon}
+
+ {!isCollapsed &&
{label}
}
+
+
+ );
+}
diff --git a/frontend/src/container/SideNav/SideNav.styles.scss b/frontend/src/container/SideNav/SideNav.styles.scss
new file mode 100644
index 0000000000..379a7bba93
--- /dev/null
+++ b/frontend/src/container/SideNav/SideNav.styles.scss
@@ -0,0 +1,172 @@
+@import '@signozhq/design-tokens';
+
+.sideNav {
+ flex: 0 0 240px;
+ max-width: 240px;
+ min-width: 240px;
+ width: 240px;
+ border-right: 1px solid $bg-slate-400;
+ padding-bottom: 48px;
+ transition: all 0.3s, background 0s, border 0s;
+ position: relative;
+
+ .brand {
+ display: flex;
+ align-items: center;
+ gap: 12px;
+ padding: $padding-4;
+
+ .brand-logo {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+
+ cursor: pointer;
+
+ img {
+ height: $font-size-xl;
+ }
+
+ .brand-logo-name {
+ font-family: 'Work Sans', sans-serif;
+ font-size: 14px;
+ font-style: normal;
+ font-weight: 500;
+ line-height: 18px;
+
+ color: #fff;
+ }
+ }
+
+ .license {
+ &.tag {
+ box-sizing: border-box;
+ margin: 0;
+ padding: 0;
+ color: rgba(255, 255, 255, 0.85);
+ font-size: 8px;
+ font-weight: $font-weight-medium;
+ letter-spacing: 0.6px;
+ padding: 4px 8px;
+ text-transform: uppercase;
+ white-space: nowrap;
+ background: $bg-slate-400;
+ border: 1px solid $bg-slate-400;
+ border-radius: 20px;
+ opacity: 1;
+ transition: all 0.2s;
+ }
+ }
+ }
+
+ .get-started-nav-items {
+ display: flex;
+ margin: 4px 13px 4px 10px;
+
+ .get-started-btn {
+ display: flex;
+ align-items: center;
+ padding: 8px;
+ margin-left: 2px;
+ gap: 8px;
+
+ width: 100%;
+ height: 36px;
+
+ border: 1px solid $bg-slate-400;
+
+ border-radius: 2px;
+ box-shadow: none !important;
+ }
+ }
+
+ .secondary-nav-items {
+ border-top: 1px solid $bg-slate-400;
+ padding: 8px 0;
+ max-width: 100%;
+ position: fixed;
+ bottom: 0;
+ left: 0;
+ width: 240px;
+
+ transition: all 0.3s, background 0s, border 0s;
+
+ // position: relative;
+
+ .collapse-expand-handlers {
+ position: absolute;
+
+ top: -9px;
+ right: -9px;
+ cursor: pointer;
+
+ display: none;
+
+ transition: display 0.3s;
+
+ svg {
+ fill: $bg-vanilla-300;
+ color: $bg-slate-300;
+ }
+ }
+ }
+
+ &.collapsed {
+ flex: 0 0 64px;
+ max-width: 64px;
+ min-width: 64px;
+ width: 64px;
+
+ .secondary-nav-items {
+ width: 64px;
+ }
+
+ .brand {
+ justify-content: center;
+ }
+
+ .get-started-nav-items {
+ .get-started-btn {
+ justify-content: center;
+ }
+ }
+ }
+
+ &:hover {
+ .collapse-expand-handlers {
+ display: block;
+ }
+ }
+}
+
+.lightMode {
+ .sideNav {
+ background: $bg-vanilla-300;
+ border-right: 1px solid $bg-vanilla-400;
+
+ .get-started-nav-items {
+ .get-started-btn {
+ border: 1px solid $bg-vanilla-400;
+ }
+ }
+
+ .brand {
+ .brand-logo {
+ .brand-logo-name {
+ color: $bg-slate-400;
+ }
+ }
+ }
+
+ .secondary-nav-items {
+ border-top: 1px solid $bg-vanilla-400;
+
+ .collapse-expand-handlers {
+ svg {
+ color: $bg-slate-300;
+ fill: $bg-vanilla-300;
+ }
+ }
+ }
+ }
+}
diff --git a/frontend/src/container/SideNav/SideNav.tsx b/frontend/src/container/SideNav/SideNav.tsx
index 911913aef9..3fd937a52f 100644
--- a/frontend/src/container/SideNav/SideNav.tsx
+++ b/frontend/src/container/SideNav/SideNav.tsx
@@ -1,12 +1,26 @@
-import { CheckCircleTwoTone, WarningOutlined } from '@ant-design/icons';
-import { MenuProps } from 'antd';
+/* eslint-disable jsx-a11y/no-static-element-interactions */
+/* eslint-disable jsx-a11y/click-events-have-key-events */
+import './SideNav.styles.scss';
+
+import { Button } from 'antd';
import getLocalStorageKey from 'api/browser/localstorage/get';
+import cx from 'classnames';
import { IS_SIDEBAR_COLLAPSED } from 'constants/app';
import { FeatureKeys } from 'constants/features';
import ROUTES from 'constants/routes';
-import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense';
+import { ToggleButton } from 'container/Header/styles';
+import useComponentPermission from 'hooks/useComponentPermission';
+import useThemeMode, { useIsDarkMode } from 'hooks/useDarkMode';
+import { LICENSE_PLAN_KEY, LICENSE_PLAN_STATUS } from 'hooks/useLicense';
import history from 'lib/history';
-import { LifeBuoy } from 'lucide-react';
+import {
+ AlertTriangle,
+ CheckSquare,
+ ChevronLeftCircle,
+ ChevronRightCircle,
+ RocketIcon,
+ UserCircle,
+} from 'lucide-react';
import {
useCallback,
useEffect,
@@ -17,44 +31,82 @@ import {
import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
-import { sideBarCollapse } from 'store/actions/app';
+import { sideBarCollapse } from 'store/actions';
import { AppState } from 'store/reducers';
+import { License } from 'types/api/licenses/def';
import AppReducer from 'types/reducer/app';
import { USER_ROLES } from 'types/roles';
import { checkVersionState, isCloudUser, isEECloudUser } from 'utils/app';
-import { routeConfig, styles } from './config';
+import { routeConfig } from './config';
import { getQueryString } from './helper';
-import defaultMenuItems from './menuItems';
-import { MenuItem, SecondaryMenuItemKey } from './sideNav.types';
+import defaultMenuItems, {
+ helpSupportMenuItem,
+ inviteMemberMenuItem,
+ manageLicenseMenuItem,
+ slackSupportMenuItem,
+ trySignozCloudMenuItem,
+} from './menuItems';
+import NavItem from './NavItem/NavItem';
+import { SecondaryMenuItemKey } from './sideNav.types';
import { getActiveMenuKeyFromPath } from './sideNav.utils';
-import Slack from './Slack';
-import {
- MenuLabelContainer,
- RedDot,
- Sider,
- StyledPrimaryMenu,
- StyledSecondaryMenu,
- StyledText,
-} from './styles';
-function SideNav(): JSX.Element {
+function SideNav({
+ licenseData,
+ isFetching,
+}: {
+ licenseData: any;
+ isFetching: boolean;
+}): JSX.Element {
const dispatch = useDispatch();
const [menuItems, setMenuItems] = useState(defaultMenuItems);
const [collapsed, setCollapsed] = useState(
getLocalStorageKey(IS_SIDEBAR_COLLAPSED) === 'true',
);
+
+ const { pathname, search } = useLocation();
const {
+ user,
role,
+ featureResponse,
currentVersion,
latestVersion,
isCurrentVersionError,
- featureResponse,
} = useSelector((state) => state.app);
- const { data, isFetching } = useLicense();
+ const userSettingsMenuItem = {
+ key: ROUTES.MY_SETTINGS,
+ label: user?.name || 'User',
+ icon: ,
+ };
- let secondaryMenuItems: MenuItem[] = [];
+ const [userManagementMenuItems, setUserManagementMenuItems] = useState([
+ manageLicenseMenuItem,
+ ]);
+
+ const onClickSlackHandler = (): void => {
+ window.open('https://signoz.io/slack', '_blank');
+ };
+
+ const onClickVersionHandler = (): void => {
+ history.push(ROUTES.VERSION);
+ };
+
+ const isLatestVersion = checkVersionState(currentVersion, latestVersion);
+
+ const [inviteMembers] = useComponentPermission(['invite_members'], role);
+
+ useEffect(() => {
+ if (inviteMembers) {
+ const updatedUserManagementMenuItems = [
+ inviteMemberMenuItem,
+ manageLicenseMenuItem,
+ ];
+
+ setUserManagementMenuItems(updatedUserManagementMenuItems);
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [inviteMembers]);
useEffect((): void => {
const isOnboardingEnabled =
@@ -78,10 +130,10 @@ function SideNav(): JSX.Element {
let items = [...menuItems];
const isOnBasicPlan =
- data?.payload?.licenses?.some(
- (license) =>
+ licenseData?.payload?.licenses?.some(
+ (license: License) =>
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
- ) || data?.payload?.licenses === null;
+ ) || licenseData?.payload?.licenses === null;
if (role !== USER_ROLES.ADMIN || isOnBasicPlan) {
items = items.filter((item) => item.key !== ROUTES.BILLING);
@@ -90,9 +142,7 @@ function SideNav(): JSX.Element {
setMenuItems(items);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
- }, [data?.payload?.licenses, isFetching, role]);
-
- const { pathname, search } = useLocation();
+ }, [licenseData?.payload?.licenses, isFetching, role]);
const { t } = useTranslation('');
@@ -104,6 +154,26 @@ function SideNav(): JSX.Element {
dispatch(sideBarCollapse(collapsed));
}, [collapsed, dispatch]);
+ const isLicenseActive =
+ licenseData?.payload?.licenses?.find((e: License) => e.isCurrent)?.status ===
+ LICENSE_PLAN_STATUS.VALID;
+
+ const isEnterprise = licenseData?.payload?.licenses?.some(
+ (license: License) =>
+ license.isCurrent && license.planKey === LICENSE_PLAN_KEY.ENTERPRISE_PLAN,
+ );
+
+ const onClickSignozCloud = (): void => {
+ window.open(
+ 'https://signoz.io/oss-to-cloud/?utm_source=product_navbar&utm_medium=frontend&utm_campaign=oss_users',
+ '_blank',
+ );
+ };
+
+ const onClickGetStarted = (): void => {
+ history.push(`/get-started`);
+ };
+
const onClickHandler = useCallback(
(key: string) => {
const params = new URLSearchParams(search);
@@ -118,80 +188,175 @@ function SideNav(): JSX.Element {
[pathname, search],
);
- const onClickMenuHandler: MenuProps['onClick'] = (e) => {
- onClickHandler(e.key);
- };
-
- const onClickSlackHandler = (): void => {
- window.open('https://signoz.io/slack', '_blank');
- };
-
- const onClickVersionHandler = (): void => {
- history.push(ROUTES.VERSION);
- };
-
- const isLatestVersion = checkVersionState(currentVersion, latestVersion);
-
- if (isCloudUser() || isEECloudUser()) {
- secondaryMenuItems = [
- {
- key: SecondaryMenuItemKey.Support,
- label: 'Support',
- icon: ,
- onClick: onClickMenuHandler,
- },
- ];
- } else {
- secondaryMenuItems = [
- {
- key: SecondaryMenuItemKey.Version,
- icon: !isLatestVersion ? (
-
- ) : (
-
- ),
- label: (
-
-
- {!isCurrentVersionError ? currentVersion : t('n_a')}
-
- {!isLatestVersion && }
-
- ),
- onClick: onClickVersionHandler,
- },
- {
- key: SecondaryMenuItemKey.Slack,
- icon: ,
- label: Support ,
- onClick: onClickSlackHandler,
- },
- ];
- }
-
const activeMenuKey = useMemo(() => getActiveMenuKeyFromPath(pathname), [
pathname,
]);
+ const isDarkMode = useIsDarkMode();
+ const { toggleTheme } = useThemeMode();
+
+ const isCloudUserVal = isCloudUser();
+
+ useEffect(() => {
+ if (isCloudUser() || isEECloudUser()) {
+ const updatedUserManagementMenuItems = [
+ helpSupportMenuItem,
+ manageLicenseMenuItem,
+ ];
+
+ setUserManagementMenuItems(updatedUserManagementMenuItems);
+ } else if (currentVersion && latestVersion) {
+ const versionMenuItem = {
+ key: SecondaryMenuItemKey.Version,
+ label: !isCurrentVersionError ? currentVersion : t('n_a'),
+ icon: !isLatestVersion ? (
+
+ ) : (
+
+ ),
+ onClick: onClickVersionHandler,
+ };
+
+ const updatedUserManagementMenuItems = [
+ versionMenuItem,
+ slackSupportMenuItem,
+ manageLicenseMenuItem,
+ ];
+
+ setUserManagementMenuItems(updatedUserManagementMenuItems);
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [currentVersion, latestVersion]);
+
+ const handleUserManagentMenuItemClick = (key: string): void => {
+ switch (key) {
+ case SecondaryMenuItemKey.Slack:
+ onClickSlackHandler();
+ break;
+ case SecondaryMenuItemKey.Version:
+ onClickVersionHandler();
+ break;
+ default:
+ onClickHandler(key);
+ break;
+ }
+ };
+
return (
-
-
-
-
+
+
+
{
+ // Current home page
+ onClickHandler(ROUTES.APPLICATION);
+ }}
+ >
+
+
+ {!collapsed &&
SigNoz }
+
+
+ {!collapsed && (
+ <>
+
{!isEnterprise ? 'Free' : 'Enterprise'}
+
+
+ >
+ )}
+
+
+ {isCloudUserVal && (
+
+
+
+
+ {!collapsed && <> Get Started >}
+
+
+ )}
+
+
+ {menuItems.map((item, index) => (
+ {
+ if (item) {
+ onClickHandler(item?.key as string);
+ }
+ }}
+ />
+ ))}
+
+
+
+ {licenseData && !isLicenseActive && (
+
+ )}
+
+ {userManagementMenuItems.map(
+ (item, index): JSX.Element => (
+
{
+ handleUserManagentMenuItemClick(item?.key as string);
+ }}
+ />
+ ),
+ )}
+
+ {inviteMembers && (
+ {
+ history.push(`${inviteMemberMenuItem.key}`);
+ }}
+ />
+ )}
+
+ {user && (
+ {
+ handleUserManagentMenuItemClick(userSettingsMenuItem?.key as string);
+ }}
+ />
+ )}
+
+
+ {collapsed ? (
+
+ ) : (
+
+ )}
+
+
+
);
}
diff --git a/frontend/src/container/SideNav/config.ts b/frontend/src/container/SideNav/config.ts
index 2fbfa1e244..95028b0a05 100644
--- a/frontend/src/container/SideNav/config.ts
+++ b/frontend/src/container/SideNav/config.ts
@@ -31,6 +31,7 @@ export const routeConfig: Record = {
[ROUTES.LIST_LICENSES]: [QueryParams.resourceAttributes],
[ROUTES.LOGIN]: [QueryParams.resourceAttributes],
[ROUTES.LOGS]: [QueryParams.resourceAttributes],
+ [ROUTES.LOGS_BASE]: [QueryParams.resourceAttributes],
[ROUTES.MY_SETTINGS]: [QueryParams.resourceAttributes],
[ROUTES.NOT_FOUND]: [QueryParams.resourceAttributes],
[ROUTES.ORG_SETTINGS]: [QueryParams.resourceAttributes],
diff --git a/frontend/src/container/SideNav/menuItems.tsx b/frontend/src/container/SideNav/menuItems.tsx
index ae0acdd8c6..00ac98d259 100644
--- a/frontend/src/container/SideNav/menuItems.tsx
+++ b/frontend/src/container/SideNav/menuItems.tsx
@@ -1,88 +1,111 @@
-import {
- AlertOutlined,
- AlignLeftOutlined,
- BarChartOutlined,
- BugOutlined,
- DashboardFilled,
- DeploymentUnitOutlined,
- FileDoneOutlined,
- LineChartOutlined,
- MenuOutlined,
- RocketOutlined,
- SearchOutlined,
- SettingOutlined,
-} from '@ant-design/icons';
+import { RocketOutlined } from '@ant-design/icons';
import ROUTES from 'constants/routes';
+import {
+ AreaChart,
+ BarChart2,
+ BellDot,
+ BugIcon,
+ Cloudy,
+ DraftingCompass,
+ FileKey2,
+ LayoutGrid,
+ MessageSquare,
+ Receipt,
+ Route,
+ ScrollText,
+ Settings,
+ Slack,
+ UserPlus,
+} from 'lucide-react';
-import { SidebarMenu } from './sideNav.types';
+import { SecondaryMenuItemKey, SidebarItem } from './sideNav.types';
-const menuItems: SidebarMenu[] = [
- {
- key: ROUTES.GET_STARTED,
- label: 'Get Started',
- icon: ,
- },
+export const getStartedMenuItem = {
+ key: ROUTES.GET_STARTED,
+ label: 'Get Started',
+ icon: ,
+};
+
+export const inviteMemberMenuItem = {
+ key: `${ROUTES.ORG_SETTINGS}#invite-team-members`,
+ label: 'Invite Team Member',
+ icon: ,
+};
+
+export const manageLicenseMenuItem = {
+ key: ROUTES.LIST_LICENSES,
+ label: 'Manage Licenses',
+ icon: ,
+};
+
+export const helpSupportMenuItem = {
+ key: ROUTES.SUPPORT,
+ label: 'Help & Support',
+ icon: ,
+};
+
+export const slackSupportMenuItem = {
+ key: SecondaryMenuItemKey.Slack,
+ label: 'Slack Support',
+ icon: ,
+};
+
+export const trySignozCloudMenuItem: SidebarItem = {
+ key: 'trySignozCloud',
+ label: 'Try Signoz Cloud',
+ icon: ,
+};
+
+const menuItems: SidebarItem[] = [
{
key: ROUTES.APPLICATION,
label: 'Services',
- icon: ,
+ icon: ,
},
{
key: ROUTES.TRACE,
label: 'Traces',
- icon: ,
+ icon: ,
},
{
- key: ROUTES.LOGS_EXPLORER,
+ key: ROUTES.LOGS,
label: 'Logs',
- icon: ,
- children: [
- {
- key: ROUTES.LOGS_EXPLORER,
- icon: ,
- label: 'Logs Explorer',
- },
- {
- key: ROUTES.LOGS_PIPELINES,
- icon: ,
- label: 'Logs Pipelines',
- },
- ],
+ icon: ,
},
{
key: ROUTES.ALL_DASHBOARD,
label: 'Dashboards',
- icon: ,
+ icon: ,
},
{
key: ROUTES.LIST_ALL_ALERT,
label: 'Alerts',
- icon: ,
+ icon: ,
},
{
key: ROUTES.ALL_ERROR,
label: 'Exceptions',
- icon: ,
+ icon: ,
},
{
key: ROUTES.SERVICE_MAP,
label: 'Service Map',
- icon: ,
+ icon: ,
},
{
key: ROUTES.USAGE_EXPLORER,
label: 'Usage Explorer',
- icon: ,
+ icon: ,
},
{
key: ROUTES.BILLING,
label: 'Billing',
- icon: ,
+ icon: ,
},
{
key: ROUTES.SETTINGS,
label: 'Settings',
- icon: ,
+ icon: ,
},
];
@@ -90,7 +113,7 @@ const menuItems: SidebarMenu[] = [
export const NEW_ROUTES_MENU_ITEM_KEY_MAP = {
[ROUTES.TRACES_EXPLORER]: ROUTES.TRACE,
[ROUTES.TRACE_EXPLORER]: ROUTES.TRACE,
- [ROUTES.LOGS_EXPLORER]: ROUTES.LOGS_EXPLORER,
+ [ROUTES.LOGS_BASE]: ROUTES.LOGS_EXPLORER,
};
export default menuItems;
diff --git a/frontend/src/container/SideNav/sideNav.types.ts b/frontend/src/container/SideNav/sideNav.types.ts
index 804cad8d18..8bc7860478 100644
--- a/frontend/src/container/SideNav/sideNav.types.ts
+++ b/frontend/src/container/SideNav/sideNav.types.ts
@@ -8,10 +8,9 @@ export type SidebarMenu = MenuItem & {
};
export interface SidebarItem {
- onClick: VoidFunction;
icon?: ReactNode;
text?: ReactNode;
- key: string;
+ key: string | number;
label?: ReactNode;
}
diff --git a/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx b/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx
index c1dd107f3f..767bf16d34 100644
--- a/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx
+++ b/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx
@@ -60,6 +60,8 @@ function TimeSeriesView({
isDarkMode,
minTimeScale,
maxTimeScale,
+ softMax: null,
+ softMin: null,
});
return (
diff --git a/frontend/src/container/TopNav/Breadcrumbs/index.tsx b/frontend/src/container/TopNav/Breadcrumbs/index.tsx
index f7b8bf5f21..d5e4941142 100644
--- a/frontend/src/container/TopNav/Breadcrumbs/index.tsx
+++ b/frontend/src/container/TopNav/Breadcrumbs/index.tsx
@@ -22,6 +22,7 @@ const breadcrumbNameMap = {
[ROUTES.ALL_DASHBOARD]: 'Dashboard',
[ROUTES.LOGS]: 'Logs',
[ROUTES.LOGS_EXPLORER]: 'Logs Explorer',
+ [ROUTES.OLD_LOGS_EXPLORER]: 'Old Logs Explorer',
[ROUTES.LIVE_LOGS]: 'Live View',
[ROUTES.LOGS_PIPELINES]: 'Logs Pipelines',
[ROUTES.BILLING]: 'Billing',
diff --git a/frontend/src/container/TopNav/DateTimeSelection/config.ts b/frontend/src/container/TopNav/DateTimeSelection/config.ts
index b99f6f6ae2..0ece952909 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/config.ts
+++ b/frontend/src/container/TopNav/DateTimeSelection/config.ts
@@ -90,6 +90,9 @@ export const routesToSkip = [
ROUTES.BILLING,
ROUTES.SUPPORT,
ROUTES.WORKSPACE_LOCKED,
+ ROUTES.LOGS,
+ ROUTES.MY_SETTINGS,
+ ROUTES.LIST_LICENSES,
];
export const routesToDisable = [ROUTES.LOGS_EXPLORER, ROUTES.LIVE_LOGS];
diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
index b36a1ebaba..c72ff7b82d 100644
--- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx
+++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx
@@ -1,7 +1,8 @@
import { SyncOutlined } from '@ant-design/icons';
-import { Button, Select as DefaultSelect } from 'antd';
+import { Button } from 'antd';
import getLocalStorageKey from 'api/browser/localstorage/get';
import setLocalStorageKey from 'api/browser/localstorage/set';
+import CustomTimePicker from 'components/CustomTimePicker/CustomTimePicker';
import { LOCALSTORAGE } from 'constants/localStorage';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
@@ -21,7 +22,6 @@ import { GlobalTimeLoading, UpdateTimeInterval } from 'store/actions';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import { GlobalReducer } from 'types/reducer/globalTime';
-import { popupContainer } from 'utils/selectPopupContainer';
import AutoRefresh from '../AutoRefresh';
import CustomDateTimeModal, { DateTimeRangeType } from '../CustomDateTimeModal';
@@ -29,8 +29,6 @@ import { getDefaultOption, getOptions, Time } from './config';
import RefreshText from './Refresh';
import { Form, FormContainer, FormItem } from './styles';
-const { Option } = DefaultSelect;
-
function DateTimeSelection({
location,
updateTimeInterval,
@@ -38,6 +36,8 @@ function DateTimeSelection({
}: Props): JSX.Element {
const [formSelector] = Form.useForm();
+ const [hasSelectedTimeError, setHasSelectedTimeError] = useState(false);
+
const urlQuery = useUrlQuery();
const searchStartTime = urlQuery.get('startTime');
const searchEndTime = urlQuery.get('endTime');
@@ -196,7 +196,7 @@ function DateTimeSelection({
urlQuery.set(QueryParams.startTime, minTime.toString());
urlQuery.set(QueryParams.endTime, maxTime.toString());
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
- history.replace(generatedUrl);
+ history.push(generatedUrl);
}
if (!stagedQuery) {
@@ -211,6 +211,7 @@ function DateTimeSelection({
};
const onCustomDateHandler = (dateTimeRange: DateTimeRangeType): void => {
+ console.log('dateTimeRange', dateTimeRange);
if (dateTimeRange !== null) {
const [startTimeMoment, endTimeMoment] = dateTimeRange;
if (startTimeMoment && endTimeMoment) {
@@ -223,10 +224,16 @@ function DateTimeSelection({
setLocalStorageKey('endTime', endTimeMoment.toString());
updateLocalStorageForRoutes('custom');
if (!isLogsExplorerPage) {
- urlQuery.set(QueryParams.startTime, startTimeMoment.toString());
- urlQuery.set(QueryParams.endTime, endTimeMoment.toString());
+ urlQuery.set(
+ QueryParams.startTime,
+ startTimeMoment?.toDate().getTime().toString(),
+ );
+ urlQuery.set(
+ QueryParams.endTime,
+ endTimeMoment?.toDate().getTime().toString(),
+ );
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
- history.replace(generatedUrl);
+ history.push(generatedUrl);
}
}
}
@@ -283,25 +290,25 @@ function DateTimeSelection({
initialValues={{ interval: selectedTime }}
>
- onSelectHandler(value as Time)}
- value={getInputLabel(
+ {
+ onSelectHandler(value as Time);
+ }}
+ onError={(hasError: boolean): void => {
+ setHasSelectedTimeError(hasError);
+ }}
+ selectedTime={selectedTime}
+ onValidCustomDateChange={(dateTime): void =>
+ onCustomDateHandler(dateTime as DateTimeRangeType)
+ }
+ selectedValue={getInputLabel(
dayjs(minTime / 1000000),
dayjs(maxTime / 1000000),
selectedTime,
)}
data-testid="dropDown"
- style={{
- minWidth: 120,
- }}
- >
- {options.map(({ value, label }) => (
-
- {label}
-
- ))}
-
+ items={options}
+ />
-
+ {!hasSelectedTimeError && (
+
+ )}
- routesToHideBreadCrumbs.some((route) =>
- matchPath(location.pathname, { path: route, exact: true }),
- ),
- [location.pathname],
- );
-
const isDisabled = useMemo(
() =>
routesToDisable.some((route) =>
@@ -50,15 +36,9 @@ function TopNav(): JSX.Element | null {
}
return (
-
- {!isRouteToHideBreadCrumbs && (
-
-
-
- )}
-
+
{!isRouteToSkip && (
-
+
@@ -69,7 +49,7 @@ function TopNav(): JSX.Element | null {
)}
-
+
);
}
diff --git a/frontend/src/container/TopNav/styles.ts b/frontend/src/container/TopNav/styles.ts
index ef3cb15c37..4c88c63246 100644
--- a/frontend/src/container/TopNav/styles.ts
+++ b/frontend/src/container/TopNav/styles.ts
@@ -3,6 +3,6 @@ import styled from 'styled-components';
export const Container = styled(Row)`
&&& {
- margin-top: 2rem;
+ margin-top: 1rem;
}
`;
diff --git a/frontend/src/hooks/analytics/useAnalytics.tsx b/frontend/src/hooks/analytics/useAnalytics.tsx
index 23d535063d..28213c9579 100644
--- a/frontend/src/hooks/analytics/useAnalytics.tsx
+++ b/frontend/src/hooks/analytics/useAnalytics.tsx
@@ -26,8 +26,8 @@ const useAnalytics = (): any => {
},
};
- const updatedPropertes = { ...properties };
- updatedPropertes.userId = user.email;
+ const updatedProperties = { ...properties };
+ updatedProperties.userId = user.email;
window.analytics.track(eventName, properties, context);
}
};
diff --git a/frontend/src/hooks/apDex/useGetMetricMeta.ts b/frontend/src/hooks/apDex/useGetMetricMeta.ts
index fd364e598d..6bf28c35a0 100644
--- a/frontend/src/hooks/apDex/useGetMetricMeta.ts
+++ b/frontend/src/hooks/apDex/useGetMetricMeta.ts
@@ -5,8 +5,9 @@ import { MetricMetaProps } from 'types/api/metrics/getApDex';
export const useGetMetricMeta = (
metricName: string,
+ servicename: string,
): UseQueryResult, AxiosError> =>
useQuery, AxiosError>({
- queryKey: [{ metricName }],
- queryFn: async () => getMetricMeta(metricName),
+ queryKey: [{ metricName, servicename }],
+ queryFn: async () => getMetricMeta(metricName, servicename),
});
diff --git a/frontend/src/hooks/dashboard/utils.ts b/frontend/src/hooks/dashboard/utils.ts
index 930fc080b2..4dfb8ce9c3 100644
--- a/frontend/src/hooks/dashboard/utils.ts
+++ b/frontend/src/hooks/dashboard/utils.ts
@@ -33,6 +33,8 @@ export const addEmptyWidgetInDashboardJSONWithQuery = (
title: '',
timePreferance: 'GLOBAL_TIME',
panelTypes: panelTypes || PANEL_TYPES.TIME_SERIES,
+ softMax: null,
+ softMin: null,
},
],
},
diff --git a/frontend/src/hooks/logs/useActiveLog.ts b/frontend/src/hooks/logs/useActiveLog.ts
index 8dbd58976b..a56c13c72e 100644
--- a/frontend/src/hooks/logs/useActiveLog.ts
+++ b/frontend/src/hooks/logs/useActiveLog.ts
@@ -36,7 +36,9 @@ export const useActiveLog = (): UseActiveLog => {
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
const { notifications } = useNotifications();
- const isLogsPage = useMemo(() => pathname === ROUTES.LOGS, [pathname]);
+ const isLogsPage = useMemo(() => pathname === ROUTES.OLD_LOGS_EXPLORER, [
+ pathname,
+ ]);
const [activeLog, setActiveLog] = useState(null);
@@ -135,7 +137,7 @@ export const useActiveLog = (): UseActiveLog => {
queryString,
);
- history.replace(`${ROUTES.LOGS}?q=${updatedQueryString}`);
+ history.replace(`${ROUTES.OLD_LOGS_EXPLORER}?q=${updatedQueryString}`);
},
[history, queryString],
);
diff --git a/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts b/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts
index d3796f96cf..c874f5e6db 100644
--- a/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts
+++ b/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts
@@ -32,7 +32,7 @@ export const useGetExplorerQueryRange = (
const isEnabled = useMemo(() => {
if (!options) return isEnabledQuery;
if (typeof options.enabled === 'boolean') {
- return isEnabledQuery || options.enabled;
+ return isEnabledQuery && options.enabled;
}
return isEnabledQuery;
diff --git a/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts b/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts
index e87d7007a7..8e883852cb 100644
--- a/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts
+++ b/frontend/src/hooks/queryBuilder/useQueryBuilderOperations.ts
@@ -1,3 +1,4 @@
+import { LEGEND } from 'constants/global';
import {
initialAutocompleteData,
initialQueryBuilderFormValuesMap,
@@ -21,6 +22,7 @@ import {
} from 'types/common/operations.types';
import { DataSource } from 'types/common/queryBuilder';
import { SelectOption } from 'types/common/select';
+import { getFormatedLegend } from 'utils/getFormatedLegend';
export const useQueryOperations: UseQueryOperations = ({
query,
@@ -157,7 +159,10 @@ export const useQueryOperations: UseQueryOperations = ({
(key, value) => {
const newQuery: IBuilderQuery = {
...query,
- [key]: value,
+ [key]:
+ key === LEGEND && typeof value === 'string'
+ ? getFormatedLegend(value)
+ : value,
};
handleSetQueryData(index, newQuery);
diff --git a/frontend/src/hooks/useDarkMode/index.tsx b/frontend/src/hooks/useDarkMode/index.tsx
index 069e08b2de..baf0c21511 100644
--- a/frontend/src/hooks/useDarkMode/index.tsx
+++ b/frontend/src/hooks/useDarkMode/index.tsx
@@ -76,6 +76,11 @@ export const useThemeConfig = (): ThemeConfig => {
borderRadiusXS: 2,
fontFamily: 'Inter',
fontSize: 13,
+ colorPrimary: '#4E74F8',
+ colorBgBase: isDarkMode ? '#0B0C0E' : '#fff',
+ colorBgContainer: isDarkMode ? '#121317' : '#fff',
+ colorLink: '#4E74F8',
+ colorPrimaryText: '#3F5ECC',
},
};
};
diff --git a/frontend/src/index.html.ejs b/frontend/src/index.html.ejs
index f46fd07f01..8d756463cd 100644
--- a/frontend/src/index.html.ejs
+++ b/frontend/src/index.html.ejs
@@ -67,7 +67,7 @@
diff --git a/frontend/src/index.tsx b/frontend/src/index.tsx
index 17320546af..405a9c6bc4 100644
--- a/frontend/src/index.tsx
+++ b/frontend/src/index.tsx
@@ -8,7 +8,6 @@ import { createRoot } from 'react-dom/client';
import { ErrorBoundary } from 'react-error-boundary';
import { HelmetProvider } from 'react-helmet-async';
import { QueryClient, QueryClientProvider } from 'react-query';
-import { ReactQueryDevtools } from 'react-query/devtools';
import { Provider } from 'react-redux';
import store from 'store';
@@ -33,7 +32,6 @@ if (container) {
- {process.env.NODE_ENV === 'development' && }
diff --git a/frontend/src/lib/uPlotLib/getUplotChartOptions.ts b/frontend/src/lib/uPlotLib/getUplotChartOptions.ts
index a86c1ee49c..b6e61a2ae6 100644
--- a/frontend/src/lib/uPlotLib/getUplotChartOptions.ts
+++ b/frontend/src/lib/uPlotLib/getUplotChartOptions.ts
@@ -35,6 +35,8 @@ interface GetUPlotChartOptions {
fillSpans?: boolean;
minTimeScale?: number;
maxTimeScale?: number;
+ softMin: number | null;
+ softMax: number | null;
}
export const getUPlotChartOptions = ({
@@ -51,6 +53,8 @@ export const getUPlotChartOptions = ({
setGraphsVisibilityStates,
thresholds,
fillSpans,
+ softMax,
+ softMin,
}: GetUPlotChartOptions): uPlot.Options => {
const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale);
@@ -87,11 +91,13 @@ export const getUPlotChartOptions = ({
...timeScaleProps,
},
y: {
- ...getYAxisScale(
+ ...getYAxisScale({
thresholds,
- apiResponse?.data.newResult.data.result,
+ series: apiResponse?.data.newResult.data.result,
yAxisUnit,
- ),
+ softMax,
+ softMin,
+ }),
},
},
plugins: [
diff --git a/frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts b/frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts
new file mode 100644
index 0000000000..ecd70084e8
--- /dev/null
+++ b/frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts
@@ -0,0 +1,211 @@
+import { PANEL_TYPES } from 'constants/queryBuilder';
+import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
+import { QueryDataV3 } from 'types/api/widgets/getQuery';
+
+import { GetYAxisScale, getYAxisScale } from './getYAxisScale';
+
+describe('getYAxisScale', () => {
+ const mockThresholds: ThresholdProps[] = [
+ {
+ index: '1',
+ keyIndex: 1,
+ thresholdValue: 10,
+ thresholdUnit: 'percentunit',
+ moveThreshold(dragIndex, hoverIndex): void {
+ console.log(dragIndex, hoverIndex);
+ },
+ selectedGraph: PANEL_TYPES.TIME_SERIES,
+ },
+ {
+ index: '2',
+ keyIndex: 2,
+ thresholdValue: 20,
+ thresholdUnit: 'percentunit',
+ moveThreshold(dragIndex, hoverIndex): void {
+ console.log(dragIndex, hoverIndex);
+ },
+ selectedGraph: PANEL_TYPES.TIME_SERIES,
+ },
+ ];
+
+ const mockSeriesData: QueryDataV3[] = [
+ {
+ list: null,
+ queryName: 'Mock Query',
+ series: [
+ {
+ labels: {},
+ values: [
+ { timestamp: 1, value: '15' },
+ { timestamp: 2, value: '25' },
+ ],
+ },
+ ],
+ },
+ ];
+
+ const mockYAxisUnit = 'percentunit';
+ const mockSoftMin = 5;
+ const mockSoftMax = 30;
+
+ it('threshold absent, series data absent and softmin and softmax is absent', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: [],
+ yAxisUnit: undefined,
+ softMin: null,
+ softMax: null,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({ auto: true });
+ });
+
+ it('Threshold absent, series data present softmin and softmax present', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: mockSeriesData,
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [5, 30],
+ });
+ });
+
+ it('Only series data present', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: mockSeriesData,
+ yAxisUnit: mockYAxisUnit,
+ softMin: null,
+ softMax: null,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({ auto: true });
+ });
+
+ it('Threshold absent, series data present, softmin present and softmax absent', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: mockSeriesData,
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: null,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [5, 25],
+ });
+ });
+
+ it('Threshold absent, series data present, softmin absent and softmax present', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: mockSeriesData,
+ yAxisUnit: mockYAxisUnit,
+ softMin: null,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [15, 30],
+ });
+ });
+
+ it('Threshold present, series absent and softmin and softmax present', () => {
+ const result = getYAxisScale({
+ thresholds: mockThresholds,
+ series: [],
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [5, 30],
+ });
+ });
+
+ it('Only threshold data present', () => {
+ const result = getYAxisScale({
+ thresholds: mockThresholds,
+ series: [],
+ yAxisUnit: mockYAxisUnit,
+ softMin: null,
+ softMax: null,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [10, 20],
+ });
+ });
+
+ it('Threshold present, series absent, softmin absent and softmax present', () => {
+ const result = getYAxisScale({
+ thresholds: mockThresholds,
+ series: [],
+ yAxisUnit: mockYAxisUnit,
+ softMin: null,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [10, 30],
+ });
+ });
+
+ it('Threshold data present, series data absent, softmin present and softmax absent', () => {
+ const result = getYAxisScale({
+ thresholds: mockThresholds,
+ series: [],
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: null,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [5, 20],
+ });
+ });
+
+ it('Threshold data absent, series absent, softmin and softmax present', () => {
+ const result = getYAxisScale({
+ thresholds: [],
+ series: [],
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ range: {
+ min: { soft: mockSoftMin, mode: 2 },
+ max: { soft: mockSoftMax, mode: 2 },
+ },
+ });
+ });
+
+ it('All data present', () => {
+ const result = getYAxisScale({
+ thresholds: mockThresholds,
+ series: mockSeriesData,
+ yAxisUnit: mockYAxisUnit,
+ softMin: mockSoftMin,
+ softMax: mockSoftMax,
+ } as GetYAxisScale);
+
+ expect(result).toEqual({
+ auto: false,
+ range: [5, 30],
+ });
+ });
+});
diff --git a/frontend/src/lib/uPlotLib/utils/getYAxisScale.ts b/frontend/src/lib/uPlotLib/utils/getYAxisScale.ts
index 503abd5790..42860ea8c8 100644
--- a/frontend/src/lib/uPlotLib/utils/getYAxisScale.ts
+++ b/frontend/src/lib/uPlotLib/utils/getYAxisScale.ts
@@ -2,6 +2,7 @@ import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/typ
import { convertValue } from 'lib/getConvertedValue';
import { isFinite } from 'lodash-es';
import { QueryDataV3 } from 'types/api/widgets/getQuery';
+import uPlot from 'uplot';
function findMinMaxValues(data: QueryDataV3[]): [number, number] {
let min = Number.MAX_SAFE_INTEGER;
@@ -71,23 +72,167 @@ function areAllSeriesEmpty(series: QueryDataV3[]): boolean {
});
}
-export const getYAxisScale = (
- thresholds?: ThresholdProps[],
- series?: QueryDataV3[],
- yAxisUnit?: string,
-): {
- auto: boolean;
- range?: [number, number];
-} => {
- if (!thresholds || !series || thresholds.length === 0) return { auto: true };
+function configSoftMinMax(
+ softMin: number | null,
+ softMax: number | null,
+): { range: uPlot.Scale.Range } {
+ return {
+ range: {
+ min: {
+ soft: softMin !== null ? softMin : undefined,
+ mode: 2,
+ },
+ max: {
+ soft: softMax !== null ? softMax : undefined,
+ mode: 2,
+ },
+ },
+ };
+}
- if (areAllSeriesEmpty(series)) return { auto: true };
+export const getYAxisScale = ({
+ thresholds,
+ series,
+ yAxisUnit,
+ softMin,
+ softMax,
+}: // eslint-disable-next-line sonarjs/cognitive-complexity
+GetYAxisScale): { auto?: boolean; range?: uPlot.Scale.Range } => {
+ // Situation: thresholds and series data is absent
+ if (
+ (!thresholds || thresholds.length === 0) &&
+ (!series || areAllSeriesEmpty(series))
+ ) {
+ // Situation: softMin is not null or softMax is null
+ if (softMin !== null && softMax === null) {
+ return configSoftMinMax(softMin, softMin + 100);
+ }
- const [min, max] = getRange(thresholds, series, yAxisUnit);
+ // Situation: softMin is null softMax is not null
+ if (softMin === null && softMax !== null) {
+ return configSoftMinMax(softMax - 100, softMax);
+ }
- // Min and Max value can be same if the value is same for all the series
- if (min === max) {
+ // Situation: softMin is not null and softMax is not null
+ if (softMin !== null && softMax !== null) {
+ return configSoftMinMax(softMin, softMax);
+ }
+
+ // Situation: softMin and softMax are null and no threshold and no series data
return { auto: true };
}
+
+ // Situation: thresholds are absent
+ if (!thresholds || thresholds.length === 0) {
+ // Situation: No thresholds data but series data is present
+ if (series && !areAllSeriesEmpty(series)) {
+ // Situation: softMin and softMax are null
+ if (softMin === null && softMax === null) {
+ return { auto: true };
+ }
+
+ // Situation: either softMin or softMax is not null
+ let [min, max] = findMinMaxValues(series);
+
+ if (softMin !== null) {
+ // Compare with softMin if it is not null
+ min = Math.min(min, softMin);
+ }
+
+ if (softMax !== null) {
+ // Compare with softMax if it is not null
+ max = Math.max(max, softMax);
+ }
+
+ if (min === max) {
+ // Min and Max value can be same if the value is same for all the series
+ return { auto: true };
+ }
+
+ return { auto: false, range: [min, max] };
+ }
+
+ // Situation: No thresholds data and series data is absent but either soft min and soft max is present
+ if (softMin !== null && softMax === null) {
+ return configSoftMinMax(softMin, softMin + 100);
+ }
+
+ if (softMin === null && softMax !== null) {
+ return configSoftMinMax(softMax - 100, softMax);
+ }
+
+ if (softMin !== null && softMax !== null) {
+ return configSoftMinMax(softMin, softMax);
+ }
+
+ return { auto: true };
+ }
+
+ if (!series || areAllSeriesEmpty(series)) {
+ // series data is absent but threshold is present
+ if (thresholds.length > 0) {
+ // Situation: thresholds are present and series data is absent
+ let [min, max] = findMinMaxThresholdValues(thresholds, yAxisUnit);
+
+ if (softMin !== null) {
+ // Compare with softMin if it is not null
+ min = Math.min(min, softMin);
+ }
+
+ if (softMax !== null) {
+ // Compare with softMax if it is not null
+ max = Math.max(max, softMax);
+ }
+
+ if (min === max) {
+ // Min and Max value can be same if the value is same for all the series
+ return { auto: true };
+ }
+
+ return { auto: false, range: [min, max] };
+ }
+
+ // Situation: softMin or softMax is not null
+ if (softMin !== null && softMax === null) {
+ return configSoftMinMax(softMin, softMin + 100);
+ }
+
+ if (softMin === null && softMax !== null) {
+ return configSoftMinMax(softMax - 100, softMax);
+ }
+
+ if (softMin !== null && softMax !== null) {
+ return configSoftMinMax(softMin, softMax);
+ }
+
+ return { auto: true };
+ }
+
+ // Situation: thresholds and series data are present
+ let [min, max] = getRange(thresholds, series, yAxisUnit);
+
+ if (softMin !== null) {
+ // Compare with softMin if it is not null
+ min = Math.min(min, softMin);
+ }
+
+ if (softMax !== null) {
+ // Compare with softMax if it is not null
+ max = Math.max(max, softMax);
+ }
+
+ if (min === max) {
+ // Min and Max value can be same if the value is same for all the series
+ return { auto: true };
+ }
+
return { auto: false, range: [min, max] };
};
+
+export type GetYAxisScale = {
+ thresholds?: ThresholdProps[];
+ series?: QueryDataV3[];
+ yAxisUnit?: string;
+ softMin: number | null;
+ softMax: number | null;
+};
diff --git a/frontend/src/pages/Logs/index.tsx b/frontend/src/pages/Logs/index.tsx
index d690f40f9c..cbbcc22c4c 100644
--- a/frontend/src/pages/Logs/index.tsx
+++ b/frontend/src/pages/Logs/index.tsx
@@ -30,7 +30,7 @@ import { useSelectedLogView } from './hooks';
import PopoverContent from './PopoverContent';
import SpaceContainer from './styles';
-function Logs(): JSX.Element {
+function OldLogsExplorer(): JSX.Element {
const dispatch = useDispatch>();
const { order } = useSelector((store) => store.logs);
const location = useLocation();
@@ -148,4 +148,4 @@ function Logs(): JSX.Element {
);
}
-export default Logs;
+export default OldLogsExplorer;
diff --git a/frontend/src/pages/LogsModulePage/LogsModulePage.tsx b/frontend/src/pages/LogsModulePage/LogsModulePage.tsx
new file mode 100644
index 0000000000..ecd8d2dcfc
--- /dev/null
+++ b/frontend/src/pages/LogsModulePage/LogsModulePage.tsx
@@ -0,0 +1,28 @@
+import RouteTab from 'components/RouteTab';
+import ROUTES from 'constants/routes';
+import history from 'lib/history';
+import LogsExplorer from 'pages/LogsExplorer';
+import Pipelines from 'pages/Pipelines';
+import { useLocation } from 'react-use';
+
+export const logsExplorer = {
+ Component: LogsExplorer,
+ name: 'Explorer',
+ route: ROUTES.LOGS,
+ key: ROUTES.LOGS,
+};
+
+export const logsPipelines = {
+ Component: Pipelines,
+ name: 'Pipelines',
+ route: ROUTES.LOGS_PIPELINES,
+ key: ROUTES.LOGS_PIPELINES,
+};
+
+export default function LogsModulePage(): JSX.Element {
+ const { pathname } = useLocation();
+
+ const routes = [logsExplorer, logsPipelines];
+
+ return ;
+}
diff --git a/frontend/src/pages/LogsModulePage/index.tsx b/frontend/src/pages/LogsModulePage/index.tsx
new file mode 100644
index 0000000000..680368481f
--- /dev/null
+++ b/frontend/src/pages/LogsModulePage/index.tsx
@@ -0,0 +1,3 @@
+import LogsModulePage from './LogsModulePage';
+
+export default LogsModulePage;
diff --git a/frontend/src/pages/Pipelines/index.tsx b/frontend/src/pages/Pipelines/index.tsx
index 1a05a4010a..d646390dda 100644
--- a/frontend/src/pages/Pipelines/index.tsx
+++ b/frontend/src/pages/Pipelines/index.tsx
@@ -81,7 +81,7 @@ function Pipelines(): JSX.Element {
return (
- ;
+
);
}
diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss
index f80a4925bc..c35284241a 100644
--- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss
+++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss
@@ -1,7 +1,7 @@
.workspace-locked-container {
text-align: center;
padding: 48px;
- margin: 48px;
+ margin: 24px;
}
.workpace-locked-details {
@@ -9,11 +9,19 @@
margin: 0 auto;
}
-.update-credit-card-btn {
- margin: 24px 0;
- border-radius: 5px;
-}
-
.contact-us {
margin-top: 48px;
}
+
+.cta {
+ display: flex;
+ gap: 8px;
+ align-items: center;
+ justify-content: center;
+
+ .update-credit-card-btn,
+ .extend-trial-btn {
+ margin: 24px 0;
+ border-radius: 5px;
+ }
+}
diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx
index 924509de82..d9df5265df 100644
--- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx
+++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx
@@ -1,11 +1,17 @@
/* eslint-disable react/no-unescaped-entities */
import './WorkspaceLocked.styles.scss';
-import { CreditCardOutlined, LockOutlined } from '@ant-design/icons';
+import {
+ CreditCardOutlined,
+ LockOutlined,
+ SendOutlined,
+} from '@ant-design/icons';
import { Button, Card, Skeleton, Typography } from 'antd';
import updateCreditCardApi from 'api/billing/checkout';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import ROUTES from 'constants/routes';
+import FullViewHeader from 'container/FullViewHeader/FullViewHeader';
+import useAnalytics from 'hooks/analytics/useAnalytics';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
@@ -21,6 +27,7 @@ export default function WorkspaceBlocked(): JSX.Element {
const { role } = useSelector((state) => state.app);
const isAdmin = role === 'ADMIN';
const [activeLicense, setActiveLicense] = useState(null);
+ const { trackEvent } = useAnalytics();
const { notifications } = useNotifications();
@@ -67,50 +74,90 @@ export default function WorkspaceBlocked(): JSX.Element {
);
const handleUpdateCreditCard = useCallback(async () => {
+ trackEvent('Workspace Blocked: User Clicked Update Credit Card');
+
updateCreditCard({
licenseKey: activeLicense?.key || '',
successURL: window.location.origin,
cancelURL: window.location.origin,
});
+ // eslint-disable-next-line react-hooks/exhaustive-deps
}, [activeLicense?.key, updateCreditCard]);
+ const handleExtendTrial = (): void => {
+ trackEvent('Workspace Blocked: User Clicked Extend Trial');
+
+ const recipient = 'cloud-support@signoz.io';
+ const subject = 'Extend SigNoz Cloud Trial';
+ const body = `I'd like to request an extension for SigNoz Cloud for my account. Please find my account details below
+
+ SigNoz URL:
+ Admin Email:
+ `;
+
+ // Create the mailto link
+ const mailtoLink = `mailto:${recipient}?subject=${encodeURIComponent(
+ subject,
+ )}&body=${encodeURIComponent(body)}`;
+
+ // Open the default email client
+ window.location.href = mailtoLink;
+ };
+
return (
-
- {isLoadingLicenseData || !licensesData?.payload?.workSpaceBlock ? (
-
- ) : (
- <>
-
- Workspace Locked
-
- You have been locked out of your workspace because your trial ended
- without an upgrade to a paid plan. Your data will continue to be ingested
- till{' '}
- {getFormattedDate(licensesData?.payload?.gracePeriodEnd || Date.now())} ,
- at which point we will drop all the ingested data and terminate the
- account.
- {!isAdmin && 'Please contact your administrator for further help'}
-
- {isAdmin && (
- }
- size="middle"
- loading={isLoading}
- onClick={handleUpdateCreditCard}
- >
- Update Credit Card
-
- )}
-
- >
- )}
-
+ <>
+
+
+
+ {isLoadingLicenseData || !licensesData?.payload?.workSpaceBlock ? (
+
+ ) : (
+ <>
+
+ Workspace Locked
+
+ You have been locked out of your workspace because your trial ended
+ without an upgrade to a paid plan. Your data will continue to be ingested
+ till{' '}
+ {getFormattedDate(licensesData?.payload?.gracePeriodEnd || Date.now())} ,
+ at which point we will drop all the ingested data and terminate the
+ account.
+ {!isAdmin && 'Please contact your administrator for further help'}
+
+
+
+ {isAdmin && (
+ }
+ size="middle"
+ loading={isLoading}
+ onClick={handleUpdateCreditCard}
+ >
+ Update Credit Card
+
+ )}
+
+ }
+ size="middle"
+ onClick={handleExtendTrial}
+ >
+ Extend Trial
+
+
+
+ >
+ )}
+
+ >
);
}
diff --git a/frontend/src/styles.scss b/frontend/src/styles.scss
index 6712b4c59a..71ce16d17d 100644
--- a/frontend/src/styles.scss
+++ b/frontend/src/styles.scss
@@ -1,3 +1,5 @@
+@import '@signozhq/design-tokens';
+
#root,
html,
body {
@@ -128,3 +130,9 @@ body {
--tw-bg-opacity: 1;
background-color: rgba(236, 236, 241, var(--tw-bg-opacity));
}
+
+.flexBtn {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+}
diff --git a/frontend/src/types/api/dashboard/getAll.ts b/frontend/src/types/api/dashboard/getAll.ts
index 3ad1c70f73..bb302c152b 100644
--- a/frontend/src/types/api/dashboard/getAll.ts
+++ b/frontend/src/types/api/dashboard/getAll.ts
@@ -73,6 +73,8 @@ export interface IBaseWidget {
stepSize?: number;
yAxisUnit?: string;
thresholds?: ThresholdProps[];
+ softMin: number | null;
+ softMax: number | null;
fillSpans?: boolean;
}
export interface Widgets extends IBaseWidget {
diff --git a/frontend/src/utils/getFormatedLegend.ts b/frontend/src/utils/getFormatedLegend.ts
new file mode 100644
index 0000000000..1b3a5ea988
--- /dev/null
+++ b/frontend/src/utils/getFormatedLegend.ts
@@ -0,0 +1,2 @@
+export const getFormatedLegend = (value: string): string =>
+ value.replace(/\{\s*\{\s*(.*?)\s*\}\s*\}/g, '{{$1}}');
diff --git a/frontend/src/utils/permission/index.ts b/frontend/src/utils/permission/index.ts
index ee1a7a09e9..91372d237b 100644
--- a/frontend/src/utils/permission/index.ts
+++ b/frontend/src/utils/permission/index.ts
@@ -86,4 +86,6 @@ export const routePermission: Record = {
BILLING: ['ADMIN', 'EDITOR', 'VIEWER'],
SUPPORT: ['ADMIN', 'EDITOR', 'VIEWER'],
SOMETHING_WENT_WRONG: ['ADMIN', 'EDITOR', 'VIEWER'],
+ LOGS_BASE: [],
+ OLD_LOGS_EXPLORER: [],
};
diff --git a/frontend/yarn.lock b/frontend/yarn.lock
index 2099c438bb..fe33785fb5 100644
--- a/frontend/yarn.lock
+++ b/frontend/yarn.lock
@@ -3082,6 +3082,13 @@
resolved "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz"
integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==
+"@signozhq/design-tokens@0.0.6":
+ version "0.0.6"
+ resolved "https://registry.yarnpkg.com/@signozhq/design-tokens/-/design-tokens-0.0.6.tgz#42449052dca644c4d52448f9c2c521d39e535720"
+ integrity sha512-i+aG0YCuYL2KVUtRFj3qgAVDU6GbKmTdFXpqCqLUQp8diKMWH5Svzzxj4B14Q6+yE79+wbm1iZ0Nr6nYgkBA8Q==
+ dependencies:
+ style-dictionary "3.8.0"
+
"@sinclair/typebox@^0.25.16":
version "0.25.24"
resolved "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.25.24.tgz"
@@ -5422,6 +5429,15 @@ canvas-color-tracker@1:
dependencies:
tinycolor2 "^1.6.0"
+capital-case@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/capital-case/-/capital-case-1.0.4.tgz#9d130292353c9249f6b00fa5852bee38a717e669"
+ integrity sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+ upper-case-first "^2.0.2"
+
cardboard-vr-display@^1.0.19:
version "1.0.19"
resolved "https://registry.npmjs.org/cardboard-vr-display/-/cardboard-vr-display-1.0.19.tgz"
@@ -5461,6 +5477,24 @@ chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1:
ansi-styles "^4.1.0"
supports-color "^7.1.0"
+change-case@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/change-case/-/change-case-4.1.2.tgz#fedfc5f136045e2398c0410ee441f95704641e12"
+ integrity sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A==
+ dependencies:
+ camel-case "^4.1.2"
+ capital-case "^1.0.4"
+ constant-case "^3.0.4"
+ dot-case "^3.0.4"
+ header-case "^2.0.4"
+ no-case "^3.0.4"
+ param-case "^3.0.4"
+ pascal-case "^3.1.2"
+ path-case "^3.0.4"
+ sentence-case "^3.0.4"
+ snake-case "^3.0.4"
+ tslib "^2.0.3"
+
char-regex@^1.0.2:
version "1.0.2"
resolved "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz"
@@ -5842,6 +5876,15 @@ connect-history-api-fallback@^2.0.0:
resolved "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz"
integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==
+constant-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/constant-case/-/constant-case-3.0.4.tgz#3b84a9aeaf4cf31ec45e6bf5de91bdfb0589faf1"
+ integrity sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+ upper-case "^2.0.2"
+
content-disposition@0.5.4:
version "0.5.4"
resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz"
@@ -7993,7 +8036,7 @@ glob-to-regexp@^0.4.1:
resolved "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz"
integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==
-glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6:
+glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@^7.2.0:
version "7.2.3"
resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz"
integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==
@@ -8329,6 +8372,14 @@ he@^1.2.0:
resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz"
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
+header-case@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/header-case/-/header-case-2.0.4.tgz#5a42e63b55177349cf405beb8d775acabb92c063"
+ integrity sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q==
+ dependencies:
+ capital-case "^1.0.4"
+ tslib "^2.0.3"
+
headers-polyfill@3.2.5:
version "3.2.5"
resolved "https://registry.yarnpkg.com/headers-polyfill/-/headers-polyfill-3.2.5.tgz#6e67d392c9d113d37448fe45014e0afdd168faed"
@@ -9871,6 +9922,11 @@ json5@^1.0.2:
dependencies:
minimist "^1.2.0"
+jsonc-parser@^3.0.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76"
+ integrity sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==
+
jsonfile@^6.0.1:
version "6.1.0"
resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz"
@@ -11789,6 +11845,14 @@ pascal-case@^3.1.2:
no-case "^3.0.4"
tslib "^2.0.3"
+path-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/path-case/-/path-case-3.0.4.tgz#9168645334eb942658375c56f80b4c0cb5f82c6f"
+ integrity sha512-qO4qCFjXqVTrcbPt/hQfhTQ+VhFsqNKOPtytgNKkKxSoEp3XPUQ8ObFuePylOIok5gjn69ry8XiULxCwot3Wfg==
+ dependencies:
+ dot-case "^3.0.4"
+ tslib "^2.0.3"
+
path-exists@^4.0.0:
version "4.0.0"
resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz"
@@ -13841,6 +13905,15 @@ send@0.18.0:
range-parser "~1.2.1"
statuses "2.0.1"
+sentence-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/sentence-case/-/sentence-case-3.0.4.tgz#3645a7b8c117c787fde8702056225bb62a45131f"
+ integrity sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+ upper-case-first "^2.0.2"
+
serialize-javascript@^5.0.1:
version "5.0.1"
resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz"
@@ -14017,6 +14090,14 @@ slice-ansi@^5.0.0:
ansi-styles "^6.0.0"
is-fullwidth-code-point "^4.0.0"
+snake-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-3.0.4.tgz#4f2bbd568e9935abdfd593f34c691dadb49c452c"
+ integrity sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==
+ dependencies:
+ dot-case "^3.0.4"
+ tslib "^2.0.3"
+
sockjs@^0.3.24:
version "0.3.24"
resolved "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz"
@@ -14397,6 +14478,21 @@ strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz"
integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+style-dictionary@3.8.0:
+ version "3.8.0"
+ resolved "https://registry.yarnpkg.com/style-dictionary/-/style-dictionary-3.8.0.tgz#7cb8d64360c53431f768d44def665f61e971a73e"
+ integrity sha512-wHlB/f5eO3mDcYv6WtOz6gvQC477jBKrwuIXe+PtHskTCBsJdAOvL8hCquczJxDui2TnwpeNE+2msK91JJomZg==
+ dependencies:
+ chalk "^4.0.0"
+ change-case "^4.1.2"
+ commander "^8.3.0"
+ fs-extra "^10.0.0"
+ glob "^7.2.0"
+ json5 "^2.2.2"
+ jsonc-parser "^3.0.0"
+ lodash "^4.17.15"
+ tinycolor2 "^1.4.1"
+
style-loader@1.3.0:
version "1.3.0"
resolved "https://registry.npmjs.org/style-loader/-/style-loader-1.3.0.tgz"
@@ -14698,7 +14794,7 @@ tiny-warning@^1.0.0:
resolved "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz"
integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==
-tinycolor2@1, tinycolor2@1.6.0, tinycolor2@^1.6.0:
+tinycolor2@1, tinycolor2@1.6.0, tinycolor2@^1.4.1, tinycolor2@^1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/tinycolor2/-/tinycolor2-1.6.0.tgz#f98007460169b0263b97072c5ae92484ce02d09e"
integrity sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==
@@ -15156,6 +15252,20 @@ uplot@1.6.26:
resolved "https://registry.yarnpkg.com/uplot/-/uplot-1.6.26.tgz#a6012fd141ad4a71741c75af0c71283d0ade45a7"
integrity sha512-qN0mveL6UsP40TnHzHAJkUQvpfA3y8zSLXtXKVlJo/sLfj2+vjan/Z3g81MCZjy/hEDUFNtnLftPmETDA4s7Rg==
+upper-case-first@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/upper-case-first/-/upper-case-first-2.0.2.tgz#992c3273f882abd19d1e02894cc147117f844324"
+ integrity sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg==
+ dependencies:
+ tslib "^2.0.3"
+
+upper-case@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-2.0.2.tgz#d89810823faab1df1549b7d97a76f8662bae6f7a"
+ integrity sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg==
+ dependencies:
+ tslib "^2.0.3"
+
uri-js@^4.2.2:
version "4.4.1"
resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz"
diff --git a/go.mod b/go.mod
index 9ea23cfd4f..23505f3f98 100644
--- a/go.mod
+++ b/go.mod
@@ -5,7 +5,7 @@ go 1.21
require (
github.com/ClickHouse/clickhouse-go/v2 v2.15.0
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb
- github.com/SigNoz/signoz-otel-collector v0.88.6
+ github.com/SigNoz/signoz-otel-collector v0.88.8
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
github.com/antonmedv/expr v1.15.3
diff --git a/go.sum b/go.sum
index 0d385a233b..b7c5de4e09 100644
--- a/go.sum
+++ b/go.sum
@@ -56,8 +56,8 @@ cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq
contrib.go.opencensus.io/exporter/prometheus v0.4.2 h1:sqfsYl5GIY/L570iT+l93ehxaWJs2/OwXtiWwew3oAg=
contrib.go.opencensus.io/exporter/prometheus v0.4.2/go.mod h1:dvEHbiKmgvbr5pjaF9fpw1KeYcjrnC1J8B+JKjsZyRQ=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
-github.com/Azure/azure-sdk-for-go v65.0.0+incompatible h1:HzKLt3kIwMm4KeJYTdx9EbjRYTySD/t8i1Ee/W5EGXw=
-github.com/Azure/azure-sdk-for-go v65.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
+github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
+github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0 h1:8q4SaHjFsClSvuVne0ID/5Ka8u3fcIHyqkLjcFpNRHQ=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0 h1:vcYCAze6p19qBW7MhZybIsqD8sMV8js0NyQM8JDnVtg=
@@ -98,8 +98,8 @@ github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb h1:bneLSKPf9YUSFm
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA=
github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY=
github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww=
-github.com/SigNoz/signoz-otel-collector v0.88.6 h1:rvXm9bz4b9GsYeT8c3+F/g56DHPf0IN8mK8tUfZfnw8=
-github.com/SigNoz/signoz-otel-collector v0.88.6/go.mod h1:6lR8Uy99zBd0JGPg9zt0aEBW4A4GpblUtpcbszGmg8E=
+github.com/SigNoz/signoz-otel-collector v0.88.8 h1:oa/0gSfkGhjzXtz1htzWBQx3p4VhBPs5iwMRxqfa2uo=
+github.com/SigNoz/signoz-otel-collector v0.88.8/go.mod h1:7I4FWwraVSnDywsPNbo8TdHDsPxShtYkGU5usr6dTtk=
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=
diff --git a/pkg/query-service/app/apdex.go b/pkg/query-service/app/apdex.go
index 6854a91367..e3498d9c80 100644
--- a/pkg/query-service/app/apdex.go
+++ b/pkg/query-service/app/apdex.go
@@ -36,7 +36,8 @@ func (aH *APIHandler) getApdexSettings(w http.ResponseWriter, r *http.Request) {
func (aH *APIHandler) getLatencyMetricMetadata(w http.ResponseWriter, r *http.Request) {
metricName := r.URL.Query().Get("metricName")
- metricMetadata, err := aH.reader.GetLatencyMetricMetadata(r.Context(), metricName, aH.preferDelta)
+ serviceName := r.URL.Query().Get("serviceName")
+ metricMetadata, err := aH.reader.GetLatencyMetricMetadata(r.Context(), metricName, serviceName, aH.preferDelta)
if err != nil {
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
return
diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go
index 0340474596..3f11a4823a 100644
--- a/pkg/query-service/app/clickhouseReader/reader.go
+++ b/pkg/query-service/app/clickhouseReader/reader.go
@@ -4057,8 +4057,8 @@ func (r *ClickHouseReader) GetMetricAttributeValues(ctx context.Context, req *v3
return &attributeValues, nil
}
-func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricName string, preferDelta bool) (*v3.LatencyMetricMetadataResponse, error) {
- query := fmt.Sprintf("SELECT DISTINCT(temporality) from %s.%s WHERE metric_name='%s'", signozMetricDBName, signozTSTableName, metricName)
+func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricName, serviceName string, preferDelta bool) (*v3.LatencyMetricMetadataResponse, error) {
+ query := fmt.Sprintf("SELECT DISTINCT(temporality) from %s.%s WHERE metric_name='%s' AND JSONExtractString(labels, 'service_name') = '%s'", signozMetricDBName, signozTSTableName, metricName, serviceName)
rows, err := r.db.Query(ctx, query, metricName)
if err != nil {
zap.S().Error(err)
@@ -4077,7 +4077,7 @@ func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricN
}
}
- query = fmt.Sprintf("SELECT DISTINCT(toFloat64(JSONExtractString(labels, 'le'))) as le from %s.%s WHERE metric_name='%s' ORDER BY le", signozMetricDBName, signozTSTableName, metricName)
+ query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, 'le')) as le from %s.%s WHERE metric_name='%s' AND JSONExtractString(labels, 'service_name') = '%s' ORDER BY le", signozMetricDBName, signozTSTableName, metricName, serviceName)
rows, err = r.db.Query(ctx, query, metricName)
if err != nil {
zap.S().Error(err)
@@ -4087,10 +4087,18 @@ func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricN
var leFloat64 []float64
for rows.Next() {
- var le float64
- if err := rows.Scan(&le); err != nil {
+ var leStr string
+ if err := rows.Scan(&leStr); err != nil {
return nil, fmt.Errorf("error while scanning rows: %s", err.Error())
}
+ le, err := strconv.ParseFloat(leStr, 64)
+ // ignore the error and continue if the value is not a float
+ // ideally this should not happen but we have seen ClickHouse
+ // returning empty string for some values
+ if err != nil {
+ zap.S().Error("error while parsing le value: ", err)
+ continue
+ }
if math.IsInf(le, 0) {
continue
}
diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go
index 1d01267860..173fe39ef4 100644
--- a/pkg/query-service/app/http_handler.go
+++ b/pkg/query-service/app/http_handler.go
@@ -29,6 +29,7 @@ import (
metricsv3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
"go.signoz.io/signoz/pkg/query-service/app/parser"
"go.signoz.io/signoz/pkg/query-service/app/querier"
+ querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
"go.signoz.io/signoz/pkg/query-service/auth"
@@ -78,6 +79,7 @@ type APIHandler struct {
featureFlags interfaces.FeatureLookup
ready func(http.HandlerFunc) http.HandlerFunc
querier interfaces.Querier
+ querierV2 interfaces.Querier
queryBuilder *queryBuilder.QueryBuilder
preferDelta bool
preferSpanMetrics bool
@@ -142,7 +144,16 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
FeatureLookup: opts.FeatureFlags,
}
+ querierOptsV2 := querierV2.QuerierOptions{
+ Reader: opts.Reader,
+ Cache: opts.Cache,
+ KeyGenerator: queryBuilder.NewKeyGenerator(),
+ FluxInterval: opts.FluxInterval,
+ FeatureLookup: opts.FeatureFlags,
+ }
+
querier := querier.NewQuerier(querierOpts)
+ querierv2 := querierV2.NewQuerier(querierOptsV2)
aH := &APIHandler{
reader: opts.Reader,
@@ -158,6 +169,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
featureFlags: opts.FeatureFlags,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
querier: querier,
+ querierV2: querierv2,
}
builderOpts := queryBuilder.QueryBuilderOptions{
@@ -320,6 +332,11 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *AuthMid
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.liveTailLogs)).Methods(http.MethodGet)
}
+func (aH *APIHandler) RegisterQueryRangeV4Routes(router *mux.Router, am *AuthMiddleware) {
+ subRouter := router.PathPrefix("/api/v4").Subrouter()
+ subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QueryRangeV4)).Methods(http.MethodPost)
+}
+
func (aH *APIHandler) Respond(w http.ResponseWriter, data interface{}) {
writeHttpResponse(w, data)
}
@@ -542,7 +559,7 @@ func (aH *APIHandler) addTemporality(ctx context.Context, qp *v3.QueryRangeParam
if qp.CompositeQuery != nil && len(qp.CompositeQuery.BuilderQueries) > 0 {
for name := range qp.CompositeQuery.BuilderQueries {
query := qp.CompositeQuery.BuilderQueries[name]
- if query.DataSource == v3.DataSourceMetrics {
+ if query.DataSource == v3.DataSourceMetrics && query.Temporality == "" {
if aH.preferDelta && metricNameToTemporality[query.AggregateAttribute.Key][v3.Delta] {
query.Temporality = v3.Delta
} else if metricNameToTemporality[query.AggregateAttribute.Key][v3.Cumulative] {
@@ -3241,3 +3258,67 @@ func (aH *APIHandler) liveTailLogs(w http.ResponseWriter, r *http.Request) {
}
}
}
+
+func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.QueryRangeParamsV3, w http.ResponseWriter, r *http.Request) {
+
+ var result []*v3.Result
+ var err error
+ var errQuriesByName map[string]string
+ var spanKeys map[string]v3.AttributeKey
+ if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
+ // check if any enrichment is required for logs if yes then enrich them
+ if logsv3.EnrichmentRequired(queryRangeParams) {
+ // get the fields if any logs query is present
+ var fields map[string]v3.AttributeKey
+ fields, err = aH.getLogFieldsV3(ctx, queryRangeParams)
+ if err != nil {
+ apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
+ RespondError(w, apiErrObj, errQuriesByName)
+ return
+ }
+ logsv3.Enrich(queryRangeParams, fields)
+ }
+
+ spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
+ if err != nil {
+ apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
+ RespondError(w, apiErrObj, errQuriesByName)
+ return
+ }
+ }
+
+ result, err, errQuriesByName = aH.querierV2.QueryRange(ctx, queryRangeParams, spanKeys)
+
+ if err != nil {
+ apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
+ RespondError(w, apiErrObj, errQuriesByName)
+ return
+ }
+
+ resp := v3.QueryRangeResponse{
+ Result: result,
+ }
+
+ aH.Respond(w, resp)
+}
+
+func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) {
+ queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
+
+ if apiErrorObj != nil {
+ zap.S().Errorf(apiErrorObj.Err.Error())
+ RespondError(w, apiErrorObj, nil)
+ return
+ }
+
+ // add temporality for each metric
+
+ temporalityErr := aH.addTemporality(r.Context(), queryRangeParams)
+ if temporalityErr != nil {
+ zap.S().Errorf("Error while adding temporality for metrics: %v", temporalityErr)
+ RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
+ return
+ }
+
+ aH.queryRangeV4(r.Context(), queryRangeParams, w, r)
+}
diff --git a/pkg/query-service/app/metrics/v4/cumulative/table.go b/pkg/query-service/app/metrics/v4/cumulative/table.go
new file mode 100644
index 0000000000..3e021a5811
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/cumulative/table.go
@@ -0,0 +1,50 @@
+package cumulative
+
+import (
+ "fmt"
+
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+// PrepareMetricQueryCumulativeTable prepares the query to be used for fetching metrics
+func PrepareMetricQueryCumulativeTable(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+ var query string
+
+ temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
+ if err != nil {
+ return "", err
+ }
+
+ groupBy := helpers.GroupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
+
+ queryTmpl :=
+ "SELECT %s," +
+ " %s as value" +
+ " FROM (%s)" +
+ " WHERE isNaN(per_series_value) = 0" +
+ " GROUP BY %s" +
+ " ORDER BY %s"
+
+ switch mq.SpaceAggregation {
+ case v3.SpaceAggregationAvg:
+ op := "avg(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationSum:
+ op := "sum(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMin:
+ op := "min(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMax:
+ op := "max(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationCount:
+ op := "count(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ }
+
+ return query, nil
+}
diff --git a/pkg/query-service/app/metrics/v4/cumulative/table_test.go b/pkg/query-service/app/metrics/v4/cumulative/table_test.go
new file mode 100644
index 0000000000..d562b5d93a
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/cumulative/table_test.go
@@ -0,0 +1,112 @@
+package cumulative
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+func TestPrepareTableQuery(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ start int64
+ end int64
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = avg, space aggregation = sum, temporality = unspecified",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "system_memory_usage",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Unspecified,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "state",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorNotEqual,
+ Value: "idle",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationAvg,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
+ },
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "http_requests",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Cumulative,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "payment_service",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQueryCumulativeTable(
+ testCase.start,
+ testCase.end,
+ testCase.builderQuery.StepInterval,
+ testCase.builderQuery,
+ )
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
diff --git a/pkg/query-service/app/metrics/v4/cumulative/timeseries.go b/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
index 78d22be4aa..7dfa8fef87 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
+++ b/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
@@ -3,7 +3,7 @@ package cumulative
import (
"fmt"
- v4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
"go.signoz.io/signoz/pkg/query-service/constants"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/utils"
@@ -104,10 +104,10 @@ const (
// value to be reset to 0. This will produce an inaccurate result. The max is the best approximation we can get.
// We don't expect the process to restart very often, so this should be a good approximation.
-func prepareTimeAggregationSubQueryTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var subQuery string
- timeSeriesSubQuery, err := v4.PrepareTimeseriesFilterQuery(mq)
+ timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(mq)
if err != nil {
return "", err
}
@@ -127,15 +127,8 @@ func prepareTimeAggregationSubQueryTimeSeries(start, end, step int64, mq *v3.Bui
" GROUP BY fingerprint, ts" +
" ORDER BY fingerprint, ts"
- var selectLabelsAny string
- for _, tag := range mq.GroupBy {
- selectLabelsAny += fmt.Sprintf("any(%s) as %s,", tag.Key, tag.Key)
- }
-
- var selectLabels string
- for _, tag := range mq.GroupBy {
- selectLabels += tag.Key + ","
- }
+ selectLabelsAny := helpers.SelectLabelsAny(mq.GroupBy)
+ selectLabels := helpers.SelectLabels(mq.GroupBy)
switch mq.TimeAggregation {
case v3.TimeAggregationAvg:
@@ -177,18 +170,18 @@ func prepareTimeAggregationSubQueryTimeSeries(start, end, step int64, mq *v3.Bui
return subQuery, nil
}
-// prepareMetricQueryCumulativeTimeSeries prepares the query to be used for fetching metrics
-func prepareMetricQueryCumulativeTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+// PrepareMetricQueryCumulativeTimeSeries prepares the query to be used for fetching metrics
+func PrepareMetricQueryCumulativeTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var query string
- temporalAggSubQuery, err := prepareTimeAggregationSubQueryTimeSeries(start, end, step, mq)
+ temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
if err != nil {
return "", err
}
- groupBy := groupingSetsByAttributeKeyTags(mq.GroupBy...)
- orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
- selectLabels := groupByAttributeKeyTags(mq.GroupBy...)
+ groupBy := helpers.GroupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
queryTmpl :=
"SELECT %s," +
diff --git a/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go b/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
index 671af6ac69..91dd1c4a1e 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
+++ b/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go
@@ -113,7 +113,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
- query, err := prepareTimeAggregationSubQueryTimeSeries(
+ query, err := prepareTimeAggregationSubQuery(
testCase.start,
testCase.end,
testCase.builderQuery.StepInterval,
@@ -216,7 +216,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
- query, err := prepareMetricQueryCumulativeTimeSeries(
+ query, err := PrepareMetricQueryCumulativeTimeSeries(
testCase.start,
testCase.end,
testCase.builderQuery.StepInterval,
diff --git a/pkg/query-service/app/metrics/v4/delta/table.go b/pkg/query-service/app/metrics/v4/delta/table.go
new file mode 100644
index 0000000000..bec10772f5
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/delta/table.go
@@ -0,0 +1,55 @@
+package delta
+
+import (
+ "fmt"
+
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+// PrepareMetricQueryDeltaTable builds the query to be used for fetching metrics
+func PrepareMetricQueryDeltaTable(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+
+ if canShortCircuit(mq) {
+ return prepareQueryOptimized(start, end, step, mq)
+ }
+
+ var query string
+
+ temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
+ if err != nil {
+ return "", err
+ }
+
+ groupBy := helpers.GroupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
+
+ queryTmpl :=
+ "SELECT %s," +
+ " %s as value" +
+ " FROM (%s)" +
+ " WHERE isNaN(per_series_value) = 0" +
+ " GROUP BY %s" +
+ " ORDER BY %s"
+
+ switch mq.SpaceAggregation {
+ case v3.SpaceAggregationAvg:
+ op := "avg(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationSum:
+ op := "sum(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMin:
+ op := "min(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMax:
+ op := "max(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationCount:
+ op := "count(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ }
+
+ return query, nil
+}
diff --git a/pkg/query-service/app/metrics/v4/delta/table_test.go b/pkg/query-service/app/metrics/v4/delta/table_test.go
new file mode 100644
index 0000000000..c7bce4268c
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/delta/table_test.go
@@ -0,0 +1,114 @@
+package delta
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+func TestPrepareTableQuery(t *testing.T) {
+ // The table query is almost the same as the time series query, except that
+ // each row will be reduced to a single value using the `ReduceTo` aggregation
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ start int64
+ end int64
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = avg, space aggregation = sum, temporality = unspecified",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "system_memory_usage",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Unspecified,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "state",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorNotEqual,
+ Value: "idle",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationAvg,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
+ },
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = delta",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "http_requests",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "payment_service",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQueryDeltaTable(
+ testCase.start,
+ testCase.end,
+ testCase.builderQuery.StepInterval,
+ testCase.builderQuery,
+ )
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
diff --git a/pkg/query-service/app/metrics/v4/delta/time_series_test.go b/pkg/query-service/app/metrics/v4/delta/time_series_test.go
new file mode 100644
index 0000000000..024371d328
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/delta/time_series_test.go
@@ -0,0 +1,229 @@
+package delta
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+func TestPrepareTimeAggregationSubQuery(t *testing.T) {
+ // The time aggregation is performed for each unique series - since the fingerprint represents the
+ // unique hash of label set, we always group by fingerprint regardless of the GroupBy
+ // This sub result is then aggregated on dimensions using the provided GroupBy clause keys
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ start int64
+ end int64
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = avg, temporality = delta",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "http_requests",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorNotEqual,
+ Value: "payment_service",
+ },
+ {
+ Key: v3.AttributeKey{
+ Key: "endpoint",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorIn,
+ Value: []interface{}{"/paycallback", "/payme", "/paypal"},
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationAvg,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts",
+ },
+ {
+ name: "test time aggregation = rate, temporality = delta",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "http_requests",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "payment_service",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := prepareTimeAggregationSubQuery(
+ testCase.start,
+ testCase.end,
+ testCase.builderQuery.StepInterval,
+ testCase.builderQuery,
+ )
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+func TestPrepareTimeseriesQuery(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ start int64
+ end int64
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = avg, space aggregation = sum, temporality = unspecified",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "system_memory_usage",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Unspecified,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "state",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorNotEqual,
+ Value: "idle",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationAvg,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
+ },
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = delta",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "http_requests",
+ DataType: v3.AttributeKeyDataTypeFloat64,
+ Type: v3.AttributeKeyTypeUnspecified,
+ IsColumn: true,
+ IsJSON: false,
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "payment_service",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ start: 1701794980000,
+ end: 1701796780000,
+ expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQueryDeltaTimeSeries(
+ testCase.start,
+ testCase.end,
+ testCase.builderQuery.StepInterval,
+ testCase.builderQuery,
+ )
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
diff --git a/pkg/query-service/app/metrics/v4/delta/timeseries.go b/pkg/query-service/app/metrics/v4/delta/timeseries.go
new file mode 100644
index 0000000000..3d6999f425
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/delta/timeseries.go
@@ -0,0 +1,196 @@
+package delta
+
+import (
+ "fmt"
+
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.signoz.io/signoz/pkg/query-service/utils"
+)
+
+// prepareTimeAggregationSubQuery builds the sub-query to be used for temporal aggregation
+func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+
+ var subQuery string
+
+ timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(mq)
+ if err != nil {
+ return "", err
+ }
+
+ samplesTableFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end)
+
+ // Select the aggregate value for interval
+ queryTmpl :=
+ "SELECT fingerprint, %s" +
+ " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
+ " %s as per_series_value" +
+ " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
+ " INNER JOIN" +
+ " (%s) as filtered_time_series" +
+ " USING fingerprint" +
+ " WHERE " + samplesTableFilter +
+ " GROUP BY fingerprint, ts" +
+ " ORDER BY fingerprint, ts"
+
+ selectLabelsAny := helpers.SelectLabelsAny(mq.GroupBy)
+
+ switch mq.TimeAggregation {
+ case v3.TimeAggregationAvg:
+ op := "avg(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationSum:
+ op := "sum(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationMin:
+ op := "min(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationMax:
+ op := "max(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationCount:
+ op := "count(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationCountDistinct:
+ op := "count(distinct(value))"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationAnyLast:
+ op := "anyLast(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationRate:
+ op := fmt.Sprintf("sum(value)/%d", step)
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ case v3.TimeAggregationIncrease:
+ op := "sum(value)"
+ subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
+ }
+ return subQuery, nil
+}
+
+// See `canShortCircuit` below for details
+func prepareQueryOptimized(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+
+ groupBy := helpers.GroupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := helpers.SelectLabels(mq.GroupBy)
+
+ var query string
+
+ timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(mq)
+ if err != nil {
+ return "", err
+ }
+
+ samplesTableFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end)
+
+ // Select the aggregate value for interval
+ queryTmpl :=
+ "SELECT %s" +
+ " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
+ " %s as value" +
+ " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
+ " INNER JOIN" +
+ " (%s) as filtered_time_series" +
+ " USING fingerprint" +
+ " WHERE " + samplesTableFilter +
+ " GROUP BY %s" +
+ " ORDER BY %s"
+
+ switch mq.SpaceAggregation {
+ case v3.SpaceAggregationSum:
+ op := "sum(value)"
+ if mq.TimeAggregation == v3.TimeAggregationRate {
+ op = "sum(value)/" + fmt.Sprintf("%d", step)
+ }
+ query = fmt.Sprintf(queryTmpl, selectLabels, step, op, timeSeriesSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMin:
+ op := "min(value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, step, op, timeSeriesSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMax:
+ op := "max(value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, step, op, timeSeriesSubQuery, groupBy, orderBy)
+ }
+ return query, nil
+}
+
+// PrepareMetricQueryDeltaTimeSeries builds the query to be used for fetching metrics
+func PrepareMetricQueryDeltaTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
+
+ if canShortCircuit(mq) {
+ return prepareQueryOptimized(start, end, step, mq)
+ }
+
+ var query string
+
+ temporalAggSubQuery, err := prepareTimeAggregationSubQuery(start, end, step, mq)
+ if err != nil {
+ return "", err
+ }
+
+ groupBy := helpers.GroupingSetsByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+ selectLabels := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
+
+ queryTmpl :=
+ "SELECT %s," +
+ " %s as value" +
+ " FROM (%s)" +
+ " WHERE isNaN(per_series_value) = 0" +
+ " GROUP BY %s" +
+ " ORDER BY %s"
+
+ switch mq.SpaceAggregation {
+ case v3.SpaceAggregationAvg:
+ op := "avg(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationSum:
+ op := "sum(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMin:
+ op := "min(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationMax:
+ op := "max(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ case v3.SpaceAggregationCount:
+ op := "count(per_series_value)"
+ query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
+ }
+
+ return query, nil
+}
+
+// canShortCircuit returns true if we can use the optimized query
+// for the given query
+// This is used to avoid the group by fingerprint thus improving the performance
+// for certain queries
+// cases where we can short circuit:
+// 1. time aggregation = (rate|increase) and space aggregation = sum
+// - rate = sum(value)/step, increase = sum(value) - sum of sums is same as sum of all values
+//
+// 2. time aggregation = sum and space aggregation = sum
+// - sum of sums is same as sum of all values
+//
+// 3. time aggregation = min and space aggregation = min
+// - min of mins is same as min of all values
+//
+// 4. time aggregation = max and space aggregation = max
+// - max of maxs is same as max of all values
+//
+// all of this is true only for delta metrics
+func canShortCircuit(mq *v3.BuilderQuery) bool {
+ if (mq.TimeAggregation == v3.TimeAggregationRate || mq.TimeAggregation == v3.TimeAggregationIncrease) && mq.SpaceAggregation == v3.SpaceAggregationSum {
+ return true
+ }
+ if mq.TimeAggregation == v3.TimeAggregationSum && mq.SpaceAggregation == v3.SpaceAggregationSum {
+ return true
+ }
+ if mq.TimeAggregation == v3.TimeAggregationMin && mq.SpaceAggregation == v3.SpaceAggregationMin {
+ return true
+ }
+ if mq.TimeAggregation == v3.TimeAggregationMax && mq.SpaceAggregation == v3.SpaceAggregationMax {
+ return true
+ }
+ return false
+}
diff --git a/pkg/query-service/app/metrics/v4/cumulative/helper.go b/pkg/query-service/app/metrics/v4/helpers/clauses.go
similarity index 58%
rename from pkg/query-service/app/metrics/v4/cumulative/helper.go
rename to pkg/query-service/app/metrics/v4/helpers/clauses.go
index 6e692d3f37..06f4b13cea 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/helper.go
+++ b/pkg/query-service/app/metrics/v4/helpers/clauses.go
@@ -1,4 +1,4 @@
-package cumulative
+package helpers
import (
"fmt"
@@ -18,8 +18,8 @@ func groupingSets(tags ...string) string {
}
}
-// groupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause
-func groupingSetsByAttributeKeyTags(tags ...v3.AttributeKey) string {
+// GroupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause
+func GroupingSetsByAttributeKeyTags(tags ...v3.AttributeKey) string {
groupTags := []string{}
for _, tag := range tags {
groupTags = append(groupTags, tag.Key)
@@ -27,8 +27,8 @@ func groupingSetsByAttributeKeyTags(tags ...v3.AttributeKey) string {
return groupingSets(groupTags...)
}
-// groupBy returns a string of comma separated tags for group by clause
-func groupByAttributeKeyTags(tags ...v3.AttributeKey) string {
+// GroupByAttributeKeyTags returns a string of comma separated tags for group by clause
+func GroupByAttributeKeyTags(tags ...v3.AttributeKey) string {
groupTags := []string{}
for _, tag := range tags {
groupTags = append(groupTags, tag.Key)
@@ -37,9 +37,9 @@ func groupByAttributeKeyTags(tags ...v3.AttributeKey) string {
return strings.Join(groupTags, ", ")
}
-// orderBy returns a string of comma separated tags for order by clause
+// OrderByAttributeKeyTags returns a string of comma separated tags for order by clause
// if the order is not specified, it defaults to ASC
-func orderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string {
+func OrderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string {
var orderBy []string
for _, tag := range tags {
found := false
@@ -59,3 +59,19 @@ func orderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string
return strings.Join(orderBy, ", ")
}
+
+func SelectLabelsAny(tags []v3.AttributeKey) string {
+ var selectLabelsAny []string
+ for _, tag := range tags {
+ selectLabelsAny = append(selectLabelsAny, fmt.Sprintf("any(%s) as %s,", tag.Key, tag.Key))
+ }
+ return strings.Join(selectLabelsAny, " ")
+}
+
+func SelectLabels(tags []v3.AttributeKey) string {
+ var selectLabels []string
+ for _, tag := range tags {
+ selectLabels = append(selectLabels, fmt.Sprintf("%s,", tag.Key))
+ }
+ return strings.Join(selectLabels, " ")
+}
diff --git a/pkg/query-service/app/metrics/v4/helpers/sub_query.go b/pkg/query-service/app/metrics/v4/helpers/sub_query.go
new file mode 100644
index 0000000000..97176e54bd
--- /dev/null
+++ b/pkg/query-service/app/metrics/v4/helpers/sub_query.go
@@ -0,0 +1,86 @@
+package helpers
+
+import (
+ "fmt"
+ "strings"
+
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.signoz.io/signoz/pkg/query-service/utils"
+)
+
+// PrepareTimeseriesFilterQuery builds the sub-query to be used for filtering timeseries based on the search criteria
+func PrepareTimeseriesFilterQuery(mq *v3.BuilderQuery) (string, error) {
+ var conditions []string
+ var fs *v3.FilterSet = mq.Filters
+ var groupTags []v3.AttributeKey = mq.GroupBy
+
+ conditions = append(conditions, fmt.Sprintf("metric_name = %s", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key)))
+ conditions = append(conditions, fmt.Sprintf("temporality = '%s'", mq.Temporality))
+
+ if fs != nil && len(fs.Items) != 0 {
+ for _, item := range fs.Items {
+ toFormat := item.Value
+ op := v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator))))
+ if op == v3.FilterOperatorContains || op == v3.FilterOperatorNotContains {
+ toFormat = fmt.Sprintf("%%%s%%", toFormat)
+ }
+ fmtVal := utils.ClickHouseFormattedValue(toFormat)
+ switch op {
+ case v3.FilterOperatorEqual:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') = %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorNotEqual:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') != %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorIn:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') IN %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorNotIn:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') NOT IN %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorLike:
+ conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorNotLike:
+ conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorRegex:
+ conditions = append(conditions, fmt.Sprintf("match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorNotRegex:
+ conditions = append(conditions, fmt.Sprintf("not match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorGreaterThan:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') > %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorGreaterThanOrEq:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') >= %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorLessThan:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') < %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorLessThanOrEq:
+ conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') <= %s", item.Key.Key, fmtVal))
+ case v3.FilterOperatorContains:
+ conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorNotContains:
+ conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
+ case v3.FilterOperatorExists:
+ conditions = append(conditions, fmt.Sprintf("has(JSONExtractKeys(labels), '%s')", item.Key.Key))
+ case v3.FilterOperatorNotExists:
+ conditions = append(conditions, fmt.Sprintf("not has(JSONExtractKeys(labels), '%s')", item.Key.Key))
+ default:
+ return "", fmt.Errorf("unsupported filter operator")
+ }
+ }
+ }
+ whereClause := strings.Join(conditions, " AND ")
+
+ var selectLabels string
+ for _, tag := range groupTags {
+ selectLabels += fmt.Sprintf("JSONExtractString(labels, '%s') as %s, ", tag.Key, tag.Key)
+ }
+
+ // The table JOIN key always exists
+ selectLabels += "fingerprint"
+
+ filterSubQuery := fmt.Sprintf(
+ "SELECT DISTINCT %s FROM %s.%s WHERE %s",
+ selectLabels,
+ constants.SIGNOZ_METRIC_DBNAME,
+ constants.SIGNOZ_TIMESERIES_LOCAL_TABLENAME,
+ whereClause,
+ )
+
+ return filterSubQuery, nil
+}
diff --git a/pkg/query-service/app/metrics/v4/query_builder.go b/pkg/query-service/app/metrics/v4/query_builder.go
index 70d35e8e08..f54f2ff059 100644
--- a/pkg/query-service/app/metrics/v4/query_builder.go
+++ b/pkg/query-service/app/metrics/v4/query_builder.go
@@ -2,85 +2,73 @@ package v4
import (
"fmt"
- "strings"
+ "time"
- "go.signoz.io/signoz/pkg/query-service/constants"
+ metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/cumulative"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/delta"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
+ "go.signoz.io/signoz/pkg/query-service/common"
+ "go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
- "go.signoz.io/signoz/pkg/query-service/utils"
)
-// PrepareTimeseriesFilterQuery builds the sub-query to be used for filtering timeseries based on the search criteria
-func PrepareTimeseriesFilterQuery(mq *v3.BuilderQuery) (string, error) {
- var conditions []string
- var fs *v3.FilterSet = mq.Filters
- var groupTags []v3.AttributeKey = mq.GroupBy
+// PrepareMetricQuery prepares the query to be used for fetching metrics
+// from the database
+// start and end are in milliseconds
+// step is in seconds
+func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery, options metricsV3.Options) (string, error) {
- conditions = append(conditions, fmt.Sprintf("metric_name = %s", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key)))
- conditions = append(conditions, fmt.Sprintf("temporality = '%s'", mq.Temporality))
+ start, end = common.AdjustedMetricTimeRange(start, end, mq.StepInterval, mq.TimeAggregation)
- if fs != nil && len(fs.Items) != 0 {
- for _, item := range fs.Items {
- toFormat := item.Value
- op := v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator))))
- if op == v3.FilterOperatorContains || op == v3.FilterOperatorNotContains {
- toFormat = fmt.Sprintf("%%%s%%", toFormat)
- }
- fmtVal := utils.ClickHouseFormattedValue(toFormat)
- switch op {
- case v3.FilterOperatorEqual:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') = %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorNotEqual:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') != %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorIn:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') IN %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorNotIn:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') NOT IN %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorLike:
- conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorNotLike:
- conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorRegex:
- conditions = append(conditions, fmt.Sprintf("match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorNotRegex:
- conditions = append(conditions, fmt.Sprintf("not match(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorGreaterThan:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') > %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorGreaterThanOrEq:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') >= %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorLessThan:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') < %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorLessThanOrEq:
- conditions = append(conditions, fmt.Sprintf("JSONExtractString(labels, '%s') <= %s", item.Key.Key, fmtVal))
- case v3.FilterOperatorContains:
- conditions = append(conditions, fmt.Sprintf("like(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorNotContains:
- conditions = append(conditions, fmt.Sprintf("notLike(JSONExtractString(labels, '%s'), %s)", item.Key.Key, fmtVal))
- case v3.FilterOperatorExists:
- conditions = append(conditions, fmt.Sprintf("has(JSONExtractKeys(labels), '%s')", item.Key.Key))
- case v3.FilterOperatorNotExists:
- conditions = append(conditions, fmt.Sprintf("not has(JSONExtractKeys(labels), '%s')", item.Key.Key))
- default:
- return "", fmt.Errorf("unsupported filter operator")
- }
+ groupBy := helpers.GroupByAttributeKeyTags(mq.GroupBy...)
+ orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
+
+ if mq.Quantile != 0 {
+ // If quantile is set, we need to group by le
+ // and set the space aggregation to sum
+ // and time aggregation to rate
+ mq.TimeAggregation = v3.TimeAggregationRate
+ mq.SpaceAggregation = v3.SpaceAggregationSum
+ mq.GroupBy = append(mq.GroupBy, v3.AttributeKey{
+ Key: "le",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ })
+ }
+
+ var query string
+ var err error
+ if mq.Temporality == v3.Delta {
+ if panelType == v3.PanelTypeTable {
+ query, err = delta.PrepareMetricQueryDeltaTable(start, end, mq.StepInterval, mq)
+ } else {
+ query, err = delta.PrepareMetricQueryDeltaTimeSeries(start, end, mq.StepInterval, mq)
+ }
+ } else {
+ if panelType == v3.PanelTypeTable {
+ query, err = cumulative.PrepareMetricQueryCumulativeTable(start, end, mq.StepInterval, mq)
+ } else {
+ query, err = cumulative.PrepareMetricQueryCumulativeTimeSeries(start, end, mq.StepInterval, mq)
}
}
- whereClause := strings.Join(conditions, " AND ")
- var selectLabels string
- for _, tag := range groupTags {
- selectLabels += fmt.Sprintf("JSONExtractString(labels, '%s') as %s, ", tag.Key, tag.Key)
+ if err != nil {
+ return "", err
}
- // The table JOIN key always exists
- selectLabels += "fingerprint"
+ if mq.Quantile != 0 {
+ query = fmt.Sprintf(`SELECT %s, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s`, groupBy, mq.Quantile, query, groupBy, orderBy)
+ }
- filterSubQuery := fmt.Sprintf(
- "SELECT DISTINCT %s FROM %s.%s WHERE %s",
- selectLabels,
- constants.SIGNOZ_METRIC_DBNAME,
- constants.SIGNOZ_TIMESERIES_LOCAL_TABLENAME,
- whereClause,
- )
-
- return filterSubQuery, nil
+ return query, nil
+}
+
+func BuildPromQuery(promQuery *v3.PromQuery, step, start, end int64) *model.QueryRangeParams {
+ return &model.QueryRangeParams{
+ Query: promQuery.Query,
+ Start: time.UnixMilli(start),
+ End: time.UnixMilli(end),
+ Step: time.Duration(step * int64(time.Second)),
+ }
}
diff --git a/pkg/query-service/app/metrics/v4/query_builder_test.go b/pkg/query-service/app/metrics/v4/query_builder_test.go
index eb071ecb2f..429c25b8e8 100644
--- a/pkg/query-service/app/metrics/v4/query_builder_test.go
+++ b/pkg/query-service/app/metrics/v4/query_builder_test.go
@@ -4,6 +4,8 @@ import (
"testing"
"github.com/stretchr/testify/assert"
+ metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
+ "go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
@@ -142,7 +144,385 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
- query, err := PrepareTimeseriesFilterQuery(testCase.builderQuery)
+ query, err := helpers.PrepareTimeseriesFilterQuery(testCase.builderQuery)
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+
+func TestPrepareMetricQueryCumulativeRate(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_calls_total",
+ },
+ Temporality: v3.Cumulative,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "frontend",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ },
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_calls_total",
+ },
+ Temporality: v3.Cumulative,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ GroupBy: []v3.AttributeKey{
+ {
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ {
+ Key: "endpoint",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ },
+ },
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, endpoint, ts), (service_name, endpoint) ) ORDER BY service_name ASC, endpoint ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{})
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+
+func TestPrepareMetricQueryDeltaRate(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ expectedQueryContains string
+ }{
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = delta, no group by",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_calls_total",
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY ts ORDER BY ts ASC",
+ },
+ {
+ name: "test time aggregation = rate, space aggregation = sum, temporality = delta, group by service_name",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_calls_total",
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ TimeAggregation: v3.TimeAggregationRate,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ },
+ expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{})
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+
+func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ expectedQueryContains string
+ }{
+ {
+ name: "test temporality = cumulative, quantile = 0.99",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_latency_bucket",
+ },
+ Temporality: v3.Cumulative,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "frontend",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ Quantile: 0.99,
+ },
+ expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
+ },
+ {
+ name: "test temporality = cumulative, quantile = 0.99 without group by",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_latency_bucket",
+ },
+ Temporality: v3.Cumulative,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "frontend",
+ },
+ },
+ },
+ Expression: "A",
+ Disabled: false,
+ Quantile: 0.99,
+ },
+ expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{})
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+
+func TestPrepreMetricQueryDeltaQuantile(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ expectedQueryContains string
+ }{
+ {
+ name: "test temporality = delta, quantile = 0.99 group by service_name",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_latency_bucket",
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "frontend",
+ },
+ },
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "service_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ Expression: "A",
+ Disabled: false,
+ Quantile: 0.99,
+ },
+ expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
+ },
+ {
+ name: "test temporality = delta, quantile = 0.99 no group by",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "signoz_latency_bucket",
+ },
+ Temporality: v3.Delta,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{
+ {
+ Key: v3.AttributeKey{
+ Key: "service_name",
+ Type: v3.AttributeKeyTypeTag,
+ DataType: v3.AttributeKeyDataTypeString,
+ },
+ Operator: v3.FilterOperatorContains,
+ Value: "frontend",
+ },
+ },
+ },
+ Expression: "A",
+ Disabled: false,
+ Quantile: 0.99,
+ },
+ expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{})
+ assert.Nil(t, err)
+ assert.Contains(t, query, testCase.expectedQueryContains)
+ })
+ }
+}
+
+func TestPrepareMetricQueryGauge(t *testing.T) {
+ testCases := []struct {
+ name string
+ builderQuery *v3.BuilderQuery
+ expectedQueryContains string
+ }{
+ {
+ name: "test gauge query with no group by",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "system_cpu_usage",
+ },
+ Temporality: v3.Unspecified,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ Expression: "A",
+ TimeAggregation: v3.TimeAggregationAvg,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ Disabled: false,
+ },
+ expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified') as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
+ },
+ {
+ name: "test gauge query with group by host_name",
+ builderQuery: &v3.BuilderQuery{
+ QueryName: "A",
+ StepInterval: 60,
+ DataSource: v3.DataSourceMetrics,
+ AggregateAttribute: v3.AttributeKey{
+ Key: "system_cpu_usage",
+ },
+ Temporality: v3.Unspecified,
+ Filters: &v3.FilterSet{
+ Operator: "AND",
+ Items: []v3.FilterItem{},
+ },
+ GroupBy: []v3.AttributeKey{{
+ Key: "host_name",
+ DataType: v3.AttributeKeyDataTypeString,
+ Type: v3.AttributeKeyTypeTag,
+ }},
+ TimeAggregation: v3.TimeAggregationAvg,
+ SpaceAggregation: v3.SpaceAggregationSum,
+ Expression: "A",
+ Disabled: false,
+ },
+ expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified') as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (host_name, ts), (host_name) ) ORDER BY host_name ASC, ts ASC",
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{})
assert.Nil(t, err)
assert.Contains(t, query, testCase.expectedQueryContains)
})
diff --git a/pkg/query-service/app/opamp/config.yaml b/pkg/query-service/app/opamp/config.yaml
deleted file mode 100644
index d5ef74e00f..0000000000
--- a/pkg/query-service/app/opamp/config.yaml
+++ /dev/null
@@ -1,76 +0,0 @@
-receivers:
- otlp/spanmetrics:
- protocols:
- grpc:
- endpoint: "localhost:12345"
- otlp:
- protocols:
- grpc:
- http:
- jaeger:
- protocols:
- grpc:
- thrift_http:
- hostmetrics:
- collection_interval: 30s
- scrapers:
- cpu:
- load:
- memory:
- disk:
- filesystem:
- network:
-processors:
- batch:
- send_batch_size: 1000
- timeout: 10s
- signozspanmetrics/prometheus:
- metrics_exporter: prometheus
- latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
- dimensions_cache_size: 10000
- dimensions:
- - name: service.namespace
- default: default
- - name: deployment.environment
- default: default
- # memory_limiter:
- # # 80% of maximum memory up to 2G
- # limit_mib: 1500
- # # 25% of limit up to 2G
- # spike_limit_mib: 512
- # check_interval: 5s
- #
- # # 50% of the maximum memory
- # limit_percentage: 50
- # # 20% of max memory usage spike expected
- # spike_limit_percentage: 20
- # queued_retry:
- # num_workers: 4
- # queue_size: 100
- # retry_on_failure: true
-extensions:
- zpages: {}
-exporters:
- clickhousetraces:
- datasource: tcp://localhost:9000/?database=signoz_traces
- migrations: exporter/clickhousetracesexporter/migrations
- clickhousemetricswrite:
- endpoint: tcp://localhost:9000/?database=signoz_metrics
- resource_to_telemetry_conversion:
- enabled: true
- prometheus:
- endpoint: "0.0.0.0:8889"
-service:
- extensions: [zpages]
- pipelines:
- traces:
- receivers: [jaeger, otlp]
- processors: [signozspanmetrics/prometheus, batch]
- exporters: [clickhousetraces]
- metrics:
- receivers: [otlp, hostmetrics]
- processors: [batch]
- exporters: [clickhousemetricswrite]
- metrics/spanmetrics:
- receivers: [otlp/spanmetrics]
- exporters: [prometheus]
\ No newline at end of file
diff --git a/pkg/query-service/app/opamp/model/agent.go b/pkg/query-service/app/opamp/model/agent.go
index 4bc5f2f3f0..1eef7bb4cf 100644
--- a/pkg/query-service/app/opamp/model/agent.go
+++ b/pkg/query-service/app/opamp/model/agent.go
@@ -4,7 +4,6 @@ import (
"bytes"
"context"
"crypto/sha256"
- "fmt"
"sync"
"time"
@@ -259,7 +258,7 @@ func (agent *Agent) processStatusUpdate(
// If remote config is changed and different from what the Agent has then
// send the new remote config to the Agent.
if configChanged ||
- (agent.Status.RemoteConfigStatus != nil &&
+ (agent.Status.RemoteConfigStatus != nil && agent.remoteConfig != nil &&
!bytes.Equal(agent.Status.RemoteConfigStatus.LastRemoteConfigHash, agent.remoteConfig.ConfigHash)) {
// The new status resulted in a change in the config of the Agent or the Agent
// does not have this config (hash is different). Send the new config the Agent.
@@ -277,8 +276,8 @@ func (agent *Agent) processStatusUpdate(
func (agent *Agent) updateRemoteConfig(configProvider AgentConfigProvider) bool {
recommendedConfig, confId, err := configProvider.RecommendAgentConfig([]byte(agent.EffectiveConfig))
if err != nil {
- // The server must always recommend a config.
- panic(fmt.Errorf("could not generate config recommendation for agent %s: %w", agent.ID, err))
+ zap.S().Error("could not generate config recommendation for agent:", agent.ID, err)
+ return false
}
cfg := protobufs.AgentRemoteConfig{
diff --git a/pkg/query-service/app/opamp/otelconfig/config_parser_test.go b/pkg/query-service/app/opamp/otelconfig/config_parser_test.go
index f4a3ed0b1b..0a0d3c15b7 100644
--- a/pkg/query-service/app/opamp/otelconfig/config_parser_test.go
+++ b/pkg/query-service/app/opamp/otelconfig/config_parser_test.go
@@ -34,7 +34,7 @@ func TestServiceConfig(t *testing.T) {
"traces": map[string]interface{}{
"receivers": []interface{}{"jaeger", "otlp"},
"processors": []interface{}{
- "signozspanmetrics/prometheus", "batch",
+ "signozspanmetrics/cumulative", "batch",
},
"exporters": []interface{}{
"clickhousetraces",
diff --git a/pkg/query-service/app/opamp/otelconfig/testdata/basic.yaml b/pkg/query-service/app/opamp/otelconfig/testdata/basic.yaml
index d5ef74e00f..e8259a27e9 100644
--- a/pkg/query-service/app/opamp/otelconfig/testdata/basic.yaml
+++ b/pkg/query-service/app/opamp/otelconfig/testdata/basic.yaml
@@ -24,8 +24,8 @@ processors:
batch:
send_batch_size: 1000
timeout: 10s
- signozspanmetrics/prometheus:
- metrics_exporter: prometheus
+ signozspanmetrics/cumulative:
+ metrics_exporter: clickhousemetricswrite
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 10000
dimensions:
@@ -65,7 +65,7 @@ service:
pipelines:
traces:
receivers: [jaeger, otlp]
- processors: [signozspanmetrics/prometheus, batch]
+ processors: [signozspanmetrics/cumulative, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp, hostmetrics]
diff --git a/pkg/query-service/app/opamp/otelconfig/testdata/service.yaml b/pkg/query-service/app/opamp/otelconfig/testdata/service.yaml
index dd562fba0d..7e7ca3f9cb 100644
--- a/pkg/query-service/app/opamp/otelconfig/testdata/service.yaml
+++ b/pkg/query-service/app/opamp/otelconfig/testdata/service.yaml
@@ -3,7 +3,7 @@ service:
pipelines:
traces:
receivers: [jaeger, otlp]
- processors: [signozspanmetrics/prometheus, batch]
+ processors: [signozspanmetrics/cumulative, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp, hostmetrics]
diff --git a/pkg/query-service/app/querier/helper.go b/pkg/query-service/app/querier/helper.go
index 82bf2cc464..5bb3cc81f8 100644
--- a/pkg/query-service/app/querier/helper.go
+++ b/pkg/query-service/app/querier/helper.go
@@ -160,7 +160,7 @@ func (q *querier) runBuilderQuery(
if !params.NoCache && q.cache != nil {
var retrieveStatus status.RetrieveStatus
data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
- zap.S().Debug("cache retrieve status", zap.String("status", retrieveStatus.String()))
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
if err == nil {
cachedData = data
}
@@ -254,7 +254,7 @@ func (q *querier) runBuilderExpression(
if !params.NoCache && q.cache != nil {
var retrieveStatus status.RetrieveStatus
data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
- zap.S().Debug("cache retrieve status", zap.String("status", retrieveStatus.String()))
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
if err == nil {
cachedData = data
}
diff --git a/pkg/query-service/app/querier/querier.go b/pkg/query-service/app/querier/querier.go
index d51e5713bf..16ef778d20 100644
--- a/pkg/query-service/app/querier/querier.go
+++ b/pkg/query-service/app/querier/querier.go
@@ -145,7 +145,7 @@ func (q *querier) execPromQuery(ctx context.Context, params *model.QueryRangePar
//
// The [End - fluxInterval, End] is always added to the list of misses, because
// the data might still be in flux and not yet available in the database.
-func findMissingTimeRanges(start, end int64, seriesList []*v3.Series, fluxInterval time.Duration) (misses []missInterval) {
+func findMissingTimeRanges(start, end, step int64, seriesList []*v3.Series, fluxInterval time.Duration) (misses []missInterval) {
var cachedStart, cachedEnd int64
for idx := range seriesList {
series := seriesList[idx]
@@ -160,11 +160,15 @@ func findMissingTimeRanges(start, end int64, seriesList []*v3.Series, fluxInterv
}
}
+ endMillis := time.Now().UnixMilli()
+ adjustStep := int64(math.Min(float64(step), 60))
+ roundedMillis := endMillis - (endMillis % (adjustStep * 1000))
+
// Exclude the flux interval from the cached end time
cachedEnd = int64(
math.Min(
float64(cachedEnd),
- float64(time.Now().UnixMilli()-fluxInterval.Milliseconds()),
+ float64(roundedMillis-fluxInterval.Milliseconds()),
),
)
@@ -215,7 +219,7 @@ func (q *querier) findMissingTimeRanges(start, end, step int64, cachedData []byt
// In case of error, we return the entire range as a miss
return []missInterval{{start: start, end: end}}
}
- return findMissingTimeRanges(start, end, cachedSeriesList, q.fluxInterval)
+ return findMissingTimeRanges(start, end, step, cachedSeriesList, q.fluxInterval)
}
func labelsToString(labels map[string]string) string {
@@ -258,6 +262,7 @@ func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series {
for idx := range seriesesByLabels {
series := seriesesByLabels[idx]
series.SortPoints()
+ series.RemoveDuplicatePoints()
mergedSeries = append(mergedSeries, series)
}
return mergedSeries
@@ -326,7 +331,7 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam
// Ensure NoCache is not set and cache is not nil
if !params.NoCache && q.cache != nil {
data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
- zap.S().Debug("cache retrieve status", zap.String("status", retrieveStatus.String()))
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
if err == nil {
cachedData = data
}
@@ -502,6 +507,16 @@ func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3,
err = fmt.Errorf("invalid query type")
}
}
+
+ // return error if the number of series is more than one for value type panel
+ if params.CompositeQuery.PanelType == v3.PanelTypeValue {
+ if len(results) > 1 {
+ err = fmt.Errorf("there can be only one active query for value type panel")
+ } else if len(results) == 1 && len(results[0].Series) > 1 {
+ err = fmt.Errorf("there can be only one result series for value type panel but got %d", len(results[0].Series))
+ }
+ }
+
return results, err, errQueriesByName
}
diff --git a/pkg/query-service/app/querier/querier_test.go b/pkg/query-service/app/querier/querier_test.go
index f08ae82dcd..605d2f5180 100644
--- a/pkg/query-service/app/querier/querier_test.go
+++ b/pkg/query-service/app/querier/querier_test.go
@@ -23,6 +23,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name string
requestedStart int64 // in milliseconds
requestedEnd int64 // in milliseconds
+ requestedStep int64 // in seconds
cachedSeries []*v3.Series
expectedMiss []missInterval
}{
@@ -30,6 +31,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name: "cached time range is a subset of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -62,6 +64,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name: "cached time range is a superset of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -93,6 +96,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name: "cached time range is a left overlap of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -125,6 +129,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name: "cached time range is a right overlap of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -157,6 +162,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
name: "cached time range is a disjoint of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -189,7 +195,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
- misses := findMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.cachedSeries, 0*time.Minute)
+ misses := findMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.requestedStep, tc.cachedSeries, 0*time.Minute)
if len(misses) != len(tc.expectedMiss) {
t.Errorf("expected %d misses, got %d", len(tc.expectedMiss), len(misses))
}
@@ -211,6 +217,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name string
requestedStart int64
requestedEnd int64
+ requestedStep int64
cachedSeries []*v3.Series
fluxInterval time.Duration
expectedMiss []missInterval
@@ -219,6 +226,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name: "cached time range is a subset of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -252,6 +260,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name: "cached time range is a superset of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -284,6 +293,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name: "cache time range is a left overlap of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -317,6 +327,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name: "cache time range is a right overlap of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -350,6 +361,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
name: "cache time range is a disjoint of the requested time range",
requestedStart: 1675115596722,
requestedEnd: 1675115596722 + 180*60*1000,
+ requestedStep: 60,
cachedSeries: []*v3.Series{
{
Labels: map[string]string{
@@ -383,7 +395,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
- misses := findMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.cachedSeries, tc.fluxInterval)
+ misses := findMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.requestedStep, tc.cachedSeries, tc.fluxInterval)
if len(misses) != len(tc.expectedMiss) {
t.Errorf("expected %d misses, got %d", len(tc.expectedMiss), len(misses))
}
@@ -404,6 +416,7 @@ func TestQueryRange(t *testing.T) {
{
Start: 1675115596722,
End: 1675115596722 + 120*60*1000,
+ Step: 60,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypeBuilder,
PanelType: v3.PanelTypeGraph,
@@ -436,6 +449,7 @@ func TestQueryRange(t *testing.T) {
{
Start: 1675115596722 + 60*60*1000,
End: 1675115596722 + 180*60*1000,
+ Step: 60,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypeBuilder,
PanelType: v3.PanelTypeGraph,
diff --git a/pkg/query-service/app/querier/v2/helper.go b/pkg/query-service/app/querier/v2/helper.go
new file mode 100644
index 0000000000..61ab056251
--- /dev/null
+++ b/pkg/query-service/app/querier/v2/helper.go
@@ -0,0 +1,306 @@
+package v2
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "strings"
+ "sync"
+ "time"
+
+ logsV3 "go.signoz.io/signoz/pkg/query-service/app/logs/v3"
+ metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
+ metricsV4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
+ tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
+ "go.signoz.io/signoz/pkg/query-service/cache/status"
+ "go.signoz.io/signoz/pkg/query-service/constants"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.uber.org/zap"
+)
+
+func (q *querier) runBuilderQuery(
+ ctx context.Context,
+ builderQuery *v3.BuilderQuery,
+ params *v3.QueryRangeParamsV3,
+ keys map[string]v3.AttributeKey,
+ cacheKeys map[string]string,
+ ch chan channelResult,
+ wg *sync.WaitGroup,
+) {
+ defer wg.Done()
+ queryName := builderQuery.QueryName
+
+ var preferRPM bool
+
+ if q.featureLookUp != nil {
+ preferRPM = q.featureLookUp.CheckFeature(constants.PreferRPM) == nil
+ }
+
+ // TODO: handle other data sources
+ if builderQuery.DataSource == v3.DataSourceLogs {
+ var query string
+ var err error
+ // for ts query with limit replace it as it is already formed
+ if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
+ limitQuery, err := logsV3.PrepareLogsQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.QueryType,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ logsV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
+ return
+ }
+ placeholderQuery, err := logsV3.PrepareLogsQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.QueryType,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ logsV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: placeholderQuery, Series: nil}
+ return
+ }
+ query = strings.Replace(placeholderQuery, "#LIMIT_PLACEHOLDER", limitQuery, 1)
+ } else {
+ query, err = logsV3.PrepareLogsQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.QueryType,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ logsV3.Options{PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
+ return
+ }
+ }
+
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
+ return
+ }
+ series, err := q.execClickHouseQuery(ctx, query)
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
+ return
+ }
+
+ if builderQuery.DataSource == v3.DataSourceTraces {
+
+ var query string
+ var err error
+ // for ts query with group by and limit form two queries
+ if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
+ limitQuery, err := tracesV3.PrepareTracesQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ keys,
+ tracesV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
+ return
+ }
+ placeholderQuery, err := tracesV3.PrepareTracesQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ keys,
+ tracesV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
+ return
+ }
+ query = fmt.Sprintf(placeholderQuery, limitQuery)
+ } else {
+ query, err = tracesV3.PrepareTracesQuery(
+ params.Start,
+ params.End,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ keys,
+ tracesV3.Options{PreferRPM: preferRPM},
+ )
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
+ return
+ }
+ }
+
+ series, err := q.execClickHouseQuery(ctx, query)
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
+ return
+ }
+
+ // What is happening here?
+ // We are only caching the graph panel queries. A non-existant cache key means that the query is not cached.
+ // If the query is not cached, we execute the query and return the result without caching it.
+ if _, ok := cacheKeys[queryName]; !ok {
+ query, err := metricsV4.PrepareMetricQuery(params.Start, params.End, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{PreferRPM: preferRPM})
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
+ return
+ }
+ series, err := q.execClickHouseQuery(ctx, query)
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
+ return
+ }
+
+ cacheKey := cacheKeys[queryName]
+ var cachedData []byte
+ if !params.NoCache && q.cache != nil {
+ var retrieveStatus status.RetrieveStatus
+ data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
+ if err == nil {
+ cachedData = data
+ }
+ }
+ misses := q.findMissingTimeRanges(params.Start, params.End, params.Step, cachedData)
+ missedSeries := make([]*v3.Series, 0)
+ cachedSeries := make([]*v3.Series, 0)
+ for _, miss := range misses {
+ query, err := metricsV4.PrepareMetricQuery(
+ miss.start,
+ miss.end,
+ params.CompositeQuery.QueryType,
+ params.CompositeQuery.PanelType,
+ builderQuery,
+ metricsV3.Options{},
+ )
+ if err != nil {
+ ch <- channelResult{
+ Err: err,
+ Name: queryName,
+ Query: query,
+ Series: nil,
+ }
+ return
+ }
+ series, err := q.execClickHouseQuery(ctx, query)
+ if err != nil {
+ ch <- channelResult{
+ Err: err,
+ Name: queryName,
+ Query: query,
+ Series: nil,
+ }
+ return
+ }
+ missedSeries = append(missedSeries, series...)
+ }
+ if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil {
+ zap.S().Error("error unmarshalling cached data", zap.Error(err))
+ }
+ mergedSeries := mergeSerieses(cachedSeries, missedSeries)
+
+ ch <- channelResult{
+ Err: nil,
+ Name: queryName,
+ Series: mergedSeries,
+ }
+ // Cache the seriesList for future queries
+ if len(missedSeries) > 0 && !params.NoCache && q.cache != nil {
+ mergedSeriesData, err := json.Marshal(mergedSeries)
+ if err != nil {
+ zap.S().Error("error marshalling merged series", zap.Error(err))
+ return
+ }
+ err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour)
+ if err != nil {
+ zap.S().Error("error storing merged series", zap.Error(err))
+ return
+ }
+ }
+}
+
+func (q *querier) runBuilderExpression(
+ ctx context.Context,
+ builderQuery *v3.BuilderQuery,
+ params *v3.QueryRangeParamsV3,
+ keys map[string]v3.AttributeKey,
+ cacheKeys map[string]string,
+ ch chan channelResult,
+ wg *sync.WaitGroup,
+) {
+ defer wg.Done()
+
+ queryName := builderQuery.QueryName
+
+ queries, err := q.builder.PrepareQueries(params, keys)
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: "", Series: nil}
+ return
+ }
+
+ if _, ok := cacheKeys[queryName]; !ok {
+ query := queries[queryName]
+ series, err := q.execClickHouseQuery(ctx, query)
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
+ return
+ }
+
+ cacheKey := cacheKeys[queryName]
+ var cachedData []byte
+ if !params.NoCache && q.cache != nil {
+ var retrieveStatus status.RetrieveStatus
+ data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
+ if err == nil {
+ cachedData = data
+ }
+ }
+ misses := q.findMissingTimeRanges(params.Start, params.End, params.Step, cachedData)
+ missedSeries := make([]*v3.Series, 0)
+ cachedSeries := make([]*v3.Series, 0)
+ for _, miss := range misses {
+ missQueries, _ := q.builder.PrepareQueries(&v3.QueryRangeParamsV3{
+ Start: miss.start,
+ End: miss.end,
+ Step: params.Step,
+ NoCache: params.NoCache,
+ CompositeQuery: params.CompositeQuery,
+ Variables: params.Variables,
+ }, keys)
+ query := missQueries[queryName]
+ series, err := q.execClickHouseQuery(ctx, query)
+ if err != nil {
+ ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
+ return
+ }
+ missedSeries = append(missedSeries, series...)
+ }
+ if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil {
+ zap.S().Error("error unmarshalling cached data", zap.Error(err))
+ }
+ mergedSeries := mergeSerieses(cachedSeries, missedSeries)
+
+ ch <- channelResult{
+ Err: nil,
+ Name: queryName,
+ Series: mergedSeries,
+ }
+ // Cache the seriesList for future queries
+ if len(missedSeries) > 0 && !params.NoCache && q.cache != nil {
+ mergedSeriesData, err := json.Marshal(mergedSeries)
+ if err != nil {
+ zap.S().Error("error marshalling merged series", zap.Error(err))
+ return
+ }
+ err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour)
+ if err != nil {
+ zap.S().Error("error storing merged series", zap.Error(err))
+ return
+ }
+ }
+}
diff --git a/pkg/query-service/app/querier/v2/querier.go b/pkg/query-service/app/querier/v2/querier.go
new file mode 100644
index 0000000000..86a472c064
--- /dev/null
+++ b/pkg/query-service/app/querier/v2/querier.go
@@ -0,0 +1,525 @@
+package v2
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "math"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+
+ logsV3 "go.signoz.io/signoz/pkg/query-service/app/logs/v3"
+ metricsV4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
+ "go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
+ tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
+
+ "go.signoz.io/signoz/pkg/query-service/cache"
+ "go.signoz.io/signoz/pkg/query-service/interfaces"
+ "go.signoz.io/signoz/pkg/query-service/model"
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+ "go.uber.org/multierr"
+ "go.uber.org/zap"
+)
+
+type channelResult struct {
+ Series []*v3.Series
+ List []*v3.Row
+ Err error
+ Name string
+ Query string
+}
+
+type missInterval struct {
+ start, end int64 // in milliseconds
+}
+
+type querier struct {
+ cache cache.Cache
+ reader interfaces.Reader
+ keyGenerator cache.KeyGenerator
+
+ fluxInterval time.Duration
+
+ builder *queryBuilder.QueryBuilder
+ featureLookUp interfaces.FeatureLookup
+
+ // used for testing
+ // TODO(srikanthccv): remove this once we have a proper mock
+ testingMode bool
+ queriesExecuted []string
+ returnedSeries []*v3.Series
+ returnedErr error
+}
+
+type QuerierOptions struct {
+ Reader interfaces.Reader
+ Cache cache.Cache
+ KeyGenerator cache.KeyGenerator
+ FluxInterval time.Duration
+ FeatureLookup interfaces.FeatureLookup
+
+ // used for testing
+ TestingMode bool
+ ReturnedSeries []*v3.Series
+ ReturnedErr error
+}
+
+func NewQuerier(opts QuerierOptions) interfaces.Querier {
+ return &querier{
+ cache: opts.Cache,
+ reader: opts.Reader,
+ keyGenerator: opts.KeyGenerator,
+ fluxInterval: opts.FluxInterval,
+
+ builder: queryBuilder.NewQueryBuilder(queryBuilder.QueryBuilderOptions{
+ BuildTraceQuery: tracesV3.PrepareTracesQuery,
+ BuildLogQuery: logsV3.PrepareLogsQuery,
+ BuildMetricQuery: metricsV4.PrepareMetricQuery,
+ }, opts.FeatureLookup),
+ featureLookUp: opts.FeatureLookup,
+
+ testingMode: opts.TestingMode,
+ returnedSeries: opts.ReturnedSeries,
+ returnedErr: opts.ReturnedErr,
+ }
+}
+
+func (q *querier) execClickHouseQuery(ctx context.Context, query string) ([]*v3.Series, error) {
+ q.queriesExecuted = append(q.queriesExecuted, query)
+ if q.testingMode && q.reader == nil {
+ return q.returnedSeries, q.returnedErr
+ }
+ result, err := q.reader.GetTimeSeriesResultV3(ctx, query)
+ var pointsWithNegativeTimestamps int
+ // Filter out the points with negative or zero timestamps
+ for idx := range result {
+ series := result[idx]
+ points := make([]v3.Point, 0)
+ for pointIdx := range series.Points {
+ point := series.Points[pointIdx]
+ if point.Timestamp > 0 {
+ points = append(points, point)
+ } else {
+ pointsWithNegativeTimestamps++
+ }
+ }
+ series.Points = points
+ }
+ if pointsWithNegativeTimestamps > 0 {
+ zap.S().Errorf("found points with negative timestamps for query %s", query)
+ }
+ return result, err
+}
+
+func (q *querier) execPromQuery(ctx context.Context, params *model.QueryRangeParams) ([]*v3.Series, error) {
+ q.queriesExecuted = append(q.queriesExecuted, params.Query)
+ if q.testingMode && q.reader == nil {
+ return q.returnedSeries, q.returnedErr
+ }
+ promResult, _, err := q.reader.GetQueryRangeResult(ctx, params)
+ if err != nil {
+ return nil, err
+ }
+ matrix, promErr := promResult.Matrix()
+ if promErr != nil {
+ return nil, promErr
+ }
+ var seriesList []*v3.Series
+ for _, v := range matrix {
+ var s v3.Series
+ s.Labels = v.Metric.Copy().Map()
+ for idx := range v.Floats {
+ p := v.Floats[idx]
+ s.Points = append(s.Points, v3.Point{Timestamp: p.T, Value: p.F})
+ }
+ seriesList = append(seriesList, &s)
+ }
+ return seriesList, nil
+}
+
+// findMissingTimeRanges finds the missing time ranges in the seriesList
+// and returns a list of miss structs, It takes the fluxInterval into
+// account to find the missing time ranges.
+//
+// The [End - fluxInterval, End] is always added to the list of misses, because
+// the data might still be in flux and not yet available in the database.
+func findMissingTimeRanges(start, end, step int64, seriesList []*v3.Series, fluxInterval time.Duration) (misses []missInterval) {
+ var cachedStart, cachedEnd int64
+ for idx := range seriesList {
+ series := seriesList[idx]
+ for pointIdx := range series.Points {
+ point := series.Points[pointIdx]
+ if cachedStart == 0 || point.Timestamp < cachedStart {
+ cachedStart = point.Timestamp
+ }
+ if cachedEnd == 0 || point.Timestamp > cachedEnd {
+ cachedEnd = point.Timestamp
+ }
+ }
+ }
+
+ endMillis := time.Now().UnixMilli()
+ adjustStep := int64(math.Min(float64(step), 60))
+ roundedMillis := endMillis - (endMillis % (adjustStep * 1000))
+
+ // Exclude the flux interval from the cached end time
+ cachedEnd = int64(
+ math.Min(
+ float64(cachedEnd),
+ float64(roundedMillis-fluxInterval.Milliseconds()),
+ ),
+ )
+
+ // There are five cases to consider
+ // 1. Cached time range is a subset of the requested time range
+ // 2. Cached time range is a superset of the requested time range
+ // 3. Cached time range is a left overlap of the requested time range
+ // 4. Cached time range is a right overlap of the requested time range
+ // 5. Cached time range is a disjoint of the requested time range
+ if cachedStart >= start && cachedEnd <= end {
+ // Case 1: Cached time range is a subset of the requested time range
+ // Add misses for the left and right sides of the cached time range
+ misses = append(misses, missInterval{start: start, end: cachedStart - 1})
+ misses = append(misses, missInterval{start: cachedEnd + 1, end: end})
+ } else if cachedStart <= start && cachedEnd >= end {
+ // Case 2: Cached time range is a superset of the requested time range
+ // No misses
+ } else if cachedStart <= start && cachedEnd >= start {
+ // Case 3: Cached time range is a left overlap of the requested time range
+ // Add a miss for the left side of the cached time range
+ misses = append(misses, missInterval{start: cachedEnd + 1, end: end})
+ } else if cachedStart <= end && cachedEnd >= end {
+ // Case 4: Cached time range is a right overlap of the requested time range
+ // Add a miss for the right side of the cached time range
+ misses = append(misses, missInterval{start: start, end: cachedStart - 1})
+ } else {
+ // Case 5: Cached time range is a disjoint of the requested time range
+ // Add a miss for the entire requested time range
+ misses = append(misses, missInterval{start: start, end: end})
+ }
+
+ // remove the struts with start > end
+ var validMisses []missInterval
+ for idx := range misses {
+ miss := misses[idx]
+ if miss.start < miss.end {
+ validMisses = append(validMisses, miss)
+ }
+ }
+ return validMisses
+}
+
+// findMissingTimeRanges finds the missing time ranges in the cached data
+// and returns them as a list of misses
+func (q *querier) findMissingTimeRanges(start, end, step int64, cachedData []byte) (misses []missInterval) {
+ var cachedSeriesList []*v3.Series
+ if err := json.Unmarshal(cachedData, &cachedSeriesList); err != nil {
+ // In case of error, we return the entire range as a miss
+ return []missInterval{{start: start, end: end}}
+ }
+ return findMissingTimeRanges(start, end, step, cachedSeriesList, q.fluxInterval)
+}
+
+func labelsToString(labels map[string]string) string {
+ type label struct {
+ Key string
+ Value string
+ }
+ var labelsList []label
+ for k, v := range labels {
+ labelsList = append(labelsList, label{Key: k, Value: v})
+ }
+ sort.Slice(labelsList, func(i, j int) bool {
+ return labelsList[i].Key < labelsList[j].Key
+ })
+ labelKVs := make([]string, len(labelsList))
+ for idx := range labelsList {
+ labelKVs[idx] = labelsList[idx].Key + "=" + labelsList[idx].Value
+ }
+ return fmt.Sprintf("{%s}", strings.Join(labelKVs, ","))
+}
+
+func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series {
+ // Merge the missed series with the cached series by timestamp
+ mergedSeries := make([]*v3.Series, 0)
+ seriesesByLabels := make(map[string]*v3.Series)
+ for idx := range cachedSeries {
+ series := cachedSeries[idx]
+ seriesesByLabels[labelsToString(series.Labels)] = series
+ }
+
+ for idx := range missedSeries {
+ series := missedSeries[idx]
+ if _, ok := seriesesByLabels[labelsToString(series.Labels)]; !ok {
+ seriesesByLabels[labelsToString(series.Labels)] = series
+ continue
+ }
+ seriesesByLabels[labelsToString(series.Labels)].Points = append(seriesesByLabels[labelsToString(series.Labels)].Points, series.Points...)
+ }
+ // Sort the points in each series by timestamp
+ for idx := range seriesesByLabels {
+ series := seriesesByLabels[idx]
+ series.SortPoints()
+ series.RemoveDuplicatePoints()
+ mergedSeries = append(mergedSeries, series)
+ }
+ return mergedSeries
+}
+
+func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
+
+ cacheKeys := q.keyGenerator.GenerateKeys(params)
+
+ ch := make(chan channelResult, len(params.CompositeQuery.BuilderQueries))
+ var wg sync.WaitGroup
+
+ for queryName, builderQuery := range params.CompositeQuery.BuilderQueries {
+ if builderQuery.Disabled {
+ continue
+ }
+ wg.Add(1)
+ if queryName == builderQuery.Expression {
+ go q.runBuilderQuery(ctx, builderQuery, params, keys, cacheKeys, ch, &wg)
+ } else {
+ go q.runBuilderExpression(ctx, builderQuery, params, keys, cacheKeys, ch, &wg)
+ }
+ }
+
+ wg.Wait()
+ close(ch)
+
+ results := make([]*v3.Result, 0)
+ errQueriesByName := make(map[string]string)
+ var errs []error
+
+ for result := range ch {
+ if result.Err != nil {
+ errs = append(errs, result.Err)
+ errQueriesByName[result.Name] = result.Err.Error()
+ continue
+ }
+ results = append(results, &v3.Result{
+ QueryName: result.Name,
+ Series: result.Series,
+ })
+ }
+
+ var err error
+ if len(errs) > 0 {
+ err = fmt.Errorf("error in builder queries")
+ }
+
+ return results, err, errQueriesByName
+}
+
+func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) {
+ channelResults := make(chan channelResult, len(params.CompositeQuery.PromQueries))
+ var wg sync.WaitGroup
+ cacheKeys := q.keyGenerator.GenerateKeys(params)
+
+ for queryName, promQuery := range params.CompositeQuery.PromQueries {
+ if promQuery.Disabled {
+ continue
+ }
+ wg.Add(1)
+ go func(queryName string, promQuery *v3.PromQuery) {
+ defer wg.Done()
+ cacheKey := cacheKeys[queryName]
+ var cachedData []byte
+ // Ensure NoCache is not set and cache is not nil
+ if !params.NoCache && q.cache != nil {
+ data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true)
+ zap.S().Infof("cache retrieve status: %s", retrieveStatus.String())
+ if err == nil {
+ cachedData = data
+ }
+ }
+ misses := q.findMissingTimeRanges(params.Start, params.End, params.Step, cachedData)
+ missedSeries := make([]*v3.Series, 0)
+ cachedSeries := make([]*v3.Series, 0)
+ for _, miss := range misses {
+ query := metricsV4.BuildPromQuery(promQuery, params.Step, miss.start, miss.end)
+ series, err := q.execPromQuery(ctx, query)
+ if err != nil {
+ channelResults <- channelResult{Err: err, Name: queryName, Query: query.Query, Series: nil}
+ return
+ }
+ missedSeries = append(missedSeries, series...)
+ }
+ if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil {
+ // ideally we should not be getting an error here
+ zap.S().Error("error unmarshalling cached data", zap.Error(err))
+ }
+ mergedSeries := mergeSerieses(cachedSeries, missedSeries)
+
+ channelResults <- channelResult{Err: nil, Name: queryName, Query: promQuery.Query, Series: mergedSeries}
+
+ // Cache the seriesList for future queries
+ if len(missedSeries) > 0 && !params.NoCache && q.cache != nil {
+ mergedSeriesData, err := json.Marshal(mergedSeries)
+ if err != nil {
+ zap.S().Error("error marshalling merged series", zap.Error(err))
+ return
+ }
+ err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour)
+ if err != nil {
+ zap.S().Error("error storing merged series", zap.Error(err))
+ return
+ }
+ }
+ }(queryName, promQuery)
+ }
+ wg.Wait()
+ close(channelResults)
+
+ results := make([]*v3.Result, 0)
+ errQueriesByName := make(map[string]string)
+ var errs []error
+
+ for result := range channelResults {
+ if result.Err != nil {
+ errs = append(errs, result.Err)
+ errQueriesByName[result.Name] = result.Err.Error()
+ continue
+ }
+ results = append(results, &v3.Result{
+ QueryName: result.Name,
+ Series: result.Series,
+ })
+ }
+
+ var err error
+ if len(errs) > 0 {
+ err = fmt.Errorf("error in prom queries")
+ }
+
+ return results, err, errQueriesByName
+}
+
+func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, error, map[string]string) {
+ channelResults := make(chan channelResult, len(params.CompositeQuery.ClickHouseQueries))
+ var wg sync.WaitGroup
+ for queryName, clickHouseQuery := range params.CompositeQuery.ClickHouseQueries {
+ if clickHouseQuery.Disabled {
+ continue
+ }
+ wg.Add(1)
+ go func(queryName string, clickHouseQuery *v3.ClickHouseQuery) {
+ defer wg.Done()
+ series, err := q.execClickHouseQuery(ctx, clickHouseQuery.Query)
+ channelResults <- channelResult{Err: err, Name: queryName, Query: clickHouseQuery.Query, Series: series}
+ }(queryName, clickHouseQuery)
+ }
+ wg.Wait()
+ close(channelResults)
+
+ results := make([]*v3.Result, 0)
+ errQueriesByName := make(map[string]string)
+ var errs []error
+
+ for result := range channelResults {
+ if result.Err != nil {
+ errs = append(errs, result.Err)
+ errQueriesByName[result.Name] = result.Err.Error()
+ continue
+ }
+ results = append(results, &v3.Result{
+ QueryName: result.Name,
+ Series: result.Series,
+ })
+ }
+
+ var err error
+ if len(errs) > 0 {
+ err = fmt.Errorf("error in clickhouse queries")
+ }
+ return results, err, errQueriesByName
+}
+
+func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
+
+ queries, err := q.builder.PrepareQueries(params, keys)
+
+ if err != nil {
+ return nil, err, nil
+ }
+
+ ch := make(chan channelResult, len(queries))
+ var wg sync.WaitGroup
+
+ for name, query := range queries {
+ wg.Add(1)
+ go func(name, query string) {
+ defer wg.Done()
+ rowList, err := q.reader.GetListResultV3(ctx, query)
+
+ if err != nil {
+ ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query}
+ return
+ }
+ ch <- channelResult{List: rowList, Name: name, Query: query}
+ }(name, query)
+ }
+
+ wg.Wait()
+ close(ch)
+
+ var errs []error
+ errQuriesByName := make(map[string]string)
+ res := make([]*v3.Result, 0)
+ // read values from the channel
+ for r := range ch {
+ if r.Err != nil {
+ errs = append(errs, r.Err)
+ errQuriesByName[r.Name] = r.Query
+ continue
+ }
+ res = append(res, &v3.Result{
+ QueryName: r.Name,
+ List: r.List,
+ })
+ }
+ if len(errs) != 0 {
+ return nil, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...)), errQuriesByName
+ }
+ return res, nil, nil
+}
+
+func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) ([]*v3.Result, error, map[string]string) {
+ var results []*v3.Result
+ var err error
+ var errQueriesByName map[string]string
+ if params.CompositeQuery != nil {
+ switch params.CompositeQuery.QueryType {
+ case v3.QueryTypeBuilder:
+ if params.CompositeQuery.PanelType == v3.PanelTypeList || params.CompositeQuery.PanelType == v3.PanelTypeTrace {
+ results, err, errQueriesByName = q.runBuilderListQueries(ctx, params, keys)
+ } else {
+ results, err, errQueriesByName = q.runBuilderQueries(ctx, params, keys)
+ }
+ case v3.QueryTypePromQL:
+ results, err, errQueriesByName = q.runPromQueries(ctx, params)
+ case v3.QueryTypeClickHouseSQL:
+ results, err, errQueriesByName = q.runClickHouseQueries(ctx, params)
+ default:
+ err = fmt.Errorf("invalid query type")
+ }
+ }
+
+ // return error if the number of series is more than one for value type panel
+ if params.CompositeQuery.PanelType == v3.PanelTypeValue {
+ if len(results) > 1 {
+ err = fmt.Errorf("there can be only one active query for value type panel")
+ } else if len(results) == 1 && len(results[0].Series) > 1 {
+ err = fmt.Errorf("there can be only one result series for value type panel but got %d", len(results[0].Series))
+ }
+ }
+
+ return results, err, errQueriesByName
+}
+
+func (q *querier) QueriesExecuted() []string {
+ return q.queriesExecuted
+}
diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go
index f7fa328b9f..eb50a775ce 100644
--- a/pkg/query-service/app/server.go
+++ b/pkg/query-service/app/server.go
@@ -267,6 +267,7 @@ func (s *Server) createPublicServer(api *APIHandler) (*http.Server, error) {
api.RegisterMetricsRoutes(r, am)
api.RegisterLogsRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
+ api.RegisterQueryRangeV4Routes(r, am)
c := cors.New(cors.Options{
AllowedOrigins: []string{"*"},
diff --git a/pkg/query-service/common/metrics.go b/pkg/query-service/common/metrics.go
new file mode 100644
index 0000000000..8596ba9d7c
--- /dev/null
+++ b/pkg/query-service/common/metrics.go
@@ -0,0 +1,19 @@
+package common
+
+import (
+ "math"
+
+ v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
+)
+
+func AdjustedMetricTimeRange(start, end, step int64, aggregaOperator v3.TimeAggregation) (int64, int64) {
+ start = start - (start % (step * 1000))
+ // if the query is a rate query, we adjust the start time by one more step
+ // so that we can calculate the rate for the first data point
+ if aggregaOperator.IsRateOperator() {
+ start -= step * 1000
+ }
+ adjustStep := int64(math.Min(float64(step), 60))
+ end = end - (end % (adjustStep * 1000))
+ return start, end
+}
diff --git a/pkg/query-service/interfaces/interface.go b/pkg/query-service/interfaces/interface.go
index e2b2b49481..a75a2f5f30 100644
--- a/pkg/query-service/interfaces/interface.go
+++ b/pkg/query-service/interfaces/interface.go
@@ -98,7 +98,7 @@ type Reader interface {
QueryDashboardVars(ctx context.Context, query string) (*model.DashboardVar, error)
CheckClickHouse(ctx context.Context) error
- GetLatencyMetricMetadata(context.Context, string, bool) (*v3.LatencyMetricMetadataResponse, error)
+ GetLatencyMetricMetadata(context.Context, string, string, bool) (*v3.LatencyMetricMetadataResponse, error)
}
type Querier interface {
diff --git a/pkg/query-service/model/v3/v3.go b/pkg/query-service/model/v3/v3.go
index 453c6475a8..a11e888c15 100644
--- a/pkg/query-service/model/v3/v3.go
+++ b/pkg/query-service/model/v3/v3.go
@@ -462,6 +462,15 @@ const (
TimeAggregationIncrease TimeAggregation = "increase"
)
+func (t TimeAggregation) IsRateOperator() bool {
+ switch t {
+ case TimeAggregationRate, TimeAggregationIncrease:
+ return true
+ default:
+ return false
+ }
+}
+
type SpaceAggregation string
const (
@@ -500,6 +509,7 @@ type BuilderQuery struct {
SelectColumns []AttributeKey `json:"selectColumns,omitempty"`
TimeAggregation TimeAggregation `json:"timeAggregation,omitempty"`
SpaceAggregation SpaceAggregation `json:"spaceAggregation,omitempty"`
+ Quantile float64 `json:"quantile,omitempty"`
Functions []Function `json:"functions,omitempty"`
}
@@ -517,8 +527,16 @@ func (b *BuilderQuery) Validate() error {
if err := b.DataSource.Validate(); err != nil {
return fmt.Errorf("data source is invalid: %w", err)
}
- if err := b.AggregateOperator.Validate(); err != nil {
- return fmt.Errorf("aggregate operator is invalid: %w", err)
+ if b.DataSource == DataSourceMetrics {
+ if b.TimeAggregation == TimeAggregationUnspecified && b.Quantile == 0 {
+ if err := b.AggregateOperator.Validate(); err != nil {
+ return fmt.Errorf("aggregate operator is invalid: %w", err)
+ }
+ }
+ } else {
+ if err := b.AggregateOperator.Validate(); err != nil {
+ return fmt.Errorf("aggregate operator is invalid: %w", err)
+ }
}
if b.AggregateAttribute == (AttributeKey{}) && b.AggregateOperator.RequireAttribute(b.DataSource) {
return fmt.Errorf("aggregate attribute is required")
@@ -680,6 +698,35 @@ func (s *Series) SortPoints() {
})
}
+func (s *Series) RemoveDuplicatePoints() {
+ if len(s.Points) == 0 {
+ return
+ }
+
+ // priortize the last point
+ // this is to handle the case where the same point is sent twice
+ // the last point is the most recent point adjusted for the flux interval
+
+ newPoints := make([]Point, 0)
+ for i := len(s.Points) - 1; i >= 0; i-- {
+ if len(newPoints) == 0 {
+ newPoints = append(newPoints, s.Points[i])
+ continue
+ }
+ if newPoints[len(newPoints)-1].Timestamp != s.Points[i].Timestamp {
+ newPoints = append(newPoints, s.Points[i])
+ }
+ }
+
+ // reverse the points
+ for i := len(newPoints)/2 - 1; i >= 0; i-- {
+ opp := len(newPoints) - 1 - i
+ newPoints[i], newPoints[opp] = newPoints[opp], newPoints[i]
+ }
+
+ s.Points = newPoints
+}
+
type Row struct {
Timestamp time.Time `json:"timestamp"`
Data map[string]interface{} `json:"data"`
diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml
index 52b213713d..9017f9326e 100644
--- a/pkg/query-service/tests/test-deploy/docker-compose.yaml
+++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml
@@ -158,7 +158,7 @@ services:
command:
[
"-config=/root/config/prometheus.yml",
- "--prefer-delta=true"
+ # "--prefer-delta=true"
]
# ports:
# - "6060:6060" # pprof port
@@ -192,7 +192,7 @@ services:
<<: *db-depend
otel-collector-migrator:
- image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.6}
+ image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.8}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -205,7 +205,7 @@ services:
# condition: service_healthy
otel-collector:
- image: signoz/signoz-otel-collector:0.88.6
+ image: signoz/signoz-otel-collector:0.88.8
container_name: signoz-otel-collector
command:
[
@@ -244,24 +244,6 @@ services:
query-service:
condition: service_healthy
- otel-collector-metrics:
- image: signoz/signoz-otel-collector:0.88.6
- container_name: signoz-otel-collector-metrics
- command:
- [
- "--config=/etc/otel-collector-metrics-config.yaml",
- "--feature-gates=-pkg.translator.prometheus.NormalizeName"
- ]
- volumes:
- - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
- # ports:
- # - "1777:1777" # pprof extension
- # - "8888:8888" # OtelCollector internal metrics
- # - "13133:13133" # Health check extension
- # - "55679:55679" # zPages extension
- restart: on-failure
- <<: *db-depend
-
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout
diff --git a/pkg/query-service/tests/test-deploy/otel-collector-config.yaml b/pkg/query-service/tests/test-deploy/otel-collector-config.yaml
index cc839e737f..8a0e899826 100644
--- a/pkg/query-service/tests/test-deploy/otel-collector-config.yaml
+++ b/pkg/query-service/tests/test-deploy/otel-collector-config.yaml
@@ -15,7 +15,7 @@ receivers:
# please remove names from below if you want to collect logs from them
- type: filter
id: signoz_logs_filter
- expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|otel-collector-metrics|clickhouse|zookeeper)"'
+ expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|clickhouse|zookeeper)"'
opencensus:
endpoint: 0.0.0.0:55678
otlp/spanmetrics:
@@ -63,8 +63,8 @@ processors:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
- signozspanmetrics/prometheus:
- metrics_exporter: prometheus
+ signozspanmetrics/cumulative:
+ metrics_exporter: clickhousemetricswrite
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
dimensions:
@@ -135,7 +135,7 @@ service:
pipelines:
traces:
receivers: [jaeger, otlp]
- processors: [signozspanmetrics/prometheus, batch]
+ processors: [signozspanmetrics/cumulative, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
diff --git a/pkg/query-service/tests/test-deploy/otel-collector-metrics-config.yaml b/pkg/query-service/tests/test-deploy/otel-collector-metrics-config.yaml
deleted file mode 100644
index aecad4eaaf..0000000000
--- a/pkg/query-service/tests/test-deploy/otel-collector-metrics-config.yaml
+++ /dev/null
@@ -1,67 +0,0 @@
-receivers:
- otlp:
- protocols:
- grpc:
- http:
- prometheus:
- config:
- scrape_configs:
- # otel-collector-metrics internal metrics
- - job_name: otel-collector-metrics
- scrape_interval: 60s
- static_configs:
- - targets:
- - localhost:8888
- # SigNoz span metrics
- - job_name: signozspanmetrics-collector
- scrape_interval: 60s
- static_configs:
- - targets:
- - otel-collector:8889
-
-processors:
- batch:
- send_batch_size: 10000
- send_batch_max_size: 11000
- timeout: 10s
- # memory_limiter:
- # # 80% of maximum memory up to 2G
- # limit_mib: 1500
- # # 25% of limit up to 2G
- # spike_limit_mib: 512
- # check_interval: 5s
- #
- # # 50% of the maximum memory
- # limit_percentage: 50
- # # 20% of max memory usage spike expected
- # spike_limit_percentage: 20
- # queued_retry:
- # num_workers: 4
- # queue_size: 100
- # retry_on_failure: true
-
-extensions:
- health_check:
- endpoint: 0.0.0.0:13133
- zpages:
- endpoint: 0.0.0.0:55679
- pprof:
- endpoint: 0.0.0.0:1777
-
-exporters:
- clickhousemetricswrite:
- endpoint: tcp://clickhouse:9000/?database=signoz_metrics
-
-service:
- telemetry:
- metrics:
- address: 0.0.0.0:8888
- extensions:
- - health_check
- - zpages
- - pprof
- pipelines:
- metrics:
- receivers: [prometheus]
- processors: [batch]
- exporters: [clickhousemetricswrite]