Merge pull request #4973 from SigNoz/release/v0.45

Release/v0.45
This commit is contained in:
Prashant Shahi 2024-05-09 08:46:58 +05:30 committed by GitHub
commit a544723bb8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
72 changed files with 19999 additions and 180 deletions

View File

@ -146,7 +146,7 @@ services:
condition: on-failure
query-service:
image: signoz/query-service:0.44.0
image: signoz/query-service:0.45.0
command:
[
"-config=/root/config/prometheus.yml",
@ -186,7 +186,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:0.44.0
image: signoz/frontend:0.45.0
deploy:
restart_policy:
condition: on-failure
@ -199,7 +199,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
image: signoz/signoz-otel-collector:0.88.21
image: signoz/signoz-otel-collector:0.88.22
command:
[
"--config=/etc/otel-collector-config.yaml",
@ -237,7 +237,7 @@ services:
- query-service
otel-collector-migrator:
image: signoz/signoz-schema-migrator:0.88.21
image: signoz/signoz-schema-migrator:0.88.22
deploy:
restart_policy:
condition: on-failure

View File

@ -66,7 +66,7 @@ services:
- --storage.path=/data
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.21}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.22}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@ -81,7 +81,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:0.88.21
image: signoz/signoz-otel-collector:0.88.22
command:
[
"--config=/etc/otel-collector-config.yaml",

View File

@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.44.0}
image: signoz/query-service:${DOCKER_TAG:-0.45.0}
container_name: signoz-query-service
command:
[
@ -203,7 +203,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.44.0}
image: signoz/frontend:${DOCKER_TAG:-0.45.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@ -215,7 +215,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.21}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.22}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@ -229,7 +229,7 @@ services:
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.21}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.22}
container_name: signoz-otel-collector
command:
[

View File

@ -0,0 +1,64 @@
<clickhouse>
<logger>
<!-- Possible levels [1]:
- none (turns off logging)
- fatal
- critical
- error
- warning
- notice
- information
- debug
- trace
[1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114
-->
<level>information</level>
<log>/var/log/clickhouse-keeper/clickhouse-keeper.log</log>
<errorlog>/var/log/clickhouse-keeper/clickhouse-keeper.err.log</errorlog>
<!-- Rotation policy
See https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/FileChannel.h#L54-L85
-->
<size>1000M</size>
<count>10</count>
<!-- <console>1</console> --> <!-- Default behavior is autodetection (log to console if not daemon mode and is tty) -->
</logger>
<listen_host>0.0.0.0</listen_host>
<max_connections>4096</max_connections>
<keeper_server>
<tcp_port>9181</tcp_port>
<!-- Must be unique among all keeper serves -->
<server_id>1</server_id>
<log_storage_path>/var/lib/clickhouse/coordination/logs</log_storage_path>
<snapshot_storage_path>/var/lib/clickhouse/coordination/snapshots</snapshot_storage_path>
<coordination_settings>
<operation_timeout_ms>10000</operation_timeout_ms>
<min_session_timeout_ms>10000</min_session_timeout_ms>
<session_timeout_ms>100000</session_timeout_ms>
<raft_logs_level>information</raft_logs_level>
<compress_logs>false</compress_logs>
<!-- All settings listed in https://github.com/ClickHouse/ClickHouse/blob/master/src/Coordination/CoordinationSettings.h -->
</coordination_settings>
<!-- enable sanity hostname checks for cluster configuration (e.g. if localhost is used with remote endpoints) -->
<hostname_checks_enabled>true</hostname_checks_enabled>
<raft_configuration>
<server>
<id>1</id>
<!-- Internal port and hostname -->
<hostname>clickhouses-keeper-1</hostname>
<port>9234</port>
</server>
<!-- Add more servers here -->
</raft_configuration>
</keeper_server>
</clickhouse>

View File

@ -1,4 +1,4 @@
FROM nginx:1.25.2-alpine
FROM nginx:1.26-alpine
# Add Maintainer Info
LABEL maintainer="signoz"

View File

@ -4,6 +4,7 @@ const config: Config.InitialOptions = {
clearMocks: true,
coverageDirectory: 'coverage',
coverageReporters: ['text', 'cobertura', 'html', 'json-summary'],
collectCoverageFrom: ['src/**/*.{ts,tsx}'],
moduleFileExtensions: ['ts', 'tsx', 'js', 'json'],
modulePathIgnorePatterns: ['dist'],
moduleNameMapper: {

View File

@ -53,7 +53,7 @@
"ansi-to-html": "0.7.2",
"antd": "5.11.0",
"antd-table-saveas-excel": "2.2.1",
"axios": "1.6.2",
"axios": "1.6.4",
"babel-eslint": "^10.1.0",
"babel-jest": "^29.6.4",
"babel-loader": "9.1.3",

View File

@ -16,6 +16,7 @@
"new_dashboard_title": "Sample Title",
"layout_saved_successfully": "Layout saved successfully",
"add_panel": "Add Panel",
"add_row": "Add Row",
"save_layout": "Save Layout",
"variable_updated_successfully": "Variable updated successfully",
"error_while_updating_variable": "Error while updating variable",

View File

@ -16,6 +16,7 @@
"new_dashboard_title": "Sample Title",
"layout_saved_successfully": "Layout saved successfully",
"add_panel": "Add Panel",
"add_row": "Add Row",
"save_layout": "Save Layout",
"full_view": "Full Screen View",
"variable_updated_successfully": "Variable updated successfully",

View File

@ -157,8 +157,8 @@ function ListLogView({
const timestampValue = useMemo(
() =>
typeof flattenLogData.timestamp === 'string'
? dayjs(flattenLogData.timestamp).format()
: dayjs(flattenLogData.timestamp / 1e6).format(),
? dayjs(flattenLogData.timestamp).format('YYYY-MM-DD HH:mm:ss.SSS')
: dayjs(flattenLogData.timestamp / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS'),
[flattenLogData.timestamp],
);

View File

@ -90,12 +90,12 @@ function RawLogView({
const text = useMemo(
() =>
typeof data.timestamp === 'string'
? `${dayjs(data.timestamp).format()} | ${attributesText} ${severityText} ${
data.body
}`
: `${dayjs(
data.timestamp / 1e6,
).format()} | ${attributesText} ${severityText} ${data.body}`,
? `${dayjs(data.timestamp).format(
'YYYY-MM-DD HH:mm:ss.SSS',
)} | ${attributesText} ${severityText} ${data.body}`
: `${dayjs(data.timestamp / 1e6).format(
'YYYY-MM-DD HH:mm:ss.SSS',
)} | ${attributesText} ${severityText} ${data.body}`,
[data.timestamp, data.body, severityText, attributesText],
);

View File

@ -76,8 +76,8 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
render: (field, item): ColumnTypeRender<Record<string, unknown>> => {
const date =
typeof field === 'string'
? dayjs(field).format()
: dayjs(field / 1e6).format();
? dayjs(field).format('YYYY-MM-DD HH:mm:ss.SSS')
: dayjs(field / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS');
return {
children: (
<div className="table-timestamp">

View File

@ -1,6 +1,6 @@
import './FacingIssueBtn.style.scss';
import { Button } from 'antd';
import { Button, Tooltip } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import { FeatureKeys } from 'constants/features';
@ -15,6 +15,7 @@ export interface FacingIssueBtnProps {
message?: string;
buttonText?: string;
className?: string;
onHoverText?: string;
}
function FacingIssueBtn({
@ -23,6 +24,7 @@ function FacingIssueBtn({
message = '',
buttonText = '',
className = '',
onHoverText = '',
}: FacingIssueBtnProps): JSX.Element | null {
const handleFacingIssuesClick = (): void => {
logEvent(eventName, attributes);
@ -37,13 +39,15 @@ function FacingIssueBtn({
return isCloudUserVal && isChatSupportEnabled ? ( // Note: we would need to move this condition to license based in future
<div className="facing-issue-button">
<Button
className={cx('periscope-btn', 'facing-issue-button', className)}
onClick={handleFacingIssuesClick}
icon={<HelpCircle size={14} />}
>
{buttonText || 'Facing issues?'}
</Button>
<Tooltip title={onHoverText} autoAdjustOverflow>
<Button
className={cx('periscope-btn', 'facing-issue-button', className)}
onClick={handleFacingIssuesClick}
icon={<HelpCircle size={14} />}
>
{buttonText || 'Facing issues?'}
</Button>
</Tooltip>
</div>
) : null;
}
@ -52,6 +56,7 @@ FacingIssueBtn.defaultProps = {
message: '',
buttonText: '',
className: '',
onHoverText: '',
};
export default FacingIssueBtn;

View File

@ -0,0 +1,57 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { AlertDef } from 'types/api/alerts/def';
import { Dashboard, DashboardData } from 'types/api/dashboard/getAll';
export const chartHelpMessage = (
selectedDashboard: Dashboard | undefined,
graphType: PANEL_TYPES,
): string => `
Hi Team,
I need help in creating this chart. Here are my dashboard details
Name: ${selectedDashboard?.data.title || ''}
Panel type: ${graphType}
Dashboard Id: ${selectedDashboard?.uuid || ''}
Thanks`;
export const dashboardHelpMessage = (
data: DashboardData | undefined,
selectedDashboard: Dashboard | undefined,
): string => `
Hi Team,
I need help with this dashboard. Here are my dashboard details
Name: ${data?.title || ''}
Dashboard Id: ${selectedDashboard?.uuid || ''}
Thanks`;
export const dashboardListMessage = `Hi Team,
I need help with dashboards.
Thanks`;
export const listAlertMessage = `Hi Team,
I need help with managing alerts.
Thanks`;
export const alertHelpMessage = (
alertDef: AlertDef,
ruleId: number,
): string => `
Hi Team,
I need help in configuring this alert. Here are my alert rule details
Name: ${alertDef?.alert || ''}
Alert Type: ${alertDef?.alertType || ''}
State: ${(alertDef as any)?.state || ''}
Alert Id: ${ruleId}
Thanks`;

View File

@ -30,4 +30,5 @@ export enum QueryParams {
integration = 'integration',
pagination = 'pagination',
relativeTime = 'relativeTime',
alertType = 'alertType',
}

View File

@ -289,6 +289,11 @@ export enum PANEL_TYPES {
EMPTY_WIDGET = 'EMPTY_WIDGET',
}
// eslint-disable-next-line @typescript-eslint/naming-convention
export enum PANEL_GROUP_TYPES {
ROW = 'row',
}
// eslint-disable-next-line @typescript-eslint/naming-convention
export enum ATTRIBUTE_TYPES {
SUM = 'Sum',

View File

@ -1,8 +1,9 @@
import { Form, Row } from 'antd';
import { ENTITY_VERSION_V4 } from 'constants/app';
import { QueryParams } from 'constants/query';
import FormAlertRules from 'container/FormAlertRules';
import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQueryParam';
import { isEqual } from 'lodash-es';
import history from 'lib/history';
import { useEffect, useState } from 'react';
import { useLocation } from 'react-router-dom';
import { AlertTypes } from 'types/api/alerts/alertTypes';
@ -19,13 +20,25 @@ import SelectAlertType from './SelectAlertType';
function CreateRules(): JSX.Element {
const [initValues, setInitValues] = useState<AlertDef | null>(null);
const [alertType, setAlertType] = useState<AlertTypes>();
const location = useLocation();
const queryParams = new URLSearchParams(location.search);
const version = queryParams.get('version');
const alertTypeFromParams = queryParams.get(QueryParams.alertType);
const compositeQuery = useGetCompositeQueryParam();
function getAlertTypeFromDataSource(): AlertTypes | null {
if (!compositeQuery) {
return null;
}
const dataSource = compositeQuery?.builder?.queryData[0]?.dataSource;
return ALERT_TYPE_VS_SOURCE_MAPPING[dataSource];
}
const [alertType, setAlertType] = useState<AlertTypes>(
(alertTypeFromParams as AlertTypes) || getAlertTypeFromDataSource(),
);
const [formInstance] = Form.useForm();
@ -47,21 +60,17 @@ function CreateRules(): JSX.Element {
version: version || ENTITY_VERSION_V4,
});
}
queryParams.set(QueryParams.alertType, typ);
const generatedUrl = `${location.pathname}?${queryParams.toString()}`;
history.replace(generatedUrl);
};
useEffect(() => {
if (!compositeQuery) {
return;
}
const dataSource = compositeQuery?.builder?.queryData[0]?.dataSource;
const alertTypeFromQuery = ALERT_TYPE_VS_SOURCE_MAPPING[dataSource];
if (alertTypeFromQuery && !isEqual(alertType, alertTypeFromQuery)) {
onSelectType(alertTypeFromQuery);
if (alertType) {
onSelectType(alertType);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [compositeQuery]);
}, [alertType]);
if (!initValues) {
return (

View File

@ -12,6 +12,7 @@ import {
import saveAlertApi from 'api/alerts/save';
import testAlertApi from 'api/alerts/testAlert';
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
import { alertHelpMessage } from 'components/facingIssueBtn/util';
import { FeatureKeys } from 'constants/features';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
@ -523,6 +524,7 @@ function FormAlertRules({
runQuery={handleRunQuery}
alertDef={alertDef}
panelType={panelType || PANEL_TYPES.TIME_SERIES}
key={currentQuery.queryType}
/>
<RuleOptions
@ -584,17 +586,9 @@ function FormAlertRules({
}}
className="facing-issue-btn"
eventName="Alert: Facing Issues in alert"
buttonText="Facing Issues in alert"
message={`Hi Team,
I am facing issues configuring alerts in SigNoz. Here are my alert rule details
Name: ${alertDef?.alert || ''}
Alert Type: ${alertDef?.alertType || ''}
State: ${(alertDef as any)?.state || ''}
Alert Id: ${ruleId}
Thanks`}
buttonText="Need help with this alert?"
message={alertHelpMessage(alertDef, ruleId)}
onHoverText="Click here to get help with this alert"
/>
</Col>
</PanelContainer>

View File

@ -59,7 +59,7 @@ function WidgetGraphComponent({
const lineChartRef = useRef<ToggleGraphProps>();
const [graphVisibility, setGraphVisibility] = useState<boolean[]>(
Array(queryResponse.data?.payload?.data.result.length || 0).fill(true),
Array(queryResponse.data?.payload?.data?.result?.length || 0).fill(true),
);
const graphRef = useRef<HTMLDivElement>(null);
@ -135,7 +135,7 @@ function WidgetGraphComponent({
i: uuid,
w: 6,
x: 0,
h: 3,
h: 6,
y: 0,
},
];

View File

@ -1,11 +1,14 @@
import './GridCardLayout.styles.scss';
import { PlusOutlined } from '@ant-design/icons';
import { Flex, Tooltip } from 'antd';
import { Flex, Form, Input, Modal, Tooltip, Typography } from 'antd';
import { useForm } from 'antd/es/form/Form';
import cx from 'classnames';
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
import { dashboardHelpMessage } from 'components/facingIssueBtn/util';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import { themeColors } from 'constants/theme';
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import useComponentPermission from 'hooks/useComponentPermission';
@ -13,12 +16,21 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
import { useNotifications } from 'hooks/useNotifications';
import useUrlQuery from 'hooks/useUrlQuery';
import history from 'lib/history';
import { defaultTo } from 'lodash-es';
import isEqual from 'lodash-es/isEqual';
import { FullscreenIcon } from 'lucide-react';
import {
FullscreenIcon,
GripVertical,
MoveDown,
MoveUp,
Settings,
Trash2,
} from 'lucide-react';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { sortLayout } from 'providers/Dashboard/util';
import { useCallback, useEffect, useState } from 'react';
import { FullScreen, useFullScreenHandle } from 'react-full-screen';
import { Layout } from 'react-grid-layout';
import { ItemCallback, Layout } from 'react-grid-layout';
import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
@ -28,6 +40,7 @@ import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
import AppReducer from 'types/reducer/app';
import { ROLES, USER_ROLES } from 'types/roles';
import { ComponentTypes } from 'utils/permission';
import { v4 as uuid } from 'uuid';
import { EditMenuAction, ViewMenuAction } from './config';
import GridCard from './GridCard';
@ -46,6 +59,8 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
selectedDashboard,
layouts,
setLayouts,
panelMap,
setPanelMap,
setSelectedDashboard,
isDashboardLocked,
} = useDashboard();
@ -66,6 +81,26 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
const [dashboardLayout, setDashboardLayout] = useState<Layout[]>([]);
const [isSettingsModalOpen, setIsSettingsModalOpen] = useState<boolean>(false);
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState<boolean>(false);
const [currentSelectRowId, setCurrentSelectRowId] = useState<string | null>(
null,
);
const [currentPanelMap, setCurrentPanelMap] = useState<
Record<string, { widgets: Layout[]; collapsed: boolean }>
>({});
useEffect(() => {
setCurrentPanelMap(panelMap);
}, [panelMap]);
const [form] = useForm<{
title: string;
}>();
const updateDashboardMutation = useUpdateDashboard();
const { notifications } = useNotifications();
@ -88,7 +123,7 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
);
useEffect(() => {
setDashboardLayout(layouts);
setDashboardLayout(sortLayout(layouts));
}, [layouts]);
const onSaveHandler = (): void => {
@ -98,6 +133,7 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
...selectedDashboard,
data: {
...selectedDashboard.data,
panelMap: { ...currentPanelMap },
layout: dashboardLayout.filter((e) => e.i !== PANEL_TYPES.EMPTY_WIDGET),
},
uuid: selectedDashboard.uuid,
@ -107,8 +143,9 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
onSuccess: (updatedDashboard) => {
if (updatedDashboard.payload) {
if (updatedDashboard.payload.data.layout)
setLayouts(updatedDashboard.payload.data.layout);
setLayouts(sortLayout(updatedDashboard.payload.data.layout));
setSelectedDashboard(updatedDashboard.payload);
setPanelMap(updatedDashboard.payload?.data?.panelMap || {});
}
featureResponse.refetch();
@ -131,7 +168,8 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
dashboardLayout,
);
if (!isEqual(filterLayout, filterDashboardLayout)) {
setDashboardLayout(layout);
const updatedLayout = sortLayout(layout);
setDashboardLayout(updatedLayout);
}
};
@ -168,6 +206,283 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dashboardLayout]);
function handleAddRow(): void {
if (!selectedDashboard) return;
const id = uuid();
const newRowWidgetMap: { widgets: Layout[]; collapsed: boolean } = {
widgets: [],
collapsed: false,
};
const currentRowIdx = 0;
for (let j = currentRowIdx; j < dashboardLayout.length; j++) {
if (!currentPanelMap[dashboardLayout[j].i]) {
newRowWidgetMap.widgets.push(dashboardLayout[j]);
} else {
break;
}
}
const updatedDashboard: Dashboard = {
...selectedDashboard,
data: {
...selectedDashboard.data,
layout: [
{
i: id,
w: 12,
minW: 12,
minH: 1,
maxH: 1,
x: 0,
h: 1,
y: 0,
},
...dashboardLayout.filter((e) => e.i !== PANEL_TYPES.EMPTY_WIDGET),
],
panelMap: { ...currentPanelMap, [id]: newRowWidgetMap },
widgets: [
...(selectedDashboard.data.widgets || []),
{
id,
title: 'Sample Row',
description: '',
panelTypes: PANEL_GROUP_TYPES.ROW,
},
],
},
uuid: selectedDashboard.uuid,
};
updateDashboardMutation.mutate(updatedDashboard, {
// eslint-disable-next-line sonarjs/no-identical-functions
onSuccess: (updatedDashboard) => {
if (updatedDashboard.payload) {
if (updatedDashboard.payload.data.layout)
setLayouts(sortLayout(updatedDashboard.payload.data.layout));
setSelectedDashboard(updatedDashboard.payload);
setPanelMap(updatedDashboard.payload?.data?.panelMap || {});
}
featureResponse.refetch();
},
// eslint-disable-next-line sonarjs/no-identical-functions
onError: () => {
notifications.error({
message: SOMETHING_WENT_WRONG,
});
},
});
}
const handleRowSettingsClick = (id: string): void => {
setIsSettingsModalOpen(true);
setCurrentSelectRowId(id);
};
const onSettingsModalSubmit = (): void => {
const newTitle = form.getFieldValue('title');
if (!selectedDashboard) return;
if (!currentSelectRowId) return;
const currentWidget = selectedDashboard?.data?.widgets?.find(
(e) => e.id === currentSelectRowId,
);
if (!currentWidget) return;
currentWidget.title = newTitle;
const updatedWidgets = selectedDashboard?.data?.widgets?.filter(
(e) => e.id !== currentSelectRowId,
);
updatedWidgets?.push(currentWidget);
const updatedSelectedDashboard: Dashboard = {
...selectedDashboard,
data: {
...selectedDashboard.data,
widgets: updatedWidgets,
},
uuid: selectedDashboard.uuid,
};
updateDashboardMutation.mutateAsync(updatedSelectedDashboard, {
onSuccess: (updatedDashboard) => {
if (setLayouts) setLayouts(updatedDashboard.payload?.data?.layout || []);
if (setSelectedDashboard && updatedDashboard.payload) {
setSelectedDashboard(updatedDashboard.payload);
}
if (setPanelMap)
setPanelMap(updatedDashboard.payload?.data?.panelMap || {});
form.setFieldValue('title', '');
setIsSettingsModalOpen(false);
setCurrentSelectRowId(null);
featureResponse.refetch();
},
// eslint-disable-next-line sonarjs/no-identical-functions
onError: () => {
notifications.error({
message: SOMETHING_WENT_WRONG,
});
},
});
};
// eslint-disable-next-line sonarjs/cognitive-complexity
const handleRowCollapse = (id: string): void => {
if (!selectedDashboard) return;
const rowProperties = { ...currentPanelMap[id] };
const updatedPanelMap = { ...currentPanelMap };
let updatedDashboardLayout = [...dashboardLayout];
if (rowProperties.collapsed === true) {
rowProperties.collapsed = false;
const widgetsInsideTheRow = rowProperties.widgets;
let maxY = 0;
widgetsInsideTheRow.forEach((w) => {
maxY = Math.max(maxY, w.y + w.h);
});
const currentRowWidget = dashboardLayout.find((w) => w.i === id);
if (currentRowWidget && widgetsInsideTheRow.length) {
maxY -= currentRowWidget.h + currentRowWidget.y;
}
const idxCurrentRow = dashboardLayout.findIndex((w) => w.i === id);
for (let j = idxCurrentRow + 1; j < dashboardLayout.length; j++) {
updatedDashboardLayout[j].y += maxY;
if (updatedPanelMap[updatedDashboardLayout[j].i]) {
updatedPanelMap[updatedDashboardLayout[j].i].widgets = updatedPanelMap[
updatedDashboardLayout[j].i
// eslint-disable-next-line @typescript-eslint/no-loop-func
].widgets.map((w) => ({
...w,
y: w.y + maxY,
}));
}
}
updatedDashboardLayout = [...updatedDashboardLayout, ...widgetsInsideTheRow];
} else {
rowProperties.collapsed = true;
const currentIdx = dashboardLayout.findIndex((w) => w.i === id);
let widgetsInsideTheRow: Layout[] = [];
let isPanelMapUpdated = false;
for (let j = currentIdx + 1; j < dashboardLayout.length; j++) {
if (currentPanelMap[dashboardLayout[j].i]) {
rowProperties.widgets = widgetsInsideTheRow;
widgetsInsideTheRow = [];
isPanelMapUpdated = true;
break;
} else {
widgetsInsideTheRow.push(dashboardLayout[j]);
}
}
if (!isPanelMapUpdated) {
rowProperties.widgets = widgetsInsideTheRow;
}
let maxY = 0;
widgetsInsideTheRow.forEach((w) => {
maxY = Math.max(maxY, w.y + w.h);
});
const currentRowWidget = dashboardLayout[currentIdx];
if (currentRowWidget && widgetsInsideTheRow.length) {
maxY -= currentRowWidget.h + currentRowWidget.y;
}
for (let j = currentIdx + 1; j < updatedDashboardLayout.length; j++) {
updatedDashboardLayout[j].y += maxY;
if (updatedPanelMap[updatedDashboardLayout[j].i]) {
updatedPanelMap[updatedDashboardLayout[j].i].widgets = updatedPanelMap[
updatedDashboardLayout[j].i
// eslint-disable-next-line @typescript-eslint/no-loop-func
].widgets.map((w) => ({
...w,
y: w.y + maxY,
}));
}
}
updatedDashboardLayout = updatedDashboardLayout.filter(
(widget) => !rowProperties.widgets.some((w: Layout) => w.i === widget.i),
);
}
setCurrentPanelMap((prev) => ({
...prev,
...updatedPanelMap,
[id]: {
...rowProperties,
},
}));
setDashboardLayout(sortLayout(updatedDashboardLayout));
};
const handleDragStop: ItemCallback = (_, oldItem, newItem): void => {
if (currentPanelMap[oldItem.i]) {
const differenceY = newItem.y - oldItem.y;
const widgetsInsideRow = currentPanelMap[oldItem.i].widgets.map((w) => ({
...w,
y: w.y + differenceY,
}));
setCurrentPanelMap((prev) => ({
...prev,
[oldItem.i]: {
...prev[oldItem.i],
widgets: widgetsInsideRow,
},
}));
}
};
const handleRowDelete = (): void => {
if (!selectedDashboard) return;
if (!currentSelectRowId) return;
const updatedWidgets = selectedDashboard?.data?.widgets?.filter(
(e) => e.id !== currentSelectRowId,
);
const updatedLayout =
selectedDashboard.data.layout?.filter((e) => e.i !== currentSelectRowId) ||
[];
const updatedPanelMap = { ...currentPanelMap };
delete updatedPanelMap[currentSelectRowId];
const updatedSelectedDashboard: Dashboard = {
...selectedDashboard,
data: {
...selectedDashboard.data,
widgets: updatedWidgets,
layout: updatedLayout,
panelMap: updatedPanelMap,
},
uuid: selectedDashboard.uuid,
};
updateDashboardMutation.mutateAsync(updatedSelectedDashboard, {
onSuccess: (updatedDashboard) => {
if (setLayouts) setLayouts(updatedDashboard.payload?.data?.layout || []);
if (setSelectedDashboard && updatedDashboard.payload) {
setSelectedDashboard(updatedDashboard.payload);
}
if (setPanelMap)
setPanelMap(updatedDashboard.payload?.data?.panelMap || {});
setIsDeleteModalOpen(false);
setCurrentSelectRowId(null);
featureResponse.refetch();
},
// eslint-disable-next-line sonarjs/no-identical-functions
onError: () => {
notifications.error({
message: SOMETHING_WENT_WRONG,
});
},
});
};
return (
<>
<Flex justify="flex-end" gap={8} align="center">
@ -178,15 +493,9 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
screen: 'Dashboard Details',
}}
eventName="Dashboard: Facing Issues in dashboard"
buttonText="Facing Issues in dashboard"
message={`Hi Team,
I am facing issues configuring dashboard in SigNoz. Here are my dashboard details
Name: ${data?.title || ''}
Dashboard Id: ${selectedDashboard?.uuid || ''}
Thanks`}
buttonText="Need help with this dashboard?"
message={dashboardHelpMessage(data, selectedDashboard)}
onHoverText="Click here to get help for this dashboard"
/>
<ButtonContainer>
<Tooltip title="Open in Full Screen">
@ -209,13 +518,23 @@ Thanks`}
{t('dashboard:add_panel')}
</Button>
)}
{!isDashboardLocked && addPanelPermission && (
<Button
className="periscope-btn"
onClick={(): void => handleAddRow()}
icon={<PlusOutlined />}
data-testid="add-row"
>
{t('dashboard:add_row')}
</Button>
)}
</ButtonContainer>
</Flex>
<FullScreen handle={handle} className="fullscreen-grid-container">
<ReactGridLayout
cols={12}
rowHeight={100}
rowHeight={45}
autoSize
width={100}
useCSSTransforms
@ -224,6 +543,7 @@ Thanks`}
isResizable={!isDashboardLocked && addPanelPermission}
allowOverlap={false}
onLayoutChange={handleLayoutChange}
onDragStop={handleDragStop}
draggableHandle=".drag-handle"
layout={dashboardLayout}
style={{ backgroundColor: isDarkMode ? '' : themeColors.snowWhite }}
@ -232,6 +552,58 @@ Thanks`}
const { i: id } = layout;
const currentWidget = (widgets || [])?.find((e) => e.id === id);
if (currentWidget?.panelTypes === PANEL_GROUP_TYPES.ROW) {
const rowWidgetProperties = currentPanelMap[id] || {};
return (
<CardContainer
className="row-card"
isDarkMode={isDarkMode}
key={id}
data-grid={JSON.stringify(currentWidget)}
>
<div className={cx('row-panel')}>
<div style={{ display: 'flex', gap: '10px', alignItems: 'center' }}>
<Button
disabled={updateDashboardMutation.isLoading}
icon={
rowWidgetProperties.collapsed ? (
<MoveDown size={14} />
) : (
<MoveUp size={14} />
)
}
type="text"
onClick={(): void => handleRowCollapse(id)}
/>
<Typography.Text>{currentWidget.title}</Typography.Text>
<Button
icon={<Settings size={14} />}
type="text"
onClick={(): void => handleRowSettingsClick(id)}
/>
</div>
{rowWidgetProperties.collapsed && (
<Button
type="text"
icon={<GripVertical size={14} />}
className="drag-handle"
/>
)}
{!rowWidgetProperties.collapsed && (
<Button
type="text"
icon={<Trash2 size={14} />}
onClick={(): void => {
setIsDeleteModalOpen(true);
setCurrentSelectRowId(id);
}}
/>
)}
</div>
</CardContainer>
);
}
return (
<CardContainer
className={isDashboardLocked ? '' : 'enable-resize'}
@ -244,7 +616,7 @@ Thanks`}
$panelType={currentWidget?.panelTypes || PANEL_TYPES.TIME_SERIES}
>
<GridCard
widget={currentWidget || ({ id, query: {} } as Widgets)}
widget={(currentWidget as Widgets) || ({ id, query: {} } as Widgets)}
headerMenuList={widgetActions}
variables={variables}
version={selectedDashboard?.data?.version}
@ -255,6 +627,46 @@ Thanks`}
);
})}
</ReactGridLayout>
<Modal
open={isSettingsModalOpen}
title="Row Options"
destroyOnClose
footer={null}
onCancel={(): void => {
setIsSettingsModalOpen(false);
setCurrentSelectRowId(null);
}}
>
<Form form={form} onFinish={onSettingsModalSubmit} requiredMark>
<Form.Item required name={['title']}>
<Input
placeholder="Enter row name here..."
defaultValue={defaultTo(
widgets?.find((widget) => widget.id === currentSelectRowId)
?.title as string,
'Sample Title',
)}
/>
</Form.Item>
<Form.Item>
<Button type="primary" htmlType="submit">
Apply Changes
</Button>
</Form.Item>
</Form>
</Modal>
<Modal
open={isDeleteModalOpen}
title="Delete Row"
destroyOnClose
onCancel={(): void => {
setIsDeleteModalOpen(false);
setCurrentSelectRowId(null);
}}
onOk={(): void => handleRowDelete()}
>
<Typography.Text>Are you sure you want to delete this row</Typography.Text>
</Modal>
</FullScreen>
</>
);

View File

@ -16,6 +16,6 @@ export const EMPTY_WIDGET_LAYOUT = {
i: PANEL_TYPES.EMPTY_WIDGET,
w: 6,
x: 0,
h: 3,
h: 6,
y: 0,
};

View File

@ -29,6 +29,17 @@ interface Props {
export const CardContainer = styled.div<Props>`
overflow: auto;
&.row-card {
.row-panel {
height: 100%;
display: flex;
justify-content: space-between;
background: var(--bg-ink-400);
align-items: center;
overflow: hidden;
}
}
&.enable-resize {
:hover {
.react-resizable-handle {

View File

@ -4,6 +4,7 @@ import { Input, Typography } from 'antd';
import type { ColumnsType } from 'antd/es/table/interface';
import saveAlertApi from 'api/alerts/save';
import DropDown from 'components/DropDown/DropDown';
import { listAlertMessage } from 'components/facingIssueBtn/util';
import {
DynamicColumnsKey,
TableDataSource,
@ -363,12 +364,9 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
screen: 'Alert list page',
},
eventName: 'Alert: Facing Issues in alert',
buttonText: 'Facing Issues in alert',
message: `Hi Team,
I am facing issues with alerts.
Thanks`,
buttonText: 'Facing issues with alerts?',
message: listAlertMessage,
onHoverText: 'Click here to get help with alerts',
}}
/>
</>

View File

@ -3,6 +3,7 @@ import { Card, Col, Dropdown, Input, Row, TableColumnProps } from 'antd';
import { ItemType } from 'antd/es/menu/hooks/useItems';
import createDashboard from 'api/dashboard/create';
import { AxiosError } from 'axios';
import { dashboardListMessage } from 'components/facingIssueBtn/util';
import {
DynamicColumnsKey,
TableDataSource,
@ -390,12 +391,9 @@ function DashboardsList(): JSX.Element {
screen: 'Dashboard list page',
},
eventName: 'Dashboard: Facing Issues in dashboard',
buttonText: 'Facing Issues in dashboard',
message: `Hi Team,
I am facing issues with dashboards.
Thanks`,
buttonText: 'Facing issues with dashboards?',
message: dashboardListMessage,
onHoverText: 'Click here to get help with dashboards',
}}
/>
</TableContainer>

View File

@ -85,8 +85,8 @@ function LogControls(): JSX.Element | null {
logs.map((log) => {
const timestamp =
typeof log.timestamp === 'string'
? dayjs(log.timestamp).format()
: dayjs(log.timestamp / 1e6).format();
? dayjs(log.timestamp).format('YYYY-MM-DD HH:mm:ss.SSS')
: dayjs(log.timestamp / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS');
return FlatLogData({
...log,

View File

@ -531,8 +531,8 @@ function LogsExplorerViews({
logs.map((log) => {
const timestamp =
typeof log.timestamp === 'string'
? dayjs(log.timestamp).format()
: dayjs(log.timestamp / 1e6).format();
? dayjs(log.timestamp).format('YYYY-MM-DD HH:mm:ss.SSS')
: dayjs(log.timestamp / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS');
return FlatLogData({
timestamp,

View File

@ -67,12 +67,13 @@ function LeftContainer({
setRequestData((prev) => ({
...prev,
selectedTime: selectedTime.enum || prev.selectedTime,
globalSelectedInterval,
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
query: stagedQuery,
}));
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [stagedQuery, selectedTime]);
}, [stagedQuery, selectedTime, globalSelectedInterval]);
const queryResponse = useGetQueryRange(
requestData,

View File

@ -0,0 +1,4 @@
.facing-issue-btn-container {
display: grid;
grid-template-columns: 1fr max-content;
}

View File

@ -1,7 +1,10 @@
/* eslint-disable sonarjs/cognitive-complexity */
import './NewWidget.styles.scss';
import { LockFilled, WarningOutlined } from '@ant-design/icons';
import { Button, Flex, Modal, Space, Tooltip, Typography } from 'antd';
import { Button, Modal, Space, Tooltip, Typography } from 'antd';
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
import { chartHelpMessage } from 'components/facingIssueBtn/util';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { FeatureKeys } from 'constants/features';
import { QueryParams } from 'constants/query';
@ -104,7 +107,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
return defaultTo(
selectedWidget,
getDefaultWidgetData(widgetId || '', selectedGraph),
);
) as Widgets;
}, [query, selectedGraph, widgets]);
const [selectedWidget, setSelectedWidget] = useState(getWidget());
@ -257,7 +260,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
i: widgetId || '',
w: 6,
x: 0,
h: 3,
h: 6,
y: 0,
},
...updatedLayout,
@ -402,7 +405,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
return (
<Container>
<Flex justify="space-between" align="center">
<div className="facing-issue-btn-container">
<FacingIssueBtn
attributes={{
uuid: selectedDashboard?.uuid,
@ -410,18 +413,12 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
panelType: graphType,
widgetId: query.get('widgetId'),
queryType: currentQuery.queryType,
screen: 'Dashboard list page',
}}
eventName="Dashboard: Facing Issues in dashboard"
buttonText="Facing Issues in dashboard"
message={`Hi Team,
I am facing issues configuring dashboard in SigNoz. Here are my dashboard details
Name: ${selectedDashboard?.data.title || ''}
Panel type: ${graphType}
Dashboard Id: ${selectedDashboard?.uuid || ''}
Thanks`}
buttonText="Need help with this chart?"
message={chartHelpMessage(selectedDashboard, graphType)}
onHoverText="Click here to get help in creating chart"
/>
<ButtonContainer>
{isSaveDisabled && (
@ -450,7 +447,7 @@ Thanks`}
)}
<Button onClick={onClickDiscardHandler}>Discard Changes</Button>
</ButtonContainer>
</Flex>
</div>
<PanelContainer>
<LeftContainerWrapper flex={5}>

View File

@ -0,0 +1,87 @@
import { render } from '@testing-library/react';
import { I18nextProvider } from 'react-i18next';
import { QueryClient, QueryClientProvider } from 'react-query';
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import i18n from 'ReactI18';
import store from 'store';
import ChangeHistory from '../index';
import { pipelineData, pipelineDataHistory } from './testUtils';
const queryClient = new QueryClient({
defaultOptions: {
queries: {
refetchOnWindowFocus: false,
},
},
});
describe('ChangeHistory test', () => {
it('should render changeHistory correctly', () => {
const { getAllByText, getByText } = render(
<MemoryRouter>
<QueryClientProvider client={queryClient}>
<Provider store={store}>
<I18nextProvider i18n={i18n}>
<ChangeHistory pipelineData={pipelineData} />
</I18nextProvider>
</Provider>
</QueryClientProvider>
</MemoryRouter>,
);
// change History table headers
[
'Version',
'Deployment Stage',
'Last Deploy Message',
'Last Deployed Time',
'Edited by',
].forEach((text) => expect(getByText(text)).toBeInTheDocument());
// table content
expect(getAllByText('test-user').length).toBe(2);
expect(getAllByText('Deployment was successful').length).toBe(2);
});
it('test deployment stage and icon based on history data', () => {
const { getByText, container } = render(
<MemoryRouter>
<QueryClientProvider client={queryClient}>
<Provider store={store}>
<I18nextProvider i18n={i18n}>
<ChangeHistory
pipelineData={{
...pipelineData,
history: pipelineDataHistory,
}}
/>
</I18nextProvider>
</Provider>
</QueryClientProvider>
</MemoryRouter>,
);
// assertion for different deployment stages
expect(container.querySelector('[data-icon="loading"]')).toBeInTheDocument();
expect(getByText('In Progress')).toBeInTheDocument();
expect(
container.querySelector('[data-icon="exclamation-circle"]'),
).toBeInTheDocument();
expect(getByText('Dirty')).toBeInTheDocument();
expect(
container.querySelector('[data-icon="close-circle"]'),
).toBeInTheDocument();
expect(getByText('Failed')).toBeInTheDocument();
expect(
container.querySelector('[data-icon="minus-circle"]'),
).toBeInTheDocument();
expect(getByText('Unknown')).toBeInTheDocument();
expect(container.querySelectorAll('.ant-table-row').length).toBe(5);
});
});

View File

@ -0,0 +1,240 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { Pipeline } from 'types/api/pipeline/def';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
export const pipelineData: Pipeline = {
id: 'test-id-1',
version: 24,
elementType: 'log_pipelines',
active: false,
is_valid: false,
disabled: false,
deployStatus: 'DEPLOYED',
deployResult: 'Deployment was successful',
lastHash: 'log_pipelines:24',
lastConf: 'oiwernveroi',
createdBy: 'test-created-by',
pipelines: [
{
id: 'test-id-2',
orderId: 1,
name: 'hotrod logs parser',
alias: 'hotrodlogsparser',
description: 'Trying to test Logs Pipeline feature',
enabled: true,
filter: {
op: 'AND',
items: [
{
key: {
key: 'container_name',
dataType: DataTypes.String,
type: 'tag',
isColumn: false,
isJSON: false,
},
id: 'sampleid',
value: 'hotrod',
op: '=',
},
],
},
config: [
{
type: 'regex_parser',
id: 'parsetext(regex)',
output: 'parseattribsjson',
on_error: 'send',
orderId: 1,
enabled: true,
name: 'parse text (regex)',
parse_to: 'attributes',
regex:
'.+\\t+(?P<log_level>.+)\\t+(?P<location>.+)\\t+(?P<message>.+)\\t+(?P<attribs_json>.+)',
parse_from: 'body',
},
{
type: 'json_parser',
id: 'parseattribsjson',
output: 'removetempattribs_json',
orderId: 2,
enabled: true,
name: 'parse attribs json',
parse_to: 'attributes',
parse_from: 'attributes.attribs_json',
},
{
type: 'remove',
id: 'removetempattribs_json',
output: 'c2062723-895e-4614-ba38-29c5d5ee5927',
orderId: 3,
enabled: true,
name: 'remove temp attribs_json',
field: 'attributes.attribs_json',
},
{
type: 'add',
id: 'c2062723-895e-4614-ba38-29c5d5ee5927',
orderId: 4,
enabled: true,
name: 'test add ',
field: 'resource["container.name"]',
value: 'hotrod',
},
],
createdBy: 'test@email',
createdAt: '2024-01-02T13:56:02.858300964Z',
},
{
id: 'tes-id-1',
orderId: 2,
name: 'Logs Parser - test - Customer Service',
alias: 'LogsParser-test-CustomerService',
description: 'Trying to test Logs Pipeline feature',
enabled: true,
filter: {
op: 'AND',
items: [
{
key: {
key: 'service',
dataType: DataTypes.String,
type: 'tag',
isColumn: false,
isJSON: false,
},
id: 'sample-test-1',
value: 'customer',
op: '=',
},
],
},
config: [
{
type: 'grok_parser',
id: 'Testtest',
on_error: 'send',
orderId: 1,
enabled: true,
name: 'Test test',
parse_to: 'attributes',
pattern:
'^%{DATE:date}Z INFO customer/database.go:73 Loading customer {"service": "customer", "component": "mysql", "trace_id": "test-id", "span_id": "1427a3fcad8b1514", "customer_id": "567"}',
parse_from: 'body',
},
],
createdBy: 'test@email',
createdAt: '2024-01-02T13:56:02.863764227Z',
},
],
history: [
{
id: 'test-id-4',
version: 24,
elementType: 'log_pipelines',
active: false,
isValid: false,
disabled: false,
deployStatus: 'DEPLOYED',
deployResult: 'Deployment was successful',
lastHash: 'log_pipelines:24',
lastConf: 'eovineroiv',
createdBy: 'test-created-by',
createdByName: 'test-user',
createdAt: '2024-01-02T13:56:02Z',
},
{
id: 'test-4',
version: 23,
elementType: 'log_pipelines',
active: false,
isValid: false,
disabled: false,
deployStatus: 'DEPLOYED',
deployResult: 'Deployment was successful',
lastHash: 'log_pipelines:23',
lastConf: 'eivrounreovi',
createdBy: 'test-created-by',
createdByName: 'test-user',
createdAt: '2023-12-29T12:59:20Z',
},
],
};
export const pipelineDataHistory: Pipeline['history'] = [
{
id: 'test-id-4',
version: 24,
elementType: 'log_pipelines',
active: false,
isValid: false,
disabled: false,
deployStatus: 'DEPLOYED',
deployResult: 'Deployment was successful',
lastHash: 'log_pipelines:24',
lastConf: 'eovineroiv',
createdBy: 'test-created-by',
createdByName: 'test-user',
createdAt: '2024-01-02T13:56:02Z',
},
{
id: 'test-4',
version: 23,
elementType: 'log_pipelines',
active: false,
isValid: false,
disabled: false,
deployStatus: 'IN_PROGRESS',
deployResult: 'Deployment is in progress',
lastHash: 'log_pipelines:23',
lastConf: 'eivrounreovi',
createdBy: 'test-created-by',
createdByName: 'test-user',
createdAt: '2023-12-29T12:59:20Z',
},
{
id: 'test-4-1',
version: 25,
elementType: 'log_pipelines',
active: false,
isValid: false,
disabled: false,
deployStatus: 'DIRTY',
deployResult: 'Deployment is dirty',
lastHash: 'log_pipelines:23',
lastConf: 'eivrounreovi',
createdBy: 'test-created-by',
createdByName: 'test-user',
createdAt: '2023-12-29T12:59:20Z',
},
{
id: 'test-4-2',
version: 26,
elementType: 'log_pipelines',
active: false,
isValid: false,
disabled: false,
deployStatus: 'FAILED',
deployResult: 'Deployment failed',
lastHash: 'log_pipelines:23',
lastConf: 'eivrounreovi',
createdBy: 'test-created-by',
createdByName: 'test-user',
createdAt: '2023-12-29T12:59:20Z',
},
{
id: 'test-4-3',
version: 27,
elementType: 'log_pipelines',
active: false,
isValid: false,
disabled: false,
deployStatus: 'UNKNOWN',
deployResult: '',
lastHash: 'log_pipelines:23',
lastConf: 'eivrounreovi',
createdBy: 'test-created-by',
createdByName: 'test-user',
createdAt: '2023-12-29T12:59:20Z',
},
];

View File

@ -1,4 +1,5 @@
import { render } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { I18nextProvider } from 'react-i18next';
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
@ -8,8 +9,17 @@ import store from 'store';
import CreatePipelineButton from '../Layouts/Pipeline/CreatePipelineButton';
import { pipelineApiResponseMockData } from '../mocks/pipeline';
const trackEventVar = jest.fn();
jest.mock('hooks/analytics/useAnalytics', () => ({
__esModule: true,
default: jest.fn().mockImplementation(() => ({
trackEvent: trackEventVar,
trackPageView: jest.fn(),
})),
}));
describe('PipelinePage container test', () => {
it('should render CreatePipelineButton section', () => {
it('should render CreatePipelineButton section', async () => {
const { asFragment } = render(
<MemoryRouter>
<Provider store={store}>
@ -26,4 +36,58 @@ describe('PipelinePage container test', () => {
);
expect(asFragment()).toMatchSnapshot();
});
it('CreatePipelineButton - edit mode & tracking', async () => {
const { getByText } = render(
<MemoryRouter>
<Provider store={store}>
<I18nextProvider i18n={i18n}>
<CreatePipelineButton
setActionType={jest.fn()}
isActionMode="viewing-mode"
setActionMode={jest.fn()}
pipelineData={pipelineApiResponseMockData}
/>
</I18nextProvider>
</Provider>
</MemoryRouter>,
);
// enter_edit_mode click and track event data
const editButton = getByText('enter_edit_mode');
expect(editButton).toBeInTheDocument();
await userEvent.click(editButton);
expect(trackEventVar).toBeCalledWith('Logs: Pipelines: Entered Edit Mode', {
source: 'signoz-ui',
});
});
it('CreatePipelineButton - add new mode & tracking', async () => {
const { getByText } = render(
<MemoryRouter>
<Provider store={store}>
<I18nextProvider i18n={i18n}>
<CreatePipelineButton
setActionType={jest.fn()}
isActionMode="viewing-mode"
setActionMode={jest.fn()}
pipelineData={{ ...pipelineApiResponseMockData, pipelines: [] }}
/>
</I18nextProvider>
</Provider>
</MemoryRouter>,
);
// new_pipeline click and track event data
const editButton = getByText('new_pipeline');
expect(editButton).toBeInTheDocument();
await userEvent.click(editButton);
expect(trackEventVar).toBeCalledWith(
'Logs: Pipelines: Clicked Add New Pipeline',
{
source: 'signoz-ui',
},
);
});
});

View File

@ -1,4 +1,5 @@
import { render } from '@testing-library/react';
import { fireEvent, render, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { I18nextProvider } from 'react-i18next';
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
@ -20,4 +21,43 @@ describe('PipelinePage container test', () => {
);
expect(asFragment()).toMatchSnapshot();
});
it('should handle search', async () => {
const setPipelineValue = jest.fn();
const { getByPlaceholderText, container } = render(
<MemoryRouter>
<Provider store={store}>
<I18nextProvider i18n={i18n}>
<PipelinesSearchSection setPipelineSearchValue={setPipelineValue} />
</I18nextProvider>
</Provider>
</MemoryRouter>,
);
const searchInput = getByPlaceholderText('search_pipeline_placeholder');
// Type into the search input
userEvent.type(searchInput, 'sample_pipeline');
jest.advanceTimersByTime(299);
expect(setPipelineValue).not.toHaveBeenCalled();
// Wait for the debounce delay to pass
await waitFor(() => {
// Expect the callback to be called after debounce delay
expect(setPipelineValue).toHaveBeenCalledWith('sample_pipeline');
});
// clear button
fireEvent.click(
container.querySelector(
'span[class*="ant-input-clear-icon"]',
) as HTMLElement,
);
// Wait for the debounce delay to pass
await waitFor(() => {
expect(setPipelineValue).toHaveBeenCalledWith('');
});
});
});

View File

@ -278,7 +278,7 @@ function SideNav({
}, [isCloudUserVal, isEnterprise, isFetching]);
useEffect(() => {
if (!isCloudUserVal) {
if (!(isCloudUserVal || isEECloudUser())) {
let updatedMenuItems = [...menuItems];
updatedMenuItems = updatedMenuItems.filter(
(item) => item.key !== ROUTES.INTEGRATIONS,

View File

@ -13,11 +13,18 @@ function Events({
return <Typography>No events data in selected span</Typography>;
}
const sortedTraceEvents = events.sort((a, b) => {
// Handle undefined names by treating them as empty strings
const nameA = a.name || '';
const nameB = b.name || '';
return nameA.localeCompare(nameB);
});
return (
<ErrorTag
onToggleHandler={onToggleHandler}
setText={setText}
event={events}
event={sortedTraceEvents}
firstSpanStartTime={firstSpanStartTime}
/>
);

View File

@ -41,8 +41,9 @@ function Tags({
setSearchText(value);
};
const filteredTags = tags.filter((tag) => tag.key.includes(searchText));
const filteredTags = tags
.filter((tag) => tag.key.includes(searchText))
.sort((a, b) => a.key.localeCompare(b.key));
if (tags.length === 0) {
return <Typography>No tags in selected span</Typography>;
}

View File

@ -16,7 +16,7 @@ export const addEmptyWidgetInDashboardJSONWithQuery = (
i: widgetId,
w: 6,
x: 0,
h: 3,
h: 6,
y: 0,
},
...(dashboard?.data?.layout || []),

View File

@ -36,7 +36,7 @@ export const getPaginationQueryData: SetupPaginationQueryData = ({
const updatedFilters: TagFilter = {
...filters,
items: filters.items.filter((item) => item.key?.key !== 'id'),
items: filters?.items?.filter((item) => item.key?.key !== 'id'),
};
const tagFilters: TagFilter = {

View File

@ -100,7 +100,7 @@ export const getUPlotChartOptions = ({
y: {
...getYAxisScale({
thresholds,
series: apiResponse?.data.newResult.data.result,
series: apiResponse?.data?.newResult?.data?.result || [],
yAxisUnit,
softMax,
softMin,

View File

@ -9,6 +9,7 @@ import history from 'lib/history';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { useEffect, useState } from 'react';
import { generatePath, useLocation, useParams } from 'react-router-dom';
import { Widgets } from 'types/api/dashboard/getAll';
function DashboardWidget(): JSX.Element | null {
const { search } = useLocation();
@ -24,7 +25,7 @@ function DashboardWidget(): JSX.Element | null {
const { data } = selectedDashboard || {};
const { widgets } = data || {};
const selectedWidget = widgets?.find((e) => e.id === widgetId);
const selectedWidget = widgets?.find((e) => e.id === widgetId) as Widgets;
useEffect(() => {
const params = new URLSearchParams(search);

View File

@ -282,7 +282,7 @@ function SaveView(): JSX.Element {
<div className="save-view-content">
<Typography.Title className="title">Views</Typography.Title>
<Typography.Text className="subtitle">
Manage your saved views for logs.
Manage your saved views for {ROUTES_VS_SOURCEPAGE[pathname]}.
</Typography.Text>
<Input
placeholder="Search for views..."

View File

@ -10,6 +10,7 @@ import { useDashboardVariablesFromLocalStorage } from 'hooks/dashboard/useDashbo
import useAxiosError from 'hooks/useAxiosError';
import useTabVisibility from 'hooks/useTabFocus';
import { getUpdatedLayout } from 'lib/dashboard/getUpdatedLayout';
import { defaultTo } from 'lodash-es';
import isEqual from 'lodash-es/isEqual';
import isUndefined from 'lodash-es/isUndefined';
import omitBy from 'lodash-es/omitBy';
@ -37,6 +38,7 @@ import { GlobalReducer } from 'types/reducer/globalTime';
import { v4 as generateUUID } from 'uuid';
import { IDashboardContext } from './types';
import { sortLayout } from './util';
const DashboardContext = createContext<IDashboardContext>({
isDashboardSliderOpen: false,
@ -47,6 +49,8 @@ const DashboardContext = createContext<IDashboardContext>({
selectedDashboard: {} as Dashboard,
dashboardId: '',
layouts: [],
panelMap: {},
setPanelMap: () => {},
setLayouts: () => {},
setSelectedDashboard: () => {},
updatedTimeRef: {} as React.MutableRefObject<Dayjs | null>,
@ -94,6 +98,10 @@ export function DashboardProvider({
const [layouts, setLayouts] = useState<Layout[]>([]);
const [panelMap, setPanelMap] = useState<
Record<string, { widgets: Layout[]; collapsed: boolean }>
>({});
const { isLoggedIn } = useSelector<AppState, AppReducer>((state) => state.app);
const dashboardId =
@ -199,7 +207,9 @@ export function DashboardProvider({
dashboardRef.current = updatedDashboardData;
setLayouts(getUpdatedLayout(updatedDashboardData.data.layout));
setLayouts(sortLayout(getUpdatedLayout(updatedDashboardData.data.layout)));
setPanelMap(defaultTo(updatedDashboardData?.data?.panelMap, {}));
}
if (
@ -235,7 +245,11 @@ export function DashboardProvider({
updatedTimeRef.current = dayjs(updatedDashboardData.updated_at);
setLayouts(getUpdatedLayout(updatedDashboardData.data.layout));
setLayouts(
sortLayout(getUpdatedLayout(updatedDashboardData.data.layout)),
);
setPanelMap(defaultTo(updatedDashboardData.data.panelMap, {}));
},
});
@ -256,7 +270,11 @@ export function DashboardProvider({
updatedDashboardData.data.layout,
)
) {
setLayouts(getUpdatedLayout(updatedDashboardData.data.layout));
setLayouts(
sortLayout(getUpdatedLayout(updatedDashboardData.data.layout)),
);
setPanelMap(defaultTo(updatedDashboardData.data.panelMap, {}));
}
}
},
@ -323,7 +341,9 @@ export function DashboardProvider({
selectedDashboard,
dashboardId,
layouts,
panelMap,
setLayouts,
setPanelMap,
setSelectedDashboard,
updatedTimeRef,
setToScrollWidgetId,
@ -339,6 +359,7 @@ export function DashboardProvider({
selectedDashboard,
dashboardId,
layouts,
panelMap,
toScrollWidgetId,
updateLocalStorageDashboardVariables,
currentDashboard,

View File

@ -12,6 +12,8 @@ export interface IDashboardContext {
selectedDashboard: Dashboard | undefined;
dashboardId: string;
layouts: Layout[];
panelMap: Record<string, { widgets: Layout[]; collapsed: boolean }>;
setPanelMap: React.Dispatch<React.SetStateAction<Record<string, any>>>;
setLayouts: React.Dispatch<React.SetStateAction<Layout[]>>;
setSelectedDashboard: React.Dispatch<
React.SetStateAction<Dashboard | undefined>

View File

@ -1,22 +1,34 @@
import { Layout } from 'react-grid-layout';
import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
export const getPreviousWidgets = (
selectedDashboard: Dashboard,
selectedWidgetIndex: number,
): Widgets[] =>
selectedDashboard.data.widgets?.slice(0, selectedWidgetIndex || 0) || [];
(selectedDashboard.data.widgets?.slice(
0,
selectedWidgetIndex || 0,
) as Widgets[]) || [];
export const getNextWidgets = (
selectedDashboard: Dashboard,
selectedWidgetIndex: number,
): Widgets[] =>
selectedDashboard.data.widgets?.slice(
(selectedDashboard.data.widgets?.slice(
(selectedWidgetIndex || 0) + 1, // this is never undefined
selectedDashboard.data.widgets?.length,
) || [];
) as Widgets[]) || [];
export const getSelectedWidgetIndex = (
selectedDashboard: Dashboard,
widgetId: string | null,
): number =>
selectedDashboard.data.widgets?.findIndex((e) => e.id === widgetId) || 0;
export const sortLayout = (layout: Layout[]): Layout[] =>
[...layout].sort((a, b) => {
if (a.y === b.y) {
return a.x - b.x;
}
return a.y - b.y;
});

View File

@ -1,4 +1,4 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
import { ReactNode } from 'react';
@ -59,13 +59,21 @@ export interface DashboardData {
description?: string;
tags?: string[];
name?: string;
widgets?: Widgets[];
widgets?: Array<WidgetRow | Widgets>;
title: string;
layout?: Layout[];
panelMap?: Record<string, { widgets: Layout[]; collapsed: boolean }>;
variables: Record<string, IDashboardVariable>;
version?: string;
}
export interface WidgetRow {
id: string;
panelTypes: PANEL_GROUP_TYPES;
title: ReactNode;
description: string;
}
export interface IBaseWidget {
isStacked: boolean;
id: string;

View File

@ -5813,12 +5813,12 @@ axe-core@^4.6.2:
resolved "https://registry.npmjs.org/axe-core/-/axe-core-4.7.0.tgz"
integrity sha512-M0JtH+hlOL5pLQwHOLNYZaXuhqmvS8oExsqB1SBYgA4Dk7u/xx+YdGHXaK5pyUfed5mYXdlYiphWq3G8cRi5JQ==
axios@1.6.2:
version "1.6.2"
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.2.tgz#de67d42c755b571d3e698df1b6504cde9b0ee9f2"
integrity sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==
axios@1.6.4:
version "1.6.4"
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.4.tgz#184ee1f63d412caffcf30d2c50982253c3ee86e0"
integrity sha512-heJnIs6N4aa1eSthhN9M5ioILu8Wi8vmQW9iHQ9NUvfkJb0lEEDUiIdQNAuBtfUt3FxReaKdpQA5DbmMOqzF/A==
dependencies:
follow-redirects "^1.15.0"
follow-redirects "^1.15.4"
form-data "^4.0.0"
proxy-from-env "^1.1.0"
@ -6333,13 +6333,13 @@ bl@^4.1.0:
inherits "^2.0.4"
readable-stream "^3.4.0"
body-parser@1.20.1:
version "1.20.1"
resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz"
integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==
body-parser@1.20.2:
version "1.20.2"
resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd"
integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==
dependencies:
bytes "3.1.2"
content-type "~1.0.4"
content-type "~1.0.5"
debug "2.6.9"
depd "2.0.0"
destroy "1.2.0"
@ -6347,7 +6347,7 @@ body-parser@1.20.1:
iconv-lite "0.4.24"
on-finished "2.4.1"
qs "6.11.0"
raw-body "2.5.1"
raw-body "2.5.2"
type-is "~1.6.18"
unpipe "1.0.0"
@ -7123,7 +7123,7 @@ content-disposition@0.5.4:
dependencies:
safe-buffer "5.2.1"
content-type@~1.0.4:
content-type@~1.0.4, content-type@~1.0.5:
version "1.0.5"
resolved "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz"
integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==
@ -7172,10 +7172,10 @@ cookie-signature@1.0.6:
resolved "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz"
integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==
cookie@0.5.0:
version "0.5.0"
resolved "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz"
integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==
cookie@0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051"
integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==
cookie@^0.4.2:
version "0.4.2"
@ -8902,16 +8902,16 @@ expect@^29.0.0:
jest-util "^29.5.0"
express@^4.17.3:
version "4.18.2"
resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59"
integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==
version "4.19.2"
resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465"
integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==
dependencies:
accepts "~1.3.8"
array-flatten "1.1.1"
body-parser "1.20.1"
body-parser "1.20.2"
content-disposition "0.5.4"
content-type "~1.0.4"
cookie "0.5.0"
cookie "0.6.0"
cookie-signature "1.0.6"
debug "2.6.9"
depd "2.0.0"
@ -9204,10 +9204,10 @@ follow-redirects@^1.0.0, follow-redirects@^1.14.0:
resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz"
integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==
follow-redirects@^1.15.0:
version "1.15.3"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a"
integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==
follow-redirects@^1.15.4:
version "1.15.4"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.4.tgz#cdc7d308bf6493126b17ea2191ea0ccf3e535adf"
integrity sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==
fontfaceobserver@2.3.0:
version "2.3.0"
@ -14489,10 +14489,10 @@ range-parser@^1.2.1, range-parser@~1.2.1:
resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz"
integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
raw-body@2.5.1:
version "2.5.1"
resolved "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz"
integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==
raw-body@2.5.2:
version "2.5.2"
resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a"
integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==
dependencies:
bytes "3.1.2"
http-errors "2.0.0"

2
go.mod
View File

@ -6,7 +6,7 @@ require (
github.com/ClickHouse/clickhouse-go/v2 v2.20.0
github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
github.com/SigNoz/signoz-otel-collector v0.88.21
github.com/SigNoz/signoz-otel-collector v0.88.22
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
github.com/antonmedv/expr v1.15.3

4
go.sum
View File

@ -98,8 +98,8 @@ github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkb
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
github.com/SigNoz/prometheus v1.11.0 h1:toX7fU2wqY1TnzvPzDglIYx6OxpqrZ0NNlM/H5S5+u8=
github.com/SigNoz/prometheus v1.11.0/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww=
github.com/SigNoz/signoz-otel-collector v0.88.21 h1:9K1FLUncUZh7cPfOLDPuT8itU8LyCufk4QwGp18hK88=
github.com/SigNoz/signoz-otel-collector v0.88.21/go.mod h1:sT1EM9PFDaOJLbAz5npWpgXK6OhpWJ9PpSwyhHWs9rU=
github.com/SigNoz/signoz-otel-collector v0.88.22 h1:PW9TpdQ8b8vWnUKWVe/w1bX8/Rq2MUUHGDIsx+KA+o0=
github.com/SigNoz/signoz-otel-collector v0.88.22/go.mod h1:sT1EM9PFDaOJLbAz5npWpgXK6OhpWJ9PpSwyhHWs9rU=
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=

View File

@ -163,12 +163,24 @@ func NewReaderFromClickhouseConnection(
os.Exit(1)
}
regex := os.Getenv("ClickHouseOptimizeReadInOrderRegex")
var regexCompiled *regexp.Regexp
if regex != "" {
regexCompiled, err = regexp.Compile(regex)
if err != nil {
zap.L().Error("Incorrect regex for ClickHouseOptimizeReadInOrderRegex")
os.Exit(1)
}
}
wrap := clickhouseConnWrapper{
conn: db,
settings: ClickhouseQuerySettings{
MaxExecutionTimeLeaf: os.Getenv("ClickHouseMaxExecutionTimeLeaf"),
TimeoutBeforeCheckingExecutionSpeed: os.Getenv("ClickHouseTimeoutBeforeCheckingExecutionSpeed"),
MaxBytesToRead: os.Getenv("ClickHouseMaxBytesToRead"),
OptimizeReadInOrderRegex: os.Getenv("ClickHouseOptimizeReadInOrderRegex"),
OptimizeReadInOrderRegexCompiled: regexCompiled,
},
}

View File

@ -3,6 +3,7 @@ package clickhouseReader
import (
"context"
"encoding/json"
"regexp"
"strings"
"github.com/ClickHouse/clickhouse-go/v2"
@ -13,6 +14,8 @@ type ClickhouseQuerySettings struct {
MaxExecutionTimeLeaf string
TimeoutBeforeCheckingExecutionSpeed string
MaxBytesToRead string
OptimizeReadInOrderRegex string
OptimizeReadInOrderRegexCompiled *regexp.Regexp
}
type clickhouseConnWrapper struct {
@ -58,6 +61,11 @@ func (c clickhouseConnWrapper) addClickHouseSettings(ctx context.Context, query
settings["timeout_before_checking_execution_speed"] = c.settings.TimeoutBeforeCheckingExecutionSpeed
}
// only list queries of
if c.settings.OptimizeReadInOrderRegex != "" && c.settings.OptimizeReadInOrderRegexCompiled.Match([]byte(query)) {
settings["optimize_read_in_order"] = 0
}
ctx = clickhouse.Context(ctx, clickhouse.WithSettings(settings))
return ctx
}

View File

@ -0,0 +1,125 @@
### Collect Clickhouse Logs
You can configure Clickhouse logs collection by providing the required collector config to your collector.
#### Create collector config file
Save the following config for collecting clickhouse logs in a file named `clickhouse-logs-collection-config.yaml`
```yaml
receivers:
filelog/clickhouse:
include: ["${env:CLICKHOUSE_LOG_FILE}"]
operators:
# Parse default clickhouse text log format.
# See https://github.com/ClickHouse/ClickHouse/blob/master/src/Loggers/OwnPatternFormatter.cpp
- type: recombine
source_identifier: attributes["log.file.name"]
is_first_entry: body matches '^\\d{4}\\.\\d{2}\\.\\d{2}\\s+'
combine_field: body
overwrite_with: oldest
- type: regex_parser
parse_from: body
if: body matches '^(?P<ts>\\d{4}\\.\\d{2}\\.\\d{2} \\d{2}:\\d{2}:\\d{2}.?[0-9]*)\\s+\\[\\s+(\\x1b.*?m)?(?P<thread_id>\\d*)(\\x1b.*?m)?\\s+\\]\\s+{((\\x1b.*?m)?(?P<query_id>[0-9a-zA-Z-_]*)(\\x1b.*?m)?)?}\\s+<(\\x1b.*?m)?(?P<log_level>\\w*)(\\x1b.*?m)?>\\s+((\\x1b.*?m)?(?P<clickhouse_component>[a-zA-Z0-9_]+)(\\x1b.*?m)?:)?\\s+(?s)(?P<message>.*)$'
regex: '^(?P<ts>\d{4}\.\d{2}\.\d{2} \d{2}:\d{2}:\d{2}.?[0-9]*)\s+\[\s+(\x1b.*?m)?(?P<thread_id>\d*)(\x1b.*?m)?\s+\]\s+{((\x1b.*?m)?(?P<query_id>[0-9a-zA-Z-_]*)(\x1b.*?m)?)?}\s+<(\x1b.*?m)?(?P<log_level>\w*)(\x1b.*?m)?>\s+((\x1b.*?m)?(?P<clickhouse_component>[a-zA-Z0-9_]+)(\x1b.*?m)?:)?\s+(?s)(?P<message>.*)$'
- type: time_parser
if: attributes.ts != nil
parse_from: attributes.ts
layout_type: gotime
layout: 2006.01.02 15:04:05.999999
location: ${env:CLICKHOUSE_TIMEZONE}
- type: remove
if: attributes.ts != nil
field: attributes.ts
- type: severity_parser
if: attributes.log_level != nil
parse_from: attributes.log_level
overwrite_text: true
# For mapping details, see getPriorityName defined in https://github.com/ClickHouse/ClickHouse/blob/master/src/Interpreters/InternalTextLogsQueue.cpp
mapping:
trace:
- Trace
- Test
debug: Debug
info:
- Information
- Notice
warn: Warning
error: Error
fatal:
- Fatal
- Critical
- type: remove
if: attributes.log_level != nil
field: attributes.log_level
- type: move
if: attributes.message != nil
from: attributes.message
to: body
- type: add
field: attributes.source
value: clickhouse
processors:
batch:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
exporters:
# export to SigNoz cloud
otlp/clickhouse-logs:
endpoint: "${env:OTLP_DESTINATION_ENDPOINT}"
tls:
insecure: false
headers:
"signoz-access-token": "${env:SIGNOZ_INGESTION_KEY}"
# export to local collector
# otlp/clickhouse-logs:
# endpoint: "localhost:4317"
# tls:
# insecure: true
service:
pipelines:
logs/clickhouse:
receivers: [filelog/clickhouse]
processors: [batch]
exporters: [otlp/clickhouse-logs]
```
#### Set Environment Variables
Set the following environment variables in your otel-collector environment:
```bash
# path of Clickhouse server log file. must be accessible by the otel collector
# typically found at /var/log/clickhouse-server/clickhouse-server.log.
# Log file location can be found in clickhouse server config
# See https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#logger
export CLICKHOUSE_LOG_FILE="/var/log/clickhouse-server/server.log"
# Locale of the clickhouse server.
# Clickhouse logs timestamps in it's locale without TZ info
# Timezone setting can be found in clickhouse config. For details see https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#timezone
# Must be a IANA timezone name like Asia/Kolkata. For examples, see https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
export CLICKHOUSE_TIMEZONE="Etc/UTC"
# region specific SigNoz cloud ingestion endpoint
export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443"
# your SigNoz ingestion key
export SIGNOZ_INGESTION_KEY="signoz-ingestion-key"
```
#### Use collector config file
Make the collector config file available to your otel collector and use it by adding the following flag to the command for running your collector
```bash
--config clickhouse-logs-collection-config.yaml
```
Note: the collector can use multiple config files, specified by multiple occurrences of the --config flag.

View File

@ -0,0 +1,82 @@
### Collect Clickhouse Metrics
You can configure Clickhouse metrics collection by providing the required collector config to your collector.
#### Create collector config file
Save the following config for collecting Clickhouse metrics in a file named `clickhouse-metrics-collection-config.yaml`
```yaml
receivers:
prometheus/clickhouse:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: clickhouse
static_configs:
- targets:
- ${env:CLICKHOUSE_PROM_METRICS_ENDPOINT}
metrics_path: ${env:CLICKHOUSE_PROM_METRICS_PATH}
processors:
# enriches the data with additional host information
# see https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/resourcedetectionprocessor#resource-detection-processor
resourcedetection/system:
# add additional detectors if needed
detectors: ["system"]
system:
hostname_sources: ["os"]
exporters:
# export to SigNoz cloud
otlp/clickhouse:
endpoint: "${env:OTLP_DESTINATION_ENDPOINT}"
tls:
insecure: false
headers:
"signoz-access-token": "${env:SIGNOZ_INGESTION_KEY}"
# export to local collector
# otlp/clickhouse:
# endpoint: "localhost:4317"
# tls:
# insecure: true
service:
pipelines:
metrics/clickhouse:
receivers: [prometheus/clickhouse]
# note: remove this processor if the collector host is not running on the same host as the clickhouse instance
processors: [resourcedetection/system]
exporters: [otlp/clickhouse]
```
#### Set Environment Variables
Set the following environment variables in your otel-collector environment:
```bash
# Prometheus metrics endpoint on the clickhouse server reachable from the otel collector.
# You can examine clickhouse server configuration to find it. For details see https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#prometheus
export CLICKHOUSE_PROM_METRICS_ENDPOINT="clickhouse:9363"
# Prometheus metrics path on the clickhouse server
# You can examine clickhouse server configuration to find it. For details see https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#prometheus
export CLICKHOUSE_PROM_METRICS_PATH="/metrics"
# region specific SigNoz cloud ingestion endpoint
export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443"
# your SigNoz ingestion key
export SIGNOZ_INGESTION_KEY="signoz-ingestion-key"
```
#### Use collector config file
Make the collector config file available to your otel collector and use it by adding the following flag to the command for running your collector
```bash
--config clickhouse-metrics-collection-config.yaml
```
Note: the collector can use multiple config files, specified by multiple occurrences of the --config flag.

View File

@ -0,0 +1,80 @@
### Collect Clickhouse Query Logs
You can configure collection from system.query_log table in clickhouse by providing the required collector config to your collector.
#### Create collector config file
Save the following config for collecting clickhouse query logs in a file named `clickhouse-query-logs-collection-config.yaml`
```yaml
receivers:
clickhousesystemtablesreceiver/query_log:
dsn: "${env:CLICKHOUSE_MONITORING_DSN}"
cluster_name: "${env:CLICKHOUSE_CLUSTER_NAME}"
query_log_scrape_config:
scrape_interval_seconds: ${env:QUERY_LOG_SCRAPE_INTERVAL_SECONDS}
min_scrape_delay_seconds: ${env:QUERY_LOG_SCRAPE_DELAY_SECONDS}
exporters:
# export to SigNoz cloud
otlp/clickhouse-query-logs:
endpoint: "${env:OTLP_DESTINATION_ENDPOINT}"
tls:
insecure: false
headers:
"signoz-access-token": "${env:SIGNOZ_INGESTION_KEY}"
# export to local collector
# otlp/clickhouse-query-logs:
# endpoint: "localhost:4317"
# tls:
# insecure: true
service:
pipelines:
logs/clickhouse-query-logs:
receivers: [clickhousesystemtablesreceiver/query_log]
processors: []
exporters: [otlp/clickhouse-query-logs]
```
#### Set Environment Variables
Set the following environment variables in your otel-collector environment:
```bash
# DSN for connecting to clickhouse with the monitoring user
# Replace monitoring:<PASSWORD> with `username:password` for your monitoring user
# Note: The monitoring user must be able to issue select queries on system.query_log table.
export CLICKHOUSE_MONITORING_DSN="tcp://monitoring:<PASSWORD>@clickhouse:9000/"
# If collecting query logs from a clustered deployment, specify a non-empty cluster name.
export CLICKHOUSE_CLUSTER_NAME=""
# Rows from query_log table will be collected periodically based on this setting
export QUERY_LOG_SCRAPE_INTERVAL_SECONDS=20
# Must be configured to a value greater than flush_interval_milliseconds setting for query_log.
# This setting can be found in the clickhouse server config
# For details see https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#query-log
# Setting a large enough value ensures all query logs for a particular time interval have been
# flushed before an attempt to collect them is made.
export QUERY_LOG_SCRAPE_DELAY_SECONDS=8
# region specific SigNoz cloud ingestion endpoint
export OTLP_DESTINATION_ENDPOINT="ingest.us.signoz.cloud:443"
# your SigNoz ingestion key
export SIGNOZ_INGESTION_KEY="signoz-ingestion-key"
```
#### Use collector config file
Make the collector config file available to your otel collector and use it by adding the following flag to the command for running your collector
```bash
--config clickhouse-query-logs-collection-config.yaml
```
Note: the collector can use multiple config files, specified by multiple occurrences of the --config flag.

View File

@ -0,0 +1,42 @@
## Before You Begin
To configure metrics and logs collection for a Clickhouse server, you need the following.
### Ensure Clickhouse server is prepared for monitoring
- **Ensure that the Clickhouse server is running a supported version**
Clickhouse versions v23 and newer are supported.
You can use the following SQL statement to determine server version
```SQL
SELECT version();
```
- **If collecting metrics, ensure that Clickhouse is configured to export prometheus metrics**
If needed, please [configure Clickhouse to expose prometheus metrics](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#prometheus).
- **If collecting query_log, ensure that there is a clickhouse user with required permissions**
To create a monitoring user for clickhouse, you can run:
```SQL
CREATE USER monitoring IDENTIFIED BY 'monitoring_password';
GRANT SELECT ON system.query_log to monitoring;
-- If monitoring a clustered deployment, also grant privilege for executing remote queries
GRANT REMOTE ON *.* TO 'monitoring' on CLUSTER 'cluster_name';
```
### Ensure OTEL Collector is running and has access to the Clickhouse server
- **Ensure that an OTEL collector is running in your deployment environment**
If needed, please [install SigNoz OTEL Collector](https://signoz.io/docs/tutorial/opentelemetry-binary-usage-in-virtual-machine/)
If already installed, ensure that the collector version is v0.88.0 or newer.
If collecting logs from system.query_log table, ensure that the collector version is v0.88.22 or newer.
Also ensure that you can provide config files to the collector and that you can set environment variables and command line flags used for running it.
- **Ensure that the OTEL collector can access the Clickhouse server**
In order to collect metrics, the collector must be able to reach clickhouse server and access the port on which prometheus metrics are being exposed.
In order to collect server logs, the collector must be able to read the Clickhouse server log file.
In order to collect logs from query_log table, the collector must be able to reach the server and connect to it as a clickhouse user with required permissions.

View File

@ -0,0 +1,33 @@
<svg version="1.1" id="Layer_1" xmlns:x="ns_extend;" xmlns:i="ns_ai;" xmlns:graph="ns_graphs;"
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50.6 50.6" style="enable-background:new 0 0 50.6 50.6;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<metadata>
<sfw xmlns="ns_sfw;">
<slices>
</slices>
<sliceSourceBounds bottomLeftOrigin="true" height="24" width="24" x="0" y="0">
</sliceSourceBounds>
</sfw>
</metadata>
<g>
<g>
<path class="st0" d="M0.6,0H5c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6H0.6C0.3,50.6,0,50.4,0,50V0.6C0,0.3,0.3,0,0.6,0z">
</path>
<path class="st0" d="M11.8,0h4.4c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V0.6
C11.3,0.3,11.5,0,11.8,0z">
</path>
<path class="st0" d="M23.1,0h4.4c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V0.6
C22.5,0.3,22.8,0,23.1,0z">
</path>
<path class="st0" d="M34.3,0h4.4c0.3,0,0.6,0.3,0.6,0.6V50c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V0.6
C33.7,0.3,34,0,34.3,0z">
</path>
<path class="st0" d="M45.6,19.7H50c0.3,0,0.6,0.3,0.6,0.6v10.1c0,0.3-0.3,0.6-0.6,0.6h-4.4c-0.3,0-0.6-0.3-0.6-0.6V20.3
C45,20,45.3,19.7,45.6,19.7z">
</path>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -0,0 +1,59 @@
{
"id": "clickhouse",
"title": "Clickhouse",
"description": "Monitor Clickhouse with metrics and logs",
"author": {
"name": "SigNoz",
"email": "integrations@signoz.io",
"homepage": "https://signoz.io"
},
"icon": "file://icon.svg",
"categories": [
"Database"
],
"overview": "file://overview.md",
"configuration": [
{
"title": "Prerequisites",
"instructions": "file://config/prerequisites.md"
},
{
"title": "Collect Metrics",
"instructions": "file://config/collect-metrics.md"
},
{
"title": "Collect Server Logs",
"instructions": "file://config/collect-logs.md"
},
{
"title": "Collect Query Logs",
"instructions": "file://config/collect-query-logs.md"
}
],
"assets": {
"logs": {
"pipelines": []
},
"dashboards": [
"file://assets/dashboards/overview.json"
],
"alerts": []
},
"connection_tests": {
"logs": {
"op": "AND",
"items": [
{
"key": {
"type": "tag",
"key": "source",
"dataType": "string"
},
"op": "=",
"value": "clickhouse"
}
]
}
},
"data_collected": "file://data-collected.json"
}

View File

@ -0,0 +1,7 @@
### Monitor Clickhouse with SigNoz
Collect key Clickhouse metrics and view them with an out of the box dashboard.
Collect and parse Clickhouse logs to populate timestamp, severity, and other log attributes for better querying and aggregation.
Collect clickhouse query logs from system.query_log table and view them in SigNoz

View File

@ -1,6 +1,7 @@
package app
import (
"math"
"sort"
"strings"
@ -39,16 +40,25 @@ func applyMetricLimit(results []*v3.Result, queryRangeParams *v3.QueryRangeParam
}
}
// For graph type queries, sort based on GroupingSetsPoint
if result.Series[i].GroupingSetsPoint == nil || result.Series[j].GroupingSetsPoint == nil {
// Handle nil GroupingSetsPoint, if needed
// Here, we assume non-nil values are always less than nil values
return result.Series[i].GroupingSetsPoint != nil
ithSum, jthSum := 0.0, 0.0
for _, point := range result.Series[i].Points {
if math.IsNaN(point.Value) || math.IsInf(point.Value, 0) {
continue
}
ithSum += point.Value
}
for _, point := range result.Series[j].Points {
if math.IsNaN(point.Value) || math.IsInf(point.Value, 0) {
continue
}
jthSum += point.Value
}
if orderBy.Order == "asc" {
return result.Series[i].GroupingSetsPoint.Value < result.Series[j].GroupingSetsPoint.Value
return ithSum < jthSum
} else if orderBy.Order == "desc" {
return result.Series[i].GroupingSetsPoint.Value > result.Series[j].GroupingSetsPoint.Value
return ithSum > jthSum
}
} else {
// Sort based on Labels map

View File

@ -145,12 +145,13 @@ func enrichFieldWithMetadata(field v3.AttributeKey, fields map[string]v3.Attribu
// check if the field is present in the fields map
if existingField, ok := fields[field.Key]; ok {
if existingField.IsColumn {
// don't update if type is not the same
if (field.Type == "" && field.DataType == "") ||
(field.Type == existingField.Type && field.DataType == existingField.DataType) ||
(field.Type == "" && field.DataType == existingField.DataType) ||
(field.DataType == "" && field.Type == existingField.Type) {
return existingField
}
field.Type = existingField.Type
field.DataType = existingField.DataType
return field
}
// enrich with default values if metadata is not found

View File

@ -342,6 +342,57 @@ var testEnrichParamsData = []struct {
},
},
},
{
Name: "Don't enrich if other keys are non empty and not same",
Params: v3.QueryRangeParamsV3{
CompositeQuery: &v3.CompositeQuery{
BuilderQueries: map[string]*v3.BuilderQuery{
"test": {
QueryName: "test",
Expression: "test",
DataSource: v3.DataSourceLogs,
AggregateAttribute: v3.AttributeKey{
Key: "test",
Type: v3.AttributeKeyTypeResource,
DataType: v3.AttributeKeyDataTypeInt64,
},
Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "test", Type: v3.AttributeKeyTypeTag}, Value: "test", Operator: "="},
{Key: v3.AttributeKey{Key: "test", DataType: v3.AttributeKeyDataTypeString}, Value: "test1", Operator: "="},
}},
},
},
},
},
Fields: map[string]v3.AttributeKey{
"test": {
Key: "test",
Type: v3.AttributeKeyTypeTag,
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
},
Result: v3.QueryRangeParamsV3{
CompositeQuery: &v3.CompositeQuery{
BuilderQueries: map[string]*v3.BuilderQuery{
"test": {
QueryName: "test",
Expression: "test",
DataSource: v3.DataSourceLogs,
AggregateAttribute: v3.AttributeKey{
Key: "test",
Type: v3.AttributeKeyTypeResource,
DataType: v3.AttributeKeyDataTypeInt64,
},
Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "test", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Value: "test", Operator: "="},
{Key: v3.AttributeKey{Key: "test", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Value: "test1", Operator: "="},
}},
},
},
},
},
},
}
func TestEnrichParams(t *testing.T) {

View File

@ -20,6 +20,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/app/metrics"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
"go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
@ -1038,6 +1039,10 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
}
}
if minStep := common.MinAllowedStepInterval(queryRangeParams.Start, queryRangeParams.End); query.StepInterval < minStep {
query.StepInterval = minStep
}
var timeShiftBy int64
if len(query.Functions) > 0 {
for idx := range query.Functions {

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.signoz.io/signoz/pkg/query-service/common"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
@ -1174,3 +1175,105 @@ func TestQueryRangeFormula(t *testing.T) {
})
}
}
func TestParseQueryRangeParamsStepIntervalAdjustment(t *testing.T) {
reqCases := []struct {
desc string
start int64
end int64
step int64
}{
{
desc: "30 minutes and 60 seconds step",
start: time.Now().Add(-30 * time.Minute).UnixMilli(),
end: time.Now().UnixMilli(),
step: 60, // no update
},
{
desc: "1 hour and 1 second step",
start: time.Now().Add(-time.Hour).UnixMilli(),
end: time.Now().UnixMilli(),
step: 1, // gets updated
},
{
desc: "1 week and 1 minute step",
start: time.Now().Add(-7 * 24 * time.Hour).UnixMilli(),
end: time.Now().UnixMilli(),
step: 60, // gets updated
},
{
desc: "1 day and 1 hour step",
start: time.Now().Add(-24 * time.Hour).UnixMilli(),
end: time.Now().UnixMilli(),
step: 3600, // no update
},
{
desc: "1 day and 1 minute step",
start: time.Now().Add(-24 * time.Hour).UnixMilli(),
end: time.Now().UnixMilli(),
step: 60, // gets updated
},
{
desc: "1 day and 2 minutes step",
start: time.Now().Add(-24 * time.Hour).UnixMilli(),
end: time.Now().UnixMilli(),
step: 120, // gets updated
},
{
desc: "1 day and 5 minutes step",
start: time.Now().Add(-24 * time.Hour).UnixMilli(),
end: time.Now().UnixMilli(),
step: 300, // no update
},
{
desc: "1 week and 10 minutes step",
start: time.Now().Add(-7 * 24 * time.Hour).UnixMilli(),
end: time.Now().UnixMilli(),
step: 600, // get updated
},
{
desc: "1 week and 45 minutes step",
start: time.Now().Add(-7 * 24 * time.Hour).UnixMilli(),
end: time.Now().UnixMilli(),
step: 2700, // no update
},
}
for _, tc := range reqCases {
t.Run(tc.desc, func(t *testing.T) {
queryRangeParams := &v3.QueryRangeParamsV3{
Start: tc.start,
End: tc.end,
Step: tc.step,
CompositeQuery: &v3.CompositeQuery{
PanelType: v3.PanelTypeGraph,
QueryType: v3.QueryTypeBuilder,
BuilderQueries: map[string]*v3.BuilderQuery{
"A": {
QueryName: "A",
DataSource: v3.DataSourceMetrics,
AggregateOperator: v3.AggregateOperatorSum,
AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"},
GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}},
Expression: "A",
StepInterval: tc.step,
},
},
},
Variables: map[string]interface{}{},
}
body := &bytes.Buffer{}
err := json.NewEncoder(body).Encode(queryRangeParams)
require.NoError(t, err)
req := httptest.NewRequest(http.MethodPost, "/api/v3/query_range", body)
p, apiErr := ParseQueryRangeParams(req)
if apiErr != nil && apiErr.Err != nil {
t.Fatalf("unexpected error %s", apiErr.Err)
}
require.True(t, p.CompositeQuery.BuilderQueries["A"].StepInterval >= common.MinAllowedStepInterval(p.Start, p.End))
})
}
}

View File

@ -525,7 +525,7 @@ func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3,
// return error if the number of series is more than one for value type panel
if params.CompositeQuery.PanelType == v3.PanelTypeValue {
if len(results) > 1 {
if len(results) > 1 && params.CompositeQuery.EnabledQueries() > 1 {
err = fmt.Errorf("there can be only one active query for value type panel")
} else if len(results) == 1 && len(results[0].Series) > 1 {
err = fmt.Errorf("there can be only one result series for value type panel but got %d", len(results[0].Series))

View File

@ -518,7 +518,7 @@ func (q *querier) QueryRange(ctx context.Context, params *v3.QueryRangeParamsV3,
// return error if the number of series is more than one for value type panel
if params.CompositeQuery.PanelType == v3.PanelTypeValue {
if len(results) > 1 {
if len(results) > 1 && params.CompositeQuery.EnabledQueries() > 1 {
err = fmt.Errorf("there can be only one active query for value type panel")
} else if len(results) == 1 && len(results[0].Series) > 1 {
err = fmt.Errorf("there can be only one result series for value type panel but got %d", len(results[0].Series))

View File

@ -4,6 +4,7 @@ import (
"math"
"time"
"go.signoz.io/signoz/pkg/query-service/constants"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
@ -23,3 +24,10 @@ func PastDayRoundOff() int64 {
now := time.Now().UnixMilli()
return int64(math.Floor(float64(now)/float64(time.Hour.Milliseconds()*24))) * time.Hour.Milliseconds() * 24
}
// start and end are in milliseconds
func MinAllowedStepInterval(start, end int64) int64 {
step := (end - start) / constants.MaxAllowedPointsInTimeSeries / 1000
// return the nearest lower multiple of 60
return step - step%60
}

View File

@ -25,6 +25,8 @@ var ConfigSignozIo = "https://config.signoz.io/api/v1"
var DEFAULT_TELEMETRY_ANONYMOUS = false
const MaxAllowedPointsInTimeSeries = 300
func IsTelemetryEnabled() bool {
if testing.Testing() {
return false

View File

@ -402,6 +402,31 @@ type CompositeQuery struct {
Unit string `json:"unit,omitempty"`
}
func (c *CompositeQuery) EnabledQueries() int {
count := 0
switch c.QueryType {
case QueryTypeBuilder:
for _, query := range c.BuilderQueries {
if !query.Disabled {
count++
}
}
case QueryTypeClickHouseSQL:
for _, query := range c.ClickHouseQueries {
if !query.Disabled {
count++
}
}
case QueryTypePromQL:
for _, query := range c.PromQueries {
if !query.Disabled {
count++
}
}
}
return count
}
func (c *CompositeQuery) Validate() error {
if c == nil {
return fmt.Errorf("composite query is required")

View File

@ -19,6 +19,7 @@ import (
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/converter"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
@ -469,7 +470,7 @@ func (r *ThresholdRule) prepareQueryRange(ts time.Time) *v3.QueryRangeParamsV3 {
if r.ruleCondition.CompositeQuery != nil && r.ruleCondition.CompositeQuery.BuilderQueries != nil {
for _, q := range r.ruleCondition.CompositeQuery.BuilderQueries {
q.StepInterval = 60
q.StepInterval = int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60))
}
}
@ -501,13 +502,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
}
columnTypes := rows.ColumnTypes()
if err != nil {
return nil, err
}
columnNames := rows.Columns()
if err != nil {
return nil, err
}
vars := make([]interface{}, len(columnTypes))
for i := range columnTypes {
@ -648,7 +643,8 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
resultMap[labelHash] = sample
}
case OnAverage:
sample.Point.V = (existing.Point.V + sample.Point.V) / 2
sample.Point.Vs = append(existing.Point.Vs, sample.Point.V)
sample.Point.V = (existing.Point.V + sample.Point.V)
resultMap[labelHash] = sample
case InTotal:
sample.Point.V = (existing.Point.V + sample.Point.V)
@ -678,6 +674,13 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
}
if r.matchType() == OnAverage {
for hash, s := range resultMap {
s.Point.V = s.Point.V / float64(len(s.Point.Vs))
resultMap[hash] = s
}
}
for hash, s := range resultMap {
if r.matchType() == AllTheTimes && r.compareOp() == ValueIsEq {
for _, v := range s.Point.Vs {

View File

@ -266,6 +266,45 @@ func TestThresholdRuleCombinations(t *testing.T) {
matchType: "1", // Once
target: 0.0,
},
{
values: [][]interface{}{
{int32(2), "endpoint"},
{int32(3), "endpoint"},
{int32(2), "endpoint"},
{int32(4), "endpoint"},
{int32(2), "endpoint"},
},
expectAlert: true,
compareOp: "2", // Below
matchType: "3", // On Average
target: 3.0,
},
{
values: [][]interface{}{
{int32(4), "endpoint"},
{int32(7), "endpoint"},
{int32(5), "endpoint"},
{int32(2), "endpoint"},
{int32(9), "endpoint"},
},
expectAlert: false,
compareOp: "2", // Below
matchType: "3", // On Average
target: 3.0,
},
{
values: [][]interface{}{
{int32(4), "endpoint"},
{int32(7), "endpoint"},
{int32(5), "endpoint"},
{int32(2), "endpoint"},
{int32(9), "endpoint"},
},
expectAlert: true,
compareOp: "2", // Below
matchType: "3", // On Average
target: 6.0,
},
}
for idx, c := range cases {

View File

@ -192,7 +192,7 @@ services:
<<: *db-depend
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.21}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.22}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@ -205,7 +205,7 @@ services:
# condition: service_healthy
otel-collector:
image: signoz/signoz-otel-collector:0.88.21
image: signoz/signoz-otel-collector:0.88.22
container_name: signoz-otel-collector
command:
[