mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-06 06:06:53 +08:00
commit
07d126c669
17
.github/pull_request_template.md
vendored
Normal file
17
.github/pull_request_template.md
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
### Summary
|
||||
|
||||
<!-- ✍️ A clear and concise description...-->
|
||||
|
||||
#### Related Issues / PR's
|
||||
|
||||
<!-- ✍️ Add the issues being resolved here and related PR's where applicable -->
|
||||
|
||||
#### Screenshots
|
||||
|
||||
NA
|
||||
|
||||
<!-- ✍️ Add screenshots of before and after changes where applicable-->
|
||||
|
||||
#### Affected Areas and Manually Tested Areas
|
||||
|
||||
<!-- ✍️ Add details of blast radius and dev testing areas where applicable-->
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -60,4 +60,5 @@ e2e/node_modules/
|
||||
e2e/test-results/
|
||||
e2e/playwright-report/
|
||||
e2e/blob-report/
|
||||
e2e/playwright/.cache/
|
||||
e2e/playwright/.cache/
|
||||
e2e/.auth
|
@ -146,7 +146,7 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.34.3
|
||||
image: signoz/query-service:0.34.4
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
@ -186,7 +186,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.34.3
|
||||
image: signoz/frontend:0.34.4
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
@ -164,7 +164,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.34.3}
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.34.4}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
@ -203,7 +203,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.34.3}
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.34.4}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
|
@ -30,4 +30,17 @@ export default defineConfig({
|
||||
baseURL:
|
||||
process.env.PLAYWRIGHT_TEST_BASE_URL || "https://stagingapp.signoz.io/",
|
||||
},
|
||||
|
||||
projects: [
|
||||
{ name: "setup", testMatch: /.*\.setup\.ts/ },
|
||||
{
|
||||
name: "chromium",
|
||||
use: {
|
||||
...devices["Desktop Chrome"],
|
||||
// Use prepared auth state.
|
||||
storageState: ".auth/user.json",
|
||||
},
|
||||
dependencies: ["setup"],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
@ -4,6 +4,8 @@ import dotenv from "dotenv";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const authFile = ".auth/user.json";
|
||||
|
||||
test("E2E Login Test", async ({ page }) => {
|
||||
await Promise.all([page.goto("/"), page.waitForRequest("**/version")]);
|
||||
|
||||
@ -30,4 +32,6 @@ test("E2E Login Test", async ({ page }) => {
|
||||
await page.locator('button[data-attr="signup"]').click();
|
||||
|
||||
await expect(page).toHaveURL(ROUTES.APPLICATION);
|
||||
|
||||
await page.context().storageState({ path: authFile });
|
||||
});
|
10
e2e/tests/contants.ts
Normal file
10
e2e/tests/contants.ts
Normal file
@ -0,0 +1,10 @@
|
||||
export const SERVICE_TABLE_HEADERS = {
|
||||
APPLICATION: "Applicaton",
|
||||
P99LATENCY: "P99 latency (in ms)",
|
||||
ERROR_RATE: "Error Rate (% of total)",
|
||||
OPS_PER_SECOND: "Operations Per Second",
|
||||
};
|
||||
|
||||
export const DATA_TEST_IDS = {
|
||||
NEW_DASHBOARD_BTN: "create-new-dashboard",
|
||||
};
|
40
e2e/tests/navigation.spec.ts
Normal file
40
e2e/tests/navigation.spec.ts
Normal file
@ -0,0 +1,40 @@
|
||||
import { test, expect } from "@playwright/test";
|
||||
import ROUTES from "../../frontend/src/constants/routes";
|
||||
import { DATA_TEST_IDS, SERVICE_TABLE_HEADERS } from "./contants";
|
||||
|
||||
test("Basic Navigation Check across different resources", async ({ page }) => {
|
||||
// route to services page and check if the page renders fine with BE contract
|
||||
await Promise.all([
|
||||
page.goto(ROUTES.APPLICATION),
|
||||
page.waitForRequest("**/v1/services"),
|
||||
]);
|
||||
|
||||
const p99Latency = page.locator(
|
||||
`th:has-text("${SERVICE_TABLE_HEADERS.P99LATENCY}")`
|
||||
);
|
||||
|
||||
await expect(p99Latency).toBeVisible();
|
||||
|
||||
// route to the new trace explorer page and check if the page renders fine
|
||||
await page.goto(ROUTES.TRACES_EXPLORER);
|
||||
|
||||
await page.waitForLoadState("networkidle");
|
||||
|
||||
const listViewTable = await page
|
||||
.locator('div[role="presentation"]')
|
||||
.isVisible();
|
||||
|
||||
expect(listViewTable).toBeTruthy();
|
||||
|
||||
// route to the dashboards page and check if the page renders fine
|
||||
await Promise.all([
|
||||
page.goto(ROUTES.ALL_DASHBOARD),
|
||||
page.waitForRequest("**/v1/dashboards"),
|
||||
]);
|
||||
|
||||
const newDashboardBtn = await page
|
||||
.locator(`data-testid=${DATA_TEST_IDS.NEW_DASHBOARD_BTN}`)
|
||||
.isVisible();
|
||||
|
||||
expect(newDashboardBtn).toBeTruthy();
|
||||
});
|
@ -20,5 +20,6 @@
|
||||
"variable_updated_successfully": "Variable updated successfully",
|
||||
"error_while_updating_variable": "Error while updating variable",
|
||||
"dashboard_has_been_updated": "Dashboard has been updated",
|
||||
"do_you_want_to_refresh_the_dashboard": "Do you want to refresh the dashboard?"
|
||||
"do_you_want_to_refresh_the_dashboard": "Do you want to refresh the dashboard?",
|
||||
"delete_dashboard_success": "{{name}} dashboard deleted successfully"
|
||||
}
|
||||
|
@ -22,6 +22,7 @@
|
||||
"error_while_updating_variable": "Error while updating variable",
|
||||
"dashboard_has_been_updated": "Dashboard has been updated",
|
||||
"do_you_want_to_refresh_the_dashboard": "Do you want to refresh the dashboard?",
|
||||
"locked_dashboard_delete_tooltip_admin_author": "Dashboard is locked. Please unlock the dashboard to enable delete.",
|
||||
"locked_dashboard_delete_tooltip_editor": "Dashboard is locked. Please contact admin to delete the dashboard."
|
||||
"locked_dashboard_delete_tooltip_admin_author": "Dashboard is locked. Please unlock the dashboard to enable delete.",
|
||||
"locked_dashboard_delete_tooltip_editor": "Dashboard is locked. Please contact admin to delete the dashboard.",
|
||||
"delete_dashboard_success": "{{name}} dashboard deleted successfully"
|
||||
}
|
||||
|
@ -32,7 +32,7 @@
|
||||
"processor_name_placeholder": "Name",
|
||||
"processor_regex_placeholder": "Regex",
|
||||
"processor_parsefrom_placeholder": "Parse From",
|
||||
"processor_parseto_placeholder": "Parse From",
|
||||
"processor_parseto_placeholder": "Parse To",
|
||||
"processor_onerror_placeholder": "on Error",
|
||||
"processor_pattern_placeholder": "Pattern",
|
||||
"processor_field_placeholder": "Field",
|
||||
|
@ -14,7 +14,7 @@ export const optionsUpdateState = (
|
||||
if (lhsHeight !== rhsHeight || lhsWidth !== rhsWidth) {
|
||||
state = 'update';
|
||||
}
|
||||
if (Object.keys(lhs).length !== Object.keys(rhs).length) {
|
||||
if (Object.keys(lhs)?.length !== Object.keys(rhs)?.length) {
|
||||
return 'create';
|
||||
}
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
@ -31,12 +31,12 @@ export const dataMatch = (
|
||||
lhs: uPlot.AlignedData,
|
||||
rhs: uPlot.AlignedData,
|
||||
): boolean => {
|
||||
if (lhs.length !== rhs.length) {
|
||||
if (lhs?.length !== rhs?.length) {
|
||||
return false;
|
||||
}
|
||||
return lhs.every((lhsOneSeries, seriesIdx) => {
|
||||
const rhsOneSeries = rhs[seriesIdx];
|
||||
if (lhsOneSeries.length !== rhsOneSeries.length) {
|
||||
if (lhsOneSeries?.length !== rhsOneSeries?.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -26,4 +26,5 @@ export enum QueryParams {
|
||||
linesPerRow = 'linesPerRow',
|
||||
viewName = 'viewName',
|
||||
viewKey = 'viewKey',
|
||||
expandedWidgetId = 'expandedWidgetId',
|
||||
}
|
||||
|
@ -30,6 +30,52 @@ const themeColors = {
|
||||
hemlock: '#66664D',
|
||||
vidaLoca: '#4D8000',
|
||||
rust: '#B33300',
|
||||
red: '#FF0000', // Adding more colors, we need to get better colors from design team
|
||||
blue: '#0000FF',
|
||||
green: '#00FF00',
|
||||
yellow: '#FFFF00',
|
||||
purple: '#800080',
|
||||
cyan: '#00FFFF',
|
||||
magenta: '#FF00FF',
|
||||
orange: '#FFA500',
|
||||
pink: '#FFC0CB',
|
||||
brown: '#A52A2A',
|
||||
teal: '#008080',
|
||||
lime: '#00FF00',
|
||||
maroon: '#800000',
|
||||
navy: '#000080',
|
||||
aquamarine: '#7FFFD4',
|
||||
gold: '#FFD700',
|
||||
gray: '#808080',
|
||||
skyBlue: '#87CEEB',
|
||||
indigo: '#4B0082',
|
||||
slateGray: '#708090',
|
||||
chocolate: '#D2691E',
|
||||
tomato: '#FF6347',
|
||||
steelBlue: '#4682B4',
|
||||
peru: '#CD853F',
|
||||
darkOliveGreen: '#556B2F',
|
||||
indianRed: '#CD5C5C',
|
||||
mediumSlateBlue: '#7B68EE',
|
||||
rosyBrown: '#BC8F8F',
|
||||
darkSlateGray: '#2F4F4F',
|
||||
mediumAquamarine: '#66CDAA',
|
||||
lavender: '#E6E6FA',
|
||||
thistle: '#D8BFD8',
|
||||
salmon: '#FA8072',
|
||||
darkSalmon: '#E9967A',
|
||||
paleVioletRed: '#DB7093',
|
||||
mediumPurple: '#9370DB',
|
||||
darkOrchid: '#9932CC',
|
||||
lawnGreen: '#7CFC00',
|
||||
mediumSeaGreen: '#3CB371',
|
||||
lightCoral: '#F08080',
|
||||
darkSeaGreen: '#8FBC8F',
|
||||
sandyBrown: '#F4A460',
|
||||
darkKhaki: '#BDB76B',
|
||||
cornflowerBlue: '#6495ED',
|
||||
mediumVioletRed: '#C71585',
|
||||
paleGreen: '#98FB98',
|
||||
},
|
||||
errorColor: '#d32f2f',
|
||||
royalGrey: '#888888',
|
||||
|
@ -22,7 +22,7 @@ import {
|
||||
import FormAlertChannels from 'container/FormAlertChannels';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import { useCallback, useState } from 'react';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useParams } from 'react-router-dom';
|
||||
|
||||
@ -57,6 +57,12 @@ function EditAlertChannels({
|
||||
setType(value as ChannelType);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
formInstance.setFieldsValue({
|
||||
...initialValue,
|
||||
});
|
||||
}, [formInstance, initialValue]);
|
||||
|
||||
const prepareSlackRequest = useCallback(
|
||||
() => ({
|
||||
api_url: selectedConfig?.api_url || '',
|
||||
|
@ -4,6 +4,7 @@ import { Button, Input } from 'antd';
|
||||
import { CheckboxChangeEvent } from 'antd/es/checkbox';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { memo, useCallback, useState } from 'react';
|
||||
|
||||
import { getGraphManagerTableColumns } from './TableRender/GraphManagerColumns';
|
||||
@ -29,6 +30,7 @@ function GraphManager({
|
||||
);
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
const { isDashboardLocked } = useDashboard();
|
||||
|
||||
const checkBoxOnChangeHandler = useCallback(
|
||||
(e: CheckboxChangeEvent, index: number): void => {
|
||||
@ -66,6 +68,7 @@ function GraphManager({
|
||||
graphVisibilityState: graphsVisibilityStates,
|
||||
labelClickedHandler,
|
||||
yAxisUnit,
|
||||
isGraphDisabled: isDashboardLocked,
|
||||
});
|
||||
|
||||
const filterHandler = useCallback(
|
||||
|
@ -9,6 +9,7 @@ function CustomCheckBox({
|
||||
index,
|
||||
graphVisibilityState = [],
|
||||
checkBoxOnChangeHandler,
|
||||
disabled = false,
|
||||
}: CheckBoxProps): JSX.Element {
|
||||
const onChangeHandler = (e: CheckboxChangeEvent): void => {
|
||||
checkBoxOnChangeHandler(e, index);
|
||||
@ -28,7 +29,11 @@ function CustomCheckBox({
|
||||
},
|
||||
}}
|
||||
>
|
||||
<Checkbox onChange={onChangeHandler} checked={isChecked} />
|
||||
<Checkbox
|
||||
onChange={onChangeHandler}
|
||||
checked={isChecked}
|
||||
disabled={disabled}
|
||||
/>
|
||||
</ConfigProvider>
|
||||
);
|
||||
}
|
||||
|
@ -5,12 +5,14 @@ import Label from './Label';
|
||||
|
||||
export const getLabel = (
|
||||
labelClickedHandler: (labelIndex: number) => void,
|
||||
disabled?: boolean,
|
||||
): ColumnType<DataSetProps> => ({
|
||||
render: (label: string, record): JSX.Element => (
|
||||
<Label
|
||||
label={label}
|
||||
labelIndex={record.index}
|
||||
labelClickedHandler={labelClickedHandler}
|
||||
disabled={disabled}
|
||||
/>
|
||||
),
|
||||
});
|
||||
|
@ -13,6 +13,7 @@ export const getGraphManagerTableColumns = ({
|
||||
graphVisibilityState,
|
||||
labelClickedHandler,
|
||||
yAxisUnit,
|
||||
isGraphDisabled,
|
||||
}: GetGraphManagerTableColumnsProps): ColumnType<DataSetProps>[] => [
|
||||
{
|
||||
title: '',
|
||||
@ -25,6 +26,7 @@ export const getGraphManagerTableColumns = ({
|
||||
index={record.index}
|
||||
checkBoxOnChangeHandler={checkBoxOnChangeHandler}
|
||||
graphVisibilityState={graphVisibilityState}
|
||||
disabled={isGraphDisabled}
|
||||
/>
|
||||
),
|
||||
},
|
||||
@ -33,7 +35,7 @@ export const getGraphManagerTableColumns = ({
|
||||
width: 300,
|
||||
dataIndex: ColumnsKeyAndDataIndex.Label,
|
||||
key: ColumnsKeyAndDataIndex.Label,
|
||||
...getLabel(labelClickedHandler),
|
||||
...getLabel(labelClickedHandler, isGraphDisabled),
|
||||
},
|
||||
{
|
||||
title: getGraphManagerTableHeaderTitle(
|
||||
@ -79,4 +81,5 @@ interface GetGraphManagerTableColumnsProps {
|
||||
labelClickedHandler: (labelIndex: number) => void;
|
||||
graphVisibilityState: boolean[];
|
||||
yAxisUnit?: string;
|
||||
isGraphDisabled?: boolean;
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ function Label({
|
||||
labelClickedHandler,
|
||||
labelIndex,
|
||||
label,
|
||||
disabled = false,
|
||||
}: LabelProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
@ -19,6 +20,7 @@ function Label({
|
||||
<LabelContainer
|
||||
isDarkMode={isDarkMode}
|
||||
type="button"
|
||||
disabled={disabled}
|
||||
onClick={onClickHandler}
|
||||
>
|
||||
{getAbbreviatedLabel(label)}
|
||||
|
@ -18,6 +18,10 @@
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
.disabled {
|
||||
height: calc(100% - 65px);
|
||||
}
|
||||
|
||||
.graph-manager-container {
|
||||
height: calc(40% - 40px);
|
||||
|
||||
|
@ -52,7 +52,7 @@ function FullView({
|
||||
|
||||
const [chartOptions, setChartOptions] = useState<uPlot.Options>();
|
||||
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const { selectedDashboard, isDashboardLocked } = useDashboard();
|
||||
|
||||
const getSelectedTime = useCallback(
|
||||
() =>
|
||||
@ -155,7 +155,12 @@ function FullView({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="graph-container" ref={fullViewRef}>
|
||||
<div
|
||||
className={
|
||||
isDashboardLocked ? 'graph-container disabled' : 'graph-container'
|
||||
}
|
||||
ref={fullViewRef}
|
||||
>
|
||||
{chartOptions && (
|
||||
<GraphContainer
|
||||
style={{ height: '90%' }}
|
||||
@ -178,7 +183,7 @@ function FullView({
|
||||
)}
|
||||
</div>
|
||||
|
||||
{canModifyChart && chartOptions && (
|
||||
{canModifyChart && chartOptions && !isDashboardLocked && (
|
||||
<GraphManager
|
||||
data={chartData}
|
||||
name={name}
|
||||
|
@ -31,9 +31,12 @@ export const GraphContainer = styled.div<GraphContainerProps>`
|
||||
isGraphLegendToggleAvailable ? '50%' : '100%'};
|
||||
`;
|
||||
|
||||
export const LabelContainer = styled.button<{ isDarkMode?: boolean }>`
|
||||
export const LabelContainer = styled.button<{
|
||||
isDarkMode?: boolean;
|
||||
disabled?: boolean;
|
||||
}>`
|
||||
max-width: 18.75rem;
|
||||
cursor: pointer;
|
||||
cursor: ${(props): string => (props.disabled ? 'no-drop' : 'pointer')};
|
||||
border: none;
|
||||
background-color: transparent;
|
||||
color: ${(props): string =>
|
||||
|
@ -42,6 +42,7 @@ export interface LabelProps {
|
||||
labelClickedHandler: (labelIndex: number) => void;
|
||||
labelIndex: number;
|
||||
label: string;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export interface FullViewProps {
|
||||
@ -74,6 +75,7 @@ export interface CheckBoxProps {
|
||||
index: number;
|
||||
graphVisibilityState: boolean[];
|
||||
checkBoxOnChangeHandler: (e: CheckboxChangeEvent, index: number) => void;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export interface SaveLegendEntriesToLocalStoreProps {
|
||||
|
@ -1,9 +1,11 @@
|
||||
import { Skeleton, Typography } from 'antd';
|
||||
import { ToggleGraphProps } from 'components/Graph/types';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import GridPanelSwitch from 'container/GridPanelSwitch';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import history from 'lib/history';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
@ -43,11 +45,14 @@ function WidgetGraphComponent({
|
||||
onDragSelect,
|
||||
}: WidgetGraphComponentProps): JSX.Element {
|
||||
const [deleteModal, setDeleteModal] = useState(false);
|
||||
const [modal, setModal] = useState<boolean>(false);
|
||||
const [hovered, setHovered] = useState(false);
|
||||
const { notifications } = useNotifications();
|
||||
const { pathname } = useLocation();
|
||||
|
||||
const params = useUrlQuery();
|
||||
|
||||
const isFullViewOpen = params.get(QueryParams.expandedWidgetId) === widget.id;
|
||||
|
||||
const lineChartRef = useRef<ToggleGraphProps>();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
@ -175,7 +180,14 @@ function WidgetGraphComponent({
|
||||
};
|
||||
|
||||
const handleOnView = (): void => {
|
||||
onToggleModal(setModal);
|
||||
const queryParams = {
|
||||
[QueryParams.expandedWidgetId]: widget.id,
|
||||
};
|
||||
|
||||
history.push({
|
||||
pathname,
|
||||
search: createQueryParams(queryParams),
|
||||
});
|
||||
};
|
||||
|
||||
const handleOnDelete = (): void => {
|
||||
@ -187,7 +199,10 @@ function WidgetGraphComponent({
|
||||
};
|
||||
|
||||
const onToggleModelHandler = (): void => {
|
||||
onToggleModal(setModal);
|
||||
history.push({
|
||||
pathname,
|
||||
search: createQueryParams({}),
|
||||
});
|
||||
};
|
||||
|
||||
if (queryResponse.isLoading || queryResponse.status === 'idle') {
|
||||
@ -236,7 +251,7 @@ function WidgetGraphComponent({
|
||||
title={widget?.title || 'View'}
|
||||
footer={[]}
|
||||
centered
|
||||
open={modal}
|
||||
open={isFullViewOpen}
|
||||
onCancel={onToggleModelHandler}
|
||||
width="85%"
|
||||
destroyOnClose
|
||||
|
@ -9,7 +9,8 @@ import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import isEmpty from 'lodash-es/isEmpty';
|
||||
import _noop from 'lodash-es/noop';
|
||||
import { memo, useCallback, useMemo, useRef, useState } from 'react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
@ -28,10 +29,11 @@ function GridCardGraph({
|
||||
isQueryEnabled,
|
||||
threshold,
|
||||
variables,
|
||||
filterNaN,
|
||||
fillSpans = false,
|
||||
}: GridCardGraphProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
const [errorMessage, setErrorMessage] = useState<string>();
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
|
||||
const onDragSelect = useCallback(
|
||||
(start: number, end: number): void => {
|
||||
@ -49,6 +51,17 @@ function GridCardGraph({
|
||||
|
||||
const isVisible = useIntersectionObserver(graphRef, undefined, true);
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
|
||||
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
@ -90,11 +103,7 @@ function GridCardGraph({
|
||||
|
||||
const containerDimensions = useResizeObserver(graphRef);
|
||||
|
||||
const chartData = getUPlotChartData(
|
||||
queryResponse?.data?.payload,
|
||||
undefined,
|
||||
filterNaN,
|
||||
);
|
||||
const chartData = getUPlotChartData(queryResponse?.data?.payload, fillSpans);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
|
@ -39,7 +39,7 @@ export interface GridCardGraphProps {
|
||||
headerMenuList?: WidgetGraphComponentProps['headerMenuList'];
|
||||
isQueryEnabled: boolean;
|
||||
variables?: Dashboard['data']['variables'];
|
||||
filterNaN?: boolean;
|
||||
fillSpans?: boolean;
|
||||
}
|
||||
|
||||
export interface GetGraphVisibilityStateOnLegendClickProps {
|
||||
|
@ -109,39 +109,37 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
|
||||
|
||||
return (
|
||||
<>
|
||||
{!isDashboardLocked && (
|
||||
<ButtonContainer>
|
||||
<ButtonContainer>
|
||||
<Button
|
||||
loading={updateDashboardMutation.isLoading}
|
||||
onClick={handle.enter}
|
||||
icon={<FullscreenIcon size={16} />}
|
||||
disabled={updateDashboardMutation.isLoading}
|
||||
>
|
||||
{t('dashboard:full_view')}
|
||||
</Button>
|
||||
|
||||
{!isDashboardLocked && saveLayoutPermission && (
|
||||
<Button
|
||||
loading={updateDashboardMutation.isLoading}
|
||||
onClick={handle.enter}
|
||||
icon={<FullscreenIcon size={16} />}
|
||||
onClick={onSaveHandler}
|
||||
icon={<SaveFilled />}
|
||||
disabled={updateDashboardMutation.isLoading}
|
||||
>
|
||||
{t('dashboard:full_view')}
|
||||
{t('dashboard:save_layout')}
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{saveLayoutPermission && (
|
||||
<Button
|
||||
loading={updateDashboardMutation.isLoading}
|
||||
onClick={onSaveHandler}
|
||||
icon={<SaveFilled />}
|
||||
disabled={updateDashboardMutation.isLoading}
|
||||
>
|
||||
{t('dashboard:save_layout')}
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{addPanelPermission && (
|
||||
<Button
|
||||
onClick={onAddPanelHandler}
|
||||
icon={<PlusOutlined />}
|
||||
data-testid="add-panel"
|
||||
>
|
||||
{t('dashboard:add_panel')}
|
||||
</Button>
|
||||
)}
|
||||
</ButtonContainer>
|
||||
)}
|
||||
{!isDashboardLocked && addPanelPermission && (
|
||||
<Button
|
||||
onClick={onAddPanelHandler}
|
||||
icon={<PlusOutlined />}
|
||||
data-testid="add-panel"
|
||||
>
|
||||
{t('dashboard:add_panel')}
|
||||
</Button>
|
||||
)}
|
||||
</ButtonContainer>
|
||||
|
||||
<FullScreen handle={handle} className="fullscreen-grid-container">
|
||||
<ReactGridLayout
|
||||
@ -167,7 +165,7 @@ function GraphLayout({ onAddPanelHandler }: GraphLayoutProps): JSX.Element {
|
||||
className={isDashboardLocked ? '' : 'enable-resize'}
|
||||
isDarkMode={isDarkMode}
|
||||
key={id}
|
||||
data-grid={layout}
|
||||
data-grid={JSON.stringify(currentWidget)}
|
||||
>
|
||||
<Card
|
||||
className="grid-item"
|
||||
|
@ -41,7 +41,7 @@ function GridTableComponent({
|
||||
const idx = thresholds.findIndex(
|
||||
(t) => t.thresholdTableOptions === e.title,
|
||||
);
|
||||
if (idx !== -1) {
|
||||
if (threshold && idx !== -1) {
|
||||
return (
|
||||
<div
|
||||
style={
|
||||
|
@ -2,6 +2,7 @@ import { DeleteOutlined, ExclamationCircleOutlined } from '@ant-design/icons';
|
||||
import { Modal, Tooltip, Typography } from 'antd';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useDeleteDashboard } from 'hooks/dashboard/useDeleteDashboard';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQueryClient } from 'react-query';
|
||||
@ -32,6 +33,8 @@ function DeleteButton({
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const { t } = useTranslation(['dashboard']);
|
||||
|
||||
const deleteDashboardMutation = useDeleteDashboard(id);
|
||||
@ -49,6 +52,11 @@ function DeleteButton({
|
||||
onOk() {
|
||||
deleteDashboardMutation.mutateAsync(undefined, {
|
||||
onSuccess: () => {
|
||||
notifications.success({
|
||||
message: t('dashboard:delete_dashboard_success', {
|
||||
name,
|
||||
}),
|
||||
});
|
||||
queryClient.invalidateQueries([REACT_QUERY_KEY.GET_ALL_DASHBOARDS]);
|
||||
},
|
||||
});
|
||||
@ -57,7 +65,7 @@ function DeleteButton({
|
||||
okButtonProps: { danger: true },
|
||||
centered: true,
|
||||
});
|
||||
}, [modal, name, deleteDashboardMutation, queryClient]);
|
||||
}, [modal, name, deleteDashboardMutation, notifications, t, queryClient]);
|
||||
|
||||
const getDeleteTooltipContent = (): string => {
|
||||
if (isLocked) {
|
||||
|
@ -1,13 +1,5 @@
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
Card,
|
||||
Col,
|
||||
Dropdown,
|
||||
Input,
|
||||
MenuProps,
|
||||
Row,
|
||||
TableColumnProps,
|
||||
} from 'antd';
|
||||
import { Card, Col, Dropdown, Input, Row, TableColumnProps } from 'antd';
|
||||
import { ItemType } from 'antd/es/menu/hooks/useItems';
|
||||
import createDashboard from 'api/dashboard/create';
|
||||
import { AxiosError } from 'axios';
|
||||
@ -30,7 +22,6 @@ import { generatePath } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import DateComponent from '../../components/ResizeTable/TableComponent/DateComponent';
|
||||
import ImportJSON from './ImportJSON';
|
||||
@ -38,9 +29,9 @@ import { ButtonContainer, NewDashboardButton, TableContainer } from './styles';
|
||||
import DeleteButton from './TableComponents/DeleteButton';
|
||||
import Name from './TableComponents/Name';
|
||||
|
||||
function ListOfAllDashboard(): JSX.Element {
|
||||
const { Search } = Input;
|
||||
const { Search } = Input;
|
||||
|
||||
function ListOfAllDashboard(): JSX.Element {
|
||||
const {
|
||||
data: dashboardListResponse = [],
|
||||
isLoading: isDashboardListLoading,
|
||||
@ -49,8 +40,8 @@ function ListOfAllDashboard(): JSX.Element {
|
||||
|
||||
const { role } = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
|
||||
const [action, createNewDashboard, newDashboard] = useComponentPermission(
|
||||
['action', 'create_new_dashboards', 'new_dashboard'],
|
||||
const [action, createNewDashboard] = useComponentPermission(
|
||||
['action', 'create_new_dashboards'],
|
||||
role,
|
||||
);
|
||||
|
||||
@ -75,9 +66,7 @@ function ListOfAllDashboard(): JSX.Element {
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (dashboardListResponse.length) {
|
||||
sortDashboardsByCreatedAt(dashboardListResponse);
|
||||
}
|
||||
sortDashboardsByCreatedAt(dashboardListResponse);
|
||||
}, [dashboardListResponse]);
|
||||
|
||||
const [newDashboardState, setNewDashboardState] = useState({
|
||||
@ -234,9 +223,22 @@ function ListOfAllDashboard(): JSX.Element {
|
||||
};
|
||||
|
||||
const getMenuItems = useMemo(() => {
|
||||
const menuItems: ItemType[] = [];
|
||||
const menuItems: ItemType[] = [
|
||||
{
|
||||
key: t('import_json').toString(),
|
||||
label: t('import_json'),
|
||||
onClick: (): void => onModalHandler(false),
|
||||
},
|
||||
{
|
||||
key: t('import_grafana_json').toString(),
|
||||
label: t('import_grafana_json'),
|
||||
onClick: (): void => onModalHandler(true),
|
||||
disabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
if (createNewDashboard) {
|
||||
menuItems.push({
|
||||
menuItems.unshift({
|
||||
key: t('create_dashboard').toString(),
|
||||
label: t('create_dashboard'),
|
||||
disabled: isDashboardListLoading,
|
||||
@ -244,29 +246,9 @@ function ListOfAllDashboard(): JSX.Element {
|
||||
});
|
||||
}
|
||||
|
||||
menuItems.push({
|
||||
key: t('import_json').toString(),
|
||||
label: t('import_json'),
|
||||
onClick: (): void => onModalHandler(false),
|
||||
});
|
||||
|
||||
menuItems.push({
|
||||
key: t('import_grafana_json').toString(),
|
||||
label: t('import_grafana_json'),
|
||||
onClick: (): void => onModalHandler(true),
|
||||
disabled: true,
|
||||
});
|
||||
|
||||
return menuItems;
|
||||
}, [createNewDashboard, isDashboardListLoading, onNewDashboardHandler, t]);
|
||||
|
||||
const menu: MenuProps = useMemo(
|
||||
() => ({
|
||||
items: getMenuItems,
|
||||
}),
|
||||
[getMenuItems],
|
||||
);
|
||||
|
||||
const searchArrayOfObjects = (searchValue: string): any[] => {
|
||||
// Convert the searchValue to lowercase for case-insensitive search
|
||||
const searchValueLowerCase = searchValue.toLowerCase();
|
||||
@ -318,35 +300,31 @@ function ListOfAllDashboard(): JSX.Element {
|
||||
url: 'https://signoz.io/docs/userguide/dashboards',
|
||||
}}
|
||||
/>
|
||||
{newDashboard && (
|
||||
<Dropdown
|
||||
getPopupContainer={popupContainer}
|
||||
disabled={isDashboardListLoading}
|
||||
trigger={['click']}
|
||||
menu={menu}
|
||||
>
|
||||
<NewDashboardButton
|
||||
icon={<PlusOutlined />}
|
||||
type="primary"
|
||||
data-testid="create-new-dashboard"
|
||||
loading={newDashboardState.loading}
|
||||
danger={newDashboardState.error}
|
||||
>
|
||||
{getText()}
|
||||
</NewDashboardButton>
|
||||
</Dropdown>
|
||||
)}
|
||||
</ButtonContainer>
|
||||
|
||||
<Dropdown
|
||||
menu={{ items: getMenuItems }}
|
||||
disabled={isDashboardListLoading}
|
||||
placement="bottomRight"
|
||||
>
|
||||
<NewDashboardButton
|
||||
icon={<PlusOutlined />}
|
||||
type="primary"
|
||||
data-testid="create-new-dashboard"
|
||||
loading={newDashboardState.loading}
|
||||
danger={newDashboardState.error}
|
||||
>
|
||||
{getText()}
|
||||
</NewDashboardButton>
|
||||
</Dropdown>
|
||||
</Col>
|
||||
</Row>
|
||||
),
|
||||
[
|
||||
Search,
|
||||
isDashboardListLoading,
|
||||
handleSearch,
|
||||
isFilteringDashboards,
|
||||
newDashboard,
|
||||
menu,
|
||||
getMenuItems,
|
||||
newDashboardState.loading,
|
||||
newDashboardState.error,
|
||||
getText,
|
||||
|
@ -107,7 +107,7 @@ function DBCall(): JSX.Element {
|
||||
<Card data-testid="database_call_rps">
|
||||
<GraphContainer>
|
||||
<Graph
|
||||
filterNaN
|
||||
fillSpans={false}
|
||||
name="database_call_rps"
|
||||
widget={databaseCallsRPSWidget}
|
||||
onClickHandler={(xValue, yValue, mouseX, mouseY): void => {
|
||||
@ -141,7 +141,7 @@ function DBCall(): JSX.Element {
|
||||
<Card data-testid="database_call_avg_duration">
|
||||
<GraphContainer>
|
||||
<Graph
|
||||
filterNaN
|
||||
fillSpans
|
||||
name="database_call_avg_duration"
|
||||
widget={databaseCallsAverageDurationWidget}
|
||||
headerMenuList={MENU_ITEMS}
|
||||
|
@ -148,7 +148,7 @@ function External(): JSX.Element {
|
||||
<Card data-testid="external_call_error_percentage">
|
||||
<GraphContainer>
|
||||
<Graph
|
||||
filterNaN
|
||||
fillSpans={false}
|
||||
headerMenuList={MENU_ITEMS}
|
||||
name="external_call_error_percentage"
|
||||
widget={externalCallErrorWidget}
|
||||
@ -184,7 +184,7 @@ function External(): JSX.Element {
|
||||
<Card data-testid="external_call_duration">
|
||||
<GraphContainer>
|
||||
<Graph
|
||||
filterNaN
|
||||
fillSpans
|
||||
name="external_call_duration"
|
||||
headerMenuList={MENU_ITEMS}
|
||||
widget={externalCallDurationWidget}
|
||||
@ -221,7 +221,7 @@ function External(): JSX.Element {
|
||||
<Card data-testid="external_call_rps_by_address">
|
||||
<GraphContainer>
|
||||
<Graph
|
||||
filterNaN
|
||||
fillSpans
|
||||
name="external_call_rps_by_address"
|
||||
widget={externalCallRPSWidget}
|
||||
headerMenuList={MENU_ITEMS}
|
||||
@ -260,7 +260,7 @@ function External(): JSX.Element {
|
||||
name="external_call_duration_by_address"
|
||||
widget={externalCallDurationAddressWidget}
|
||||
headerMenuList={MENU_ITEMS}
|
||||
filterNaN
|
||||
fillSpans
|
||||
onClickHandler={(xValue, yValue, mouseX, mouseY): void => {
|
||||
onGraphClickHandler(setSelectedTimeStamp)(
|
||||
xValue,
|
||||
|
@ -84,7 +84,7 @@ function ApDexMetrics({
|
||||
return (
|
||||
<Graph
|
||||
name="apdex"
|
||||
filterNaN
|
||||
fillSpans={false}
|
||||
widget={apDexMetricsWidget}
|
||||
onDragSelect={onDragSelect}
|
||||
onClickHandler={handleGraphClick('ApDex')}
|
||||
|
@ -88,7 +88,7 @@ function ServiceOverview({
|
||||
widget={latencyWidget}
|
||||
onClickHandler={handleGraphClick('Service')}
|
||||
isQueryEnabled={isQueryEnabled}
|
||||
filterNaN
|
||||
fillSpans={false}
|
||||
/>
|
||||
</GraphContainer>
|
||||
</Card>
|
||||
|
@ -27,7 +27,7 @@ function TopLevelOperation({
|
||||
) : (
|
||||
<GraphContainer>
|
||||
<Graph
|
||||
filterNaN
|
||||
fillSpans={false}
|
||||
name={name}
|
||||
widget={widget}
|
||||
onClickHandler={handleGraphClick(opName)}
|
||||
|
@ -21,7 +21,7 @@ import { useCallback, useMemo, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { generatePath, useLocation, useParams } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
@ -41,7 +41,11 @@ import {
|
||||
import { NewWidgetProps } from './types';
|
||||
|
||||
function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const {
|
||||
selectedDashboard,
|
||||
setSelectedDashboard,
|
||||
setToScrollWidgetId,
|
||||
} = useDashboard();
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
@ -104,8 +108,6 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
enum: selectedWidget?.timePreferance || 'GLOBAL_TIME',
|
||||
});
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const updateDashboardMutation = useUpdateDashboard();
|
||||
|
||||
const { afterWidgets, preWidgets } = useMemo(() => {
|
||||
@ -135,49 +137,54 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
return { selectedWidget, preWidgets, afterWidgets };
|
||||
}, [selectedDashboard, query]);
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const onClickSaveHandler = useCallback(() => {
|
||||
if (!selectedDashboard) {
|
||||
return;
|
||||
}
|
||||
|
||||
updateDashboardMutation.mutateAsync(
|
||||
{
|
||||
uuid: selectedDashboard.uuid,
|
||||
data: {
|
||||
...selectedDashboard.data,
|
||||
widgets: [
|
||||
...preWidgets,
|
||||
{
|
||||
...(selectedWidget || ({} as Widgets)),
|
||||
description,
|
||||
timePreferance: selectedTime.enum,
|
||||
isStacked: stacked,
|
||||
opacity,
|
||||
nullZeroValues: selectedNullZeroValue,
|
||||
title,
|
||||
yAxisUnit,
|
||||
panelTypes: graphType,
|
||||
thresholds,
|
||||
},
|
||||
...afterWidgets,
|
||||
],
|
||||
},
|
||||
const dashboard: Dashboard = {
|
||||
...selectedDashboard,
|
||||
uuid: selectedDashboard.uuid,
|
||||
data: {
|
||||
...selectedDashboard.data,
|
||||
widgets: [
|
||||
...preWidgets,
|
||||
{
|
||||
...(selectedWidget || ({} as Widgets)),
|
||||
description,
|
||||
timePreferance: selectedTime.enum,
|
||||
isStacked: stacked,
|
||||
opacity,
|
||||
nullZeroValues: selectedNullZeroValue,
|
||||
title,
|
||||
yAxisUnit,
|
||||
panelTypes: graphType,
|
||||
thresholds,
|
||||
},
|
||||
...afterWidgets,
|
||||
],
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
featureResponse.refetch();
|
||||
history.push(generatePath(ROUTES.DASHBOARD, { dashboardId }));
|
||||
},
|
||||
onError: () => {
|
||||
notifications.error({
|
||||
message: SOMETHING_WENT_WRONG,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
updateDashboardMutation.mutateAsync(dashboard, {
|
||||
onSuccess: () => {
|
||||
setSelectedDashboard(dashboard);
|
||||
setToScrollWidgetId(selectedWidget?.id || '');
|
||||
featureResponse.refetch();
|
||||
history.push({
|
||||
pathname: generatePath(ROUTES.DASHBOARD, { dashboardId }),
|
||||
});
|
||||
},
|
||||
);
|
||||
onError: () => {
|
||||
notifications.error({
|
||||
message: SOMETHING_WENT_WRONG,
|
||||
});
|
||||
},
|
||||
});
|
||||
}, [
|
||||
selectedDashboard,
|
||||
updateDashboardMutation,
|
||||
preWidgets,
|
||||
selectedWidget,
|
||||
description,
|
||||
@ -190,6 +197,9 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
graphType,
|
||||
thresholds,
|
||||
afterWidgets,
|
||||
updateDashboardMutation,
|
||||
setSelectedDashboard,
|
||||
setToScrollWidgetId,
|
||||
featureResponse,
|
||||
dashboardId,
|
||||
notifications,
|
||||
|
@ -21,7 +21,7 @@ function LogsList({ logs }: LogsListProps): JSX.Element {
|
||||
{logs.map((log) => (
|
||||
<div key={log.id} className="logs-preview-list-item">
|
||||
<div className="logs-preview-list-item-timestamp">
|
||||
{dayjs(String(log.timestamp)).format('MMM DD HH:mm:ss.SSS')}
|
||||
{dayjs(log.timestamp).format('MMM DD HH:mm:ss.SSS')}
|
||||
</div>
|
||||
<div className="logs-preview-list-item-body">{log.body}</div>
|
||||
<div
|
||||
|
@ -61,7 +61,7 @@ export function QueryTable({
|
||||
<ResizeTable
|
||||
columns={tableColumns}
|
||||
tableLayout="fixed"
|
||||
dataSource={dataSource}
|
||||
dataSource={newDataSource}
|
||||
scroll={{ x: true }}
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
{...props}
|
||||
|
@ -37,7 +37,6 @@ if (container) {
|
||||
</QueryClientProvider>
|
||||
</ThemeProvider>
|
||||
</HelmetProvider>
|
||||
,
|
||||
</ErrorBoundary>,
|
||||
);
|
||||
}
|
||||
|
@ -1,4 +1,6 @@
|
||||
const createQueryParams = (params: { [x: string]: string | number }): string =>
|
||||
const createQueryParams = (params: {
|
||||
[x: string]: string | number | undefined;
|
||||
}): string =>
|
||||
Object.keys(params)
|
||||
.map(
|
||||
(k) => `${encodeURIComponent(k)}=${encodeURIComponent(String(params[k]))}`,
|
||||
|
@ -44,136 +44,142 @@ export const getUPlotChartOptions = ({
|
||||
setGraphsVisibilityStates,
|
||||
thresholds,
|
||||
fillSpans,
|
||||
}: GetUPlotChartOptions): uPlot.Options => ({
|
||||
id,
|
||||
width: dimensions.width,
|
||||
height: dimensions.height - 45,
|
||||
// tzDate: (ts) => uPlot.tzDate(new Date(ts * 1e3), ''), // Pass timezone for 2nd param
|
||||
legend: {
|
||||
show: true,
|
||||
live: false,
|
||||
},
|
||||
focus: {
|
||||
alpha: 0.3,
|
||||
},
|
||||
cursor: {
|
||||
}: GetUPlotChartOptions): uPlot.Options => {
|
||||
// eslint-disable-next-line sonarjs/prefer-immediate-return
|
||||
const chartOptions = {
|
||||
id,
|
||||
width: dimensions.width,
|
||||
height: dimensions.height - 45,
|
||||
// tzDate: (ts) => uPlot.tzDate(new Date(ts * 1e3), ''), // Pass timezone for 2nd param
|
||||
legend: {
|
||||
show: true,
|
||||
live: false,
|
||||
},
|
||||
focus: {
|
||||
prox: 1e6,
|
||||
bias: 1,
|
||||
alpha: 0.3,
|
||||
},
|
||||
points: {
|
||||
size: (u, seriesIdx): number => u.series[seriesIdx].points.size * 2.5,
|
||||
width: (u, seriesIdx, size): number => size / 4,
|
||||
stroke: (u, seriesIdx): string =>
|
||||
`${u.series[seriesIdx].points.stroke(u, seriesIdx)}90`,
|
||||
fill: (): string => '#fff',
|
||||
},
|
||||
},
|
||||
padding: [16, 16, 16, 16],
|
||||
scales: {
|
||||
x: {
|
||||
time: true,
|
||||
auto: true, // Automatically adjust scale range
|
||||
},
|
||||
y: {
|
||||
auto: true,
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
tooltipPlugin(apiResponse, yAxisUnit, fillSpans),
|
||||
onClickPlugin({
|
||||
onClick: onClickHandler,
|
||||
}),
|
||||
],
|
||||
hooks: {
|
||||
draw: [
|
||||
(u): void => {
|
||||
thresholds?.forEach((threshold) => {
|
||||
if (threshold.thresholdValue !== undefined) {
|
||||
const { ctx } = u;
|
||||
ctx.save();
|
||||
|
||||
const yPos = u.valToPos(
|
||||
convertValue(
|
||||
threshold.thresholdValue,
|
||||
threshold.thresholdUnit,
|
||||
yAxisUnit,
|
||||
),
|
||||
'y',
|
||||
true,
|
||||
);
|
||||
|
||||
ctx.strokeStyle = threshold.thresholdColor || 'red';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.setLineDash([10, 5]);
|
||||
|
||||
ctx.beginPath();
|
||||
|
||||
const plotLeft = u.bbox.left; // left edge of the plot area
|
||||
const plotRight = plotLeft + u.bbox.width; // right edge of the plot area
|
||||
|
||||
ctx.moveTo(plotLeft, yPos);
|
||||
ctx.lineTo(plotRight, yPos);
|
||||
|
||||
ctx.stroke();
|
||||
|
||||
// Text configuration
|
||||
if (threshold.thresholdLabel) {
|
||||
const text = threshold.thresholdLabel;
|
||||
const textX = plotRight - ctx.measureText(text).width - 20;
|
||||
const textY = yPos - 15;
|
||||
ctx.fillStyle = threshold.thresholdColor || 'red';
|
||||
ctx.fillText(text, textX, textY);
|
||||
}
|
||||
|
||||
ctx.restore();
|
||||
}
|
||||
});
|
||||
cursor: {
|
||||
lock: false,
|
||||
focus: {
|
||||
prox: 1e6,
|
||||
bias: 1,
|
||||
},
|
||||
],
|
||||
setSelect: [
|
||||
(self): void => {
|
||||
const selection = self.select;
|
||||
if (selection) {
|
||||
const startTime = self.posToVal(selection.left, 'x');
|
||||
const endTime = self.posToVal(selection.left + selection.width, 'x');
|
||||
|
||||
const diff = endTime - startTime;
|
||||
|
||||
if (typeof onDragSelect === 'function' && diff > 0) {
|
||||
onDragSelect(startTime * 1000, endTime * 1000);
|
||||
}
|
||||
}
|
||||
points: {
|
||||
size: (u, seriesIdx): number => u.series[seriesIdx].points.size * 2.5,
|
||||
width: (u, seriesIdx, size): number => size / 4,
|
||||
stroke: (u, seriesIdx): string =>
|
||||
`${u.series[seriesIdx].points.stroke(u, seriesIdx)}90`,
|
||||
fill: (): string => '#fff',
|
||||
},
|
||||
},
|
||||
padding: [16, 16, 16, 16],
|
||||
scales: {
|
||||
x: {
|
||||
time: true,
|
||||
auto: true, // Automatically adjust scale range
|
||||
},
|
||||
y: {
|
||||
auto: true,
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
tooltipPlugin(apiResponse, yAxisUnit, fillSpans),
|
||||
onClickPlugin({
|
||||
onClick: onClickHandler,
|
||||
}),
|
||||
],
|
||||
ready: [
|
||||
(self): void => {
|
||||
const legend = self.root.querySelector('.u-legend');
|
||||
if (legend) {
|
||||
const seriesEls = legend.querySelectorAll('.u-label');
|
||||
const seriesArray = Array.from(seriesEls);
|
||||
seriesArray.forEach((seriesEl, index) => {
|
||||
seriesEl.addEventListener('click', () => {
|
||||
if (graphsVisibilityStates) {
|
||||
setGraphsVisibilityStates?.((prev) => {
|
||||
const newGraphVisibilityStates = [...prev];
|
||||
newGraphVisibilityStates[index + 1] = !newGraphVisibilityStates[
|
||||
index + 1
|
||||
];
|
||||
return newGraphVisibilityStates;
|
||||
});
|
||||
hooks: {
|
||||
draw: [
|
||||
(u): void => {
|
||||
thresholds?.forEach((threshold) => {
|
||||
if (threshold.thresholdValue !== undefined) {
|
||||
const { ctx } = u;
|
||||
ctx.save();
|
||||
|
||||
const yPos = u.valToPos(
|
||||
convertValue(
|
||||
threshold.thresholdValue,
|
||||
threshold.thresholdUnit,
|
||||
yAxisUnit,
|
||||
),
|
||||
'y',
|
||||
true,
|
||||
);
|
||||
|
||||
ctx.strokeStyle = threshold.thresholdColor || 'red';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.setLineDash([10, 5]);
|
||||
|
||||
ctx.beginPath();
|
||||
|
||||
const plotLeft = u.bbox.left; // left edge of the plot area
|
||||
const plotRight = plotLeft + u.bbox.width; // right edge of the plot area
|
||||
|
||||
ctx.moveTo(plotLeft, yPos);
|
||||
ctx.lineTo(plotRight, yPos);
|
||||
|
||||
ctx.stroke();
|
||||
|
||||
// Text configuration
|
||||
if (threshold.thresholdLabel) {
|
||||
const text = threshold.thresholdLabel;
|
||||
const textX = plotRight - ctx.measureText(text).width - 20;
|
||||
const textY = yPos - 15;
|
||||
ctx.fillStyle = threshold.thresholdColor || 'red';
|
||||
ctx.fillText(text, textX, textY);
|
||||
}
|
||||
});
|
||||
|
||||
ctx.restore();
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
series: getSeries(
|
||||
apiResponse,
|
||||
apiResponse?.data.result,
|
||||
graphsVisibilityStates,
|
||||
fillSpans,
|
||||
),
|
||||
axes: getAxes(isDarkMode, yAxisUnit),
|
||||
});
|
||||
},
|
||||
],
|
||||
setSelect: [
|
||||
(self): void => {
|
||||
const selection = self.select;
|
||||
if (selection) {
|
||||
const startTime = self.posToVal(selection.left, 'x');
|
||||
const endTime = self.posToVal(selection.left + selection.width, 'x');
|
||||
|
||||
const diff = endTime - startTime;
|
||||
|
||||
if (typeof onDragSelect === 'function' && diff > 0) {
|
||||
onDragSelect(startTime * 1000, endTime * 1000);
|
||||
}
|
||||
}
|
||||
},
|
||||
],
|
||||
ready: [
|
||||
(self): void => {
|
||||
const legend = self.root.querySelector('.u-legend');
|
||||
if (legend) {
|
||||
const seriesEls = legend.querySelectorAll('.u-label');
|
||||
const seriesArray = Array.from(seriesEls);
|
||||
seriesArray.forEach((seriesEl, index) => {
|
||||
seriesEl.addEventListener('click', () => {
|
||||
if (graphsVisibilityStates) {
|
||||
setGraphsVisibilityStates?.((prev) => {
|
||||
const newGraphVisibilityStates = [...prev];
|
||||
newGraphVisibilityStates[index + 1] = !newGraphVisibilityStates[
|
||||
index + 1
|
||||
];
|
||||
return newGraphVisibilityStates;
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
series: getSeries(
|
||||
apiResponse,
|
||||
apiResponse?.data.result,
|
||||
graphsVisibilityStates,
|
||||
fillSpans,
|
||||
),
|
||||
axes: getAxes(isDarkMode, yAxisUnit),
|
||||
};
|
||||
|
||||
return chartOptions;
|
||||
};
|
||||
|
@ -9,34 +9,88 @@ import { placement } from '../placement';
|
||||
|
||||
dayjs.extend(customParseFormat);
|
||||
|
||||
const createDivsFromArray = (
|
||||
interface UplotTooltipDataProps {
|
||||
show: boolean;
|
||||
color: string;
|
||||
label: string;
|
||||
focus: boolean;
|
||||
value: number;
|
||||
tooltipValue: string;
|
||||
textContent: string;
|
||||
}
|
||||
|
||||
const generateTooltipContent = (
|
||||
seriesList: any[],
|
||||
data: any[],
|
||||
idx: number,
|
||||
yAxisUnit?: string,
|
||||
series?: uPlot.Options['series'],
|
||||
fillSpans?: boolean,
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): HTMLElement => {
|
||||
const container = document.createElement('div');
|
||||
container.classList.add('tooltip-container');
|
||||
|
||||
let tooltipTitle = '';
|
||||
const formattedData: Record<string, UplotTooltipDataProps> = {};
|
||||
|
||||
function sortTooltipContentBasedOnValue(
|
||||
tooltipDataObj: Record<string, UplotTooltipDataProps>,
|
||||
): Record<string, UplotTooltipDataProps> {
|
||||
const entries = Object.entries(tooltipDataObj);
|
||||
entries.sort((a, b) => b[1].value - a[1].value);
|
||||
return Object.fromEntries(entries);
|
||||
}
|
||||
|
||||
if (Array.isArray(series) && series.length > 0) {
|
||||
series.forEach((item, index) => {
|
||||
const div = document.createElement('div');
|
||||
div.classList.add('tooltip-content-row');
|
||||
|
||||
if (index === 0) {
|
||||
const formattedDate = dayjs(data[0][idx] * 1000).format(
|
||||
'MMM DD YYYY HH:mm:ss',
|
||||
);
|
||||
tooltipTitle = dayjs(data[0][idx] * 1000).format('MMM DD YYYY HH:mm:ss');
|
||||
} else if (item.show) {
|
||||
const { metric = {}, queryName = '', legend = '' } =
|
||||
seriesList[index - 1] || {};
|
||||
|
||||
div.textContent = formattedDate;
|
||||
div.classList.add('tooltip-content-header');
|
||||
} else if (fillSpans ? item.show : item.show && data[index][idx]) {
|
||||
const label = getLabelName(metric, queryName || '', legend || '');
|
||||
|
||||
const value = data[index][idx] || 0;
|
||||
const tooltipValue = getToolTipValue(value, yAxisUnit);
|
||||
|
||||
const dataObj = {
|
||||
show: item.show || false,
|
||||
color: colors[(index - 1) % colors.length],
|
||||
label,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
focus: item?._focus || false,
|
||||
value,
|
||||
tooltipValue,
|
||||
textContent: `${label} : ${tooltipValue}`,
|
||||
};
|
||||
|
||||
formattedData[label] = dataObj;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const sortedData: Record<
|
||||
string,
|
||||
UplotTooltipDataProps
|
||||
> = sortTooltipContentBasedOnValue(formattedData);
|
||||
|
||||
const div = document.createElement('div');
|
||||
div.classList.add('tooltip-content-row');
|
||||
div.textContent = tooltipTitle;
|
||||
div.classList.add('tooltip-content-header');
|
||||
container.appendChild(div);
|
||||
|
||||
const sortedKeys = Object.keys(sortedData);
|
||||
|
||||
if (Array.isArray(sortedKeys) && sortedKeys.length > 0) {
|
||||
sortedKeys.forEach((key) => {
|
||||
if (sortedData[key]) {
|
||||
const { textContent, color, focus } = sortedData[key];
|
||||
const div = document.createElement('div');
|
||||
div.classList.add('tooltip-content-row');
|
||||
div.classList.add('tooltip-content');
|
||||
const color = colors[(index - 1) % colors.length];
|
||||
|
||||
const squareBox = document.createElement('div');
|
||||
squareBox.classList.add('pointSquare');
|
||||
|
||||
@ -45,37 +99,35 @@ const createDivsFromArray = (
|
||||
const text = document.createElement('div');
|
||||
text.classList.add('tooltip-data-point');
|
||||
|
||||
const { metric = {}, queryName = '', legend = '' } =
|
||||
seriesList[index - 1] || {};
|
||||
|
||||
const label = getLabelName(
|
||||
metric,
|
||||
queryName || '', // query
|
||||
legend || '',
|
||||
);
|
||||
|
||||
const value = data[index][idx] || 0;
|
||||
|
||||
const tooltipValue = getToolTipValue(value, yAxisUnit);
|
||||
|
||||
text.textContent = `${label} : ${tooltipValue || 0}`;
|
||||
text.textContent = textContent;
|
||||
text.style.color = color;
|
||||
|
||||
if (focus) {
|
||||
text.classList.add('focus');
|
||||
} else {
|
||||
text.classList.remove('focus');
|
||||
}
|
||||
|
||||
div.appendChild(squareBox);
|
||||
div.appendChild(text);
|
||||
}
|
||||
|
||||
container.appendChild(div);
|
||||
container.appendChild(div);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const overlay = document.getElementById('overlay');
|
||||
|
||||
if (overlay && overlay.style.display === 'none') {
|
||||
overlay.style.display = 'block';
|
||||
}
|
||||
|
||||
return container;
|
||||
};
|
||||
|
||||
const tooltipPlugin = (
|
||||
apiResponse: MetricRangePayloadProps | undefined,
|
||||
yAxisUnit?: string,
|
||||
fillSpans?: boolean,
|
||||
): any => {
|
||||
let over: HTMLElement;
|
||||
let bound: HTMLElement;
|
||||
@ -127,16 +179,14 @@ const tooltipPlugin = (
|
||||
if (overlay) {
|
||||
overlay.textContent = '';
|
||||
const { left, top, idx } = u.cursor;
|
||||
|
||||
if (idx) {
|
||||
const anchor = { left: left + bLeft, top: top + bTop };
|
||||
const content = createDivsFromArray(
|
||||
const content = generateTooltipContent(
|
||||
apiResult,
|
||||
u.data,
|
||||
idx,
|
||||
yAxisUnit,
|
||||
u.series,
|
||||
fillSpans,
|
||||
);
|
||||
overlay.appendChild(content);
|
||||
placement(overlay, anchor, 'right', 'start', { bound });
|
||||
|
@ -14,6 +14,10 @@
|
||||
|
||||
.tooltip-data-point {
|
||||
font-size: 11px;
|
||||
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
.tooltip-content {
|
||||
@ -24,6 +28,12 @@
|
||||
|
||||
.pointSquare,
|
||||
.tooltip-data-point {
|
||||
font-size: 13px !important;
|
||||
font-size: 12px !important;
|
||||
opacity: 0.9;
|
||||
|
||||
&.focus {
|
||||
opacity: 1;
|
||||
font-weight: 700;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,20 +1,52 @@
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
function filterIsNan(
|
||||
value: [number, string],
|
||||
isFilterNaNEnabled?: boolean,
|
||||
): boolean {
|
||||
const val = value[1];
|
||||
return isFilterNaNEnabled ? val !== 'NaN' : true;
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function fillMissingTimestamps(
|
||||
sortedTimestamps: number[],
|
||||
subsetArray: any[],
|
||||
fillSpans: boolean | undefined,
|
||||
): any[] {
|
||||
const filledArray = [];
|
||||
|
||||
let subsetIndex = 0;
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const timestamp of sortedTimestamps) {
|
||||
if (
|
||||
subsetIndex < subsetArray.length &&
|
||||
timestamp === subsetArray[subsetIndex][0]
|
||||
) {
|
||||
// Timestamp is present in subsetArray
|
||||
const seriesPointData = subsetArray[subsetIndex];
|
||||
|
||||
if (
|
||||
seriesPointData &&
|
||||
Array.isArray(seriesPointData) &&
|
||||
seriesPointData.length > 0 &&
|
||||
seriesPointData[1] !== 'NaN'
|
||||
) {
|
||||
filledArray.push(subsetArray[subsetIndex]);
|
||||
} else {
|
||||
const value = fillSpans ? 0 : null;
|
||||
filledArray.push([seriesPointData[0], value]);
|
||||
}
|
||||
|
||||
subsetIndex += 1;
|
||||
} else {
|
||||
// Timestamp is missing in subsetArray, fill with [timestamp, 0]
|
||||
const value = fillSpans ? 0 : null;
|
||||
filledArray.push([timestamp, value]);
|
||||
}
|
||||
}
|
||||
|
||||
return filledArray;
|
||||
}
|
||||
|
||||
export const getUPlotChartData = (
|
||||
apiResponse?: MetricRangePayloadProps,
|
||||
fillSpans?: boolean,
|
||||
filterNaN?: boolean,
|
||||
): uPlot.AlignedData => {
|
||||
): any[] => {
|
||||
const seriesList = apiResponse?.data?.result || [];
|
||||
const uPlotData: uPlot.AlignedData = [];
|
||||
const uPlotData = [];
|
||||
|
||||
// this helps us identify the series with the max number of values and helps define the x axis - timestamps
|
||||
const xSeries = seriesList.reduce(
|
||||
@ -28,33 +60,28 @@ export const getUPlotChartData = (
|
||||
seriesList[index]?.values?.sort((a, b) => a[0] - b[0]);
|
||||
}
|
||||
|
||||
const timestampArr = xSeries?.values
|
||||
?.filter((response) => filterIsNan(response, filterNaN))
|
||||
.map((v) => v[0]);
|
||||
|
||||
const uplotDataFormatArr = new Float64Array(timestampArr);
|
||||
const timestampArr = xSeries?.values?.map((v) => v[0]);
|
||||
|
||||
// timestamp
|
||||
uPlotData.push(uplotDataFormatArr);
|
||||
|
||||
const numberOfTimestamps = uPlotData[0].length;
|
||||
uPlotData.push(timestampArr);
|
||||
|
||||
// for each series, push the values
|
||||
seriesList.forEach((series) => {
|
||||
const updatedSeries = fillMissingTimestamps(
|
||||
timestampArr,
|
||||
series?.values || [],
|
||||
fillSpans,
|
||||
);
|
||||
|
||||
const seriesData =
|
||||
series?.values
|
||||
?.filter((response) => filterIsNan(response, filterNaN))
|
||||
.map((v) => parseFloat(v[1])) || [];
|
||||
updatedSeries?.map((v) => {
|
||||
if (v[1] === null) {
|
||||
return v[1];
|
||||
}
|
||||
return parseFloat(v[1]);
|
||||
}) || [];
|
||||
|
||||
// fill rest of the value with zero
|
||||
if (seriesData.length < numberOfTimestamps && fillSpans) {
|
||||
const diff = numberOfTimestamps - seriesData.length;
|
||||
for (let i = 0; i < diff; i += 1) {
|
||||
seriesData.push(0);
|
||||
}
|
||||
}
|
||||
|
||||
uPlotData.push(new Float64Array(seriesData));
|
||||
uPlotData.push(seriesData);
|
||||
});
|
||||
|
||||
return uPlotData;
|
||||
|
@ -18,7 +18,7 @@ function ChannelsEdit(): JSX.Element {
|
||||
const { id } = useParams<Params>();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const { isLoading, isError, data } = useQuery(['getChannel', id], {
|
||||
const { isFetching, isError, data } = useQuery(['getChannel', id], {
|
||||
queryFn: () =>
|
||||
get({
|
||||
id,
|
||||
@ -29,7 +29,7 @@ function ChannelsEdit(): JSX.Element {
|
||||
return <Typography>{data?.error || t('something_went_wrong')}</Typography>;
|
||||
}
|
||||
|
||||
if (isLoading || !data?.payload) {
|
||||
if (isFetching || !data?.payload) {
|
||||
return <Spinner tip="Loading Channels..." />;
|
||||
}
|
||||
|
||||
|
@ -48,6 +48,8 @@ const DashboardContext = createContext<IDashboardContext>({
|
||||
setLayouts: () => {},
|
||||
setSelectedDashboard: () => {},
|
||||
updatedTimeRef: {} as React.MutableRefObject<Dayjs | null>,
|
||||
toScrollWidgetId: '',
|
||||
setToScrollWidgetId: () => {},
|
||||
});
|
||||
|
||||
interface Props {
|
||||
@ -59,6 +61,8 @@ export function DashboardProvider({
|
||||
}: PropsWithChildren): JSX.Element {
|
||||
const [isDashboardSliderOpen, setIsDashboardSlider] = useState<boolean>(false);
|
||||
|
||||
const [toScrollWidgetId, setToScrollWidgetId] = useState<string>('');
|
||||
|
||||
const [isDashboardLocked, setIsDashboardLocked] = useState<boolean>(false);
|
||||
|
||||
const isDashboardPage = useRouteMatch<Props>({
|
||||
@ -185,7 +189,12 @@ export function DashboardProvider({
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (isVisible && updatedTimeRef.current) {
|
||||
// make the call on tab visibility only if the user is on dashboard / widget page
|
||||
if (
|
||||
isVisible &&
|
||||
updatedTimeRef.current &&
|
||||
(!!isDashboardPage || !!isDashboardWidgetPage)
|
||||
) {
|
||||
dashboardResponse.refetch();
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
@ -230,6 +239,7 @@ export function DashboardProvider({
|
||||
|
||||
const value: IDashboardContext = useMemo(
|
||||
() => ({
|
||||
toScrollWidgetId,
|
||||
isDashboardSliderOpen,
|
||||
isDashboardLocked,
|
||||
handleToggleDashboardSlider,
|
||||
@ -241,6 +251,7 @@ export function DashboardProvider({
|
||||
setLayouts,
|
||||
setSelectedDashboard,
|
||||
updatedTimeRef,
|
||||
setToScrollWidgetId,
|
||||
}),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
@ -250,6 +261,7 @@ export function DashboardProvider({
|
||||
selectedDashboard,
|
||||
dashboardId,
|
||||
layouts,
|
||||
toScrollWidgetId,
|
||||
],
|
||||
);
|
||||
|
||||
|
@ -17,4 +17,6 @@ export interface IDashboardContext {
|
||||
React.SetStateAction<Dashboard | undefined>
|
||||
>;
|
||||
updatedTimeRef: React.MutableRefObject<dayjs.Dayjs | null>;
|
||||
toScrollWidgetId: string;
|
||||
setToScrollWidgetId: React.Dispatch<React.SetStateAction<string>>;
|
||||
}
|
||||
|
@ -51,23 +51,23 @@ body {
|
||||
}
|
||||
|
||||
#overlay {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto,
|
||||
'Helvetica Neue', Arial, 'Noto Sans', sans-serif, 'Apple Color Emoji',
|
||||
'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji';
|
||||
font-family: 'Inter';
|
||||
font-size: 12px;
|
||||
position: absolute;
|
||||
margin: 0.5rem;
|
||||
background: rgba(0, 0, 0, 0.9);
|
||||
background: rgba(0, 0, 0);
|
||||
-webkit-font-smoothing: antialiased;
|
||||
color: #fff;
|
||||
z-index: 10000;
|
||||
pointer-events: none;
|
||||
// pointer-events: none;
|
||||
overflow: auto;
|
||||
max-height: 600px !important;
|
||||
max-height: 480px !important;
|
||||
max-width: 240px !important;
|
||||
border-radius: 5px;
|
||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||
|
||||
.tooltip-container {
|
||||
padding: 0.5rem;
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
|
@ -1249,13 +1249,13 @@ func (aH *APIHandler) createRule(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
err = aH.ruleManager.CreateRule(r.Context(), string(body))
|
||||
rule, err := aH.ruleManager.CreateRule(r.Context(), string(body))
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, "rule successfully added")
|
||||
aH.Respond(w, rule)
|
||||
|
||||
}
|
||||
|
||||
@ -3042,6 +3042,17 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
resp := v3.QueryRangeResponse{
|
||||
Result: result,
|
||||
}
|
||||
|
||||
// This checks if the time for context to complete has exceeded.
|
||||
// it adds flag to notify the user of incomplete respone
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
resp.ContextTimeout = true
|
||||
resp.ContextTimeoutMessage = "result might contain incomplete data due to context timeout, for custom timeout set the timeout header eg:- timeout:120"
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
aH.Respond(w, resp)
|
||||
}
|
||||
|
||||
|
@ -49,11 +49,10 @@ type PipelineOperator struct {
|
||||
Name string `json:"name,omitempty" yaml:"-"`
|
||||
|
||||
// optional keys depending on the type
|
||||
ParseTo string `json:"parse_to,omitempty" yaml:"parse_to,omitempty"`
|
||||
Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
|
||||
Regex string `json:"regex,omitempty" yaml:"regex,omitempty"`
|
||||
ParseFrom string `json:"parse_from,omitempty" yaml:"parse_from,omitempty"`
|
||||
Timestamp *TimestampParser `json:"timestamp,omitempty" yaml:"timestamp,omitempty"`
|
||||
ParseTo string `json:"parse_to,omitempty" yaml:"parse_to,omitempty"`
|
||||
Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
|
||||
Regex string `json:"regex,omitempty" yaml:"regex,omitempty"`
|
||||
ParseFrom string `json:"parse_from,omitempty" yaml:"parse_from,omitempty"`
|
||||
*TraceParser `yaml:",inline,omitempty"`
|
||||
Field string `json:"field,omitempty" yaml:"field,omitempty"`
|
||||
Value string `json:"value,omitempty" yaml:"value,omitempty"`
|
||||
@ -63,6 +62,10 @@ type PipelineOperator struct {
|
||||
Routes *[]Route `json:"routes,omitempty" yaml:"routes,omitempty"`
|
||||
Fields []string `json:"fields,omitempty" yaml:"fields,omitempty"`
|
||||
Default string `json:"default,omitempty" yaml:"default,omitempty"`
|
||||
|
||||
// time_parser fields.
|
||||
Layout string `json:"layout,omitempty" yaml:"layout,omitempty"`
|
||||
LayoutType string `json:"layout_type,omitempty" yaml:"layout_type,omitempty"`
|
||||
}
|
||||
|
||||
type TimestampParser struct {
|
||||
|
@ -25,7 +25,11 @@ func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []s
|
||||
continue
|
||||
}
|
||||
|
||||
operators := getOperators(v.Config)
|
||||
operators, err := getOperators(v.Config)
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrap(err, "failed to prepare operators")
|
||||
}
|
||||
|
||||
if len(operators) == 0 {
|
||||
continue
|
||||
}
|
||||
@ -68,7 +72,7 @@ func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []s
|
||||
return processors, names, nil
|
||||
}
|
||||
|
||||
func getOperators(ops []PipelineOperator) []PipelineOperator {
|
||||
func getOperators(ops []PipelineOperator) ([]PipelineOperator, error) {
|
||||
filteredOp := []PipelineOperator{}
|
||||
for i, operator := range ops {
|
||||
if operator.Enabled {
|
||||
@ -106,6 +110,34 @@ func getOperators(ops []PipelineOperator) []PipelineOperator {
|
||||
|
||||
} else if operator.Type == "trace_parser" {
|
||||
cleanTraceParser(&operator)
|
||||
|
||||
} else if operator.Type == "time_parser" {
|
||||
parseFromParts := strings.Split(operator.ParseFrom, ".")
|
||||
parseFromPath := strings.Join(parseFromParts, "?.")
|
||||
|
||||
operator.If = fmt.Sprintf(`%s != nil`, parseFromPath)
|
||||
|
||||
if operator.LayoutType == "strptime" {
|
||||
regex, err := RegexForStrptimeLayout(operator.Layout)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not generate time_parser processor: %w", err)
|
||||
}
|
||||
|
||||
operator.If = fmt.Sprintf(
|
||||
`%s && %s matches "%s"`, operator.If, parseFromPath, regex,
|
||||
)
|
||||
} else if operator.LayoutType == "epoch" {
|
||||
valueRegex := `^\\s*[0-9]+\\s*$`
|
||||
if strings.Contains(operator.Layout, ".") {
|
||||
valueRegex = `^\\s*[0-9]+\\.[0-9]+\\s*$`
|
||||
}
|
||||
|
||||
operator.If = fmt.Sprintf(
|
||||
`%s && string(%s) matches "%s"`, operator.If, parseFromPath, valueRegex,
|
||||
)
|
||||
|
||||
}
|
||||
// TODO(Raj): Maybe add support for gotime too eventually
|
||||
}
|
||||
|
||||
filteredOp = append(filteredOp, operator)
|
||||
@ -113,7 +145,7 @@ func getOperators(ops []PipelineOperator) []PipelineOperator {
|
||||
filteredOp[len(filteredOp)-1].Output = ""
|
||||
}
|
||||
}
|
||||
return filteredOp
|
||||
return filteredOp, nil
|
||||
}
|
||||
|
||||
func cleanTraceParser(operator *PipelineOperator) {
|
||||
|
@ -2,6 +2,7 @@ package logparsingpipeline
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
@ -198,7 +199,8 @@ var prepareProcessorTestData = []struct {
|
||||
func TestPreparePipelineProcessor(t *testing.T) {
|
||||
for _, test := range prepareProcessorTestData {
|
||||
Convey(test.Name, t, func() {
|
||||
res := getOperators(test.Operators)
|
||||
res, err := getOperators(test.Operators)
|
||||
So(err, ShouldBeNil)
|
||||
So(res, ShouldResemble, test.Output)
|
||||
})
|
||||
}
|
||||
@ -256,11 +258,13 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
type pipelineTestCase struct {
|
||||
Name string
|
||||
Operator PipelineOperator
|
||||
NonMatchingLog model.SignozLog
|
||||
}{
|
||||
}
|
||||
|
||||
testCases := []pipelineTestCase{
|
||||
{
|
||||
"regex processor should ignore log with missing field",
|
||||
PipelineOperator{
|
||||
@ -342,12 +346,82 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
|
||||
Field: "attributes.test",
|
||||
},
|
||||
makeTestLog("mismatching log", map[string]string{}),
|
||||
}, {
|
||||
"time parser should ignore logs with missing field.",
|
||||
PipelineOperator{
|
||||
ID: "time",
|
||||
Type: "time_parser",
|
||||
Enabled: true,
|
||||
Name: "time parser",
|
||||
ParseFrom: "attributes.test_timestamp",
|
||||
LayoutType: "strptime",
|
||||
Layout: "%Y-%m-%dT%H:%M:%S.%f%z",
|
||||
},
|
||||
makeTestLog("mismatching log", map[string]string{}),
|
||||
}, {
|
||||
"time parser should ignore logs timestamp values that don't contain expected strptime layout.",
|
||||
PipelineOperator{
|
||||
ID: "time",
|
||||
Type: "time_parser",
|
||||
Enabled: true,
|
||||
Name: "time parser",
|
||||
ParseFrom: "attributes.test_timestamp",
|
||||
LayoutType: "strptime",
|
||||
Layout: "%Y-%m-%dT%H:%M:%S.%f%z",
|
||||
},
|
||||
makeTestLog("mismatching log", map[string]string{
|
||||
"test_timestamp": "2023-11-27T12:03:28A239907+0530",
|
||||
}),
|
||||
}, {
|
||||
"time parser should ignore logs timestamp values that don't contain an epoch",
|
||||
PipelineOperator{
|
||||
ID: "time",
|
||||
Type: "time_parser",
|
||||
Enabled: true,
|
||||
Name: "time parser",
|
||||
ParseFrom: "attributes.test_timestamp",
|
||||
LayoutType: "epoch",
|
||||
Layout: "s",
|
||||
},
|
||||
makeTestLog("mismatching log", map[string]string{
|
||||
"test_timestamp": "not-an-epoch",
|
||||
}),
|
||||
},
|
||||
// TODO(Raj): see if there is an error scenario for grok parser.
|
||||
// TODO(Raj): see if there is an error scenario for trace parser.
|
||||
// TODO(Raj): see if there is an error scenario for Add operator.
|
||||
}
|
||||
|
||||
// Some more timeparser test cases
|
||||
epochLayouts := []string{"s", "ms", "us", "ns", "s.ms", "s.us", "s.ns"}
|
||||
epochTestValues := []string{
|
||||
"1136214245", "1136214245123", "1136214245123456",
|
||||
"1136214245123456789", "1136214245.123",
|
||||
"1136214245.123456", "1136214245.123456789",
|
||||
}
|
||||
for _, epochLayout := range epochLayouts {
|
||||
for _, testValue := range epochTestValues {
|
||||
testCases = append(testCases, pipelineTestCase{
|
||||
fmt.Sprintf(
|
||||
"time parser should ignore log with timestamp value %s that doesn't match layout type %s",
|
||||
testValue, epochLayout,
|
||||
),
|
||||
PipelineOperator{
|
||||
ID: "time",
|
||||
Type: "time_parser",
|
||||
Enabled: true,
|
||||
Name: "time parser",
|
||||
ParseFrom: "attributes.test_timestamp",
|
||||
LayoutType: "epoch",
|
||||
Layout: epochLayout,
|
||||
},
|
||||
makeTestLog("mismatching log", map[string]string{
|
||||
"test_timestamp": testValue,
|
||||
}),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
testPipelines := []Pipeline{makeTestPipeline([]PipelineOperator{testCase.Operator})}
|
||||
|
||||
@ -420,3 +494,138 @@ func TestResourceFiltersWork(t *testing.T) {
|
||||
|
||||
require.Equal(result[0].Attributes_string["test"], "test-value")
|
||||
}
|
||||
|
||||
func TestPipelineFilterWithStringOpsShouldNotSpamWarningsIfAttributeIsMissing(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
for _, operator := range []v3.FilterOperator{
|
||||
v3.FilterOperatorContains,
|
||||
v3.FilterOperatorNotContains,
|
||||
v3.FilterOperatorRegex,
|
||||
v3.FilterOperatorNotRegex,
|
||||
} {
|
||||
testPipeline := Pipeline{
|
||||
OrderId: 1,
|
||||
Name: "pipeline1",
|
||||
Alias: "pipeline1",
|
||||
Enabled: true,
|
||||
Filter: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "service",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeResource,
|
||||
},
|
||||
Operator: operator,
|
||||
Value: "nginx",
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []PipelineOperator{
|
||||
{
|
||||
ID: "add",
|
||||
Type: "add",
|
||||
Enabled: true,
|
||||
Name: "add",
|
||||
Field: "attributes.test",
|
||||
Value: "test-value",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
testLog := model.SignozLog{
|
||||
Timestamp: uint64(time.Now().UnixNano()),
|
||||
Body: "test log",
|
||||
Attributes_string: map[string]string{},
|
||||
Resources_string: map[string]string{},
|
||||
SeverityText: entry.Info.String(),
|
||||
SeverityNumber: uint8(entry.Info),
|
||||
SpanID: "",
|
||||
TraceID: "",
|
||||
}
|
||||
|
||||
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
|
||||
context.Background(),
|
||||
[]Pipeline{testPipeline},
|
||||
[]model.SignozLog{testLog},
|
||||
)
|
||||
require.Nil(err)
|
||||
require.Equal(0, len(collectorWarnAndErrorLogs), strings.Join(collectorWarnAndErrorLogs, "\n"))
|
||||
require.Equal(1, len(result))
|
||||
}
|
||||
}
|
||||
|
||||
func TestTemporaryWorkaroundForSupportingAttribsContainingDots(t *testing.T) {
|
||||
// TODO(Raj): Remove this after dots are supported
|
||||
|
||||
require := require.New(t)
|
||||
|
||||
testPipeline := Pipeline{
|
||||
OrderId: 1,
|
||||
Name: "pipeline1",
|
||||
Alias: "pipeline1",
|
||||
Enabled: true,
|
||||
Filter: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "k8s_deployment_name",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeResource,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "ingress",
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []PipelineOperator{
|
||||
{
|
||||
ID: "add",
|
||||
Type: "add",
|
||||
Enabled: true,
|
||||
Name: "add",
|
||||
Field: "attributes.test",
|
||||
Value: "test-value",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
testLogs := []model.SignozLog{{
|
||||
Timestamp: uint64(time.Now().UnixNano()),
|
||||
Body: "test log",
|
||||
Attributes_string: map[string]string{},
|
||||
Resources_string: map[string]string{
|
||||
"k8s_deployment_name": "ingress",
|
||||
},
|
||||
SeverityText: entry.Info.String(),
|
||||
SeverityNumber: uint8(entry.Info),
|
||||
SpanID: "",
|
||||
TraceID: "",
|
||||
}, {
|
||||
Timestamp: uint64(time.Now().UnixNano()),
|
||||
Body: "test log",
|
||||
Attributes_string: map[string]string{},
|
||||
Resources_string: map[string]string{
|
||||
"k8s.deployment.name": "ingress",
|
||||
},
|
||||
SeverityText: entry.Info.String(),
|
||||
SeverityNumber: uint8(entry.Info),
|
||||
SpanID: "",
|
||||
TraceID: "",
|
||||
}}
|
||||
|
||||
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
|
||||
context.Background(),
|
||||
[]Pipeline{testPipeline},
|
||||
testLogs,
|
||||
)
|
||||
require.Nil(err)
|
||||
require.Equal(0, len(collectorWarnAndErrorLogs), strings.Join(collectorWarnAndErrorLogs, "\n"))
|
||||
require.Equal(2, len(result))
|
||||
for _, processedLog := range result {
|
||||
require.Equal(processedLog.Attributes_string["test"], "test-value")
|
||||
}
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/queryBuilderToExpr"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
// PostablePipelines are a list of user defined pielines
|
||||
@ -164,6 +165,39 @@ func isValidOperator(op PipelineOperator) error {
|
||||
if len(op.Fields) == 0 {
|
||||
return fmt.Errorf(fmt.Sprintf("fields of %s retain operator cannot be empty", op.ID))
|
||||
}
|
||||
|
||||
case "time_parser":
|
||||
if op.ParseFrom == "" {
|
||||
return fmt.Errorf("parse from of time parsing processor %s cannot be empty", op.ID)
|
||||
}
|
||||
if op.LayoutType != "epoch" && op.LayoutType != "strptime" {
|
||||
// TODO(Raj): Maybe add support for gotime format
|
||||
return fmt.Errorf(
|
||||
"invalid format type '%s' of time parsing processor %s", op.LayoutType, op.ID,
|
||||
)
|
||||
}
|
||||
if op.Layout == "" {
|
||||
return fmt.Errorf(fmt.Sprintf("format can not be empty for time parsing processor %s", op.ID))
|
||||
}
|
||||
|
||||
validEpochLayouts := []string{"s", "ms", "us", "ns", "s.ms", "s.us", "s.ns"}
|
||||
if op.LayoutType == "epoch" && !slices.Contains(validEpochLayouts, op.Layout) {
|
||||
return fmt.Errorf(
|
||||
"invalid epoch format '%s' of time parsing processor %s", op.LayoutType, op.ID,
|
||||
)
|
||||
}
|
||||
|
||||
// TODO(Raj): Add validation for strptime layouts via
|
||||
// collector simulator maybe.
|
||||
if op.LayoutType == "strptime" {
|
||||
_, err := RegexForStrptimeLayout(op.Layout)
|
||||
if err != nil {
|
||||
return fmt.Errorf(
|
||||
"invalid strptime format '%s' of time parsing processor %s: %w", op.LayoutType, op.ID, err,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return fmt.Errorf(fmt.Sprintf("operator type %s not supported for %s, use one of (grok_parser, regex_parser, copy, move, add, remove, trace_parser, retain)", op.Type, op.ID))
|
||||
}
|
||||
|
@ -275,6 +275,57 @@ var operatorTest = []struct {
|
||||
},
|
||||
},
|
||||
IsValid: false,
|
||||
}, {
|
||||
Name: "Timestamp Parser - valid",
|
||||
Operator: PipelineOperator{
|
||||
ID: "time",
|
||||
Type: "time_parser",
|
||||
ParseFrom: "attributes.test_timestamp",
|
||||
LayoutType: "epoch",
|
||||
Layout: "s",
|
||||
},
|
||||
IsValid: true,
|
||||
}, {
|
||||
Name: "Timestamp Parser - invalid - bad parsefrom attribute",
|
||||
Operator: PipelineOperator{
|
||||
ID: "time",
|
||||
Type: "time_parser",
|
||||
ParseFrom: "timestamp",
|
||||
LayoutType: "epoch",
|
||||
Layout: "s",
|
||||
},
|
||||
IsValid: false,
|
||||
}, {
|
||||
Name: "Timestamp Parser - unsupported layout_type",
|
||||
Operator: PipelineOperator{
|
||||
ID: "time",
|
||||
Type: "time_parser",
|
||||
ParseFrom: "attributes.test_timestamp",
|
||||
// TODO(Raj): Maybe add support for gotime format
|
||||
LayoutType: "gotime",
|
||||
Layout: "Mon Jan 2 15:04:05 -0700 MST 2006",
|
||||
},
|
||||
IsValid: false,
|
||||
}, {
|
||||
Name: "Timestamp Parser - invalid epoch layout",
|
||||
Operator: PipelineOperator{
|
||||
ID: "time",
|
||||
Type: "time_parser",
|
||||
ParseFrom: "attributes.test_timestamp",
|
||||
LayoutType: "epoch",
|
||||
Layout: "%Y-%m-%d",
|
||||
},
|
||||
IsValid: false,
|
||||
}, {
|
||||
Name: "Timestamp Parser - invalid strptime layout",
|
||||
Operator: PipelineOperator{
|
||||
ID: "time",
|
||||
Type: "time_parser",
|
||||
ParseFrom: "attributes.test_timestamp",
|
||||
LayoutType: "strptime",
|
||||
Layout: "%U",
|
||||
},
|
||||
IsValid: false,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -145,7 +145,7 @@ func TestPipelinePreview(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
func TestGrokParsingPreview(t *testing.T) {
|
||||
func TestGrokParsingProcessor(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
testPipelines := []Pipeline{
|
||||
@ -207,7 +207,7 @@ func TestGrokParsingPreview(t *testing.T) {
|
||||
require.Equal("route/server.go:71", processed.Attributes_string["location"])
|
||||
}
|
||||
|
||||
func TestTraceParsingPreview(t *testing.T) {
|
||||
func TestTraceParsingProcessor(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
testPipelines := []Pipeline{
|
||||
|
120
pkg/query-service/app/logparsingpipeline/time_parser.go
Normal file
120
pkg/query-service/app/logparsingpipeline/time_parser.go
Normal file
@ -0,0 +1,120 @@
|
||||
package logparsingpipeline
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Regex for strptime format placeholders supported by the time parser.
|
||||
// Used for defining if conditions on time parsing operators so they do not
|
||||
// spam collector logs when encountering values that can't be parsed.
|
||||
//
|
||||
// Based on ctimeSubstitutes defined in https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/internal/coreinternal/timeutils/internal/ctimefmt/ctimefmt.go#L22
|
||||
//
|
||||
// TODO(Raj): Maybe make the expressions tighter.
|
||||
var ctimeRegex = map[string]string{
|
||||
// %Y - Year, zero-padded (0001, 0002, ..., 2019, 2020, ..., 9999)
|
||||
"%Y": "[0-9]{4}",
|
||||
// %y - Year, last two digits, zero-padded (01, ..., 99)
|
||||
"%y": "[0-9]{2}",
|
||||
// %m - Month as a decimal number (01, 02, ..., 12)
|
||||
"%m": "[0-9]{2}",
|
||||
// %o - Month as a space-padded number ( 1, 2, ..., 12)
|
||||
"%o": "_[0-9]",
|
||||
// %q - Month as a unpadded number (1,2,...,12)
|
||||
"%q": "[0-9]",
|
||||
// %b, %h - Abbreviated month name (Jan, Feb, ...)
|
||||
"%b": "[a-zA-Z]*?",
|
||||
"%h": "[a-zA-Z]*?",
|
||||
// %B - Full month name (January, February, ...)
|
||||
"%B": "[a-zA-Z]*?",
|
||||
// %d - Day of the month, zero-padded (01, 02, ..., 31)
|
||||
"%d": "[0-9]{2}",
|
||||
// %e - Day of the month, space-padded ( 1, 2, ..., 31)
|
||||
"%e": "_[0-9]",
|
||||
// %g - Day of the month, unpadded (1,2,...,31)
|
||||
"%g": "[0-9]",
|
||||
// %a - Abbreviated weekday name (Sun, Mon, ...)
|
||||
"%a": "[a-zA-Z]*?",
|
||||
// %A - Full weekday name (Sunday, Monday, ...)
|
||||
"%A": "[a-zA-Z]*?",
|
||||
// %H - Hour (24-hour clock) as a zero-padded decimal number (00, ..., 24)
|
||||
"%H": "[0-9]{2}",
|
||||
// %l - Hour (12-hour clock: 0, ..., 12)
|
||||
"%l": "[0-9]{1-2}",
|
||||
// %I - Hour (12-hour clock) as a zero-padded decimal number (00, ..., 12)
|
||||
"%I": "[0-9]{2}",
|
||||
// %p - Locale’s equivalent of either AM or PM
|
||||
"%p": "(AM|PM)",
|
||||
// %P - Locale’s equivalent of either am or pm
|
||||
"%P": "(am|pm)",
|
||||
// %M - Minute, zero-padded (00, 01, ..., 59)
|
||||
"%M": "[0-9]{2}",
|
||||
// %S - Second as a zero-padded decimal number (00, 01, ..., 59)
|
||||
"%S": "[0-9]{2}",
|
||||
// %L - Millisecond as a decimal number, zero-padded on the left (000, 001, ..., 999)
|
||||
"%L": "[0-9]*?",
|
||||
// %f - Microsecond as a decimal number, zero-padded on the left (000000, ..., 999999)
|
||||
"%f": "[0-9]*?",
|
||||
// %s - Nanosecond as a decimal number, zero-padded on the left (000000, ..., 999999)
|
||||
"%s": "[0-9]*?",
|
||||
// %Z - Timezone name or abbreviation or empty (UTC, EST, CST)
|
||||
"%Z": "[a-zA-Z]*?",
|
||||
// %z - UTC offset in the form ±HHMM[SS[.ffffff]] or empty(+0000, -0400)
|
||||
"%z": "[-+][0-9]*?",
|
||||
// Weekday as a decimal number, where 0 is Sunday and 6 is Saturday.
|
||||
"%w": "[-+][0-9]*?",
|
||||
"%i": "[-+][0-9]*?",
|
||||
"%j": "[-+][0-9]{2}:[0-9]{2}",
|
||||
"%k": "[-+][0-9]{2}:[0-9]{2}:[0-9]{2}",
|
||||
// %D, %x - Short MM/DD/YY date, equivalent to %m/%d/%y
|
||||
"%D": "[0-9]{2}/[0-9]{2}/[0-9]{4}",
|
||||
// %D, %x - Short MM/DD/YY date, equivalent to %m/%d/%y
|
||||
"%x": "[0-9]{2}/[0-9]{2}/[0-9]{4}",
|
||||
// %F - Short YYYY-MM-DD date, equivalent to %Y-%m-%d
|
||||
"%F": "[0-9]{4}-[0-9]{2}-[0-9]{2}",
|
||||
// %T, %X - ISO 8601 time format (HH:MM:SS), equivalent to %H:%M:%S
|
||||
"%T": "[0-9]{2}:[0-9]{2}:[0-9]{2}",
|
||||
// %T, %X - ISO 8601 time format (HH:MM:SS), equivalent to %H:%M:%S
|
||||
"%X": "[0-9]{2}:[0-9]{2}:[0-9]{2}",
|
||||
// %r - 12-hour clock time (02:55:02 pm)
|
||||
"%r": "[0-9]{2}:[0-9]{2}:[0-9]{2} (am|pm)",
|
||||
// %R - 24-hour HH:MM time, equivalent to %H:%M
|
||||
"%R": "[0-9]{2}:[0-9]{2}",
|
||||
// %n - New-line character ('\n')
|
||||
"%n": "\n",
|
||||
// %t - Horizontal-tab character ('\t')
|
||||
"%t": "\t",
|
||||
// %% - A % sign
|
||||
"%%": "%",
|
||||
// %c - Date and time representation (Mon Jan 02 15:04:05 2006)
|
||||
"%c": "[a-zA-Z]{3} [a-zA-Z]{3} [0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2} [0-9]{4}",
|
||||
}
|
||||
|
||||
func RegexForStrptimeLayout(layout string) (string, error) {
|
||||
layoutRegex := layout
|
||||
for _, regexSpecialChar := range []string{
|
||||
".", "+", "*", "?", "^", "$", "(", ")", "[", "]", "{", "}", "|", `\`,
|
||||
} {
|
||||
layoutRegex = strings.ReplaceAll(layoutRegex, regexSpecialChar, `\`+regexSpecialChar)
|
||||
}
|
||||
|
||||
var errs []error
|
||||
replaceStrptimeDirectiveWithRegex := func(directive string) string {
|
||||
if regex, ok := ctimeRegex[directive]; ok {
|
||||
return regex
|
||||
}
|
||||
errs = append(errs, errors.New("unsupported ctimefmt directive: "+directive))
|
||||
return ""
|
||||
}
|
||||
|
||||
strptimeDirectiveRegexp := regexp.MustCompile(`%.`)
|
||||
layoutRegex = strptimeDirectiveRegexp.ReplaceAllStringFunc(layoutRegex, replaceStrptimeDirectiveWithRegex)
|
||||
if len(errs) != 0 {
|
||||
return "", fmt.Errorf("couldn't generate regex for ctime format: %v", errs)
|
||||
}
|
||||
|
||||
return layoutRegex, nil
|
||||
}
|
136
pkg/query-service/app/logparsingpipeline/time_parser_test.go
Normal file
136
pkg/query-service/app/logparsingpipeline/time_parser_test.go
Normal file
@ -0,0 +1,136 @@
|
||||
package logparsingpipeline
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/antonmedv/expr"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
func TestRegexForStrptimeLayout(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
var testCases = []struct {
|
||||
strptimeLayout string
|
||||
str string
|
||||
shouldMatch bool
|
||||
}{
|
||||
{
|
||||
strptimeLayout: "%Y-%m-%dT%H:%M:%S.%f%z",
|
||||
str: "2023-11-26T12:03:28.239907+0530",
|
||||
shouldMatch: true,
|
||||
}, {
|
||||
strptimeLayout: "%d-%m-%Y",
|
||||
str: "26-11-2023",
|
||||
shouldMatch: true,
|
||||
}, {
|
||||
strptimeLayout: "%d-%m-%Y",
|
||||
str: "26-11-2023",
|
||||
shouldMatch: true,
|
||||
}, {
|
||||
strptimeLayout: "%d/%m/%y",
|
||||
str: "11/03/02",
|
||||
shouldMatch: true,
|
||||
}, {
|
||||
strptimeLayout: "%A, %d. %B %Y %I:%M%p",
|
||||
str: "Tuesday, 21. November 2006 04:30PM11/03/02",
|
||||
shouldMatch: true,
|
||||
}, {
|
||||
strptimeLayout: "%A, %d. %B %Y %I:%M%p",
|
||||
str: "some random text",
|
||||
shouldMatch: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
regex, err := RegexForStrptimeLayout(test.strptimeLayout)
|
||||
require.Nil(err, test.strptimeLayout)
|
||||
|
||||
code := fmt.Sprintf(`"%s" matches "%s"`, test.str, regex)
|
||||
program, err := expr.Compile(code)
|
||||
require.Nil(err, test.strptimeLayout)
|
||||
|
||||
output, err := expr.Run(program, map[string]string{})
|
||||
require.Nil(err, test.strptimeLayout)
|
||||
require.Equal(output, test.shouldMatch, test.strptimeLayout)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func TestTimestampParsingProcessor(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
testPipelines := []Pipeline{
|
||||
{
|
||||
OrderId: 1,
|
||||
Name: "pipeline1",
|
||||
Alias: "pipeline1",
|
||||
Enabled: true,
|
||||
Filter: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "method",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "GET",
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []PipelineOperator{},
|
||||
},
|
||||
}
|
||||
|
||||
var timestampParserOp PipelineOperator
|
||||
err := json.Unmarshal([]byte(`
|
||||
{
|
||||
"orderId": 1,
|
||||
"enabled": true,
|
||||
"type": "time_parser",
|
||||
"name": "Test timestamp parser",
|
||||
"id": "test-timestamp-parser",
|
||||
"parse_from": "attributes.test_timestamp",
|
||||
"layout_type": "strptime",
|
||||
"layout": "%Y-%m-%dT%H:%M:%S.%f%z"
|
||||
}
|
||||
`), ×tampParserOp)
|
||||
require.Nil(err)
|
||||
testPipelines[0].Config = append(testPipelines[0].Config, timestampParserOp)
|
||||
|
||||
testTimestampStr := "2023-11-27T12:03:28.239907+0530"
|
||||
testLog := makeTestLogEntry(
|
||||
"test log",
|
||||
map[string]string{
|
||||
"method": "GET",
|
||||
"test_timestamp": testTimestampStr,
|
||||
},
|
||||
)
|
||||
|
||||
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
|
||||
context.Background(),
|
||||
testPipelines,
|
||||
[]model.SignozLog{
|
||||
testLog,
|
||||
},
|
||||
)
|
||||
require.Nil(err)
|
||||
require.Equal(1, len(result))
|
||||
require.Equal(0, len(collectorWarnAndErrorLogs), strings.Join(collectorWarnAndErrorLogs, "\n"))
|
||||
processed := result[0]
|
||||
|
||||
expectedTimestamp, err := time.Parse("2006-01-02T15:04:05.999999-0700", testTimestampStr)
|
||||
require.Nil(err)
|
||||
|
||||
require.Equal(uint64(expectedTimestamp.UnixNano()), processed.Timestamp)
|
||||
|
||||
}
|
@ -3,6 +3,7 @@ package v3
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
@ -129,13 +130,18 @@ func buildMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery, tableNam
|
||||
rateGroupBy := "fingerprint, " + groupBy
|
||||
rateGroupTags := "fingerprint, " + groupTags
|
||||
rateOrderBy := "fingerprint, " + orderBy
|
||||
partitionBy := "fingerprint"
|
||||
if len(groupTags) != 0 {
|
||||
partitionBy += ", " + groupTags
|
||||
partitionBy = strings.Trim(partitionBy, ", ")
|
||||
}
|
||||
op := "max(value)"
|
||||
subQuery := fmt.Sprintf(
|
||||
queryTmplCounterInner, rateGroupTags, step, op, filterSubQuery, rateGroupBy, rateOrderBy,
|
||||
) // labels will be same so any should be fine
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + `as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery)
|
||||
query = fmt.Sprintf(`SELECT %s toStartOfHour(now()) as ts, %s(value)/%d as value FROM (%s) GROUP BY %s ORDER BY %s ts`, groupTags, aggregateOperatorToSQLFunc[mq.AggregateOperator], points, query, groupBy, orderBy)
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + `as rate_value FROM(%s) WINDOW rate_window as (PARTITION BY %s ORDER BY %s ts)`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery, partitionBy, rateOrderBy)
|
||||
query = fmt.Sprintf(`SELECT %s toStartOfHour(now()) as ts, %s(rate_value)/%d as value FROM (%s) WHERE isNaN(rate_value) = 0 GROUP BY %s ORDER BY %s ts`, groupTags, aggregateOperatorToSQLFunc[mq.AggregateOperator], points, query, groupBy, orderBy)
|
||||
return query, nil
|
||||
case
|
||||
v3.AggregateOperatorRateSum,
|
||||
@ -145,8 +151,13 @@ func buildMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery, tableNam
|
||||
step = ((end - start + 1) / 1000) / 2
|
||||
op := fmt.Sprintf("%s(value)", aggregateOperatorToSQLFunc[mq.AggregateOperator])
|
||||
subQuery := fmt.Sprintf(queryTmplCounterInner, groupTags, step, op, filterSubQuery, groupBy, orderBy)
|
||||
query := `SELECT %s toStartOfHour(now()) as ts, ` + rateWithoutNegative + `as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery)
|
||||
partitionBy := ""
|
||||
if len(groupTags) != 0 {
|
||||
partitionBy = "PARTITION BY " + groupTags
|
||||
partitionBy = strings.Trim(partitionBy, ", ")
|
||||
}
|
||||
query := `SELECT %s toStartOfHour(now()) as ts, ` + rateWithoutNegative + `as value FROM(%s) WINDOW rate_window as (%s ORDER BY %s ts)`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery, partitionBy, groupTags)
|
||||
return query, nil
|
||||
case
|
||||
v3.AggregateOperatorP05,
|
||||
@ -165,13 +176,18 @@ func buildMetricQueryForTable(start, end, _ int64, mq *v3.BuilderQuery, tableNam
|
||||
rateGroupBy := "fingerprint, " + groupBy
|
||||
rateGroupTags := "fingerprint, " + groupTags
|
||||
rateOrderBy := "fingerprint, " + orderBy
|
||||
partitionBy := "fingerprint"
|
||||
if len(groupTags) != 0 {
|
||||
partitionBy += ", " + groupTags
|
||||
partitionBy = strings.Trim(partitionBy, ", ")
|
||||
}
|
||||
op := "max(value)"
|
||||
subQuery := fmt.Sprintf(
|
||||
queryTmplCounterInner, rateGroupTags, step, op, filterSubQuery, rateGroupBy, rateOrderBy,
|
||||
) // labels will be same so any should be fine
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + ` as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery)
|
||||
query = fmt.Sprintf(`SELECT %s toStartOfHour(now()) as ts, sum(value)/%d as value FROM (%s) GROUP BY %s HAVING isNaN(value) = 0 ORDER BY %s ts`, groupTags, points, query, groupBy, orderBy)
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + ` as rate_value FROM(%s) WINDOW rate_window as (PARTITION BY %s ORDER BY %s ts)`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery, partitionBy, rateOrderBy)
|
||||
query = fmt.Sprintf(`SELECT %s toStartOfHour(now()) as ts, sum(rate_value)/%d as value FROM (%s) WHERE isNaN(rate_value) = 0 GROUP BY %s ORDER BY %s ts`, groupTags, points, query, groupBy, orderBy)
|
||||
value := aggregateOperatorToPercentile[mq.AggregateOperator]
|
||||
|
||||
query = fmt.Sprintf(`SELECT %s toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s ts`, groupTagsWithoutLe, value, query, groupByWithoutLe, orderWithoutLe)
|
||||
|
@ -38,7 +38,7 @@ func TestPanelTableForCumulative(t *testing.T) {
|
||||
},
|
||||
Expression: "A",
|
||||
},
|
||||
expected: "SELECT toStartOfHour(now()) as ts, sum(value)/29 as value FROM (SELECT ts, if(runningDifference(ts) <= 0, nan, if(runningDifference(value) < 0, (value) / runningDifference(ts), runningDifference(value) / runningDifference(ts))) as value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_count' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch']) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(value) = 0) GROUP BY ts ORDER BY ts",
|
||||
expected: "SELECT toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_count' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch']) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_count' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
{
|
||||
name: "latency p50",
|
||||
@ -61,7 +61,7 @@ func TestPanelTableForCumulative(t *testing.T) {
|
||||
},
|
||||
Expression: "A",
|
||||
},
|
||||
expected: "SELECT toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(value)/29 as value FROM (SELECT le, ts, if(runningDifference(ts) <= 0, nan, if(runningDifference(value) < 0, (value) / runningDifference(ts), runningDifference(value) / runningDifference(ts))) as value FROM(SELECT fingerprint, le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, le,ts ORDER BY fingerprint, le ASC, ts) WHERE isNaN(value) = 0) GROUP BY le,ts HAVING isNaN(value) = 0 ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts",
|
||||
expected: "SELECT toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, le,ts ORDER BY fingerprint, le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, le ORDER BY fingerprint, le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
{
|
||||
name: "latency p99 with group by",
|
||||
@ -80,7 +80,7 @@ func TestPanelTableForCumulative(t *testing.T) {
|
||||
},
|
||||
Expression: "A",
|
||||
},
|
||||
expected: "SELECT service_name, toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name,le, toStartOfHour(now()) as ts, sum(value)/29 as value FROM (SELECT service_name,le, ts, if(runningDifference(ts) <= 0, nan, if(runningDifference(value) < 0, (value) / runningDifference(ts), runningDifference(value) / runningDifference(ts))) as value FROM(SELECT fingerprint, service_name,le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, service_name,le,ts ORDER BY fingerprint, service_name ASC,le ASC, ts) WHERE isNaN(value) = 0) GROUP BY service_name,le,ts HAVING isNaN(value) = 0 ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts",
|
||||
expected: "SELECT service_name, toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name,le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT service_name,le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, service_name,le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1689255866000 AND timestamp_ms <= 1689257640000 GROUP BY fingerprint, service_name,le,ts ORDER BY fingerprint, service_name ASC,le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, service_name,le ORDER BY fingerprint, service_name ASC,le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts",
|
||||
},
|
||||
}
|
||||
|
||||
@ -88,11 +88,11 @@ func TestPanelTableForCumulative(t *testing.T) {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
query, err := buildMetricQueryForTable(1689255866000, 1689257640000, 1800, c.query, "distributed_time_series_v2")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
t.Fatalf("unexpected error: %v\n", err)
|
||||
}
|
||||
|
||||
if query != c.expected {
|
||||
t.Fatalf("expected: %s, got: %s", c.expected, query)
|
||||
t.Fatalf("expected: %s, got: %s\n", c.expected, query)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ var aggregateOperatorToSQLFunc = map[v3.AggregateOperator]string{
|
||||
}
|
||||
|
||||
// See https://github.com/SigNoz/signoz/issues/2151#issuecomment-1467249056
|
||||
var rateWithoutNegative = `if(runningDifference(ts) <= 0, nan, if(runningDifference(value) < 0, (value) / runningDifference(ts), runningDifference(value) / runningDifference(ts))) `
|
||||
var rateWithoutNegative = `If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) `
|
||||
|
||||
// buildMetricsTimeSeriesFilterQuery builds the sub-query to be used for filtering
|
||||
// timeseries based on search criteria
|
||||
@ -219,35 +219,46 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str
|
||||
groupBy = "fingerprint, ts"
|
||||
orderBy = "fingerprint, "
|
||||
groupTags = "fingerprint,"
|
||||
partitionBy := "fingerprint"
|
||||
op := "max(value)" // max value should be the closest value for point in time
|
||||
subQuery := fmt.Sprintf(
|
||||
queryTmpl, "any(labels) as labels, "+groupTags, step, op, filterSubQuery, groupBy, orderBy,
|
||||
) // labels will be same so any should be fine
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + ` as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + ` as value FROM(%s) WINDOW rate_window as (PARTITION BY %s ORDER BY %s ts) `
|
||||
|
||||
query = fmt.Sprintf(query, "labels as fullLabels,", subQuery)
|
||||
query = fmt.Sprintf(query, "labels as fullLabels,", subQuery, partitionBy, orderBy)
|
||||
return query, nil
|
||||
case v3.AggregateOperatorSumRate, v3.AggregateOperatorAvgRate, v3.AggregateOperatorMaxRate, v3.AggregateOperatorMinRate:
|
||||
rateGroupBy := "fingerprint, " + groupBy
|
||||
rateGroupTags := "fingerprint, " + groupTags
|
||||
rateOrderBy := "fingerprint, " + orderBy
|
||||
partitionBy := "fingerprint"
|
||||
if len(groupTags) != 0 {
|
||||
partitionBy += ", " + groupTags
|
||||
partitionBy = strings.Trim(partitionBy, ", ")
|
||||
}
|
||||
op := "max(value)"
|
||||
subQuery := fmt.Sprintf(
|
||||
queryTmpl, rateGroupTags, step, op, filterSubQuery, rateGroupBy, rateOrderBy,
|
||||
) // labels will be same so any should be fine
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + `as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery)
|
||||
query = fmt.Sprintf(`SELECT %s ts, %s(value) as value FROM (%s) GROUP BY %s ORDER BY %s ts`, groupTags, aggregateOperatorToSQLFunc[mq.AggregateOperator], query, groupSets, orderBy)
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + ` as rate_value FROM(%s) WINDOW rate_window as (PARTITION BY %s ORDER BY %s ts) `
|
||||
query = fmt.Sprintf(query, groupTags, subQuery, partitionBy, rateOrderBy)
|
||||
query = fmt.Sprintf(`SELECT %s ts, %s(rate_value) as value FROM (%s) WHERE isNaN(rate_value) = 0 GROUP BY %s ORDER BY %s ts`, groupTags, aggregateOperatorToSQLFunc[mq.AggregateOperator], query, groupSets, orderBy)
|
||||
return query, nil
|
||||
case
|
||||
v3.AggregateOperatorRateSum,
|
||||
v3.AggregateOperatorRateMax,
|
||||
v3.AggregateOperatorRateAvg,
|
||||
v3.AggregateOperatorRateMin:
|
||||
partitionBy := ""
|
||||
if len(groupTags) != 0 {
|
||||
partitionBy = "PARTITION BY " + groupTags
|
||||
partitionBy = strings.Trim(partitionBy, ", ")
|
||||
}
|
||||
op := fmt.Sprintf("%s(value)", aggregateOperatorToSQLFunc[mq.AggregateOperator])
|
||||
subQuery := fmt.Sprintf(queryTmpl, groupTags, step, op, filterSubQuery, groupSets, orderBy)
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + `as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery)
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + ` as value FROM(%s) WINDOW rate_window as (%s ORDER BY %s ts) `
|
||||
query = fmt.Sprintf(query, groupTags, subQuery, partitionBy, groupTags)
|
||||
return query, nil
|
||||
case
|
||||
v3.AggregateOperatorP05,
|
||||
@ -266,13 +277,18 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str
|
||||
rateGroupBy := "fingerprint, " + groupBy
|
||||
rateGroupTags := "fingerprint, " + groupTags
|
||||
rateOrderBy := "fingerprint, " + orderBy
|
||||
partitionBy := "fingerprint"
|
||||
if len(groupTags) != 0 {
|
||||
partitionBy += ", " + groupTags
|
||||
partitionBy = strings.Trim(partitionBy, ", ")
|
||||
}
|
||||
op := "max(value)"
|
||||
subQuery := fmt.Sprintf(
|
||||
queryTmpl, rateGroupTags, step, op, filterSubQuery, rateGroupBy, rateOrderBy,
|
||||
) // labels will be same so any should be fine
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + ` as value FROM(%s) WHERE isNaN(value) = 0`
|
||||
query = fmt.Sprintf(query, groupTags, subQuery)
|
||||
query = fmt.Sprintf(`SELECT %s ts, sum(value) as value FROM (%s) GROUP BY %s HAVING isNaN(value) = 0 ORDER BY %s ts`, groupTags, query, groupSets, orderBy)
|
||||
query := `SELECT %s ts, ` + rateWithoutNegative + ` as rate_value FROM(%s) WINDOW rate_window as (PARTITION BY %s ORDER BY %s ts) `
|
||||
query = fmt.Sprintf(query, groupTags, subQuery, partitionBy, rateOrderBy)
|
||||
query = fmt.Sprintf(`SELECT %s ts, sum(rate_value) as value FROM (%s) WHERE isNaN(rate_value) = 0 GROUP BY %s ORDER BY %s ts`, groupTags, query, groupSets, orderBy)
|
||||
value := aggregateOperatorToPercentile[mq.AggregateOperator]
|
||||
|
||||
query = fmt.Sprintf(`SELECT %s ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s ts`, groupTagsWithoutLe, value, query, groupByWithoutLe, orderWithoutLe)
|
||||
|
@ -245,7 +245,7 @@ func TestBuildQueryOperators(t *testing.T) {
|
||||
func TestBuildQueryXRate(t *testing.T) {
|
||||
t.Run("TestBuildQueryXRate", func(t *testing.T) {
|
||||
|
||||
tmpl := `SELECT ts, %s(value) as value FROM (SELECT ts, if(runningDifference(ts) <= 0, nan, if(runningDifference(value) < 0, (value) / runningDifference(ts), runningDifference(value) / runningDifference(ts))) as value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(value) = 0) GROUP BY GROUPING SETS ( (ts), () ) ORDER BY ts`
|
||||
tmpl := `SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY GROUPING SETS ( (ts), () ) ORDER BY ts`
|
||||
|
||||
cases := []struct {
|
||||
aggregateOperator v3.AggregateOperator
|
||||
@ -298,7 +298,7 @@ func TestBuildQueryXRate(t *testing.T) {
|
||||
func TestBuildQueryRPM(t *testing.T) {
|
||||
t.Run("TestBuildQueryXRate", func(t *testing.T) {
|
||||
|
||||
tmpl := `SELECT ts, ceil(value * 60) as value FROM (SELECT ts, %s(value) as value FROM (SELECT ts, if(runningDifference(ts) <= 0, nan, if(runningDifference(value) < 0, (value) / runningDifference(ts), runningDifference(value) / runningDifference(ts))) as value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(value) = 0) GROUP BY GROUPING SETS ( (ts), () ) ORDER BY ts)`
|
||||
tmpl := `SELECT ts, ceil(value * 60) as value FROM (SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY GROUPING SETS ( (ts), () ) ORDER BY ts)`
|
||||
|
||||
cases := []struct {
|
||||
aggregateOperator v3.AggregateOperator
|
||||
|
@ -55,7 +55,7 @@ func TestBuildQueryWithMultipleQueriesAndFormula(t *testing.T) {
|
||||
|
||||
require.Contains(t, queries["C"], "SELECT A.ts as ts, A.value / B.value")
|
||||
require.Contains(t, queries["C"], "WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified'] AND JSONExtractString(labels, 'in') IN ['a','b','c']")
|
||||
require.Contains(t, queries["C"], "runningDifference(value) / runningDifference(ts)")
|
||||
require.Contains(t, queries["C"], "(value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)))")
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -428,6 +428,22 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
|
||||
})
|
||||
}
|
||||
|
||||
func getRouteContextTimeout(overrideTimeout string) time.Duration {
|
||||
var timeout time.Duration
|
||||
var err error
|
||||
if overrideTimeout != "" {
|
||||
timeout, err = time.ParseDuration(overrideTimeout + "s")
|
||||
if err != nil {
|
||||
timeout = constants.ContextTimeout
|
||||
}
|
||||
if timeout > constants.ContextTimeoutMaxAllowed {
|
||||
timeout = constants.ContextTimeoutMaxAllowed
|
||||
}
|
||||
return timeout
|
||||
}
|
||||
return constants.ContextTimeout
|
||||
}
|
||||
|
||||
func setTimeoutMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
@ -435,7 +451,7 @@ func setTimeoutMiddleware(next http.Handler) http.Handler {
|
||||
// check if route is not excluded
|
||||
url := r.URL.Path
|
||||
if _, ok := constants.TimeoutExcludedRoutes[url]; !ok {
|
||||
ctx, cancel = context.WithTimeout(r.Context(), constants.ContextTimeout)
|
||||
ctx, cancel = context.WithTimeout(r.Context(), getRouteContextTimeout(r.Header.Get("timeout")))
|
||||
defer cancel()
|
||||
}
|
||||
|
||||
|
41
pkg/query-service/app/server_test.go
Normal file
41
pkg/query-service/app/server_test.go
Normal file
@ -0,0 +1,41 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestGetRouteContextTimeout(t *testing.T) {
|
||||
var testGetRouteContextTimeoutData = []struct {
|
||||
Name string
|
||||
OverrideValue string
|
||||
timeout time.Duration
|
||||
}{
|
||||
{
|
||||
Name: "default",
|
||||
OverrideValue: "",
|
||||
timeout: 60 * time.Second,
|
||||
},
|
||||
{
|
||||
Name: "override",
|
||||
OverrideValue: "180",
|
||||
timeout: 180 * time.Second,
|
||||
},
|
||||
{
|
||||
Name: "override more than max",
|
||||
OverrideValue: "610",
|
||||
timeout: 600 * time.Second,
|
||||
},
|
||||
}
|
||||
|
||||
t.Parallel()
|
||||
|
||||
for _, test := range testGetRouteContextTimeoutData {
|
||||
t.Run(test.Name, func(t *testing.T) {
|
||||
res := getRouteContextTimeout(test.OverrideValue)
|
||||
assert.Equal(t, test.timeout, res)
|
||||
})
|
||||
}
|
||||
}
|
@ -135,6 +135,17 @@ func GetContextTimeout() time.Duration {
|
||||
|
||||
var ContextTimeout = GetContextTimeout()
|
||||
|
||||
func GetContextTimeoutMaxAllowed() time.Duration {
|
||||
contextTimeoutStr := GetOrDefaultEnv("CONTEXT_TIMEOUT_MAX_ALLOWED", "600")
|
||||
contextTimeoutDuration, err := time.ParseDuration(contextTimeoutStr + "s")
|
||||
if err != nil {
|
||||
return time.Minute
|
||||
}
|
||||
return contextTimeoutDuration
|
||||
}
|
||||
|
||||
var ContextTimeoutMaxAllowed = GetContextTimeoutMaxAllowed()
|
||||
|
||||
const (
|
||||
TraceID = "traceID"
|
||||
ServiceName = "serviceName"
|
||||
|
@ -613,8 +613,10 @@ func (h *Having) CacheKey() string {
|
||||
}
|
||||
|
||||
type QueryRangeResponse struct {
|
||||
ResultType string `json:"resultType"`
|
||||
Result []*Result `json:"result"`
|
||||
ContextTimeout bool `json:"contextTimeout,omitempty"`
|
||||
ContextTimeoutMessage string `json:"contextTimeoutMessage,omitempty"`
|
||||
ResultType string `json:"resultType"`
|
||||
Result []*Result `json:"result"`
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
|
@ -30,9 +30,9 @@ var logOperatorsToExpr = map[v3.FilterOperator]string{
|
||||
|
||||
func getName(v v3.AttributeKey) string {
|
||||
if v.Type == v3.AttributeKeyTypeTag {
|
||||
return "attributes?." + v.Key
|
||||
return fmt.Sprintf(`attributes["%s"]`, v.Key)
|
||||
} else if v.Type == v3.AttributeKeyTypeResource {
|
||||
return "resource?." + v.Key
|
||||
return fmt.Sprintf(`resource["%s"]`, v.Key)
|
||||
}
|
||||
return v.Key
|
||||
}
|
||||
@ -53,19 +53,40 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
||||
return "", fmt.Errorf("operator not supported")
|
||||
}
|
||||
|
||||
name := getName(v.Key)
|
||||
var filter string
|
||||
switch v.Operator {
|
||||
// uncomment following lines when new version of expr is used
|
||||
// case v3.FilterOperatorIn, v3.FilterOperatorNotIn:
|
||||
// filter = fmt.Sprintf("%s %s list%s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
|
||||
|
||||
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], getTypeName(v.Key.Type))
|
||||
default:
|
||||
filter = fmt.Sprintf("%s %s %s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
|
||||
// TODO(Raj): Remove the use of dot replaced alternative when key
|
||||
// contains underscore after dots are supported in keys
|
||||
names := []string{getName(v.Key)}
|
||||
if strings.Contains(v.Key.Key, "_") {
|
||||
dotKey := v.Key
|
||||
dotKey.Key = strings.Replace(v.Key.Key, "_", ".", -1)
|
||||
names = append(names, getName(dotKey))
|
||||
}
|
||||
|
||||
filterParts := []string{}
|
||||
for _, name := range names {
|
||||
var filter string
|
||||
|
||||
switch v.Operator {
|
||||
// uncomment following lines when new version of expr is used
|
||||
// case v3.FilterOperatorIn, v3.FilterOperatorNotIn:
|
||||
// filter = fmt.Sprintf("%s %s list%s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
|
||||
|
||||
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], getTypeName(v.Key.Type))
|
||||
default:
|
||||
filter = fmt.Sprintf("%s %s %s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
|
||||
|
||||
// Avoid running operators on nil values
|
||||
if v.Operator != v3.FilterOperatorEqual && v.Operator != v3.FilterOperatorNotEqual {
|
||||
filter = fmt.Sprintf("%s != nil && %s", name, filter)
|
||||
}
|
||||
}
|
||||
|
||||
filterParts = append(filterParts, filter)
|
||||
}
|
||||
|
||||
filter := strings.Join(filterParts, " || ")
|
||||
|
||||
// check if the filter is a correct expression language
|
||||
_, err := expr.Compile(filter)
|
||||
if err != nil {
|
||||
|
@ -14,7 +14,7 @@ import (
|
||||
// Data store to capture user alert rule settings
|
||||
type RuleDB interface {
|
||||
// CreateRuleTx stores rule in the db and returns tx and group name (on success)
|
||||
CreateRuleTx(ctx context.Context, rule string) (string, Tx, error)
|
||||
CreateRuleTx(ctx context.Context, rule string) (int64, Tx, error)
|
||||
|
||||
// EditRuleTx updates the given rule in the db and returns tx and group name (on success)
|
||||
EditRuleTx(ctx context.Context, rule string, id string) (string, Tx, error)
|
||||
@ -57,9 +57,7 @@ func newRuleDB(db *sqlx.DB) RuleDB {
|
||||
|
||||
// CreateRuleTx stores a given rule in db and returns task name,
|
||||
// sql tx and error (if any)
|
||||
func (r *ruleDB) CreateRuleTx(ctx context.Context, rule string) (string, Tx, error) {
|
||||
|
||||
var groupName string
|
||||
func (r *ruleDB) CreateRuleTx(ctx context.Context, rule string) (int64, Tx, error) {
|
||||
var lastInsertId int64
|
||||
|
||||
var userEmail string
|
||||
@ -70,14 +68,14 @@ func (r *ruleDB) CreateRuleTx(ctx context.Context, rule string) (string, Tx, err
|
||||
updatedAt := time.Now()
|
||||
tx, err := r.Begin()
|
||||
if err != nil {
|
||||
return groupName, nil, err
|
||||
return lastInsertId, nil, err
|
||||
}
|
||||
|
||||
stmt, err := tx.Prepare(`INSERT into rules (created_at, created_by, updated_at, updated_by, data) VALUES($1,$2,$3,$4,$5);`)
|
||||
if err != nil {
|
||||
zap.S().Errorf("Error in preparing statement for INSERT to rules\n", err)
|
||||
tx.Rollback()
|
||||
return groupName, nil, err
|
||||
return lastInsertId, nil, err
|
||||
}
|
||||
|
||||
defer stmt.Close()
|
||||
@ -86,15 +84,17 @@ func (r *ruleDB) CreateRuleTx(ctx context.Context, rule string) (string, Tx, err
|
||||
if err != nil {
|
||||
zap.S().Errorf("Error in Executing prepared statement for INSERT to rules\n", err)
|
||||
tx.Rollback() // return an error too, we may want to wrap them
|
||||
return groupName, nil, err
|
||||
return lastInsertId, nil, err
|
||||
}
|
||||
|
||||
lastInsertId, _ = result.LastInsertId()
|
||||
|
||||
groupName = prepareTaskName(lastInsertId)
|
||||
|
||||
return groupName, tx, nil
|
||||
lastInsertId, err = result.LastInsertId()
|
||||
if err != nil {
|
||||
zap.S().Errorf("Error in getting last insert id for INSERT to rules\n", err)
|
||||
tx.Rollback() // return an error too, we may want to wrap them
|
||||
return lastInsertId, nil, err
|
||||
}
|
||||
|
||||
return lastInsertId, tx, nil
|
||||
}
|
||||
|
||||
// EditRuleTx stores a given rule string in database and returns
|
||||
|
@ -366,34 +366,35 @@ func (m *Manager) deleteTask(taskName string) {
|
||||
|
||||
// CreateRule stores rule def into db and also
|
||||
// starts an executor for the rule
|
||||
func (m *Manager) CreateRule(ctx context.Context, ruleStr string) error {
|
||||
func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*GettableRule, error) {
|
||||
parsedRule, errs := ParsePostableRule([]byte(ruleStr))
|
||||
|
||||
// check if the rule uses any feature that is not enabled
|
||||
err := m.checkFeatureUsage(parsedRule)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(errs) > 0 {
|
||||
zap.S().Errorf("failed to parse rules:", errs)
|
||||
// just one rule is being parsed so expect just one error
|
||||
return errs[0]
|
||||
return nil, errs[0]
|
||||
}
|
||||
|
||||
taskName, tx, err := m.ruleDB.CreateRuleTx(ctx, ruleStr)
|
||||
lastInsertId, tx, err := m.ruleDB.CreateRuleTx(ctx, ruleStr)
|
||||
taskName := prepareTaskName(lastInsertId)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
if !m.opts.DisableRules {
|
||||
if err := m.addTask(parsedRule, taskName); err != nil {
|
||||
tx.Rollback()
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update feature usage
|
||||
@ -401,7 +402,11 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) error {
|
||||
if err != nil {
|
||||
zap.S().Errorf("error updating feature usage: %v", err)
|
||||
}
|
||||
return nil
|
||||
gettableRule := &GettableRule{
|
||||
Id: fmt.Sprintf("%d", lastInsertId),
|
||||
PostableRule: *parsedRule,
|
||||
}
|
||||
return gettableRule, nil
|
||||
}
|
||||
|
||||
func (m *Manager) updateFeatureUsage(parsedRule *PostableRule, usage int64) error {
|
||||
|
Loading…
x
Reference in New Issue
Block a user