mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-16 19:25:56 +08:00
commit
545d46c39c
@ -35,7 +35,6 @@ x-clickhouse-depend: &clickhouse-depend
|
||||
services:
|
||||
zookeeper-1:
|
||||
image: bitnami/zookeeper:3.7.0
|
||||
container_name: zookeeper-1
|
||||
hostname: zookeeper-1
|
||||
user: root
|
||||
ports:
|
||||
@ -52,7 +51,6 @@ services:
|
||||
|
||||
# zookeeper-2:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: zookeeper-2
|
||||
# hostname: zookeeper-2
|
||||
# user: root
|
||||
# ports:
|
||||
@ -69,7 +67,6 @@ services:
|
||||
|
||||
# zookeeper-3:
|
||||
# image: bitnami/zookeeper:3.7.0
|
||||
# container_name: zookeeper-3
|
||||
# hostname: zookeeper-3
|
||||
# user: root
|
||||
# ports:
|
||||
@ -86,7 +83,6 @@ services:
|
||||
|
||||
clickhouse:
|
||||
<<: *clickhouse-defaults
|
||||
container_name: clickhouse
|
||||
hostname: clickhouse
|
||||
# ports:
|
||||
# - "9000:9000"
|
||||
@ -101,7 +97,6 @@ services:
|
||||
|
||||
# clickhouse-2:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-2
|
||||
# hostname: clickhouse-2
|
||||
# ports:
|
||||
# - "9001:9000"
|
||||
@ -116,7 +111,6 @@ services:
|
||||
|
||||
# clickhouse-3:
|
||||
# <<: *clickhouse-defaults
|
||||
# container_name: clickhouse-3
|
||||
# hostname: clickhouse-3
|
||||
# ports:
|
||||
# - "9002:9000"
|
||||
@ -143,7 +137,7 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.12.0
|
||||
image: signoz/query-service:0.13.0
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
# ports:
|
||||
# - "6060:6060" # pprof port
|
||||
@ -172,7 +166,7 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.12.0
|
||||
image: signoz/frontend:0.13.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@ -185,7 +179,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-config.yaml"]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
@ -213,7 +207,7 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
|
@ -78,7 +78,7 @@ processors:
|
||||
signozspanmetrics/prometheus:
|
||||
metrics_exporter: prometheus
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 10000
|
||||
dimensions_cache_size: 100000
|
||||
dimensions:
|
||||
- name: service.namespace
|
||||
default: default
|
||||
|
@ -30,6 +30,8 @@ server {
|
||||
|
||||
location /api {
|
||||
proxy_pass http://query-service:8080/api;
|
||||
# connection will be closed if no data is read for 600s between successive read operations
|
||||
proxy_read_timeout 600s;
|
||||
}
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
|
@ -41,7 +41,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: otel-collector
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-config.yaml"]
|
||||
# user: root # required for reading docker container logs
|
||||
volumes:
|
||||
@ -67,7 +67,7 @@ services:
|
||||
|
||||
otel-collector-metrics:
|
||||
container_name: otel-collector-metrics
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
|
@ -146,7 +146,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.12.0
|
||||
image: signoz/query-service:0.13.0
|
||||
container_name: query-service
|
||||
command: ["-config=/root/config/prometheus.yml"]
|
||||
# ports:
|
||||
@ -174,7 +174,7 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.12.0
|
||||
image: signoz/frontend:0.13.0
|
||||
container_name: frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@ -186,7 +186,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-config.yaml"]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
@ -211,7 +211,7 @@ services:
|
||||
<<: *clickhouse-depend
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
|
@ -74,7 +74,7 @@ processors:
|
||||
signozspanmetrics/prometheus:
|
||||
metrics_exporter: prometheus
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 10000
|
||||
dimensions_cache_size: 100000
|
||||
dimensions:
|
||||
- name: service.namespace
|
||||
default: default
|
||||
|
@ -30,6 +30,8 @@ server {
|
||||
|
||||
location /api {
|
||||
proxy_pass http://query-service:8080/api;
|
||||
# connection will be closed if no data is read for 600s between successive read operations
|
||||
proxy_read_timeout 600s;
|
||||
}
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
|
@ -6,7 +6,7 @@
|
||||
"release_notes": "Release Notes",
|
||||
"read_how_to_upgrade": "Read instructions on how to upgrade",
|
||||
"latest_version_signoz": "You are running the latest version of SigNoz.",
|
||||
"stale_version": "You are on an older version and may be losing out on the latest features we have shipped. We recommend to upgrade to the latest version",
|
||||
"stale_version": "You are on an older version and may be missing out on the latest features we have shipped. We recommend to upgrade to the latest version",
|
||||
"oops_something_went_wrong_version": "Oops.. facing issues with fetching updated version information",
|
||||
"n_a": "N/A",
|
||||
"routes": {
|
||||
|
@ -4,14 +4,16 @@ import { ENVIRONMENT } from 'constants/env';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { EventSourcePolyfill } from 'event-source-polyfill';
|
||||
|
||||
export const LiveTail = (queryParams: string): EventSourcePolyfill => {
|
||||
const dict = {
|
||||
headers: {
|
||||
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
|
||||
},
|
||||
};
|
||||
return new EventSourcePolyfill(
|
||||
// 10 min in ms
|
||||
const TIMEOUT_IN_MS = 10 * 60 * 1000;
|
||||
|
||||
export const LiveTail = (queryParams: string): EventSourcePolyfill =>
|
||||
new EventSourcePolyfill(
|
||||
`${ENVIRONMENT.baseURL}${apiV1}logs/tail?${queryParams}`,
|
||||
dict,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
|
||||
},
|
||||
heartbeatTimeout: TIMEOUT_IN_MS,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
@ -1,46 +1,21 @@
|
||||
import { Button, Popover } from 'antd';
|
||||
import getStep from 'lib/getStep';
|
||||
import { generateFilterQuery } from 'lib/logs/generateFilterQuery';
|
||||
import React, { memo, useCallback, useMemo } from 'react';
|
||||
import { connect, useDispatch, useSelector } from 'react-redux';
|
||||
import { bindActionCreators, Dispatch } from 'redux';
|
||||
import { ThunkDispatch } from 'redux-thunk';
|
||||
import { getLogs } from 'store/actions/logs/getLogs';
|
||||
import { getLogsAggregate } from 'store/actions/logs/getLogsAggregate';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { SET_SEARCH_QUERY_STRING, TOGGLE_LIVE_TAIL } from 'types/actions/logs';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { SET_SEARCH_QUERY_STRING } from 'types/actions/logs';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
interface AddToQueryHOCProps {
|
||||
fieldKey: string;
|
||||
fieldValue: string;
|
||||
children: React.ReactNode;
|
||||
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>;
|
||||
getLogsAggregate: (
|
||||
props: Parameters<typeof getLogsAggregate>[0],
|
||||
) => ReturnType<typeof getLogsAggregate>;
|
||||
}
|
||||
function AddToQueryHOC({
|
||||
fieldKey,
|
||||
fieldValue,
|
||||
children,
|
||||
getLogs,
|
||||
getLogsAggregate,
|
||||
}: AddToQueryHOCProps): JSX.Element {
|
||||
const {
|
||||
searchFilter: { queryString },
|
||||
logLinesPerPage,
|
||||
idStart,
|
||||
idEnd,
|
||||
liveTail,
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const generatedQuery = useMemo(
|
||||
() => generateFilterQuery({ fieldKey, fieldValue, type: 'IN' }),
|
||||
[fieldKey, fieldValue],
|
||||
@ -58,69 +33,14 @@ function AddToQueryHOC({
|
||||
type: SET_SEARCH_QUERY_STRING,
|
||||
payload: updatedQueryString,
|
||||
});
|
||||
if (liveTail === 'STOPPED') {
|
||||
getLogs({
|
||||
q: updatedQueryString,
|
||||
limit: logLinesPerPage,
|
||||
orderBy: 'timestamp',
|
||||
order: 'desc',
|
||||
timestampStart: minTime,
|
||||
timestampEnd: maxTime,
|
||||
...(idStart ? { idGt: idStart } : {}),
|
||||
...(idEnd ? { idLt: idEnd } : {}),
|
||||
});
|
||||
getLogsAggregate({
|
||||
timestampStart: minTime,
|
||||
timestampEnd: maxTime,
|
||||
step: getStep({
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
inputFormat: 'ns',
|
||||
}),
|
||||
q: updatedQueryString,
|
||||
...(idStart ? { idGt: idStart } : {}),
|
||||
...(idEnd ? { idLt: idEnd } : {}),
|
||||
});
|
||||
} else if (liveTail === 'PLAYING') {
|
||||
dispatch({
|
||||
type: TOGGLE_LIVE_TAIL,
|
||||
payload: 'PAUSED',
|
||||
});
|
||||
setTimeout(
|
||||
() =>
|
||||
dispatch({
|
||||
type: TOGGLE_LIVE_TAIL,
|
||||
payload: liveTail,
|
||||
}),
|
||||
0,
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
dispatch,
|
||||
generatedQuery,
|
||||
getLogs,
|
||||
idEnd,
|
||||
idStart,
|
||||
logLinesPerPage,
|
||||
maxTime,
|
||||
minTime,
|
||||
queryString,
|
||||
}, [dispatch, generatedQuery, queryString]);
|
||||
|
||||
const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
|
||||
fieldKey,
|
||||
]);
|
||||
|
||||
const popOverContent = (
|
||||
<span style={{ fontSize: '0.9rem' }}>Add to query: {fieldKey}</span>
|
||||
);
|
||||
return (
|
||||
<Button
|
||||
size="small"
|
||||
type="text"
|
||||
style={{
|
||||
margin: 0,
|
||||
padding: 0,
|
||||
}}
|
||||
onClick={handleQueryAdd}
|
||||
>
|
||||
<Button size="small" type="text" onClick={handleQueryAdd}>
|
||||
<Popover placement="top" content={popOverContent}>
|
||||
{children}
|
||||
</Popover>
|
||||
@ -128,20 +48,10 @@ function AddToQueryHOC({
|
||||
);
|
||||
}
|
||||
|
||||
interface DispatchProps {
|
||||
getLogs: (
|
||||
props: Parameters<typeof getLogs>[0],
|
||||
) => (dispatch: Dispatch<AppActions>) => void;
|
||||
getLogsAggregate: (
|
||||
props: Parameters<typeof getLogsAggregate>[0],
|
||||
) => (dispatch: Dispatch<AppActions>) => void;
|
||||
interface AddToQueryHOCProps {
|
||||
fieldKey: string;
|
||||
fieldValue: string;
|
||||
children: React.ReactNode;
|
||||
}
|
||||
|
||||
const mapDispatchToProps = (
|
||||
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
|
||||
): DispatchProps => ({
|
||||
getLogs: bindActionCreators(getLogs, dispatch),
|
||||
getLogsAggregate: bindActionCreators(getLogsAggregate, dispatch),
|
||||
});
|
||||
|
||||
export default connect(null, mapDispatchToProps)(memo(AddToQueryHOC));
|
||||
export default memo(AddToQueryHOC);
|
||||
|
1
frontend/src/components/NotFound/constant.ts
Normal file
1
frontend/src/components/NotFound/constant.ts
Normal file
@ -0,0 +1 @@
|
||||
export const defaultText = 'Ah, seems like we reached a dead end!';
|
@ -2,45 +2,52 @@ import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import NotFoundImage from 'assets/NotFound';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import React from 'react';
|
||||
import React, { useCallback } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { Dispatch } from 'redux';
|
||||
import AppActions from 'types/actions';
|
||||
import { LOGGED_IN } from 'types/actions/app';
|
||||
|
||||
import { defaultText } from './constant';
|
||||
import { Button, Container, Text, TextContainer } from './styles';
|
||||
|
||||
function NotFound(): JSX.Element {
|
||||
function NotFound({ text = defaultText }: Props): JSX.Element {
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
const isLoggedIn = getLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN);
|
||||
|
||||
const onClickHandler = useCallback(() => {
|
||||
if (isLoggedIn) {
|
||||
dispatch({
|
||||
type: LOGGED_IN,
|
||||
payload: {
|
||||
isLoggedIn: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
}, [dispatch, isLoggedIn]);
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<NotFoundImage />
|
||||
|
||||
<TextContainer>
|
||||
<Text>Ah, seems like we reached a dead end!</Text>
|
||||
<Text>{text}</Text>
|
||||
<Text>Page Not Found</Text>
|
||||
</TextContainer>
|
||||
|
||||
<Button
|
||||
onClick={(): void => {
|
||||
if (isLoggedIn) {
|
||||
dispatch({
|
||||
type: LOGGED_IN,
|
||||
payload: {
|
||||
isLoggedIn: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
}}
|
||||
to={ROUTES.APPLICATION}
|
||||
tabIndex={0}
|
||||
>
|
||||
<Button onClick={onClickHandler} to={ROUTES.APPLICATION} tabIndex={0}>
|
||||
Return To Services Page
|
||||
</Button>
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
|
||||
interface Props {
|
||||
text?: string;
|
||||
}
|
||||
|
||||
NotFound.defaultProps = {
|
||||
text: defaultText,
|
||||
};
|
||||
|
||||
export default NotFound;
|
||||
|
9
frontend/src/container/AllError/constant.ts
Normal file
9
frontend/src/container/AllError/constant.ts
Normal file
@ -0,0 +1,9 @@
|
||||
const DEFAULT_FILTER_VALUE = '';
|
||||
const EXCEPTION_TYPE_FILTER_NAME = 'exceptionType';
|
||||
const SERVICE_NAME_FILTER_NAME = 'serviceName';
|
||||
|
||||
export {
|
||||
DEFAULT_FILTER_VALUE,
|
||||
EXCEPTION_TYPE_FILTER_NAME,
|
||||
SERVICE_NAME_FILTER_NAME,
|
||||
};
|
@ -17,6 +17,7 @@ import getAll from 'api/errors/getAll';
|
||||
import getErrorCounts from 'api/errors/getErrorCounts';
|
||||
import ROUTES from 'constants/routes';
|
||||
import dayjs from 'dayjs';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import history from 'lib/history';
|
||||
import React, { useCallback, useEffect, useMemo } from 'react';
|
||||
@ -30,7 +31,11 @@ import { Exception, PayloadProps } from 'types/api/errors/getAll';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import {
|
||||
extractFilterValues,
|
||||
getDefaultFilterValue,
|
||||
getDefaultOrder,
|
||||
getFilterString,
|
||||
getFilterValues,
|
||||
getNanoSeconds,
|
||||
getOffSet,
|
||||
getOrder,
|
||||
@ -43,15 +48,27 @@ function AllErrors(): JSX.Element {
|
||||
const { maxTime, minTime, loading } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const { search, pathname } = useLocation();
|
||||
const params = useMemo(() => new URLSearchParams(search), [search]);
|
||||
|
||||
const { pathname } = useLocation();
|
||||
const params = useUrlQuery();
|
||||
const { t } = useTranslation(['common']);
|
||||
|
||||
const updatedOrder = getOrder(params.get(urlKey.order));
|
||||
const getUpdatedOffset = getOffSet(params.get(urlKey.offset));
|
||||
const getUpdatedParams = getOrderParams(params.get(urlKey.orderParam));
|
||||
const getUpdatedPageSize = getUpdatePageSize(params.get(urlKey.pageSize));
|
||||
const {
|
||||
updatedOrder,
|
||||
getUpdatedOffset,
|
||||
getUpdatedParams,
|
||||
getUpdatedPageSize,
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedServiceName,
|
||||
} = useMemo(
|
||||
() => ({
|
||||
updatedOrder: getOrder(params.get(urlKey.order)),
|
||||
getUpdatedOffset: getOffSet(params.get(urlKey.offset)),
|
||||
getUpdatedParams: getOrderParams(params.get(urlKey.orderParam)),
|
||||
getUpdatedPageSize: getUpdatePageSize(params.get(urlKey.pageSize)),
|
||||
getUpdatedExceptionType: getFilterString(params.get(urlKey.exceptionType)),
|
||||
getUpdatedServiceName: getFilterString(params.get(urlKey.serviceName)),
|
||||
}),
|
||||
[params],
|
||||
);
|
||||
|
||||
const updatedPath = useMemo(
|
||||
() =>
|
||||
@ -60,6 +77,8 @@ function AllErrors(): JSX.Element {
|
||||
offset: getUpdatedOffset,
|
||||
orderParam: getUpdatedParams,
|
||||
pageSize: getUpdatedPageSize,
|
||||
exceptionType: getUpdatedExceptionType,
|
||||
serviceName: getUpdatedServiceName,
|
||||
})}`,
|
||||
[
|
||||
pathname,
|
||||
@ -67,6 +86,8 @@ function AllErrors(): JSX.Element {
|
||||
getUpdatedOffset,
|
||||
getUpdatedParams,
|
||||
getUpdatedPageSize,
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedServiceName,
|
||||
],
|
||||
);
|
||||
|
||||
@ -81,6 +102,8 @@ function AllErrors(): JSX.Element {
|
||||
limit: getUpdatedPageSize,
|
||||
offset: getUpdatedOffset,
|
||||
orderParam: getUpdatedParams,
|
||||
exceptionType: getUpdatedExceptionType,
|
||||
serviceName: getUpdatedServiceName,
|
||||
}),
|
||||
enabled: !loading,
|
||||
},
|
||||
@ -108,14 +131,43 @@ function AllErrors(): JSX.Element {
|
||||
|
||||
const filterIcon = useCallback(() => <SearchOutlined />, []);
|
||||
|
||||
const handleSearch = (
|
||||
confirm: (param?: FilterConfirmProps) => void,
|
||||
): VoidFunction => (): void => {
|
||||
confirm();
|
||||
};
|
||||
const handleSearch = useCallback(
|
||||
(
|
||||
confirm: (param?: FilterConfirmProps) => void,
|
||||
filterValue: string,
|
||||
filterKey: string,
|
||||
): VoidFunction => (): void => {
|
||||
const { exceptionFilterValue, serviceFilterValue } = getFilterValues(
|
||||
getUpdatedServiceName || '',
|
||||
getUpdatedExceptionType || '',
|
||||
filterKey,
|
||||
filterValue || '',
|
||||
);
|
||||
history.replace(
|
||||
`${pathname}?${createQueryParams({
|
||||
order: updatedOrder,
|
||||
offset: getUpdatedOffset,
|
||||
orderParam: getUpdatedParams,
|
||||
pageSize: getUpdatedPageSize,
|
||||
exceptionType: exceptionFilterValue,
|
||||
serviceName: serviceFilterValue,
|
||||
})}`,
|
||||
);
|
||||
confirm();
|
||||
},
|
||||
[
|
||||
getUpdatedExceptionType,
|
||||
getUpdatedOffset,
|
||||
getUpdatedPageSize,
|
||||
getUpdatedParams,
|
||||
getUpdatedServiceName,
|
||||
pathname,
|
||||
updatedOrder,
|
||||
],
|
||||
);
|
||||
|
||||
const filterDropdownWrapper = useCallback(
|
||||
({ setSelectedKeys, selectedKeys, confirm, placeholder }) => {
|
||||
({ setSelectedKeys, selectedKeys, confirm, placeholder, filterKey }) => {
|
||||
return (
|
||||
<Card size="small">
|
||||
<Space align="start" direction="vertical">
|
||||
@ -126,11 +178,16 @@ function AllErrors(): JSX.Element {
|
||||
setSelectedKeys(e.target.value ? [e.target.value] : [])
|
||||
}
|
||||
allowClear
|
||||
onPressEnter={handleSearch(confirm)}
|
||||
defaultValue={getDefaultFilterValue(
|
||||
filterKey,
|
||||
getUpdatedServiceName,
|
||||
getUpdatedExceptionType,
|
||||
)}
|
||||
onPressEnter={handleSearch(confirm, selectedKeys[0], filterKey)}
|
||||
/>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={handleSearch(confirm)}
|
||||
onClick={handleSearch(confirm, selectedKeys[0], filterKey)}
|
||||
icon={<SearchOutlined />}
|
||||
size="small"
|
||||
>
|
||||
@ -140,7 +197,7 @@ function AllErrors(): JSX.Element {
|
||||
</Card>
|
||||
);
|
||||
},
|
||||
[],
|
||||
[getUpdatedExceptionType, getUpdatedServiceName, handleSearch],
|
||||
);
|
||||
|
||||
const onExceptionTypeFilter = useCallback(
|
||||
@ -167,6 +224,7 @@ function AllErrors(): JSX.Element {
|
||||
(
|
||||
onFilter: ColumnType<Exception>['onFilter'],
|
||||
placeholder: string,
|
||||
filterKey: string,
|
||||
): ColumnType<Exception> => ({
|
||||
onFilter,
|
||||
filterIcon,
|
||||
@ -176,6 +234,7 @@ function AllErrors(): JSX.Element {
|
||||
selectedKeys,
|
||||
confirm,
|
||||
placeholder,
|
||||
filterKey,
|
||||
}),
|
||||
}),
|
||||
[filterIcon, filterDropdownWrapper],
|
||||
@ -186,7 +245,7 @@ function AllErrors(): JSX.Element {
|
||||
title: 'Exception Type',
|
||||
dataIndex: 'exceptionType',
|
||||
key: 'exceptionType',
|
||||
...getFilter(onExceptionTypeFilter, 'Search By Exception'),
|
||||
...getFilter(onExceptionTypeFilter, 'Search By Exception', 'exceptionType'),
|
||||
render: (value, record): JSX.Element => (
|
||||
<Tooltip overlay={(): JSX.Element => value}>
|
||||
<Link
|
||||
@ -266,30 +325,39 @@ function AllErrors(): JSX.Element {
|
||||
updatedOrder,
|
||||
'serviceName',
|
||||
),
|
||||
...getFilter(onApplicationTypeFilter, 'Search By Application'),
|
||||
...getFilter(
|
||||
onApplicationTypeFilter,
|
||||
'Search By Application',
|
||||
'serviceName',
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
const onChangeHandler: TableProps<Exception>['onChange'] = (
|
||||
paginations,
|
||||
_,
|
||||
sorter,
|
||||
) => {
|
||||
if (!Array.isArray(sorter)) {
|
||||
const { pageSize = 0, current = 0 } = paginations;
|
||||
const { columnKey = '', order } = sorter;
|
||||
const updatedOrder = order === 'ascend' ? 'ascending' : 'descending';
|
||||
|
||||
history.replace(
|
||||
`${pathname}?${createQueryParams({
|
||||
order: updatedOrder,
|
||||
offset: (current - 1) * pageSize,
|
||||
orderParam: columnKey,
|
||||
pageSize,
|
||||
})}`,
|
||||
);
|
||||
}
|
||||
};
|
||||
const onChangeHandler: TableProps<Exception>['onChange'] = useCallback(
|
||||
(paginations, filters, sorter) => {
|
||||
if (!Array.isArray(sorter)) {
|
||||
const { pageSize = 0, current = 0 } = paginations;
|
||||
const { columnKey = '', order } = sorter;
|
||||
const updatedOrder = order === 'ascend' ? 'ascending' : 'descending';
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
const { exceptionType, serviceName } = extractFilterValues(filters, {
|
||||
serviceName: getFilterString(params.get(urlKey.serviceName)),
|
||||
exceptionType: getFilterString(params.get(urlKey.exceptionType)),
|
||||
});
|
||||
history.replace(
|
||||
`${pathname}?${createQueryParams({
|
||||
order: updatedOrder,
|
||||
offset: (current - 1) * pageSize,
|
||||
orderParam: columnKey,
|
||||
pageSize,
|
||||
exceptionType,
|
||||
serviceName,
|
||||
})}`,
|
||||
);
|
||||
}
|
||||
},
|
||||
[pathname],
|
||||
);
|
||||
|
||||
return (
|
||||
<Table
|
||||
|
@ -1,7 +1,13 @@
|
||||
import { SortOrder } from 'antd/lib/table/interface';
|
||||
import { FilterValue, SortOrder } from 'antd/lib/table/interface';
|
||||
import Timestamp from 'timestamp-nano';
|
||||
import { Order, OrderBy } from 'types/api/errors/getAll';
|
||||
|
||||
import {
|
||||
DEFAULT_FILTER_VALUE,
|
||||
EXCEPTION_TYPE_FILTER_NAME,
|
||||
SERVICE_NAME_FILTER_NAME,
|
||||
} from './constant';
|
||||
|
||||
export const isOrder = (order: string | null): order is Order =>
|
||||
!!(order === 'ascending' || order === 'descending');
|
||||
|
||||
@ -10,6 +16,8 @@ export const urlKey = {
|
||||
offset: 'offset',
|
||||
orderParam: 'orderParam',
|
||||
pageSize: 'pageSize',
|
||||
exceptionType: 'exceptionType',
|
||||
serviceName: 'serviceName',
|
||||
};
|
||||
|
||||
export const isOrderParams = (orderBy: string | null): orderBy is OrderBy => {
|
||||
@ -87,3 +95,94 @@ export const getUpdatePageSize = (pageSize: string | null): number => {
|
||||
}
|
||||
return 10;
|
||||
};
|
||||
|
||||
export const getFilterString = (filter: string | null): string => {
|
||||
if (filter) {
|
||||
return filter;
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
export const getDefaultFilterValue = (
|
||||
filterKey: string | null,
|
||||
serviceName: string,
|
||||
exceptionType: string,
|
||||
): string | undefined => {
|
||||
let defaultValue: string | undefined;
|
||||
switch (filterKey) {
|
||||
case SERVICE_NAME_FILTER_NAME:
|
||||
defaultValue = serviceName;
|
||||
break;
|
||||
case EXCEPTION_TYPE_FILTER_NAME:
|
||||
defaultValue = exceptionType;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return defaultValue;
|
||||
};
|
||||
|
||||
export const getFilterValues = (
|
||||
serviceName: string,
|
||||
exceptionType: string,
|
||||
filterKey: string,
|
||||
filterValue: string,
|
||||
): { exceptionFilterValue: string; serviceFilterValue: string } => {
|
||||
let serviceFilterValue = serviceName;
|
||||
let exceptionFilterValue = exceptionType;
|
||||
switch (filterKey) {
|
||||
case EXCEPTION_TYPE_FILTER_NAME:
|
||||
exceptionFilterValue = filterValue;
|
||||
break;
|
||||
case SERVICE_NAME_FILTER_NAME:
|
||||
serviceFilterValue = filterValue;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return { exceptionFilterValue, serviceFilterValue };
|
||||
};
|
||||
|
||||
type FilterValues = { exceptionType: string; serviceName: string };
|
||||
|
||||
const extractSingleFilterValue = (
|
||||
filterName: string,
|
||||
filters: Filter,
|
||||
): string => {
|
||||
const filterValues = filters[filterName];
|
||||
|
||||
if (
|
||||
!filterValues ||
|
||||
!Array.isArray(filterValues) ||
|
||||
filterValues.length === 0
|
||||
) {
|
||||
return DEFAULT_FILTER_VALUE;
|
||||
}
|
||||
|
||||
return String(filterValues[0]);
|
||||
};
|
||||
|
||||
type Filter = Record<string, FilterValue | null>;
|
||||
|
||||
export const extractFilterValues = (
|
||||
filters: Filter,
|
||||
prefilledFilters: FilterValues,
|
||||
): FilterValues => {
|
||||
const filterValues: FilterValues = {
|
||||
exceptionType: prefilledFilters.exceptionType,
|
||||
serviceName: prefilledFilters.serviceName,
|
||||
};
|
||||
if (filters[EXCEPTION_TYPE_FILTER_NAME]) {
|
||||
filterValues.exceptionType = extractSingleFilterValue(
|
||||
EXCEPTION_TYPE_FILTER_NAME,
|
||||
filters,
|
||||
);
|
||||
}
|
||||
if (filters[SERVICE_NAME_FILTER_NAME]) {
|
||||
filterValues.serviceName = extractSingleFilterValue(
|
||||
SERVICE_NAME_FILTER_NAME,
|
||||
filters,
|
||||
);
|
||||
}
|
||||
return filterValues;
|
||||
};
|
||||
|
@ -5,19 +5,14 @@ import {
|
||||
} from '@ant-design/icons';
|
||||
import { Button, Divider, Select } from 'antd';
|
||||
import React, { memo } from 'react';
|
||||
import { connect, useDispatch, useSelector } from 'react-redux';
|
||||
import { bindActionCreators, Dispatch } from 'redux';
|
||||
import { ThunkDispatch } from 'redux-thunk';
|
||||
import { getLogs } from 'store/actions/logs/getLogs';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import {
|
||||
GET_NEXT_LOG_LINES,
|
||||
GET_PREVIOUS_LOG_LINES,
|
||||
RESET_ID_START_AND_END,
|
||||
SET_LOG_LINES_PER_PAGE,
|
||||
} from 'types/actions/logs';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
import { Container } from './styles';
|
||||
@ -26,20 +21,10 @@ const { Option } = Select;
|
||||
|
||||
const ITEMS_PER_PAGE_OPTIONS = [25, 50, 100, 200];
|
||||
|
||||
interface LogControlsProps {
|
||||
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>;
|
||||
}
|
||||
function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
function LogControls(): JSX.Element | null {
|
||||
const { logLinesPerPage, liveTail } = useSelector<AppState, ILogsReducer>(
|
||||
(state) => state.logs,
|
||||
);
|
||||
const {
|
||||
logLinesPerPage,
|
||||
idStart,
|
||||
idEnd,
|
||||
liveTail,
|
||||
searchFilter: { queryString },
|
||||
} = useSelector<AppState, ILogsReducer>((state) => state.logs);
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const handleLogLinesPerPageChange = (e: number): void => {
|
||||
@ -53,18 +38,6 @@ function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
|
||||
dispatch({
|
||||
type: RESET_ID_START_AND_END,
|
||||
});
|
||||
|
||||
if (liveTail === 'STOPPED')
|
||||
getLogs({
|
||||
q: queryString,
|
||||
limit: logLinesPerPage,
|
||||
orderBy: 'timestamp',
|
||||
order: 'desc',
|
||||
timestampStart: minTime,
|
||||
timestampEnd: maxTime,
|
||||
...(idStart ? { idGt: idStart } : {}),
|
||||
...(idEnd ? { idLt: idEnd } : {}),
|
||||
});
|
||||
};
|
||||
|
||||
const handleNavigatePrevious = (): void => {
|
||||
@ -106,16 +79,4 @@ function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
|
||||
);
|
||||
}
|
||||
|
||||
interface DispatchProps {
|
||||
getLogs: (
|
||||
props: Parameters<typeof getLogs>[0],
|
||||
) => (dispatch: Dispatch<AppActions>) => void;
|
||||
}
|
||||
|
||||
const mapDispatchToProps = (
|
||||
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
|
||||
): DispatchProps => ({
|
||||
getLogs: bindActionCreators(getLogs, dispatch),
|
||||
});
|
||||
|
||||
export default connect(null, mapDispatchToProps)(memo(LogControls));
|
||||
export default memo(LogControls);
|
||||
|
@ -0,0 +1,36 @@
|
||||
import { Button, Row } from 'antd';
|
||||
import React from 'react';
|
||||
|
||||
import { QueryFields } from './utils';
|
||||
|
||||
interface SearchFieldsActionBarProps {
|
||||
fieldsQuery: QueryFields[][];
|
||||
applyUpdate: () => void;
|
||||
clearFilters: () => void;
|
||||
}
|
||||
|
||||
export function SearchFieldsActionBar({
|
||||
fieldsQuery,
|
||||
applyUpdate,
|
||||
clearFilters,
|
||||
}: SearchFieldsActionBarProps): JSX.Element | null {
|
||||
if (fieldsQuery.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Row style={{ justifyContent: 'flex-end', paddingRight: '2.4rem' }}>
|
||||
<Button
|
||||
type="default"
|
||||
onClick={clearFilters}
|
||||
style={{ marginRight: '1rem' }}
|
||||
>
|
||||
Clear Filter
|
||||
</Button>
|
||||
<Button type="primary" onClick={applyUpdate}>
|
||||
Apply
|
||||
</Button>
|
||||
</Row>
|
||||
);
|
||||
}
|
||||
export default SearchFieldsActionBar;
|
@ -12,19 +12,15 @@ import {
|
||||
QueryOperatorsMultiVal,
|
||||
QueryOperatorsSingleVal,
|
||||
} from 'lib/logql/tokens';
|
||||
import { flatten } from 'lodash-es';
|
||||
import React, { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import React, { useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
import { v4 } from 'uuid';
|
||||
|
||||
import { SearchFieldsProps } from '..';
|
||||
import FieldKey from '../FieldKey';
|
||||
import { QueryFieldContainer } from '../styles';
|
||||
import { createParsedQueryStructure } from '../utils';
|
||||
import { QueryFields } from '../utils';
|
||||
import { Container, QueryWrapper } from './styles';
|
||||
import { hashCode, parseQuery } from './utils';
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
@ -68,7 +64,6 @@ function QueryField({
|
||||
const {
|
||||
fields: { selected },
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
|
||||
const getFieldType = (inputKey: string): string => {
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const selectedField of selected) {
|
||||
@ -147,9 +142,12 @@ function QueryField({
|
||||
/>
|
||||
) : (
|
||||
<Input
|
||||
onChange={(e): void => handleChange(2, e.target.value)}
|
||||
onChange={(e): void => {
|
||||
handleChange(2, e.target.value);
|
||||
}}
|
||||
style={{ width: '100%' }}
|
||||
defaultValue={query[2] && query[2].value}
|
||||
value={query[2] && query[2].value}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
@ -165,85 +163,78 @@ function QueryField({
|
||||
}
|
||||
|
||||
interface QueryConditionFieldProps {
|
||||
query: { value: string | string[]; type: string }[];
|
||||
query: QueryFields;
|
||||
queryIndex: number;
|
||||
onUpdate: (arg0: unknown, arg1: number) => void;
|
||||
}
|
||||
|
||||
export type Query = { value: string | string[]; type: string }[];
|
||||
|
||||
export interface QueryBuilderProps {
|
||||
keyPrefix: string;
|
||||
onDropDownToggleHandler: (value: boolean) => VoidFunction;
|
||||
fieldsQuery: QueryFields[][];
|
||||
setFieldsQuery: (q: QueryFields[][]) => void;
|
||||
}
|
||||
|
||||
function QueryBuilder({
|
||||
updateParsedQuery,
|
||||
keyPrefix,
|
||||
fieldsQuery,
|
||||
setFieldsQuery,
|
||||
onDropDownToggleHandler,
|
||||
}: SearchFieldsProps): JSX.Element {
|
||||
const {
|
||||
searchFilter: { parsedQuery },
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
|
||||
const keyPrefixRef = useRef(hashCode(JSON.stringify(parsedQuery)));
|
||||
const [keyPrefix, setKeyPrefix] = useState(keyPrefixRef.current);
|
||||
const generatedQueryStructure = createParsedQueryStructure(
|
||||
parsedQuery as never[],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const incomingHashCode = hashCode(JSON.stringify(parsedQuery));
|
||||
if (incomingHashCode !== keyPrefixRef.current) {
|
||||
keyPrefixRef.current = incomingHashCode;
|
||||
setKeyPrefix(incomingHashCode);
|
||||
}
|
||||
}, [parsedQuery]);
|
||||
|
||||
}: QueryBuilderProps): JSX.Element {
|
||||
const handleUpdate = (query: Query, queryIndex: number): void => {
|
||||
const updatedParsedQuery = generatedQueryStructure;
|
||||
updatedParsedQuery[queryIndex] = parseQuery(query) as never;
|
||||
|
||||
const flatParsedQuery = flatten(updatedParsedQuery).filter((q) => q.value);
|
||||
keyPrefixRef.current = hashCode(JSON.stringify(flatParsedQuery));
|
||||
updateParsedQuery(flatParsedQuery);
|
||||
const updated = [...fieldsQuery];
|
||||
updated[queryIndex] = query as never; // parseQuery(query) as never;
|
||||
setFieldsQuery(updated);
|
||||
};
|
||||
|
||||
const handleDelete = (queryIndex: number): void => {
|
||||
const updatedParsedQuery = generatedQueryStructure;
|
||||
updatedParsedQuery.splice(queryIndex - 1, 2);
|
||||
const updated = [...fieldsQuery];
|
||||
if (queryIndex !== 0) updated.splice(queryIndex - 1, 2);
|
||||
else updated.splice(queryIndex, 2);
|
||||
|
||||
const flatParsedQuery = flatten(updatedParsedQuery).filter((q) => q.value);
|
||||
keyPrefixRef.current = v4();
|
||||
updateParsedQuery(flatParsedQuery);
|
||||
setFieldsQuery(updated);
|
||||
};
|
||||
|
||||
const QueryUI = (): JSX.Element | JSX.Element[] =>
|
||||
generatedQueryStructure.map((query, idx) => {
|
||||
if (Array.isArray(query))
|
||||
return (
|
||||
const QueryUI = (
|
||||
fieldsQuery: QueryFields[][],
|
||||
): JSX.Element | JSX.Element[] => {
|
||||
const result: JSX.Element[] = [];
|
||||
fieldsQuery.forEach((query, idx) => {
|
||||
if (Array.isArray(query) && query.length > 1) {
|
||||
result.push(
|
||||
<QueryField
|
||||
key={keyPrefix + idx}
|
||||
query={query as never}
|
||||
queryIndex={idx}
|
||||
onUpdate={handleUpdate as never}
|
||||
onDelete={handleDelete}
|
||||
/>
|
||||
/>,
|
||||
);
|
||||
|
||||
return (
|
||||
<div key={keyPrefix + idx}>
|
||||
<QueryConditionField
|
||||
query={query}
|
||||
queryIndex={idx}
|
||||
onUpdate={handleUpdate as never}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
result.push(
|
||||
<div key={keyPrefix + idx}>
|
||||
<QueryConditionField
|
||||
query={Array.isArray(query) ? query[0] : query}
|
||||
queryIndex={idx}
|
||||
onUpdate={handleUpdate as never}
|
||||
/>
|
||||
</div>,
|
||||
);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<Container isMargin={generatedQueryStructure.length === 0}>
|
||||
<Container isMargin={fieldsQuery.length === 0}>
|
||||
<CategoryHeading>LOG QUERY BUILDER</CategoryHeading>
|
||||
<CloseSquareOutlined onClick={onDropDownToggleHandler(false)} />
|
||||
</Container>
|
||||
|
||||
<QueryWrapper>{QueryUI()}</QueryWrapper>
|
||||
<QueryWrapper key={keyPrefix}>{QueryUI(fieldsQuery)}</QueryWrapper>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
@ -21,17 +21,3 @@ export const parseQuery = (queries: Query): Query => {
|
||||
}
|
||||
return queries;
|
||||
};
|
||||
|
||||
export const hashCode = (s: string): string => {
|
||||
if (!s) {
|
||||
return '0';
|
||||
}
|
||||
return `${Math.abs(
|
||||
s.split('').reduce((a, b) => {
|
||||
// eslint-disable-next-line no-bitwise, no-param-reassign
|
||||
a = (a << 5) - a + b.charCodeAt(0);
|
||||
// eslint-disable-next-line no-bitwise
|
||||
return a & a;
|
||||
}, 0),
|
||||
)}`;
|
||||
};
|
||||
|
@ -2,9 +2,9 @@ import { Button } from 'antd';
|
||||
import CategoryHeading from 'components/Logs/CategoryHeading';
|
||||
import map from 'lodash-es/map';
|
||||
import React from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ADD_SEARCH_FIELD_QUERY_STRING } from 'types/actions/logs';
|
||||
// import { ADD_SEARCH_FIELD_QUERY_STRING } from 'types/actions/logs';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
import FieldKey from './FieldKey';
|
||||
@ -12,15 +12,15 @@ import FieldKey from './FieldKey';
|
||||
interface SuggestedItemProps {
|
||||
name: string;
|
||||
type: string;
|
||||
applySuggestion: (name: string) => void;
|
||||
}
|
||||
function SuggestedItem({ name, type }: SuggestedItemProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
function SuggestedItem({
|
||||
name,
|
||||
type,
|
||||
applySuggestion,
|
||||
}: SuggestedItemProps): JSX.Element {
|
||||
const addSuggestedField = (): void => {
|
||||
dispatch({
|
||||
type: ADD_SEARCH_FIELD_QUERY_STRING,
|
||||
payload: name,
|
||||
});
|
||||
applySuggestion(name);
|
||||
};
|
||||
return (
|
||||
<Button
|
||||
@ -33,7 +33,11 @@ function SuggestedItem({ name, type }: SuggestedItemProps): JSX.Element {
|
||||
);
|
||||
}
|
||||
|
||||
function Suggestions(): JSX.Element {
|
||||
interface SuggestionsProps {
|
||||
applySuggestion: (name: string) => void;
|
||||
}
|
||||
|
||||
function Suggestions({ applySuggestion }: SuggestionsProps): JSX.Element {
|
||||
const {
|
||||
fields: { selected },
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
@ -47,6 +51,7 @@ function Suggestions(): JSX.Element {
|
||||
key={JSON.stringify(field)}
|
||||
name={field.name}
|
||||
type={field.type}
|
||||
applySuggestion={applySuggestion}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
@ -1,8 +1,21 @@
|
||||
import React from 'react';
|
||||
import { notification } from 'antd';
|
||||
import { flatten } from 'lodash-es';
|
||||
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
import { SearchFieldsActionBar } from './ActionBar';
|
||||
import QueryBuilder from './QueryBuilder/QueryBuilder';
|
||||
import Suggestions from './Suggestions';
|
||||
import { QueryFields } from './utils';
|
||||
import {
|
||||
createParsedQueryStructure,
|
||||
fieldsQueryIsvalid,
|
||||
hashCode,
|
||||
initQueryKOVPair,
|
||||
prepareConditionOperator,
|
||||
QueryFields,
|
||||
} from './utils';
|
||||
|
||||
export interface SearchFieldsProps {
|
||||
updateParsedQuery: (query: QueryFields[]) => void;
|
||||
@ -13,13 +26,85 @@ function SearchFields({
|
||||
updateParsedQuery,
|
||||
onDropDownToggleHandler,
|
||||
}: SearchFieldsProps): JSX.Element {
|
||||
const {
|
||||
searchFilter: { parsedQuery },
|
||||
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
|
||||
|
||||
const [fieldsQuery, setFieldsQuery] = useState(
|
||||
createParsedQueryStructure([...parsedQuery] as never[]),
|
||||
);
|
||||
|
||||
const keyPrefixRef = useRef(hashCode(JSON.stringify(fieldsQuery)));
|
||||
|
||||
useEffect(() => {
|
||||
const updatedFieldsQuery = createParsedQueryStructure([
|
||||
...parsedQuery,
|
||||
] as never[]);
|
||||
setFieldsQuery(updatedFieldsQuery);
|
||||
const incomingHashCode = hashCode(JSON.stringify(updatedFieldsQuery));
|
||||
if (incomingHashCode !== keyPrefixRef.current) {
|
||||
keyPrefixRef.current = incomingHashCode;
|
||||
}
|
||||
}, [parsedQuery]);
|
||||
|
||||
const addSuggestedField = useCallback(
|
||||
(name: string): void => {
|
||||
if (!name) {
|
||||
return;
|
||||
}
|
||||
|
||||
const query = [...fieldsQuery];
|
||||
|
||||
if (fieldsQuery.length > 0) {
|
||||
query.push([prepareConditionOperator()]);
|
||||
}
|
||||
|
||||
const newField: QueryFields[] = [];
|
||||
initQueryKOVPair(name).forEach((q) => newField.push(q));
|
||||
|
||||
query.push(newField);
|
||||
keyPrefixRef.current = hashCode(JSON.stringify(query));
|
||||
setFieldsQuery(query);
|
||||
},
|
||||
[fieldsQuery, setFieldsQuery],
|
||||
);
|
||||
|
||||
const applyUpdate = useCallback((): void => {
|
||||
const flatParsedQuery = flatten(fieldsQuery);
|
||||
|
||||
if (!fieldsQueryIsvalid(flatParsedQuery)) {
|
||||
notification.error({
|
||||
message: 'Please enter a valid criteria for each of the selected fields',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
keyPrefixRef.current = hashCode(JSON.stringify(flatParsedQuery));
|
||||
updateParsedQuery(flatParsedQuery);
|
||||
onDropDownToggleHandler(false)();
|
||||
}, [onDropDownToggleHandler, fieldsQuery, updateParsedQuery]);
|
||||
|
||||
const clearFilters = useCallback((): void => {
|
||||
keyPrefixRef.current = hashCode(JSON.stringify([]));
|
||||
updateParsedQuery([]);
|
||||
onDropDownToggleHandler(false)();
|
||||
}, [onDropDownToggleHandler, updateParsedQuery]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<QueryBuilder
|
||||
key={keyPrefixRef.current}
|
||||
keyPrefix={keyPrefixRef.current}
|
||||
onDropDownToggleHandler={onDropDownToggleHandler}
|
||||
updateParsedQuery={updateParsedQuery}
|
||||
fieldsQuery={fieldsQuery}
|
||||
setFieldsQuery={setFieldsQuery}
|
||||
/>
|
||||
<Suggestions />
|
||||
<SearchFieldsActionBar
|
||||
applyUpdate={applyUpdate}
|
||||
clearFilters={clearFilters}
|
||||
fieldsQuery={fieldsQuery}
|
||||
/>
|
||||
<Suggestions applySuggestion={addSuggestedField} />
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
@ -2,11 +2,30 @@
|
||||
// @ts-ignore
|
||||
// @ts-nocheck
|
||||
|
||||
import { QueryTypes, QueryOperatorsSingleVal } from 'lib/logql/tokens';
|
||||
import { QueryTypes, ConditionalOperators, ValidTypeSequence, ValidTypeValue } from 'lib/logql/tokens';
|
||||
|
||||
export interface QueryFields {
|
||||
type: keyof typeof QueryTypes;
|
||||
value: string;
|
||||
value: string | string[];
|
||||
}
|
||||
|
||||
|
||||
export function fieldsQueryIsvalid(queryFields: QueryFields[]): boolean {
|
||||
let lastOp: string;
|
||||
let result = true;
|
||||
queryFields.forEach((q, idx)=> {
|
||||
|
||||
if (!q.value || q.value === null || q.value === '') result = false;
|
||||
|
||||
if (Array.isArray(q.value) && q.value.length === 0 ) result = false;
|
||||
|
||||
const nextOp = idx < queryFields.length ? queryFields[idx+1]: undefined;
|
||||
if (!ValidTypeSequence(lastOp?.type, q?.type, nextOp?.type)) result = false
|
||||
|
||||
if (!ValidTypeValue(lastOp?.value, q.value)) result = false;
|
||||
lastOp = q;
|
||||
});
|
||||
return result
|
||||
}
|
||||
|
||||
export const queryKOVPair = (): QueryFields[] => [
|
||||
@ -23,6 +42,29 @@ export const queryKOVPair = (): QueryFields[] => [
|
||||
value: null,
|
||||
},
|
||||
];
|
||||
|
||||
export const initQueryKOVPair = (name?: string = null, op?: string = null , value?: string | string[] = null ): QueryFields[] => [
|
||||
{
|
||||
type: QueryTypes.QUERY_KEY,
|
||||
value: name,
|
||||
},
|
||||
{
|
||||
type: QueryTypes.QUERY_OPERATOR,
|
||||
value: op,
|
||||
},
|
||||
{
|
||||
type: QueryTypes.QUERY_VALUE,
|
||||
value: value,
|
||||
},
|
||||
];
|
||||
|
||||
export const prepareConditionOperator = (op?: string = ConditionalOperators.AND): QueryFields => {
|
||||
return {
|
||||
type: QueryTypes.CONDITIONAL_OPERATOR,
|
||||
value: op,
|
||||
}
|
||||
}
|
||||
|
||||
export const createParsedQueryStructure = (parsedQuery = []) => {
|
||||
if (!parsedQuery.length) {
|
||||
return parsedQuery;
|
||||
@ -64,3 +106,17 @@ export const createParsedQueryStructure = (parsedQuery = []) => {
|
||||
});
|
||||
return structuredArray;
|
||||
};
|
||||
|
||||
export const hashCode = (s: string): string => {
|
||||
if (!s) {
|
||||
return '0';
|
||||
}
|
||||
return `${Math.abs(
|
||||
s.split('').reduce((a, b) => {
|
||||
// eslint-disable-next-line no-bitwise, no-param-reassign
|
||||
a = (a << 5) - a + b.charCodeAt(0);
|
||||
// eslint-disable-next-line no-bitwise
|
||||
return a & a;
|
||||
}, 0),
|
||||
)}`;
|
||||
};
|
||||
|
@ -1,7 +1,14 @@
|
||||
import { Input, InputRef, Popover } from 'antd';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import getStep from 'lib/getStep';
|
||||
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { debounce } from 'lodash-es';
|
||||
import React, {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { connect, useDispatch, useSelector } from 'react-redux';
|
||||
import { bindActionCreators, Dispatch } from 'redux';
|
||||
import { ThunkDispatch } from 'redux-thunk';
|
||||
@ -9,7 +16,7 @@ import { getLogs } from 'store/actions/logs/getLogs';
|
||||
import { getLogsAggregate } from 'store/actions/logs/getLogsAggregate';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { TOGGLE_LIVE_TAIL } from 'types/actions/logs';
|
||||
import { FLUSH_LOGS, TOGGLE_LIVE_TAIL } from 'types/actions/logs';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { ILogsReducer } from 'types/reducer/logs';
|
||||
|
||||
@ -22,12 +29,31 @@ function SearchFilter({
|
||||
getLogsAggregate,
|
||||
}: SearchFilterProps): JSX.Element {
|
||||
const {
|
||||
queryString,
|
||||
updateParsedQuery,
|
||||
updateQueryString,
|
||||
queryString,
|
||||
} = useSearchParser();
|
||||
const [searchText, setSearchText] = useState(queryString);
|
||||
const [showDropDown, setShowDropDown] = useState(false);
|
||||
const searchRef = useRef<InputRef>(null);
|
||||
const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector<
|
||||
AppState,
|
||||
ILogsReducer
|
||||
>((state) => state.logs);
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
// keep sync with url queryString
|
||||
useEffect(() => {
|
||||
setSearchText(queryString);
|
||||
}, [queryString]);
|
||||
|
||||
const debouncedupdateQueryString = useMemo(
|
||||
() => debounce(updateQueryString, 300),
|
||||
[updateQueryString],
|
||||
);
|
||||
|
||||
const onDropDownToggleHandler = useCallback(
|
||||
(value: boolean) => (): void => {
|
||||
@ -36,17 +62,6 @@ function SearchFilter({
|
||||
[],
|
||||
);
|
||||
|
||||
const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector<
|
||||
AppState,
|
||||
ILogsReducer
|
||||
>((state) => state.logs);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
const handleSearch = useCallback(
|
||||
(customQuery) => {
|
||||
if (liveTail === 'PLAYING') {
|
||||
@ -54,6 +69,9 @@ function SearchFilter({
|
||||
type: TOGGLE_LIVE_TAIL,
|
||||
payload: 'PAUSED',
|
||||
});
|
||||
dispatch({
|
||||
type: FLUSH_LOGS,
|
||||
});
|
||||
setTimeout(
|
||||
() =>
|
||||
dispatch({
|
||||
@ -102,10 +120,14 @@ function SearchFilter({
|
||||
const urlQuery = useUrlQuery();
|
||||
const urlQueryString = urlQuery.get('q');
|
||||
|
||||
const debouncedHandleSearch = useMemo(() => debounce(handleSearch, 600), [
|
||||
handleSearch,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
handleSearch(urlQueryString || '');
|
||||
debouncedHandleSearch(urlQueryString || '');
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [urlQueryString, maxTime, minTime]);
|
||||
}, [urlQueryString, maxTime, minTime, idEnd, idStart, logLinesPerPage]);
|
||||
|
||||
return (
|
||||
<Container>
|
||||
@ -132,9 +154,11 @@ function SearchFilter({
|
||||
<Input.Search
|
||||
ref={searchRef}
|
||||
placeholder="Search Filter"
|
||||
value={queryString}
|
||||
value={searchText}
|
||||
onChange={(e): void => {
|
||||
updateQueryString(e.target.value);
|
||||
const { value } = e.target;
|
||||
setSearchText(value);
|
||||
debouncedupdateQueryString(value);
|
||||
}}
|
||||
allowClear
|
||||
onSearch={handleSearch}
|
||||
|
@ -23,12 +23,10 @@ export function useSearchParser(): {
|
||||
|
||||
const updateQueryString = useCallback(
|
||||
(updatedQueryString) => {
|
||||
if (updatedQueryString) {
|
||||
history.push({
|
||||
pathname: history.location.pathname,
|
||||
search: updatedQueryString ? `?q=${updatedQueryString}` : '',
|
||||
});
|
||||
}
|
||||
history.replace({
|
||||
pathname: history.location.pathname,
|
||||
search: updatedQueryString ? `?q=${updatedQueryString}` : '',
|
||||
});
|
||||
|
||||
dispatch({
|
||||
type: SET_SEARCH_QUERY_STRING,
|
||||
|
@ -31,7 +31,7 @@ export const externalCallErrorPercent = ({
|
||||
|
||||
const legendFormula = 'External Call Error Percentage';
|
||||
const expression = 'A*100/B';
|
||||
const disabled = false;
|
||||
const disabled = true;
|
||||
return getQueryBuilderQuerieswithAdditionalItems({
|
||||
metricNameA,
|
||||
metricNameB,
|
||||
@ -102,7 +102,7 @@ export const externalCallDurationByAddress = ({
|
||||
const metricNameB = 'signoz_external_call_latency_count';
|
||||
const expression = 'A/B';
|
||||
const legendFormula = legend;
|
||||
const disabled = false;
|
||||
const disabled = true;
|
||||
return getQueryBuilderQuerieswithFormula({
|
||||
servicename,
|
||||
legend,
|
||||
|
@ -1,6 +1,4 @@
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { Collapse, Popover, Space } from 'antd';
|
||||
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
|
||||
import { Collapse } from 'antd';
|
||||
import useThemeMode from 'hooks/useThemeMode';
|
||||
import keys from 'lodash-es/keys';
|
||||
import map from 'lodash-es/map';
|
||||
@ -9,6 +7,8 @@ import { ITraceTree } from 'types/api/trace/getTraceItem';
|
||||
|
||||
import EllipsedButton from '../EllipsedButton';
|
||||
import { CustomSubText, CustomSubTitle } from '../styles';
|
||||
import EventStartTime from './EventStartTime';
|
||||
import RelativeStartTime from './RelativeStartTime';
|
||||
|
||||
const { Panel } = Collapse;
|
||||
|
||||
@ -25,10 +25,6 @@ function ErrorTag({
|
||||
{map(event, ({ attributeMap, name, timeUnixNano }) => {
|
||||
const attributes = keys(attributeMap);
|
||||
|
||||
const { time, timeUnitName } = convertTimeToRelevantUnit(
|
||||
timeUnixNano / 1e6 - firstSpanStartTime,
|
||||
);
|
||||
|
||||
return (
|
||||
<Collapse
|
||||
key={`${name}${JSON.stringify(attributeMap)}`}
|
||||
@ -39,18 +35,14 @@ function ErrorTag({
|
||||
header={name || attributeMap?.event}
|
||||
key={name || attributeMap.event}
|
||||
>
|
||||
<Space direction="horizontal" align="center">
|
||||
<CustomSubTitle style={{ margin: 0 }} ellipsis>
|
||||
Event Start Time
|
||||
</CustomSubTitle>
|
||||
<Popover content="Relative to start of the full trace">
|
||||
<InfoCircleOutlined />
|
||||
</Popover>
|
||||
</Space>
|
||||
|
||||
<CustomSubText isDarkMode={isDarkMode}>
|
||||
{`${time.toFixed(2)} ${timeUnitName}`}
|
||||
</CustomSubText>
|
||||
{firstSpanStartTime ? (
|
||||
<RelativeStartTime
|
||||
firstSpanStartTime={firstSpanStartTime}
|
||||
timeUnixNano={timeUnixNano}
|
||||
/>
|
||||
) : (
|
||||
<EventStartTime timeUnixNano={timeUnixNano} />
|
||||
)}
|
||||
|
||||
{map(attributes, (event) => {
|
||||
const value = attributeMap[event];
|
||||
@ -93,7 +85,11 @@ interface ErrorTagProps {
|
||||
event: ITraceTree['event'];
|
||||
onToggleHandler: (isOpen: boolean) => void;
|
||||
setText: (text: { subText: string; text: string }) => void;
|
||||
firstSpanStartTime: number;
|
||||
firstSpanStartTime?: number;
|
||||
}
|
||||
|
||||
ErrorTag.defaultProps = {
|
||||
firstSpanStartTime: undefined,
|
||||
};
|
||||
|
||||
export default ErrorTag;
|
||||
|
@ -0,0 +1,31 @@
|
||||
import { Popover } from 'antd';
|
||||
import dayjs from 'dayjs';
|
||||
import useThemeMode from 'hooks/useThemeMode';
|
||||
import React from 'react';
|
||||
|
||||
import { CustomSubText, CustomSubTitle } from '../styles';
|
||||
|
||||
function EventStartTime({ timeUnixNano }: EventStartTimeProps): JSX.Element {
|
||||
const { isDarkMode } = useThemeMode();
|
||||
|
||||
const humanReadableTimeInDayJs = dayjs(timeUnixNano / 1e6).format(
|
||||
'YYYY-MM-DD hh:mm:ss.SSS A',
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<CustomSubTitle style={{ margin: 0 }}>Event Time</CustomSubTitle>
|
||||
<CustomSubText ellipsis isDarkMode={isDarkMode}>
|
||||
<Popover content={humanReadableTimeInDayJs}>
|
||||
{humanReadableTimeInDayJs}
|
||||
</Popover>
|
||||
</CustomSubText>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
interface EventStartTimeProps {
|
||||
timeUnixNano: number;
|
||||
}
|
||||
|
||||
export default EventStartTime;
|
@ -0,0 +1,42 @@
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { Popover, Space } from 'antd';
|
||||
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
|
||||
import useThemeMode from 'hooks/useThemeMode';
|
||||
import React from 'react';
|
||||
|
||||
import { CustomSubText, CustomSubTitle } from '../styles';
|
||||
|
||||
function StartTime({
|
||||
firstSpanStartTime,
|
||||
timeUnixNano,
|
||||
}: StartTimeProps): JSX.Element {
|
||||
const { isDarkMode } = useThemeMode();
|
||||
|
||||
const { time, timeUnitName } = convertTimeToRelevantUnit(
|
||||
timeUnixNano / 1e6 - (firstSpanStartTime || 0),
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Space direction="horizontal" align="center">
|
||||
<CustomSubTitle style={{ margin: 0 }} ellipsis>
|
||||
Event Start Time
|
||||
</CustomSubTitle>
|
||||
<Popover content="Relative to start of the full trace">
|
||||
<InfoCircleOutlined />
|
||||
</Popover>
|
||||
</Space>
|
||||
|
||||
<CustomSubText isDarkMode={isDarkMode}>
|
||||
{`${time.toFixed(2)} ${timeUnitName}`}
|
||||
</CustomSubText>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
interface StartTimeProps {
|
||||
timeUnixNano: number;
|
||||
firstSpanStartTime: number;
|
||||
}
|
||||
|
||||
export default StartTime;
|
@ -76,7 +76,7 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
|
||||
/* eslint-enable */
|
||||
}, [treesData, spanServiceColors]);
|
||||
|
||||
const firstSpanStartTime = tree.spanTree[0].startTime;
|
||||
const firstSpanStartTime = tree.spanTree[0]?.startTime;
|
||||
|
||||
const [globalTraceMetadata] = useState<ITraceMetaData>({
|
||||
...traceMetaData,
|
||||
|
@ -34,9 +34,10 @@ export const traceDateAndTimelineContainer = css`
|
||||
|
||||
export const traceDateTimeContainer = css`
|
||||
display: flex;
|
||||
aligh-items: center;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
`;
|
||||
|
||||
export const timelineContainer = css`
|
||||
overflow: visible;
|
||||
margin: 0 1rem 0 0;
|
||||
@ -48,7 +49,7 @@ export const ganttChartContainer = css`
|
||||
position: relative;
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
overflow-x: scroll;
|
||||
`;
|
||||
|
||||
export const selectedSpanDetailContainer = css`
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { WarningFilled } from '@ant-design/icons';
|
||||
import { Button, Card, Form, Space, Typography } from 'antd';
|
||||
import React, { useCallback } from 'react';
|
||||
import React from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@ -14,10 +14,6 @@ function Version(): JSX.Element {
|
||||
const [form] = Form.useForm();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const onClickUpgradeHandler = useCallback((link: string) => {
|
||||
window.open(link, '_blank');
|
||||
}, []);
|
||||
|
||||
const {
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
@ -60,9 +56,8 @@ function Version(): JSX.Element {
|
||||
placeholder={t('latest_version')}
|
||||
/>
|
||||
<Button
|
||||
onClick={(): void =>
|
||||
onClickUpgradeHandler('https://github.com/SigNoz/signoz/releases')
|
||||
}
|
||||
href="https://github.com/SigNoz/signoz/releases"
|
||||
target="_blank"
|
||||
type="link"
|
||||
>
|
||||
{t('release_notes')}
|
||||
@ -94,11 +89,8 @@ function Version(): JSX.Element {
|
||||
|
||||
{!isError && !isLatestVersion && (
|
||||
<Button
|
||||
onClick={(): void =>
|
||||
onClickUpgradeHandler(
|
||||
'https://signoz.io/docs/operate/docker-standalone/#upgrade',
|
||||
)
|
||||
}
|
||||
href="https://signoz.io/docs/operate/docker-standalone/#upgrade"
|
||||
target="_blank"
|
||||
>
|
||||
{t('read_how_to_upgrade')}
|
||||
</Button>
|
||||
|
@ -2,20 +2,34 @@
|
||||
// @ts-ignore
|
||||
// @ts-nocheck
|
||||
|
||||
import { QueryTypes, StringTypeQueryOperators } from "./tokens";
|
||||
|
||||
export const reverseParser = (
|
||||
parserQueryArr: { type: string; value: any }[] = [],
|
||||
) => {
|
||||
let queryString = '';
|
||||
let lastToken: { type: string; value: any };
|
||||
parserQueryArr.forEach((query) => {
|
||||
if (queryString) {
|
||||
queryString += ' ';
|
||||
}
|
||||
|
||||
if (Array.isArray(query.value) && query.value.length > 0) {
|
||||
// if the values are array type, here we spread them in
|
||||
// ('a', 'b') format
|
||||
queryString += `(${query.value.map((val) => `'${val}'`).join(',')})`;
|
||||
} else {
|
||||
queryString += query.value;
|
||||
if (query.type === QueryTypes.QUERY_VALUE
|
||||
&& lastToken.type === QueryTypes.QUERY_OPERATOR
|
||||
&& Object.values(StringTypeQueryOperators).includes(lastToken.value) ) {
|
||||
// for operators that need string type value, here we append single
|
||||
// quotes. if the content has single quote they would be removed
|
||||
queryString += `'${query.value?.replace(/'/g, '')}'`;
|
||||
} else {
|
||||
queryString += query.value;
|
||||
}
|
||||
}
|
||||
lastToken = query;
|
||||
});
|
||||
|
||||
// console.log(queryString);
|
||||
|
@ -7,6 +7,21 @@ export const QueryOperatorsSingleVal = {
|
||||
NCONTAINS: 'NCONTAINS',
|
||||
};
|
||||
|
||||
// list of operators that support only number values
|
||||
export const NumTypeQueryOperators = {
|
||||
GTE: 'GTE',
|
||||
GT: 'GT',
|
||||
LTE: 'LTE',
|
||||
LT: 'LT',
|
||||
};
|
||||
|
||||
// list of operators that support only string values
|
||||
export const StringTypeQueryOperators = {
|
||||
CONTAINS: 'CONTAINS',
|
||||
NCONTAINS: 'NCONTAINS',
|
||||
};
|
||||
|
||||
// list of operators that support array values
|
||||
export const QueryOperatorsMultiVal = {
|
||||
IN: 'IN',
|
||||
NIN: 'NIN',
|
||||
@ -23,3 +38,46 @@ export const QueryTypes = {
|
||||
QUERY_VALUE: 'QUERY_VALUE',
|
||||
CONDITIONAL_OPERATOR: 'CONDITIONAL_OPERATOR',
|
||||
};
|
||||
|
||||
export const ValidTypeValue = (
|
||||
op: string,
|
||||
value: string | string[],
|
||||
): boolean => {
|
||||
if (!op) return true;
|
||||
if (Object.values(NumTypeQueryOperators).includes(op)) {
|
||||
if (Array.isArray(value)) return false;
|
||||
return !Number.isNaN(Number(value));
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
// ValidTypeSequence takes prior, current and next op to confirm
|
||||
// the proper sequence. For example, if QUERY_VALUE needs to be
|
||||
// in between QUERY_OPERATOR and (empty or CONDITIONAL_OPERATOR).
|
||||
export const ValidTypeSequence = (
|
||||
prior: string | undefined,
|
||||
current: string | undefined,
|
||||
next: string | undefined,
|
||||
): boolean => {
|
||||
switch (current) {
|
||||
case QueryTypes.QUERY_KEY:
|
||||
// query key can have an empty prior
|
||||
if (!prior) return true;
|
||||
return [QueryTypes.CONDITIONAL_OPERATOR].includes(prior);
|
||||
case QueryTypes.QUERY_OPERATOR:
|
||||
// empty prior is not allowed
|
||||
if (!prior || ![QueryTypes.QUERY_KEY].includes(prior)) return false;
|
||||
if (!next || ![QueryTypes.QUERY_VALUE].includes(next)) return false;
|
||||
return true;
|
||||
case QueryTypes.QUERY_VALUE:
|
||||
// empty prior is not allowed
|
||||
if (!prior) return false;
|
||||
return [QueryTypes.QUERY_OPERATOR].includes(prior);
|
||||
case QueryTypes.CONDITIONAL_OPERATOR:
|
||||
// empty prior is not allowed
|
||||
if (!next) return false;
|
||||
return [QueryTypes.QUERY_KEY].includes(next);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
@ -6,10 +6,8 @@
|
||||
*/
|
||||
export const isPasswordValid = (value: string): boolean => {
|
||||
// eslint-disable-next-line prefer-regex-literals
|
||||
const pattern = new RegExp(
|
||||
'^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[#?!@$%^&*-]).{8,}$',
|
||||
);
|
||||
const pattern = new RegExp('^.{8,}$');
|
||||
return pattern.test(value);
|
||||
};
|
||||
|
||||
export const isPasswordNotValidMessage = `Password must a have minimum of 8 characters with at least one lower case, one number ,one upper case and one special character`;
|
||||
export const isPasswordNotValidMessage = `Password must a have minimum of 8 characters`;
|
||||
|
@ -1 +1,4 @@
|
||||
export const SPAN_DETAILS_LEFT_COL_WIDTH = 350;
|
||||
|
||||
export const noEventMessage =
|
||||
'The requested trace id was not found. Sometimes this happens because of insertion delay in trace data. Please try again after some time';
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { Typography } from 'antd';
|
||||
import getTraceItem from 'api/trace/getTraceItem';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import TraceDetailContainer from 'container/TraceDetail';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
@ -8,6 +9,8 @@ import { useQuery } from 'react-query';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { Props as TraceDetailProps } from 'types/api/trace/getTraceItem';
|
||||
|
||||
import { noEventMessage } from './constants';
|
||||
|
||||
function TraceDetail(): JSX.Element {
|
||||
const { id } = useParams<TraceDetailProps>();
|
||||
const urlQuery = useUrlQuery();
|
||||
@ -19,6 +22,7 @@ function TraceDetail(): JSX.Element {
|
||||
}),
|
||||
[urlQuery],
|
||||
);
|
||||
|
||||
const { data: traceDetailResponse, error, isLoading, isError } = useQuery(
|
||||
`getTraceItem/${id}`,
|
||||
() => getTraceItem({ id, spanId, levelUp, levelDown }),
|
||||
@ -39,6 +43,10 @@ function TraceDetail(): JSX.Element {
|
||||
return <Spinner tip="Loading.." />;
|
||||
}
|
||||
|
||||
if (traceDetailResponse.payload[0].events.length === 0) {
|
||||
return <NotFound text={noEventMessage} />;
|
||||
}
|
||||
|
||||
return <TraceDetailContainer response={traceDetailResponse.payload} />;
|
||||
}
|
||||
|
||||
|
@ -15,6 +15,8 @@ export interface Props {
|
||||
orderParam?: OrderBy;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
exceptionType?: string;
|
||||
serviceName?: string;
|
||||
}
|
||||
|
||||
export interface Exception {
|
||||
|
@ -45,6 +45,7 @@ import (
|
||||
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
|
||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
"go.signoz.io/signoz/pkg/query-service/telemetry"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -1177,33 +1178,54 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
traceFilterReponse.Status = map[string]uint64{"ok": 0, "error": 0}
|
||||
}
|
||||
case constants.Duration:
|
||||
finalQuery := fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
|
||||
finalQuery += query
|
||||
finalQuery += " ORDER BY durationNano LIMIT 1"
|
||||
var dBResponse []model.DBResponseTotal
|
||||
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||
zap.S().Info(finalQuery)
|
||||
err := r.featureFlags.CheckFeature(constants.DurationSort)
|
||||
durationSortEnabled := err == nil
|
||||
finalQuery := ""
|
||||
if !durationSortEnabled {
|
||||
// if duration sort is not enabled, we need to get the min and max duration from the index table
|
||||
finalQuery = fmt.Sprintf("SELECT min(durationNano) as min, max(durationNano) as max FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
var dBResponse []model.DBResponseMinMax
|
||||
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||
zap.S().Info(finalQuery)
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
|
||||
}
|
||||
if len(dBResponse) > 0 {
|
||||
traceFilterReponse.Duration = map[string]uint64{"minDuration": dBResponse[0].Min, "maxDuration": dBResponse[0].Max}
|
||||
}
|
||||
} else {
|
||||
// when duration sort is enabled, we need to get the min and max duration from the duration table
|
||||
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
|
||||
finalQuery += query
|
||||
finalQuery += " ORDER BY durationNano LIMIT 1"
|
||||
var dBResponse []model.DBResponseTotal
|
||||
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||
zap.S().Info(finalQuery)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
|
||||
}
|
||||
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
|
||||
finalQuery += query
|
||||
finalQuery += " ORDER BY durationNano DESC LIMIT 1"
|
||||
var dBResponse2 []model.DBResponseTotal
|
||||
err = r.db.Select(ctx, &dBResponse2, finalQuery, args...)
|
||||
zap.S().Info(finalQuery)
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
|
||||
}
|
||||
if len(dBResponse) > 0 {
|
||||
traceFilterReponse.Duration["minDuration"] = dBResponse[0].NumTotal
|
||||
}
|
||||
if len(dBResponse2) > 0 {
|
||||
traceFilterReponse.Duration["maxDuration"] = dBResponse2[0].NumTotal
|
||||
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
|
||||
finalQuery += query
|
||||
finalQuery += " ORDER BY durationNano DESC LIMIT 1"
|
||||
var dBResponse2 []model.DBResponseTotal
|
||||
err = r.db.Select(ctx, &dBResponse2, finalQuery, args...)
|
||||
zap.S().Info(finalQuery)
|
||||
|
||||
if err != nil {
|
||||
zap.S().Debug("Error in processing sql query: ", err)
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
|
||||
}
|
||||
if len(dBResponse) > 0 {
|
||||
traceFilterReponse.Duration["minDuration"] = dBResponse[0].NumTotal
|
||||
}
|
||||
if len(dBResponse2) > 0 {
|
||||
traceFilterReponse.Duration["maxDuration"] = dBResponse2[0].NumTotal
|
||||
}
|
||||
}
|
||||
case constants.RPCMethod:
|
||||
finalQuery := fmt.Sprintf("SELECT rpcMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
@ -2506,8 +2528,35 @@ func (r *ClickHouseReader) ListErrors(ctx context.Context, queryParams *model.Li
|
||||
|
||||
var getErrorResponses []model.Error
|
||||
|
||||
query := fmt.Sprintf("SELECT any(exceptionType) as exceptionType, any(exceptionMessage) as exceptionMessage, count() AS exceptionCount, min(timestamp) as firstSeen, max(timestamp) as lastSeen, any(serviceName) as serviceName, groupID FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU GROUP BY groupID", r.TraceDB, r.errorTable)
|
||||
query := "SELECT any(exceptionMessage) as exceptionMessage, count() AS exceptionCount, min(timestamp) as firstSeen, max(timestamp) as lastSeen, groupID"
|
||||
if len(queryParams.ServiceName) != 0 {
|
||||
query = query + ", serviceName"
|
||||
} else {
|
||||
query = query + ", any(serviceName) as serviceName"
|
||||
}
|
||||
if len(queryParams.ExceptionType) != 0 {
|
||||
query = query + ", exceptionType"
|
||||
} else {
|
||||
query = query + ", any(exceptionType) as exceptionType"
|
||||
}
|
||||
query += fmt.Sprintf(" FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.errorTable)
|
||||
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
|
||||
|
||||
if len(queryParams.ServiceName) != 0 {
|
||||
query = query + " AND serviceName ilike @serviceName"
|
||||
args = append(args, clickhouse.Named("serviceName", "%"+queryParams.ServiceName+"%"))
|
||||
}
|
||||
if len(queryParams.ExceptionType) != 0 {
|
||||
query = query + " AND exceptionType ilike @exceptionType"
|
||||
args = append(args, clickhouse.Named("exceptionType", "%"+queryParams.ExceptionType+"%"))
|
||||
}
|
||||
query = query + " GROUP BY groupID"
|
||||
if len(queryParams.ServiceName) != 0 {
|
||||
query = query + ", serviceName"
|
||||
}
|
||||
if len(queryParams.ExceptionType) != 0 {
|
||||
query = query + ", exceptionType"
|
||||
}
|
||||
if len(queryParams.OrderParam) != 0 {
|
||||
if queryParams.Order == constants.Descending {
|
||||
query = query + " ORDER BY " + queryParams.OrderParam + " DESC"
|
||||
@ -2542,7 +2591,14 @@ func (r *ClickHouseReader) CountErrors(ctx context.Context, queryParams *model.C
|
||||
|
||||
query := fmt.Sprintf("SELECT count(distinct(groupID)) FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.errorTable)
|
||||
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
|
||||
|
||||
if len(queryParams.ServiceName) != 0 {
|
||||
query = query + " AND serviceName = @serviceName"
|
||||
args = append(args, clickhouse.Named("serviceName", queryParams.ServiceName))
|
||||
}
|
||||
if len(queryParams.ExceptionType) != 0 {
|
||||
query = query + " AND exceptionType = @exceptionType"
|
||||
args = append(args, clickhouse.Named("exceptionType", queryParams.ExceptionType))
|
||||
}
|
||||
err := r.db.QueryRow(ctx, query, args...).Scan(&errorCount)
|
||||
zap.S().Info(query)
|
||||
|
||||
@ -3067,6 +3123,20 @@ func (r *ClickHouseReader) GetSamplesInfoInLastHeartBeatInterval(ctx context.Con
|
||||
|
||||
return totalSamples, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetDistributedInfoInLastHeartBeatInterval(ctx context.Context) (map[string]interface{}, error) {
|
||||
|
||||
clusterInfo := []model.ClusterInfo{}
|
||||
|
||||
queryStr := `SELECT shard_num, shard_weight, replica_num, errors_count, slowdowns_count, estimated_recovery_time FROM system.clusters where cluster='cluster';`
|
||||
r.db.Select(ctx, &clusterInfo, queryStr)
|
||||
if len(clusterInfo) == 1 {
|
||||
return clusterInfo[0].GetMapFromStruct(), nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetLogsInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error) {
|
||||
|
||||
var totalLogLines uint64
|
||||
@ -3197,7 +3267,8 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda
|
||||
// remove index
|
||||
query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s DROP INDEX IF EXISTS %s_idx", r.logsDB, r.logsLocalTable, cluster, field.Name)
|
||||
err := r.db.Exec(ctx, query)
|
||||
if err != nil {
|
||||
// we are ignoring errors with code 341 as it is an error with updating old part https://github.com/SigNoz/engineering-pod/issues/919#issuecomment-1366344346
|
||||
if err != nil && !strings.HasPrefix(err.Error(), "code: 341") {
|
||||
return &model.ApiError{Err: err, Typ: model.ErrorInternal}
|
||||
}
|
||||
}
|
||||
@ -3212,11 +3283,18 @@ func (r *ClickHouseReader) GetLogs(ctx context.Context, params *model.LogsFilter
|
||||
}
|
||||
|
||||
isPaginatePrev := logs.CheckIfPrevousPaginateAndModifyOrder(params)
|
||||
filterSql, err := logs.GenerateSQLWhere(fields, params)
|
||||
filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, params)
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Err: err, Typ: model.ErrorBadData}
|
||||
}
|
||||
|
||||
data := map[string]interface{}{
|
||||
"lenFilters": lenFilters,
|
||||
}
|
||||
if lenFilters != 0 {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data)
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("%s from %s.%s", constants.LogsSQLSelect, r.logsDB, r.logsTable)
|
||||
|
||||
if filterSql != "" {
|
||||
@ -3246,10 +3324,17 @@ func (r *ClickHouseReader) TailLogs(ctx context.Context, client *model.LogsTailC
|
||||
return
|
||||
}
|
||||
|
||||
filterSql, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
|
||||
filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
|
||||
Query: client.Filter.Query,
|
||||
})
|
||||
|
||||
data := map[string]interface{}{
|
||||
"lenFilters": lenFilters,
|
||||
}
|
||||
if lenFilters != 0 {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
client.Error <- err
|
||||
return
|
||||
@ -3326,13 +3411,20 @@ func (r *ClickHouseReader) AggregateLogs(ctx context.Context, params *model.Logs
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
filterSql, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
|
||||
filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
|
||||
Query: params.Query,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Err: err, Typ: model.ErrorBadData}
|
||||
}
|
||||
|
||||
data := map[string]interface{}{
|
||||
"lenFilters": lenFilters,
|
||||
}
|
||||
if lenFilters != 0 {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data)
|
||||
}
|
||||
|
||||
query := ""
|
||||
if params.GroupBy != "" {
|
||||
query = fmt.Sprintf("SELECT toInt64(toUnixTimestamp(toStartOfInterval(toDateTime(timestamp/1000000000), INTERVAL %d minute))*1000000000) as ts_start_interval, toString(%s) as groupBy, "+
|
||||
|
@ -1159,6 +1159,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
|
||||
RespondError(w, &model.ApiError{model.ErrorTimeout, res.Err}, nil)
|
||||
}
|
||||
RespondError(w, &model.ApiError{model.ErrorExec, res.Err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
response_data := &model.QueryData{
|
||||
@ -1332,6 +1333,9 @@ func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_NUMBER_OF_SERVICES, data)
|
||||
if (data["number"] != 0) && (data["number"] != telemetry.DEFAULT_NUMBER_OF_SERVICES) {
|
||||
telemetry.GetInstance().AddActiveTracesUser()
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, result)
|
||||
}
|
||||
@ -2190,6 +2194,8 @@ func (aH *APIHandler) tailLogs(w http.ResponseWriter, r *http.Request) {
|
||||
RespondError(w, &err, "streaming is not supported")
|
||||
return
|
||||
}
|
||||
// flush the headers
|
||||
flusher.Flush()
|
||||
|
||||
for {
|
||||
select {
|
||||
|
@ -36,7 +36,7 @@ const (
|
||||
DESC = "desc"
|
||||
)
|
||||
|
||||
var tokenRegex, _ = regexp.Compile(`(?i)(and( )*?|or( )*?)?(([\w.-]+ (in|nin) \([^(]+\))|([\w.]+ (gt|lt|gte|lte) (')?[\S]+(')?)|([\w.]+ (contains|ncontains)) [^\\]?'(.*?[^\\])')`)
|
||||
var tokenRegex, _ = regexp.Compile(`(?i)(and( )*?|or( )*?)?(([\w.-]+( )+(in|nin)( )+\([^(]+\))|([\w.]+( )+(gt|lt|gte|lte)( )+(')?[\S]+(')?)|([\w.]+( )+(contains|ncontains))( )+[^\\]?'(.*?[^\\])')`)
|
||||
var operatorRegex, _ = regexp.Compile(`(?i)(?: )(in|nin|gt|lt|gte|lte|contains|ncontains)(?: )`)
|
||||
|
||||
func ParseLogFilterParams(r *http.Request) (*model.LogsFilterParams, error) {
|
||||
@ -152,6 +152,7 @@ func ParseLogAggregateParams(r *http.Request) (*model.LogsAggregateParams, error
|
||||
|
||||
func parseLogQuery(query string) ([]string, error) {
|
||||
sqlQueryTokens := []string{}
|
||||
|
||||
filterTokens := tokenRegex.FindAllString(query, -1)
|
||||
|
||||
if len(filterTokens) == 0 {
|
||||
@ -190,7 +191,13 @@ func parseLogQuery(query string) ([]string, error) {
|
||||
sqlQueryTokens = append(sqlQueryTokens, f)
|
||||
} else {
|
||||
symbol := operatorMapping[strings.ToLower(op)]
|
||||
sqlQueryTokens = append(sqlQueryTokens, strings.Replace(v, " "+op+" ", " "+symbol+" ", 1)+" ")
|
||||
sqlExpr := strings.Replace(v, " "+op+" ", " "+symbol+" ", 1)
|
||||
splittedExpr := strings.Split(sqlExpr, symbol)
|
||||
if len(splittedExpr) != 2 {
|
||||
return nil, fmt.Errorf("error while splitting expression: %s", sqlExpr)
|
||||
}
|
||||
trimmedSqlExpr := fmt.Sprintf("%s %s %s ", strings.Join(strings.Fields(splittedExpr[0]), " "), symbol, strings.TrimSpace(splittedExpr[1]))
|
||||
sqlQueryTokens = append(sqlQueryTokens, trimmedSqlExpr)
|
||||
}
|
||||
}
|
||||
|
||||
@ -272,20 +279,23 @@ func CheckIfPrevousPaginateAndModifyOrder(params *model.LogsFilterParams) (isPag
|
||||
return
|
||||
}
|
||||
|
||||
func GenerateSQLWhere(allFields *model.GetFieldsResponse, params *model.LogsFilterParams) (string, error) {
|
||||
func GenerateSQLWhere(allFields *model.GetFieldsResponse, params *model.LogsFilterParams) (string, int, error) {
|
||||
var tokens []string
|
||||
var err error
|
||||
var sqlWhere string
|
||||
var lenTokens = 0
|
||||
if params.Query != "" {
|
||||
tokens, err = parseLogQuery(params.Query)
|
||||
|
||||
if err != nil {
|
||||
return sqlWhere, err
|
||||
return sqlWhere, -1, err
|
||||
}
|
||||
lenTokens = len(tokens)
|
||||
}
|
||||
|
||||
tokens, err = replaceInterestingFields(allFields, tokens)
|
||||
if err != nil {
|
||||
return sqlWhere, err
|
||||
return sqlWhere, -1, err
|
||||
}
|
||||
|
||||
filterTokens := []string{}
|
||||
@ -335,5 +345,5 @@ func GenerateSQLWhere(allFields *model.GetFieldsResponse, params *model.LogsFilt
|
||||
|
||||
sqlWhere = strings.Join(tokens, "")
|
||||
|
||||
return sqlWhere, nil
|
||||
return sqlWhere, lenTokens, nil
|
||||
}
|
||||
|
@ -80,7 +80,17 @@ var correctQueriesTest = []struct {
|
||||
{
|
||||
`filters with extra spaces`,
|
||||
`service IN ('name > 100') AND length gt 100`,
|
||||
[]string{`service IN ('name > 100') `, `AND length > 100 `},
|
||||
[]string{`service IN ('name > 100') `, `AND length > 100 `},
|
||||
},
|
||||
{
|
||||
`Extra space within a filter expression`,
|
||||
`service IN ('name > 100')`,
|
||||
[]string{`service IN ('name > 100') `},
|
||||
},
|
||||
{
|
||||
`Extra space between a query filter`,
|
||||
`data contains 'hello world .'`,
|
||||
[]string{`data ILIKE '%hello world .%' `},
|
||||
},
|
||||
{
|
||||
`filters with special characters in key name`,
|
||||
|
@ -480,14 +480,18 @@ func parseListErrorsRequest(r *http.Request) (*model.ListErrorsParams, error) {
|
||||
if err != nil {
|
||||
return nil, errors.New("offset param is not in correct format")
|
||||
}
|
||||
serviceName := r.URL.Query().Get("serviceName")
|
||||
exceptionType := r.URL.Query().Get("exceptionType")
|
||||
|
||||
params := &model.ListErrorsParams{
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
OrderParam: orderParam,
|
||||
Order: order,
|
||||
Limit: int64(limitInt),
|
||||
Offset: int64(offsetInt),
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
OrderParam: orderParam,
|
||||
Order: order,
|
||||
Limit: int64(limitInt),
|
||||
Offset: int64(offsetInt),
|
||||
ServiceName: serviceName,
|
||||
ExceptionType: exceptionType,
|
||||
}
|
||||
|
||||
return params, nil
|
||||
|
@ -1,8 +1,11 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"net/http"
|
||||
_ "net/http/pprof" // http profiler
|
||||
@ -235,15 +238,84 @@ func (lrw *loggingResponseWriter) Flush() {
|
||||
lrw.ResponseWriter.(http.Flusher).Flush()
|
||||
}
|
||||
|
||||
func extractDashboardMetaData(path string, r *http.Request) (map[string]interface{}, bool) {
|
||||
pathToExtractBodyFrom := "/api/v2/metrics/query_range"
|
||||
var requestBody map[string]interface{}
|
||||
data := map[string]interface{}{}
|
||||
|
||||
if path == pathToExtractBodyFrom && (r.Method == "POST") {
|
||||
bodyBytes, _ := ioutil.ReadAll(r.Body)
|
||||
r.Body.Close() // must close
|
||||
r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
json.Unmarshal(bodyBytes, &requestBody)
|
||||
|
||||
} else {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
compositeMetricQuery, compositeMetricQueryExists := requestBody["compositeMetricQuery"]
|
||||
compositeMetricQueryMap := compositeMetricQuery.(map[string]interface{})
|
||||
signozMetricFound := false
|
||||
|
||||
if compositeMetricQueryExists {
|
||||
signozMetricFound = telemetry.GetInstance().CheckSigNozMetrics(compositeMetricQueryMap)
|
||||
queryType, queryTypeExists := compositeMetricQueryMap["queryType"]
|
||||
if queryTypeExists {
|
||||
data["queryType"] = queryType
|
||||
|
||||
}
|
||||
panelType, panelTypeExists := compositeMetricQueryMap["panelType"]
|
||||
if panelTypeExists {
|
||||
data["panelType"] = panelType
|
||||
}
|
||||
}
|
||||
|
||||
datasource, datasourceExists := requestBody["dataSource"]
|
||||
if datasourceExists {
|
||||
data["datasource"] = datasource
|
||||
}
|
||||
|
||||
if !signozMetricFound {
|
||||
telemetry.GetInstance().AddActiveMetricsUser()
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_DASHBOARDS_METADATA, data, false)
|
||||
}
|
||||
|
||||
return data, true
|
||||
}
|
||||
|
||||
func getActiveLogs(path string, r *http.Request) {
|
||||
// if path == "/api/v1/dashboards/{uuid}" {
|
||||
// telemetry.GetInstance().AddActiveMetricsUser()
|
||||
// }
|
||||
if path == "/api/v1/logs" {
|
||||
hasFilters := len(r.URL.Query().Get("q"))
|
||||
if hasFilters > 0 {
|
||||
telemetry.GetInstance().AddActiveLogsUser()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
route := mux.CurrentRoute(r)
|
||||
path, _ := route.GetPathTemplate()
|
||||
|
||||
dashboardMetadata, metadataExists := extractDashboardMetaData(path, r)
|
||||
getActiveLogs(path, r)
|
||||
|
||||
lrw := NewLoggingResponseWriter(w)
|
||||
next.ServeHTTP(lrw, r)
|
||||
|
||||
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
|
||||
if metadataExists {
|
||||
for key, value := range dashboardMetadata {
|
||||
data[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
if telemetry.GetInstance().IsSampled() {
|
||||
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data)
|
||||
|
@ -2,9 +2,7 @@ package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
@ -74,21 +72,21 @@ func ValidatePassword(password string) error {
|
||||
return errors.Errorf("Password should be atleast %d characters.", minimumPasswordLength)
|
||||
}
|
||||
|
||||
num := `[0-9]{1}`
|
||||
lower := `[a-z]{1}`
|
||||
upper := `[A-Z]{1}`
|
||||
symbol := `[!@#$&*]{1}`
|
||||
if b, err := regexp.MatchString(num, password); !b || err != nil {
|
||||
return fmt.Errorf("password should have atleast one number")
|
||||
}
|
||||
if b, err := regexp.MatchString(lower, password); !b || err != nil {
|
||||
return fmt.Errorf("password should have atleast one lower case letter")
|
||||
}
|
||||
if b, err := regexp.MatchString(upper, password); !b || err != nil {
|
||||
return fmt.Errorf("password should have atleast one upper case letter")
|
||||
}
|
||||
if b, err := regexp.MatchString(symbol, password); !b || err != nil {
|
||||
return fmt.Errorf("password should have atleast one special character from !@#$&* ")
|
||||
}
|
||||
// num := `[0-9]{1}`
|
||||
// lower := `[a-z]{1}`
|
||||
// upper := `[A-Z]{1}`
|
||||
// symbol := `[!@#$&*]{1}`
|
||||
// if b, err := regexp.MatchString(num, password); !b || err != nil {
|
||||
// return fmt.Errorf("password should have atleast one number")
|
||||
// }
|
||||
// if b, err := regexp.MatchString(lower, password); !b || err != nil {
|
||||
// return fmt.Errorf("password should have atleast one lower case letter")
|
||||
// }
|
||||
// if b, err := regexp.MatchString(upper, password); !b || err != nil {
|
||||
// return fmt.Errorf("password should have atleast one upper case letter")
|
||||
// }
|
||||
// if b, err := regexp.MatchString(symbol, password); !b || err != nil {
|
||||
// return fmt.Errorf("password should have atleast one special character from !@#$&* ")
|
||||
// }
|
||||
return nil
|
||||
}
|
||||
|
@ -125,6 +125,13 @@ func (mds *ModelDaoSqlite) initializeOrgPreferences(ctx context.Context) error {
|
||||
// set telemetry fields from userPreferences
|
||||
telemetry.GetInstance().SetDistinctId(org.Id)
|
||||
|
||||
users, _ := mds.GetUsers(ctx)
|
||||
countUsers := len(users)
|
||||
telemetry.GetInstance().SetCountUsers(int8(countUsers))
|
||||
if countUsers > 0 {
|
||||
telemetry.GetInstance().SetCompanyDomain(users[countUsers-1].Email)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -63,6 +63,7 @@ type Reader interface {
|
||||
GetSamplesInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error)
|
||||
GetLogsInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error)
|
||||
GetTagsInfoInLastHeartBeatInterval(ctx context.Context) (*model.TagsInfo, error)
|
||||
GetDistributedInfoInLastHeartBeatInterval(ctx context.Context) (map[string]interface{}, error)
|
||||
// Logs
|
||||
GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError)
|
||||
UpdateLogField(ctx context.Context, field *model.UpdateField) *model.ApiError
|
||||
|
@ -296,17 +296,21 @@ type GetTTLParams struct {
|
||||
}
|
||||
|
||||
type ListErrorsParams struct {
|
||||
Start *time.Time
|
||||
End *time.Time
|
||||
Limit int64
|
||||
OrderParam string
|
||||
Order string
|
||||
Offset int64
|
||||
Start *time.Time
|
||||
End *time.Time
|
||||
Limit int64
|
||||
OrderParam string
|
||||
Order string
|
||||
Offset int64
|
||||
ServiceName string
|
||||
ExceptionType string
|
||||
}
|
||||
|
||||
type CountErrorsParams struct {
|
||||
Start *time.Time
|
||||
End *time.Time
|
||||
Start *time.Time
|
||||
End *time.Time
|
||||
ServiceName string
|
||||
ExceptionType string
|
||||
}
|
||||
|
||||
type GetErrorParams struct {
|
||||
|
@ -399,6 +399,11 @@ type DBResponseTotal struct {
|
||||
NumTotal uint64 `ch:"numTotal"`
|
||||
}
|
||||
|
||||
type DBResponseMinMax struct {
|
||||
Min uint64 `ch:"min"`
|
||||
Max uint64 `ch:"max"`
|
||||
}
|
||||
|
||||
type SpanFiltersResponse struct {
|
||||
ServiceName map[string]uint64 `json:"serviceName"`
|
||||
Status map[string]uint64 `json:"status"`
|
||||
@ -564,3 +569,19 @@ type TagTelemetryData struct {
|
||||
Env string `json:"env" ch:"env"`
|
||||
Language string `json:"language" ch:"language"`
|
||||
}
|
||||
|
||||
type ClusterInfo struct {
|
||||
ShardNum uint32 `json:"shard_num" ch:"shard_num"`
|
||||
ShardWeight uint32 `json:"shard_weight" ch:"shard_weight"`
|
||||
ReplicaNum uint32 `json:"replica_num" ch:"replica_num"`
|
||||
ErrorsCount uint32 `json:"errors_count" ch:"errors_count"`
|
||||
SlowdownsCount uint32 `json:"slowdowns_count" ch:"slowdowns_count"`
|
||||
EstimatedRecoveryTime uint32 `json:"estimated_recovery_time" ch:"estimated_recovery_time"`
|
||||
}
|
||||
|
||||
func (ci *ClusterInfo) GetMapFromStruct() map[string]interface{} {
|
||||
var clusterInfoMap map[string]interface{}
|
||||
data, _ := json.Marshal(*ci)
|
||||
json.Unmarshal(data, &clusterInfoMap)
|
||||
return clusterInfoMap
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package telemetry
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
@ -32,19 +33,24 @@ const (
|
||||
TELEMETRY_LICENSE_ACT_FAILED = "License Activation Failed"
|
||||
TELEMETRY_EVENT_ENVIRONMENT = "Environment"
|
||||
TELEMETRY_EVENT_LANGUAGE = "Language"
|
||||
TELEMETRY_EVENT_LOGS_FILTERS = "Logs Filters"
|
||||
TELEMETRY_EVENT_DISTRIBUTED = "Distributed"
|
||||
TELEMETRY_EVENT_DASHBOARDS_METADATA = "Dashboards Metadata"
|
||||
TELEMETRY_EVENT_ACTIVE_USER = "Active User"
|
||||
)
|
||||
|
||||
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
|
||||
const ph_api_key = "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w"
|
||||
|
||||
const IP_NOT_FOUND_PLACEHOLDER = "NA"
|
||||
const DEFAULT_NUMBER_OF_SERVICES = 6
|
||||
|
||||
const HEART_BEAT_DURATION = 6 * time.Hour
|
||||
|
||||
// const HEART_BEAT_DURATION = 10 * time.Second
|
||||
|
||||
const RATE_LIMIT_CHECK_DURATION = 1 * time.Minute
|
||||
const RATE_LIMIT_VALUE = 10
|
||||
const RATE_LIMIT_VALUE = 2
|
||||
|
||||
// const RATE_LIMIT_CHECK_DURATION = 20 * time.Second
|
||||
// const RATE_LIMIT_VALUE = 5
|
||||
@ -64,6 +70,33 @@ func (a *Telemetry) IsSampled() bool {
|
||||
|
||||
}
|
||||
|
||||
func (telemetry *Telemetry) CheckSigNozMetrics(compositeMetricQueryMap map[string]interface{}) bool {
|
||||
|
||||
builderQueries, builderQueriesExists := compositeMetricQueryMap["builderQueries"]
|
||||
if builderQueriesExists {
|
||||
builderQueriesStr, _ := json.Marshal(builderQueries)
|
||||
return strings.Contains(string(builderQueriesStr), "signoz_")
|
||||
}
|
||||
|
||||
promQueries, promQueriesExists := compositeMetricQueryMap["promQueries"]
|
||||
if promQueriesExists {
|
||||
promQueriesStr, _ := json.Marshal(promQueries)
|
||||
return strings.Contains(string(promQueriesStr), "signoz_")
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (telemetry *Telemetry) AddActiveTracesUser() {
|
||||
telemetry.activeUser["traces"] = 1
|
||||
}
|
||||
func (telemetry *Telemetry) AddActiveMetricsUser() {
|
||||
telemetry.activeUser["metrics"] = 1
|
||||
}
|
||||
func (telemetry *Telemetry) AddActiveLogsUser() {
|
||||
telemetry.activeUser["logs"] = 1
|
||||
}
|
||||
|
||||
type Telemetry struct {
|
||||
operator analytics.Client
|
||||
phOperator ph.Client
|
||||
@ -76,6 +109,8 @@ type Telemetry struct {
|
||||
minRandInt int
|
||||
maxRandInt int
|
||||
rateLimits map[string]int8
|
||||
activeUser map[string]int8
|
||||
countUsers int8
|
||||
}
|
||||
|
||||
func createTelemetry() {
|
||||
@ -85,6 +120,7 @@ func createTelemetry() {
|
||||
phOperator: ph.New(ph_api_key),
|
||||
ipAddress: getOutboundIP(),
|
||||
rateLimits: make(map[string]int8),
|
||||
activeUser: make(map[string]int8),
|
||||
}
|
||||
telemetry.minRandInt = 0
|
||||
telemetry.maxRandInt = int(1 / DEFAULT_SAMPLING)
|
||||
@ -111,6 +147,13 @@ func createTelemetry() {
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
|
||||
if (telemetry.activeUser["traces"] != 0) || (telemetry.activeUser["metrics"] != 0) || (telemetry.activeUser["logs"] != 0) {
|
||||
telemetry.activeUser["any"] = 1
|
||||
}
|
||||
telemetry.SendEvent(TELEMETRY_EVENT_ACTIVE_USER, map[string]interface{}{"traces": telemetry.activeUser["traces"], "metrics": telemetry.activeUser["metrics"], "logs": telemetry.activeUser["logs"], "any": telemetry.activeUser["any"]})
|
||||
telemetry.activeUser = map[string]int8{"traces": 0, "metrics": 0, "logs": 0, "any": 0}
|
||||
|
||||
tagsInfo, _ := telemetry.reader.GetTagsInfoInLastHeartBeatInterval(context.Background())
|
||||
|
||||
if len(tagsInfo.Env) != 0 {
|
||||
@ -128,16 +171,28 @@ func createTelemetry() {
|
||||
|
||||
getLogsInfoInLastHeartBeatInterval, _ := telemetry.reader.GetLogsInfoInLastHeartBeatInterval(context.Background())
|
||||
|
||||
traceTTL, _ := telemetry.reader.GetTTL(context.Background(), &model.GetTTLParams{Type: constants.TraceTTL})
|
||||
metricsTTL, _ := telemetry.reader.GetTTL(context.Background(), &model.GetTTLParams{Type: constants.MetricsTTL})
|
||||
logsTTL, _ := telemetry.reader.GetTTL(context.Background(), &model.GetTTLParams{Type: constants.LogsTTL})
|
||||
|
||||
data := map[string]interface{}{
|
||||
"totalSpans": totalSpans,
|
||||
"spansInLastHeartBeatInterval": spansInLastHeartBeatInterval,
|
||||
"getSamplesInfoInLastHeartBeatInterval": getSamplesInfoInLastHeartBeatInterval,
|
||||
"getLogsInfoInLastHeartBeatInterval": getLogsInfoInLastHeartBeatInterval,
|
||||
"countUsers": telemetry.countUsers,
|
||||
"metricsTTLStatus": metricsTTL.Status,
|
||||
"tracesTTLStatus": traceTTL.Status,
|
||||
"logsTTLStatus": logsTTL.Status,
|
||||
}
|
||||
for key, value := range tsInfo {
|
||||
data[key] = value
|
||||
}
|
||||
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data)
|
||||
|
||||
getDistributedInfoInLastHeartBeatInterval, _ := telemetry.reader.GetDistributedInfoInLastHeartBeatInterval(context.Background())
|
||||
telemetry.SendEvent(TELEMETRY_EVENT_DISTRIBUTED, getDistributedInfoInLastHeartBeatInterval)
|
||||
|
||||
}
|
||||
}
|
||||
}()
|
||||
@ -169,7 +224,7 @@ func (a *Telemetry) IdentifyUser(user *model.User) {
|
||||
if !a.isTelemetryEnabled() || a.isTelemetryAnonymous() {
|
||||
return
|
||||
}
|
||||
a.setCompanyDomain(user.Email)
|
||||
a.SetCompanyDomain(user.Email)
|
||||
|
||||
a.operator.Enqueue(analytics.Identify{
|
||||
UserId: a.ipAddress,
|
||||
@ -185,7 +240,11 @@ func (a *Telemetry) IdentifyUser(user *model.User) {
|
||||
|
||||
}
|
||||
|
||||
func (a *Telemetry) setCompanyDomain(email string) {
|
||||
func (a *Telemetry) SetCountUsers(countUsers int8) {
|
||||
a.countUsers = countUsers
|
||||
}
|
||||
|
||||
func (a *Telemetry) SetCompanyDomain(email string) {
|
||||
|
||||
email_split := strings.Split(email, "@")
|
||||
if len(email_split) != 2 {
|
||||
@ -207,7 +266,12 @@ func (a *Telemetry) checkEvents(event string) bool {
|
||||
return sendEvent
|
||||
}
|
||||
|
||||
func (a *Telemetry) SendEvent(event string, data map[string]interface{}) {
|
||||
func (a *Telemetry) SendEvent(event string, data map[string]interface{}, opts ...bool) {
|
||||
|
||||
rateLimitFlag := true
|
||||
if len(opts) > 0 {
|
||||
rateLimitFlag = opts[0]
|
||||
}
|
||||
|
||||
if !a.isTelemetryEnabled() {
|
||||
return
|
||||
@ -218,10 +282,12 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}) {
|
||||
return
|
||||
}
|
||||
|
||||
if a.rateLimits[event] < RATE_LIMIT_VALUE {
|
||||
a.rateLimits[event] += 1
|
||||
} else {
|
||||
return
|
||||
if rateLimitFlag {
|
||||
if a.rateLimits[event] < RATE_LIMIT_VALUE {
|
||||
a.rateLimits[event] += 1
|
||||
} else {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// zap.S().Info(data)
|
||||
|
@ -169,7 +169,7 @@ services:
|
||||
<<: *clickhouse-depends
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-config.yaml"]
|
||||
user: root # required for reading docker container logs
|
||||
volumes:
|
||||
@ -194,7 +194,7 @@ services:
|
||||
<<: *clickhouse-depends
|
||||
|
||||
otel-collector-metrics:
|
||||
image: signoz/signoz-otel-collector:0.66.0
|
||||
image: signoz/signoz-otel-collector:0.66.1
|
||||
command: ["--config=/etc/otel-collector-metrics-config.yaml"]
|
||||
volumes:
|
||||
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml
|
||||
|
@ -74,7 +74,7 @@ processors:
|
||||
signozspanmetrics/prometheus:
|
||||
metrics_exporter: prometheus
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 10000
|
||||
dimensions_cache_size: 100000
|
||||
dimensions:
|
||||
- name: service.namespace
|
||||
default: default
|
||||
|
Loading…
x
Reference in New Issue
Block a user