Merge pull request #1943 from SigNoz/release/v0.13.0

Release/v0.13.0
This commit is contained in:
Ankit Nayan 2022-12-29 17:32:15 +05:30 committed by GitHub
commit 545d46c39c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 1138 additions and 466 deletions

View File

@ -35,7 +35,6 @@ x-clickhouse-depend: &clickhouse-depend
services: services:
zookeeper-1: zookeeper-1:
image: bitnami/zookeeper:3.7.0 image: bitnami/zookeeper:3.7.0
container_name: zookeeper-1
hostname: zookeeper-1 hostname: zookeeper-1
user: root user: root
ports: ports:
@ -52,7 +51,6 @@ services:
# zookeeper-2: # zookeeper-2:
# image: bitnami/zookeeper:3.7.0 # image: bitnami/zookeeper:3.7.0
# container_name: zookeeper-2
# hostname: zookeeper-2 # hostname: zookeeper-2
# user: root # user: root
# ports: # ports:
@ -69,7 +67,6 @@ services:
# zookeeper-3: # zookeeper-3:
# image: bitnami/zookeeper:3.7.0 # image: bitnami/zookeeper:3.7.0
# container_name: zookeeper-3
# hostname: zookeeper-3 # hostname: zookeeper-3
# user: root # user: root
# ports: # ports:
@ -86,7 +83,6 @@ services:
clickhouse: clickhouse:
<<: *clickhouse-defaults <<: *clickhouse-defaults
container_name: clickhouse
hostname: clickhouse hostname: clickhouse
# ports: # ports:
# - "9000:9000" # - "9000:9000"
@ -101,7 +97,6 @@ services:
# clickhouse-2: # clickhouse-2:
# <<: *clickhouse-defaults # <<: *clickhouse-defaults
# container_name: clickhouse-2
# hostname: clickhouse-2 # hostname: clickhouse-2
# ports: # ports:
# - "9001:9000" # - "9001:9000"
@ -116,7 +111,6 @@ services:
# clickhouse-3: # clickhouse-3:
# <<: *clickhouse-defaults # <<: *clickhouse-defaults
# container_name: clickhouse-3
# hostname: clickhouse-3 # hostname: clickhouse-3
# ports: # ports:
# - "9002:9000" # - "9002:9000"
@ -143,7 +137,7 @@ services:
condition: on-failure condition: on-failure
query-service: query-service:
image: signoz/query-service:0.12.0 image: signoz/query-service:0.13.0
command: ["-config=/root/config/prometheus.yml"] command: ["-config=/root/config/prometheus.yml"]
# ports: # ports:
# - "6060:6060" # pprof port # - "6060:6060" # pprof port
@ -172,7 +166,7 @@ services:
<<: *clickhouse-depend <<: *clickhouse-depend
frontend: frontend:
image: signoz/frontend:0.12.0 image: signoz/frontend:0.13.0
deploy: deploy:
restart_policy: restart_policy:
condition: on-failure condition: on-failure
@ -185,7 +179,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:0.66.0 image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-config.yaml"] command: ["--config=/etc/otel-collector-config.yaml"]
user: root # required for reading docker container logs user: root # required for reading docker container logs
volumes: volumes:
@ -213,7 +207,7 @@ services:
<<: *clickhouse-depend <<: *clickhouse-depend
otel-collector-metrics: otel-collector-metrics:
image: signoz/signoz-otel-collector:0.66.0 image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-metrics-config.yaml"] command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes: volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml

View File

@ -78,7 +78,7 @@ processors:
signozspanmetrics/prometheus: signozspanmetrics/prometheus:
metrics_exporter: prometheus metrics_exporter: prometheus
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 10000 dimensions_cache_size: 100000
dimensions: dimensions:
- name: service.namespace - name: service.namespace
default: default default: default

View File

@ -30,6 +30,8 @@ server {
location /api { location /api {
proxy_pass http://query-service:8080/api; proxy_pass http://query-service:8080/api;
# connection will be closed if no data is read for 600s between successive read operations
proxy_read_timeout 600s;
} }
# redirect server error pages to the static page /50x.html # redirect server error pages to the static page /50x.html

View File

@ -41,7 +41,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector: otel-collector:
container_name: otel-collector container_name: otel-collector
image: signoz/signoz-otel-collector:0.66.0 image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-config.yaml"] command: ["--config=/etc/otel-collector-config.yaml"]
# user: root # required for reading docker container logs # user: root # required for reading docker container logs
volumes: volumes:
@ -67,7 +67,7 @@ services:
otel-collector-metrics: otel-collector-metrics:
container_name: otel-collector-metrics container_name: otel-collector-metrics
image: signoz/signoz-otel-collector:0.66.0 image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-metrics-config.yaml"] command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes: volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml

View File

@ -146,7 +146,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service: query-service:
image: signoz/query-service:0.12.0 image: signoz/query-service:0.13.0
container_name: query-service container_name: query-service
command: ["-config=/root/config/prometheus.yml"] command: ["-config=/root/config/prometheus.yml"]
# ports: # ports:
@ -174,7 +174,7 @@ services:
<<: *clickhouse-depend <<: *clickhouse-depend
frontend: frontend:
image: signoz/frontend:0.12.0 image: signoz/frontend:0.13.0
container_name: frontend container_name: frontend
restart: on-failure restart: on-failure
depends_on: depends_on:
@ -186,7 +186,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:0.66.0 image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-config.yaml"] command: ["--config=/etc/otel-collector-config.yaml"]
user: root # required for reading docker container logs user: root # required for reading docker container logs
volumes: volumes:
@ -211,7 +211,7 @@ services:
<<: *clickhouse-depend <<: *clickhouse-depend
otel-collector-metrics: otel-collector-metrics:
image: signoz/signoz-otel-collector:0.66.0 image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-metrics-config.yaml"] command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes: volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml

View File

@ -74,7 +74,7 @@ processors:
signozspanmetrics/prometheus: signozspanmetrics/prometheus:
metrics_exporter: prometheus metrics_exporter: prometheus
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 10000 dimensions_cache_size: 100000
dimensions: dimensions:
- name: service.namespace - name: service.namespace
default: default default: default

View File

@ -30,6 +30,8 @@ server {
location /api { location /api {
proxy_pass http://query-service:8080/api; proxy_pass http://query-service:8080/api;
# connection will be closed if no data is read for 600s between successive read operations
proxy_read_timeout 600s;
} }
# redirect server error pages to the static page /50x.html # redirect server error pages to the static page /50x.html

View File

@ -6,7 +6,7 @@
"release_notes": "Release Notes", "release_notes": "Release Notes",
"read_how_to_upgrade": "Read instructions on how to upgrade", "read_how_to_upgrade": "Read instructions on how to upgrade",
"latest_version_signoz": "You are running the latest version of SigNoz.", "latest_version_signoz": "You are running the latest version of SigNoz.",
"stale_version": "You are on an older version and may be losing out on the latest features we have shipped. We recommend to upgrade to the latest version", "stale_version": "You are on an older version and may be missing out on the latest features we have shipped. We recommend to upgrade to the latest version",
"oops_something_went_wrong_version": "Oops.. facing issues with fetching updated version information", "oops_something_went_wrong_version": "Oops.. facing issues with fetching updated version information",
"n_a": "N/A", "n_a": "N/A",
"routes": { "routes": {

View File

@ -4,14 +4,16 @@ import { ENVIRONMENT } from 'constants/env';
import { LOCALSTORAGE } from 'constants/localStorage'; import { LOCALSTORAGE } from 'constants/localStorage';
import { EventSourcePolyfill } from 'event-source-polyfill'; import { EventSourcePolyfill } from 'event-source-polyfill';
export const LiveTail = (queryParams: string): EventSourcePolyfill => { // 10 min in ms
const dict = { const TIMEOUT_IN_MS = 10 * 60 * 1000;
headers: {
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`, export const LiveTail = (queryParams: string): EventSourcePolyfill =>
}, new EventSourcePolyfill(
};
return new EventSourcePolyfill(
`${ENVIRONMENT.baseURL}${apiV1}logs/tail?${queryParams}`, `${ENVIRONMENT.baseURL}${apiV1}logs/tail?${queryParams}`,
dict, {
headers: {
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
},
heartbeatTimeout: TIMEOUT_IN_MS,
},
); );
};

View File

@ -1,46 +1,21 @@
import { Button, Popover } from 'antd'; import { Button, Popover } from 'antd';
import getStep from 'lib/getStep';
import { generateFilterQuery } from 'lib/logs/generateFilterQuery'; import { generateFilterQuery } from 'lib/logs/generateFilterQuery';
import React, { memo, useCallback, useMemo } from 'react'; import React, { memo, useCallback, useMemo } from 'react';
import { connect, useDispatch, useSelector } from 'react-redux'; import { useDispatch, useSelector } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
import { getLogs } from 'store/actions/logs/getLogs';
import { getLogsAggregate } from 'store/actions/logs/getLogsAggregate';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import AppActions from 'types/actions'; import { SET_SEARCH_QUERY_STRING } from 'types/actions/logs';
import { SET_SEARCH_QUERY_STRING, TOGGLE_LIVE_TAIL } from 'types/actions/logs';
import { GlobalReducer } from 'types/reducer/globalTime';
import { ILogsReducer } from 'types/reducer/logs'; import { ILogsReducer } from 'types/reducer/logs';
interface AddToQueryHOCProps {
fieldKey: string;
fieldValue: string;
children: React.ReactNode;
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>;
getLogsAggregate: (
props: Parameters<typeof getLogsAggregate>[0],
) => ReturnType<typeof getLogsAggregate>;
}
function AddToQueryHOC({ function AddToQueryHOC({
fieldKey, fieldKey,
fieldValue, fieldValue,
children, children,
getLogs,
getLogsAggregate,
}: AddToQueryHOCProps): JSX.Element { }: AddToQueryHOCProps): JSX.Element {
const { const {
searchFilter: { queryString }, searchFilter: { queryString },
logLinesPerPage,
idStart,
idEnd,
liveTail,
} = useSelector<AppState, ILogsReducer>((store) => store.logs); } = useSelector<AppState, ILogsReducer>((store) => store.logs);
const dispatch = useDispatch(); const dispatch = useDispatch();
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const generatedQuery = useMemo( const generatedQuery = useMemo(
() => generateFilterQuery({ fieldKey, fieldValue, type: 'IN' }), () => generateFilterQuery({ fieldKey, fieldValue, type: 'IN' }),
[fieldKey, fieldValue], [fieldKey, fieldValue],
@ -58,69 +33,14 @@ function AddToQueryHOC({
type: SET_SEARCH_QUERY_STRING, type: SET_SEARCH_QUERY_STRING,
payload: updatedQueryString, payload: updatedQueryString,
}); });
if (liveTail === 'STOPPED') { }, [dispatch, generatedQuery, queryString]);
getLogs({
q: updatedQueryString, const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
limit: logLinesPerPage, fieldKey,
orderBy: 'timestamp',
order: 'desc',
timestampStart: minTime,
timestampEnd: maxTime,
...(idStart ? { idGt: idStart } : {}),
...(idEnd ? { idLt: idEnd } : {}),
});
getLogsAggregate({
timestampStart: minTime,
timestampEnd: maxTime,
step: getStep({
start: minTime,
end: maxTime,
inputFormat: 'ns',
}),
q: updatedQueryString,
...(idStart ? { idGt: idStart } : {}),
...(idEnd ? { idLt: idEnd } : {}),
});
} else if (liveTail === 'PLAYING') {
dispatch({
type: TOGGLE_LIVE_TAIL,
payload: 'PAUSED',
});
setTimeout(
() =>
dispatch({
type: TOGGLE_LIVE_TAIL,
payload: liveTail,
}),
0,
);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
dispatch,
generatedQuery,
getLogs,
idEnd,
idStart,
logLinesPerPage,
maxTime,
minTime,
queryString,
]); ]);
const popOverContent = (
<span style={{ fontSize: '0.9rem' }}>Add to query: {fieldKey}</span>
);
return ( return (
<Button <Button size="small" type="text" onClick={handleQueryAdd}>
size="small"
type="text"
style={{
margin: 0,
padding: 0,
}}
onClick={handleQueryAdd}
>
<Popover placement="top" content={popOverContent}> <Popover placement="top" content={popOverContent}>
{children} {children}
</Popover> </Popover>
@ -128,20 +48,10 @@ function AddToQueryHOC({
); );
} }
interface DispatchProps { interface AddToQueryHOCProps {
getLogs: ( fieldKey: string;
props: Parameters<typeof getLogs>[0], fieldValue: string;
) => (dispatch: Dispatch<AppActions>) => void; children: React.ReactNode;
getLogsAggregate: (
props: Parameters<typeof getLogsAggregate>[0],
) => (dispatch: Dispatch<AppActions>) => void;
} }
const mapDispatchToProps = ( export default memo(AddToQueryHOC);
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
): DispatchProps => ({
getLogs: bindActionCreators(getLogs, dispatch),
getLogsAggregate: bindActionCreators(getLogsAggregate, dispatch),
});
export default connect(null, mapDispatchToProps)(memo(AddToQueryHOC));

View File

@ -0,0 +1 @@
export const defaultText = 'Ah, seems like we reached a dead end!';

View File

@ -2,45 +2,52 @@ import getLocalStorageKey from 'api/browser/localstorage/get';
import NotFoundImage from 'assets/NotFound'; import NotFoundImage from 'assets/NotFound';
import { LOCALSTORAGE } from 'constants/localStorage'; import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes'; import ROUTES from 'constants/routes';
import React from 'react'; import React, { useCallback } from 'react';
import { useDispatch } from 'react-redux'; import { useDispatch } from 'react-redux';
import { Dispatch } from 'redux'; import { Dispatch } from 'redux';
import AppActions from 'types/actions'; import AppActions from 'types/actions';
import { LOGGED_IN } from 'types/actions/app'; import { LOGGED_IN } from 'types/actions/app';
import { defaultText } from './constant';
import { Button, Container, Text, TextContainer } from './styles'; import { Button, Container, Text, TextContainer } from './styles';
function NotFound(): JSX.Element { function NotFound({ text = defaultText }: Props): JSX.Element {
const dispatch = useDispatch<Dispatch<AppActions>>(); const dispatch = useDispatch<Dispatch<AppActions>>();
const isLoggedIn = getLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN); const isLoggedIn = getLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN);
const onClickHandler = useCallback(() => {
if (isLoggedIn) {
dispatch({
type: LOGGED_IN,
payload: {
isLoggedIn: true,
},
});
}
}, [dispatch, isLoggedIn]);
return ( return (
<Container> <Container>
<NotFoundImage /> <NotFoundImage />
<TextContainer> <TextContainer>
<Text>Ah, seems like we reached a dead end!</Text> <Text>{text}</Text>
<Text>Page Not Found</Text> <Text>Page Not Found</Text>
</TextContainer> </TextContainer>
<Button <Button onClick={onClickHandler} to={ROUTES.APPLICATION} tabIndex={0}>
onClick={(): void => {
if (isLoggedIn) {
dispatch({
type: LOGGED_IN,
payload: {
isLoggedIn: true,
},
});
}
}}
to={ROUTES.APPLICATION}
tabIndex={0}
>
Return To Services Page Return To Services Page
</Button> </Button>
</Container> </Container>
); );
} }
interface Props {
text?: string;
}
NotFound.defaultProps = {
text: defaultText,
};
export default NotFound; export default NotFound;

View File

@ -0,0 +1,9 @@
const DEFAULT_FILTER_VALUE = '';
const EXCEPTION_TYPE_FILTER_NAME = 'exceptionType';
const SERVICE_NAME_FILTER_NAME = 'serviceName';
export {
DEFAULT_FILTER_VALUE,
EXCEPTION_TYPE_FILTER_NAME,
SERVICE_NAME_FILTER_NAME,
};

View File

@ -17,6 +17,7 @@ import getAll from 'api/errors/getAll';
import getErrorCounts from 'api/errors/getErrorCounts'; import getErrorCounts from 'api/errors/getErrorCounts';
import ROUTES from 'constants/routes'; import ROUTES from 'constants/routes';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import useUrlQuery from 'hooks/useUrlQuery';
import createQueryParams from 'lib/createQueryParams'; import createQueryParams from 'lib/createQueryParams';
import history from 'lib/history'; import history from 'lib/history';
import React, { useCallback, useEffect, useMemo } from 'react'; import React, { useCallback, useEffect, useMemo } from 'react';
@ -30,7 +31,11 @@ import { Exception, PayloadProps } from 'types/api/errors/getAll';
import { GlobalReducer } from 'types/reducer/globalTime'; import { GlobalReducer } from 'types/reducer/globalTime';
import { import {
extractFilterValues,
getDefaultFilterValue,
getDefaultOrder, getDefaultOrder,
getFilterString,
getFilterValues,
getNanoSeconds, getNanoSeconds,
getOffSet, getOffSet,
getOrder, getOrder,
@ -43,15 +48,27 @@ function AllErrors(): JSX.Element {
const { maxTime, minTime, loading } = useSelector<AppState, GlobalReducer>( const { maxTime, minTime, loading } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime, (state) => state.globalTime,
); );
const { search, pathname } = useLocation(); const { pathname } = useLocation();
const params = useMemo(() => new URLSearchParams(search), [search]); const params = useUrlQuery();
const { t } = useTranslation(['common']); const { t } = useTranslation(['common']);
const {
const updatedOrder = getOrder(params.get(urlKey.order)); updatedOrder,
const getUpdatedOffset = getOffSet(params.get(urlKey.offset)); getUpdatedOffset,
const getUpdatedParams = getOrderParams(params.get(urlKey.orderParam)); getUpdatedParams,
const getUpdatedPageSize = getUpdatePageSize(params.get(urlKey.pageSize)); getUpdatedPageSize,
getUpdatedExceptionType,
getUpdatedServiceName,
} = useMemo(
() => ({
updatedOrder: getOrder(params.get(urlKey.order)),
getUpdatedOffset: getOffSet(params.get(urlKey.offset)),
getUpdatedParams: getOrderParams(params.get(urlKey.orderParam)),
getUpdatedPageSize: getUpdatePageSize(params.get(urlKey.pageSize)),
getUpdatedExceptionType: getFilterString(params.get(urlKey.exceptionType)),
getUpdatedServiceName: getFilterString(params.get(urlKey.serviceName)),
}),
[params],
);
const updatedPath = useMemo( const updatedPath = useMemo(
() => () =>
@ -60,6 +77,8 @@ function AllErrors(): JSX.Element {
offset: getUpdatedOffset, offset: getUpdatedOffset,
orderParam: getUpdatedParams, orderParam: getUpdatedParams,
pageSize: getUpdatedPageSize, pageSize: getUpdatedPageSize,
exceptionType: getUpdatedExceptionType,
serviceName: getUpdatedServiceName,
})}`, })}`,
[ [
pathname, pathname,
@ -67,6 +86,8 @@ function AllErrors(): JSX.Element {
getUpdatedOffset, getUpdatedOffset,
getUpdatedParams, getUpdatedParams,
getUpdatedPageSize, getUpdatedPageSize,
getUpdatedExceptionType,
getUpdatedServiceName,
], ],
); );
@ -81,6 +102,8 @@ function AllErrors(): JSX.Element {
limit: getUpdatedPageSize, limit: getUpdatedPageSize,
offset: getUpdatedOffset, offset: getUpdatedOffset,
orderParam: getUpdatedParams, orderParam: getUpdatedParams,
exceptionType: getUpdatedExceptionType,
serviceName: getUpdatedServiceName,
}), }),
enabled: !loading, enabled: !loading,
}, },
@ -108,14 +131,43 @@ function AllErrors(): JSX.Element {
const filterIcon = useCallback(() => <SearchOutlined />, []); const filterIcon = useCallback(() => <SearchOutlined />, []);
const handleSearch = ( const handleSearch = useCallback(
confirm: (param?: FilterConfirmProps) => void, (
): VoidFunction => (): void => { confirm: (param?: FilterConfirmProps) => void,
confirm(); filterValue: string,
}; filterKey: string,
): VoidFunction => (): void => {
const { exceptionFilterValue, serviceFilterValue } = getFilterValues(
getUpdatedServiceName || '',
getUpdatedExceptionType || '',
filterKey,
filterValue || '',
);
history.replace(
`${pathname}?${createQueryParams({
order: updatedOrder,
offset: getUpdatedOffset,
orderParam: getUpdatedParams,
pageSize: getUpdatedPageSize,
exceptionType: exceptionFilterValue,
serviceName: serviceFilterValue,
})}`,
);
confirm();
},
[
getUpdatedExceptionType,
getUpdatedOffset,
getUpdatedPageSize,
getUpdatedParams,
getUpdatedServiceName,
pathname,
updatedOrder,
],
);
const filterDropdownWrapper = useCallback( const filterDropdownWrapper = useCallback(
({ setSelectedKeys, selectedKeys, confirm, placeholder }) => { ({ setSelectedKeys, selectedKeys, confirm, placeholder, filterKey }) => {
return ( return (
<Card size="small"> <Card size="small">
<Space align="start" direction="vertical"> <Space align="start" direction="vertical">
@ -126,11 +178,16 @@ function AllErrors(): JSX.Element {
setSelectedKeys(e.target.value ? [e.target.value] : []) setSelectedKeys(e.target.value ? [e.target.value] : [])
} }
allowClear allowClear
onPressEnter={handleSearch(confirm)} defaultValue={getDefaultFilterValue(
filterKey,
getUpdatedServiceName,
getUpdatedExceptionType,
)}
onPressEnter={handleSearch(confirm, selectedKeys[0], filterKey)}
/> />
<Button <Button
type="primary" type="primary"
onClick={handleSearch(confirm)} onClick={handleSearch(confirm, selectedKeys[0], filterKey)}
icon={<SearchOutlined />} icon={<SearchOutlined />}
size="small" size="small"
> >
@ -140,7 +197,7 @@ function AllErrors(): JSX.Element {
</Card> </Card>
); );
}, },
[], [getUpdatedExceptionType, getUpdatedServiceName, handleSearch],
); );
const onExceptionTypeFilter = useCallback( const onExceptionTypeFilter = useCallback(
@ -167,6 +224,7 @@ function AllErrors(): JSX.Element {
( (
onFilter: ColumnType<Exception>['onFilter'], onFilter: ColumnType<Exception>['onFilter'],
placeholder: string, placeholder: string,
filterKey: string,
): ColumnType<Exception> => ({ ): ColumnType<Exception> => ({
onFilter, onFilter,
filterIcon, filterIcon,
@ -176,6 +234,7 @@ function AllErrors(): JSX.Element {
selectedKeys, selectedKeys,
confirm, confirm,
placeholder, placeholder,
filterKey,
}), }),
}), }),
[filterIcon, filterDropdownWrapper], [filterIcon, filterDropdownWrapper],
@ -186,7 +245,7 @@ function AllErrors(): JSX.Element {
title: 'Exception Type', title: 'Exception Type',
dataIndex: 'exceptionType', dataIndex: 'exceptionType',
key: 'exceptionType', key: 'exceptionType',
...getFilter(onExceptionTypeFilter, 'Search By Exception'), ...getFilter(onExceptionTypeFilter, 'Search By Exception', 'exceptionType'),
render: (value, record): JSX.Element => ( render: (value, record): JSX.Element => (
<Tooltip overlay={(): JSX.Element => value}> <Tooltip overlay={(): JSX.Element => value}>
<Link <Link
@ -266,30 +325,39 @@ function AllErrors(): JSX.Element {
updatedOrder, updatedOrder,
'serviceName', 'serviceName',
), ),
...getFilter(onApplicationTypeFilter, 'Search By Application'), ...getFilter(
onApplicationTypeFilter,
'Search By Application',
'serviceName',
),
}, },
]; ];
const onChangeHandler: TableProps<Exception>['onChange'] = ( const onChangeHandler: TableProps<Exception>['onChange'] = useCallback(
paginations, (paginations, filters, sorter) => {
_, if (!Array.isArray(sorter)) {
sorter, const { pageSize = 0, current = 0 } = paginations;
) => { const { columnKey = '', order } = sorter;
if (!Array.isArray(sorter)) { const updatedOrder = order === 'ascend' ? 'ascending' : 'descending';
const { pageSize = 0, current = 0 } = paginations; const params = new URLSearchParams(window.location.search);
const { columnKey = '', order } = sorter; const { exceptionType, serviceName } = extractFilterValues(filters, {
const updatedOrder = order === 'ascend' ? 'ascending' : 'descending'; serviceName: getFilterString(params.get(urlKey.serviceName)),
exceptionType: getFilterString(params.get(urlKey.exceptionType)),
history.replace( });
`${pathname}?${createQueryParams({ history.replace(
order: updatedOrder, `${pathname}?${createQueryParams({
offset: (current - 1) * pageSize, order: updatedOrder,
orderParam: columnKey, offset: (current - 1) * pageSize,
pageSize, orderParam: columnKey,
})}`, pageSize,
); exceptionType,
} serviceName,
}; })}`,
);
}
},
[pathname],
);
return ( return (
<Table <Table

View File

@ -1,7 +1,13 @@
import { SortOrder } from 'antd/lib/table/interface'; import { FilterValue, SortOrder } from 'antd/lib/table/interface';
import Timestamp from 'timestamp-nano'; import Timestamp from 'timestamp-nano';
import { Order, OrderBy } from 'types/api/errors/getAll'; import { Order, OrderBy } from 'types/api/errors/getAll';
import {
DEFAULT_FILTER_VALUE,
EXCEPTION_TYPE_FILTER_NAME,
SERVICE_NAME_FILTER_NAME,
} from './constant';
export const isOrder = (order: string | null): order is Order => export const isOrder = (order: string | null): order is Order =>
!!(order === 'ascending' || order === 'descending'); !!(order === 'ascending' || order === 'descending');
@ -10,6 +16,8 @@ export const urlKey = {
offset: 'offset', offset: 'offset',
orderParam: 'orderParam', orderParam: 'orderParam',
pageSize: 'pageSize', pageSize: 'pageSize',
exceptionType: 'exceptionType',
serviceName: 'serviceName',
}; };
export const isOrderParams = (orderBy: string | null): orderBy is OrderBy => { export const isOrderParams = (orderBy: string | null): orderBy is OrderBy => {
@ -87,3 +95,94 @@ export const getUpdatePageSize = (pageSize: string | null): number => {
} }
return 10; return 10;
}; };
export const getFilterString = (filter: string | null): string => {
if (filter) {
return filter;
}
return '';
};
export const getDefaultFilterValue = (
filterKey: string | null,
serviceName: string,
exceptionType: string,
): string | undefined => {
let defaultValue: string | undefined;
switch (filterKey) {
case SERVICE_NAME_FILTER_NAME:
defaultValue = serviceName;
break;
case EXCEPTION_TYPE_FILTER_NAME:
defaultValue = exceptionType;
break;
default:
break;
}
return defaultValue;
};
export const getFilterValues = (
serviceName: string,
exceptionType: string,
filterKey: string,
filterValue: string,
): { exceptionFilterValue: string; serviceFilterValue: string } => {
let serviceFilterValue = serviceName;
let exceptionFilterValue = exceptionType;
switch (filterKey) {
case EXCEPTION_TYPE_FILTER_NAME:
exceptionFilterValue = filterValue;
break;
case SERVICE_NAME_FILTER_NAME:
serviceFilterValue = filterValue;
break;
default:
break;
}
return { exceptionFilterValue, serviceFilterValue };
};
type FilterValues = { exceptionType: string; serviceName: string };
const extractSingleFilterValue = (
filterName: string,
filters: Filter,
): string => {
const filterValues = filters[filterName];
if (
!filterValues ||
!Array.isArray(filterValues) ||
filterValues.length === 0
) {
return DEFAULT_FILTER_VALUE;
}
return String(filterValues[0]);
};
type Filter = Record<string, FilterValue | null>;
export const extractFilterValues = (
filters: Filter,
prefilledFilters: FilterValues,
): FilterValues => {
const filterValues: FilterValues = {
exceptionType: prefilledFilters.exceptionType,
serviceName: prefilledFilters.serviceName,
};
if (filters[EXCEPTION_TYPE_FILTER_NAME]) {
filterValues.exceptionType = extractSingleFilterValue(
EXCEPTION_TYPE_FILTER_NAME,
filters,
);
}
if (filters[SERVICE_NAME_FILTER_NAME]) {
filterValues.serviceName = extractSingleFilterValue(
SERVICE_NAME_FILTER_NAME,
filters,
);
}
return filterValues;
};

View File

@ -5,19 +5,14 @@ import {
} from '@ant-design/icons'; } from '@ant-design/icons';
import { Button, Divider, Select } from 'antd'; import { Button, Divider, Select } from 'antd';
import React, { memo } from 'react'; import React, { memo } from 'react';
import { connect, useDispatch, useSelector } from 'react-redux'; import { useDispatch, useSelector } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
import { getLogs } from 'store/actions/logs/getLogs';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import { import {
GET_NEXT_LOG_LINES, GET_NEXT_LOG_LINES,
GET_PREVIOUS_LOG_LINES, GET_PREVIOUS_LOG_LINES,
RESET_ID_START_AND_END, RESET_ID_START_AND_END,
SET_LOG_LINES_PER_PAGE, SET_LOG_LINES_PER_PAGE,
} from 'types/actions/logs'; } from 'types/actions/logs';
import { GlobalReducer } from 'types/reducer/globalTime';
import { ILogsReducer } from 'types/reducer/logs'; import { ILogsReducer } from 'types/reducer/logs';
import { Container } from './styles'; import { Container } from './styles';
@ -26,20 +21,10 @@ const { Option } = Select;
const ITEMS_PER_PAGE_OPTIONS = [25, 50, 100, 200]; const ITEMS_PER_PAGE_OPTIONS = [25, 50, 100, 200];
interface LogControlsProps { function LogControls(): JSX.Element | null {
getLogs: (props: Parameters<typeof getLogs>[0]) => ReturnType<typeof getLogs>; const { logLinesPerPage, liveTail } = useSelector<AppState, ILogsReducer>(
} (state) => state.logs,
function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
); );
const {
logLinesPerPage,
idStart,
idEnd,
liveTail,
searchFilter: { queryString },
} = useSelector<AppState, ILogsReducer>((state) => state.logs);
const dispatch = useDispatch(); const dispatch = useDispatch();
const handleLogLinesPerPageChange = (e: number): void => { const handleLogLinesPerPageChange = (e: number): void => {
@ -53,18 +38,6 @@ function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
dispatch({ dispatch({
type: RESET_ID_START_AND_END, type: RESET_ID_START_AND_END,
}); });
if (liveTail === 'STOPPED')
getLogs({
q: queryString,
limit: logLinesPerPage,
orderBy: 'timestamp',
order: 'desc',
timestampStart: minTime,
timestampEnd: maxTime,
...(idStart ? { idGt: idStart } : {}),
...(idEnd ? { idLt: idEnd } : {}),
});
}; };
const handleNavigatePrevious = (): void => { const handleNavigatePrevious = (): void => {
@ -106,16 +79,4 @@ function LogControls({ getLogs }: LogControlsProps): JSX.Element | null {
); );
} }
interface DispatchProps { export default memo(LogControls);
getLogs: (
props: Parameters<typeof getLogs>[0],
) => (dispatch: Dispatch<AppActions>) => void;
}
const mapDispatchToProps = (
dispatch: ThunkDispatch<unknown, unknown, AppActions>,
): DispatchProps => ({
getLogs: bindActionCreators(getLogs, dispatch),
});
export default connect(null, mapDispatchToProps)(memo(LogControls));

View File

@ -0,0 +1,36 @@
import { Button, Row } from 'antd';
import React from 'react';
import { QueryFields } from './utils';
interface SearchFieldsActionBarProps {
fieldsQuery: QueryFields[][];
applyUpdate: () => void;
clearFilters: () => void;
}
export function SearchFieldsActionBar({
fieldsQuery,
applyUpdate,
clearFilters,
}: SearchFieldsActionBarProps): JSX.Element | null {
if (fieldsQuery.length === 0) {
return null;
}
return (
<Row style={{ justifyContent: 'flex-end', paddingRight: '2.4rem' }}>
<Button
type="default"
onClick={clearFilters}
style={{ marginRight: '1rem' }}
>
Clear Filter
</Button>
<Button type="primary" onClick={applyUpdate}>
Apply
</Button>
</Row>
);
}
export default SearchFieldsActionBar;

View File

@ -12,19 +12,15 @@ import {
QueryOperatorsMultiVal, QueryOperatorsMultiVal,
QueryOperatorsSingleVal, QueryOperatorsSingleVal,
} from 'lib/logql/tokens'; } from 'lib/logql/tokens';
import { flatten } from 'lodash-es'; import React, { useMemo } from 'react';
import React, { useEffect, useMemo, useRef, useState } from 'react';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import { ILogsReducer } from 'types/reducer/logs'; import { ILogsReducer } from 'types/reducer/logs';
import { v4 } from 'uuid';
import { SearchFieldsProps } from '..';
import FieldKey from '../FieldKey'; import FieldKey from '../FieldKey';
import { QueryFieldContainer } from '../styles'; import { QueryFieldContainer } from '../styles';
import { createParsedQueryStructure } from '../utils'; import { QueryFields } from '../utils';
import { Container, QueryWrapper } from './styles'; import { Container, QueryWrapper } from './styles';
import { hashCode, parseQuery } from './utils';
const { Option } = Select; const { Option } = Select;
@ -68,7 +64,6 @@ function QueryField({
const { const {
fields: { selected }, fields: { selected },
} = useSelector<AppState, ILogsReducer>((store) => store.logs); } = useSelector<AppState, ILogsReducer>((store) => store.logs);
const getFieldType = (inputKey: string): string => { const getFieldType = (inputKey: string): string => {
// eslint-disable-next-line no-restricted-syntax // eslint-disable-next-line no-restricted-syntax
for (const selectedField of selected) { for (const selectedField of selected) {
@ -147,9 +142,12 @@ function QueryField({
/> />
) : ( ) : (
<Input <Input
onChange={(e): void => handleChange(2, e.target.value)} onChange={(e): void => {
handleChange(2, e.target.value);
}}
style={{ width: '100%' }} style={{ width: '100%' }}
defaultValue={query[2] && query[2].value} defaultValue={query[2] && query[2].value}
value={query[2] && query[2].value}
/> />
)} )}
</div> </div>
@ -165,85 +163,78 @@ function QueryField({
} }
interface QueryConditionFieldProps { interface QueryConditionFieldProps {
query: { value: string | string[]; type: string }[]; query: QueryFields;
queryIndex: number; queryIndex: number;
onUpdate: (arg0: unknown, arg1: number) => void; onUpdate: (arg0: unknown, arg1: number) => void;
} }
export type Query = { value: string | string[]; type: string }[]; export type Query = { value: string | string[]; type: string }[];
export interface QueryBuilderProps {
keyPrefix: string;
onDropDownToggleHandler: (value: boolean) => VoidFunction;
fieldsQuery: QueryFields[][];
setFieldsQuery: (q: QueryFields[][]) => void;
}
function QueryBuilder({ function QueryBuilder({
updateParsedQuery, keyPrefix,
fieldsQuery,
setFieldsQuery,
onDropDownToggleHandler, onDropDownToggleHandler,
}: SearchFieldsProps): JSX.Element { }: QueryBuilderProps): JSX.Element {
const {
searchFilter: { parsedQuery },
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
const keyPrefixRef = useRef(hashCode(JSON.stringify(parsedQuery)));
const [keyPrefix, setKeyPrefix] = useState(keyPrefixRef.current);
const generatedQueryStructure = createParsedQueryStructure(
parsedQuery as never[],
);
useEffect(() => {
const incomingHashCode = hashCode(JSON.stringify(parsedQuery));
if (incomingHashCode !== keyPrefixRef.current) {
keyPrefixRef.current = incomingHashCode;
setKeyPrefix(incomingHashCode);
}
}, [parsedQuery]);
const handleUpdate = (query: Query, queryIndex: number): void => { const handleUpdate = (query: Query, queryIndex: number): void => {
const updatedParsedQuery = generatedQueryStructure; const updated = [...fieldsQuery];
updatedParsedQuery[queryIndex] = parseQuery(query) as never; updated[queryIndex] = query as never; // parseQuery(query) as never;
setFieldsQuery(updated);
const flatParsedQuery = flatten(updatedParsedQuery).filter((q) => q.value);
keyPrefixRef.current = hashCode(JSON.stringify(flatParsedQuery));
updateParsedQuery(flatParsedQuery);
}; };
const handleDelete = (queryIndex: number): void => { const handleDelete = (queryIndex: number): void => {
const updatedParsedQuery = generatedQueryStructure; const updated = [...fieldsQuery];
updatedParsedQuery.splice(queryIndex - 1, 2); if (queryIndex !== 0) updated.splice(queryIndex - 1, 2);
else updated.splice(queryIndex, 2);
const flatParsedQuery = flatten(updatedParsedQuery).filter((q) => q.value); setFieldsQuery(updated);
keyPrefixRef.current = v4();
updateParsedQuery(flatParsedQuery);
}; };
const QueryUI = (): JSX.Element | JSX.Element[] => const QueryUI = (
generatedQueryStructure.map((query, idx) => { fieldsQuery: QueryFields[][],
if (Array.isArray(query)) ): JSX.Element | JSX.Element[] => {
return ( const result: JSX.Element[] = [];
fieldsQuery.forEach((query, idx) => {
if (Array.isArray(query) && query.length > 1) {
result.push(
<QueryField <QueryField
key={keyPrefix + idx} key={keyPrefix + idx}
query={query as never} query={query as never}
queryIndex={idx} queryIndex={idx}
onUpdate={handleUpdate as never} onUpdate={handleUpdate as never}
onDelete={handleDelete} onDelete={handleDelete}
/> />,
); );
} else {
return ( result.push(
<div key={keyPrefix + idx}> <div key={keyPrefix + idx}>
<QueryConditionField <QueryConditionField
query={query} query={Array.isArray(query) ? query[0] : query}
queryIndex={idx} queryIndex={idx}
onUpdate={handleUpdate as never} onUpdate={handleUpdate as never}
/> />
</div> </div>,
); );
}
}); });
return result;
};
return ( return (
<> <>
<Container isMargin={generatedQueryStructure.length === 0}> <Container isMargin={fieldsQuery.length === 0}>
<CategoryHeading>LOG QUERY BUILDER</CategoryHeading> <CategoryHeading>LOG QUERY BUILDER</CategoryHeading>
<CloseSquareOutlined onClick={onDropDownToggleHandler(false)} /> <CloseSquareOutlined onClick={onDropDownToggleHandler(false)} />
</Container> </Container>
<QueryWrapper>{QueryUI()}</QueryWrapper> <QueryWrapper key={keyPrefix}>{QueryUI(fieldsQuery)}</QueryWrapper>
</> </>
); );
} }

View File

@ -21,17 +21,3 @@ export const parseQuery = (queries: Query): Query => {
} }
return queries; return queries;
}; };
export const hashCode = (s: string): string => {
if (!s) {
return '0';
}
return `${Math.abs(
s.split('').reduce((a, b) => {
// eslint-disable-next-line no-bitwise, no-param-reassign
a = (a << 5) - a + b.charCodeAt(0);
// eslint-disable-next-line no-bitwise
return a & a;
}, 0),
)}`;
};

View File

@ -2,9 +2,9 @@ import { Button } from 'antd';
import CategoryHeading from 'components/Logs/CategoryHeading'; import CategoryHeading from 'components/Logs/CategoryHeading';
import map from 'lodash-es/map'; import map from 'lodash-es/map';
import React from 'react'; import React from 'react';
import { useDispatch, useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import { ADD_SEARCH_FIELD_QUERY_STRING } from 'types/actions/logs'; // import { ADD_SEARCH_FIELD_QUERY_STRING } from 'types/actions/logs';
import { ILogsReducer } from 'types/reducer/logs'; import { ILogsReducer } from 'types/reducer/logs';
import FieldKey from './FieldKey'; import FieldKey from './FieldKey';
@ -12,15 +12,15 @@ import FieldKey from './FieldKey';
interface SuggestedItemProps { interface SuggestedItemProps {
name: string; name: string;
type: string; type: string;
applySuggestion: (name: string) => void;
} }
function SuggestedItem({ name, type }: SuggestedItemProps): JSX.Element { function SuggestedItem({
const dispatch = useDispatch(); name,
type,
applySuggestion,
}: SuggestedItemProps): JSX.Element {
const addSuggestedField = (): void => { const addSuggestedField = (): void => {
dispatch({ applySuggestion(name);
type: ADD_SEARCH_FIELD_QUERY_STRING,
payload: name,
});
}; };
return ( return (
<Button <Button
@ -33,7 +33,11 @@ function SuggestedItem({ name, type }: SuggestedItemProps): JSX.Element {
); );
} }
function Suggestions(): JSX.Element { interface SuggestionsProps {
applySuggestion: (name: string) => void;
}
function Suggestions({ applySuggestion }: SuggestionsProps): JSX.Element {
const { const {
fields: { selected }, fields: { selected },
} = useSelector<AppState, ILogsReducer>((store) => store.logs); } = useSelector<AppState, ILogsReducer>((store) => store.logs);
@ -47,6 +51,7 @@ function Suggestions(): JSX.Element {
key={JSON.stringify(field)} key={JSON.stringify(field)}
name={field.name} name={field.name}
type={field.type} type={field.type}
applySuggestion={applySuggestion}
/> />
))} ))}
</div> </div>

View File

@ -1,8 +1,21 @@
import React from 'react'; import { notification } from 'antd';
import { flatten } from 'lodash-es';
import React, { useCallback, useEffect, useRef, useState } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { ILogsReducer } from 'types/reducer/logs';
import { SearchFieldsActionBar } from './ActionBar';
import QueryBuilder from './QueryBuilder/QueryBuilder'; import QueryBuilder from './QueryBuilder/QueryBuilder';
import Suggestions from './Suggestions'; import Suggestions from './Suggestions';
import { QueryFields } from './utils'; import {
createParsedQueryStructure,
fieldsQueryIsvalid,
hashCode,
initQueryKOVPair,
prepareConditionOperator,
QueryFields,
} from './utils';
export interface SearchFieldsProps { export interface SearchFieldsProps {
updateParsedQuery: (query: QueryFields[]) => void; updateParsedQuery: (query: QueryFields[]) => void;
@ -13,13 +26,85 @@ function SearchFields({
updateParsedQuery, updateParsedQuery,
onDropDownToggleHandler, onDropDownToggleHandler,
}: SearchFieldsProps): JSX.Element { }: SearchFieldsProps): JSX.Element {
const {
searchFilter: { parsedQuery },
} = useSelector<AppState, ILogsReducer>((store) => store.logs);
const [fieldsQuery, setFieldsQuery] = useState(
createParsedQueryStructure([...parsedQuery] as never[]),
);
const keyPrefixRef = useRef(hashCode(JSON.stringify(fieldsQuery)));
useEffect(() => {
const updatedFieldsQuery = createParsedQueryStructure([
...parsedQuery,
] as never[]);
setFieldsQuery(updatedFieldsQuery);
const incomingHashCode = hashCode(JSON.stringify(updatedFieldsQuery));
if (incomingHashCode !== keyPrefixRef.current) {
keyPrefixRef.current = incomingHashCode;
}
}, [parsedQuery]);
const addSuggestedField = useCallback(
(name: string): void => {
if (!name) {
return;
}
const query = [...fieldsQuery];
if (fieldsQuery.length > 0) {
query.push([prepareConditionOperator()]);
}
const newField: QueryFields[] = [];
initQueryKOVPair(name).forEach((q) => newField.push(q));
query.push(newField);
keyPrefixRef.current = hashCode(JSON.stringify(query));
setFieldsQuery(query);
},
[fieldsQuery, setFieldsQuery],
);
const applyUpdate = useCallback((): void => {
const flatParsedQuery = flatten(fieldsQuery);
if (!fieldsQueryIsvalid(flatParsedQuery)) {
notification.error({
message: 'Please enter a valid criteria for each of the selected fields',
});
return;
}
keyPrefixRef.current = hashCode(JSON.stringify(flatParsedQuery));
updateParsedQuery(flatParsedQuery);
onDropDownToggleHandler(false)();
}, [onDropDownToggleHandler, fieldsQuery, updateParsedQuery]);
const clearFilters = useCallback((): void => {
keyPrefixRef.current = hashCode(JSON.stringify([]));
updateParsedQuery([]);
onDropDownToggleHandler(false)();
}, [onDropDownToggleHandler, updateParsedQuery]);
return ( return (
<> <>
<QueryBuilder <QueryBuilder
key={keyPrefixRef.current}
keyPrefix={keyPrefixRef.current}
onDropDownToggleHandler={onDropDownToggleHandler} onDropDownToggleHandler={onDropDownToggleHandler}
updateParsedQuery={updateParsedQuery} fieldsQuery={fieldsQuery}
setFieldsQuery={setFieldsQuery}
/> />
<Suggestions /> <SearchFieldsActionBar
applyUpdate={applyUpdate}
clearFilters={clearFilters}
fieldsQuery={fieldsQuery}
/>
<Suggestions applySuggestion={addSuggestedField} />
</> </>
); );
} }

View File

@ -2,11 +2,30 @@
// @ts-ignore // @ts-ignore
// @ts-nocheck // @ts-nocheck
import { QueryTypes, QueryOperatorsSingleVal } from 'lib/logql/tokens'; import { QueryTypes, ConditionalOperators, ValidTypeSequence, ValidTypeValue } from 'lib/logql/tokens';
export interface QueryFields { export interface QueryFields {
type: keyof typeof QueryTypes; type: keyof typeof QueryTypes;
value: string; value: string | string[];
}
export function fieldsQueryIsvalid(queryFields: QueryFields[]): boolean {
let lastOp: string;
let result = true;
queryFields.forEach((q, idx)=> {
if (!q.value || q.value === null || q.value === '') result = false;
if (Array.isArray(q.value) && q.value.length === 0 ) result = false;
const nextOp = idx < queryFields.length ? queryFields[idx+1]: undefined;
if (!ValidTypeSequence(lastOp?.type, q?.type, nextOp?.type)) result = false
if (!ValidTypeValue(lastOp?.value, q.value)) result = false;
lastOp = q;
});
return result
} }
export const queryKOVPair = (): QueryFields[] => [ export const queryKOVPair = (): QueryFields[] => [
@ -23,6 +42,29 @@ export const queryKOVPair = (): QueryFields[] => [
value: null, value: null,
}, },
]; ];
export const initQueryKOVPair = (name?: string = null, op?: string = null , value?: string | string[] = null ): QueryFields[] => [
{
type: QueryTypes.QUERY_KEY,
value: name,
},
{
type: QueryTypes.QUERY_OPERATOR,
value: op,
},
{
type: QueryTypes.QUERY_VALUE,
value: value,
},
];
export const prepareConditionOperator = (op?: string = ConditionalOperators.AND): QueryFields => {
return {
type: QueryTypes.CONDITIONAL_OPERATOR,
value: op,
}
}
export const createParsedQueryStructure = (parsedQuery = []) => { export const createParsedQueryStructure = (parsedQuery = []) => {
if (!parsedQuery.length) { if (!parsedQuery.length) {
return parsedQuery; return parsedQuery;
@ -64,3 +106,17 @@ export const createParsedQueryStructure = (parsedQuery = []) => {
}); });
return structuredArray; return structuredArray;
}; };
export const hashCode = (s: string): string => {
if (!s) {
return '0';
}
return `${Math.abs(
s.split('').reduce((a, b) => {
// eslint-disable-next-line no-bitwise, no-param-reassign
a = (a << 5) - a + b.charCodeAt(0);
// eslint-disable-next-line no-bitwise
return a & a;
}, 0),
)}`;
};

View File

@ -1,7 +1,14 @@
import { Input, InputRef, Popover } from 'antd'; import { Input, InputRef, Popover } from 'antd';
import useUrlQuery from 'hooks/useUrlQuery'; import useUrlQuery from 'hooks/useUrlQuery';
import getStep from 'lib/getStep'; import getStep from 'lib/getStep';
import React, { useCallback, useEffect, useRef, useState } from 'react'; import { debounce } from 'lodash-es';
import React, {
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from 'react';
import { connect, useDispatch, useSelector } from 'react-redux'; import { connect, useDispatch, useSelector } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux'; import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk'; import { ThunkDispatch } from 'redux-thunk';
@ -9,7 +16,7 @@ import { getLogs } from 'store/actions/logs/getLogs';
import { getLogsAggregate } from 'store/actions/logs/getLogsAggregate'; import { getLogsAggregate } from 'store/actions/logs/getLogsAggregate';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
import AppActions from 'types/actions'; import AppActions from 'types/actions';
import { TOGGLE_LIVE_TAIL } from 'types/actions/logs'; import { FLUSH_LOGS, TOGGLE_LIVE_TAIL } from 'types/actions/logs';
import { GlobalReducer } from 'types/reducer/globalTime'; import { GlobalReducer } from 'types/reducer/globalTime';
import { ILogsReducer } from 'types/reducer/logs'; import { ILogsReducer } from 'types/reducer/logs';
@ -22,12 +29,31 @@ function SearchFilter({
getLogsAggregate, getLogsAggregate,
}: SearchFilterProps): JSX.Element { }: SearchFilterProps): JSX.Element {
const { const {
queryString,
updateParsedQuery, updateParsedQuery,
updateQueryString, updateQueryString,
queryString,
} = useSearchParser(); } = useSearchParser();
const [searchText, setSearchText] = useState(queryString);
const [showDropDown, setShowDropDown] = useState(false); const [showDropDown, setShowDropDown] = useState(false);
const searchRef = useRef<InputRef>(null); const searchRef = useRef<InputRef>(null);
const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector<
AppState,
ILogsReducer
>((state) => state.logs);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const dispatch = useDispatch<Dispatch<AppActions>>();
// keep sync with url queryString
useEffect(() => {
setSearchText(queryString);
}, [queryString]);
const debouncedupdateQueryString = useMemo(
() => debounce(updateQueryString, 300),
[updateQueryString],
);
const onDropDownToggleHandler = useCallback( const onDropDownToggleHandler = useCallback(
(value: boolean) => (): void => { (value: boolean) => (): void => {
@ -36,17 +62,6 @@ function SearchFilter({
[], [],
); );
const { logLinesPerPage, idEnd, idStart, liveTail } = useSelector<
AppState,
ILogsReducer
>((state) => state.logs);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const dispatch = useDispatch<Dispatch<AppActions>>();
const handleSearch = useCallback( const handleSearch = useCallback(
(customQuery) => { (customQuery) => {
if (liveTail === 'PLAYING') { if (liveTail === 'PLAYING') {
@ -54,6 +69,9 @@ function SearchFilter({
type: TOGGLE_LIVE_TAIL, type: TOGGLE_LIVE_TAIL,
payload: 'PAUSED', payload: 'PAUSED',
}); });
dispatch({
type: FLUSH_LOGS,
});
setTimeout( setTimeout(
() => () =>
dispatch({ dispatch({
@ -102,10 +120,14 @@ function SearchFilter({
const urlQuery = useUrlQuery(); const urlQuery = useUrlQuery();
const urlQueryString = urlQuery.get('q'); const urlQueryString = urlQuery.get('q');
const debouncedHandleSearch = useMemo(() => debounce(handleSearch, 600), [
handleSearch,
]);
useEffect(() => { useEffect(() => {
handleSearch(urlQueryString || ''); debouncedHandleSearch(urlQueryString || '');
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [urlQueryString, maxTime, minTime]); }, [urlQueryString, maxTime, minTime, idEnd, idStart, logLinesPerPage]);
return ( return (
<Container> <Container>
@ -132,9 +154,11 @@ function SearchFilter({
<Input.Search <Input.Search
ref={searchRef} ref={searchRef}
placeholder="Search Filter" placeholder="Search Filter"
value={queryString} value={searchText}
onChange={(e): void => { onChange={(e): void => {
updateQueryString(e.target.value); const { value } = e.target;
setSearchText(value);
debouncedupdateQueryString(value);
}} }}
allowClear allowClear
onSearch={handleSearch} onSearch={handleSearch}

View File

@ -23,12 +23,10 @@ export function useSearchParser(): {
const updateQueryString = useCallback( const updateQueryString = useCallback(
(updatedQueryString) => { (updatedQueryString) => {
if (updatedQueryString) { history.replace({
history.push({ pathname: history.location.pathname,
pathname: history.location.pathname, search: updatedQueryString ? `?q=${updatedQueryString}` : '',
search: updatedQueryString ? `?q=${updatedQueryString}` : '', });
});
}
dispatch({ dispatch({
type: SET_SEARCH_QUERY_STRING, type: SET_SEARCH_QUERY_STRING,

View File

@ -31,7 +31,7 @@ export const externalCallErrorPercent = ({
const legendFormula = 'External Call Error Percentage'; const legendFormula = 'External Call Error Percentage';
const expression = 'A*100/B'; const expression = 'A*100/B';
const disabled = false; const disabled = true;
return getQueryBuilderQuerieswithAdditionalItems({ return getQueryBuilderQuerieswithAdditionalItems({
metricNameA, metricNameA,
metricNameB, metricNameB,
@ -102,7 +102,7 @@ export const externalCallDurationByAddress = ({
const metricNameB = 'signoz_external_call_latency_count'; const metricNameB = 'signoz_external_call_latency_count';
const expression = 'A/B'; const expression = 'A/B';
const legendFormula = legend; const legendFormula = legend;
const disabled = false; const disabled = true;
return getQueryBuilderQuerieswithFormula({ return getQueryBuilderQuerieswithFormula({
servicename, servicename,
legend, legend,

View File

@ -1,6 +1,4 @@
import { InfoCircleOutlined } from '@ant-design/icons'; import { Collapse } from 'antd';
import { Collapse, Popover, Space } from 'antd';
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
import useThemeMode from 'hooks/useThemeMode'; import useThemeMode from 'hooks/useThemeMode';
import keys from 'lodash-es/keys'; import keys from 'lodash-es/keys';
import map from 'lodash-es/map'; import map from 'lodash-es/map';
@ -9,6 +7,8 @@ import { ITraceTree } from 'types/api/trace/getTraceItem';
import EllipsedButton from '../EllipsedButton'; import EllipsedButton from '../EllipsedButton';
import { CustomSubText, CustomSubTitle } from '../styles'; import { CustomSubText, CustomSubTitle } from '../styles';
import EventStartTime from './EventStartTime';
import RelativeStartTime from './RelativeStartTime';
const { Panel } = Collapse; const { Panel } = Collapse;
@ -25,10 +25,6 @@ function ErrorTag({
{map(event, ({ attributeMap, name, timeUnixNano }) => { {map(event, ({ attributeMap, name, timeUnixNano }) => {
const attributes = keys(attributeMap); const attributes = keys(attributeMap);
const { time, timeUnitName } = convertTimeToRelevantUnit(
timeUnixNano / 1e6 - firstSpanStartTime,
);
return ( return (
<Collapse <Collapse
key={`${name}${JSON.stringify(attributeMap)}`} key={`${name}${JSON.stringify(attributeMap)}`}
@ -39,18 +35,14 @@ function ErrorTag({
header={name || attributeMap?.event} header={name || attributeMap?.event}
key={name || attributeMap.event} key={name || attributeMap.event}
> >
<Space direction="horizontal" align="center"> {firstSpanStartTime ? (
<CustomSubTitle style={{ margin: 0 }} ellipsis> <RelativeStartTime
Event Start Time firstSpanStartTime={firstSpanStartTime}
</CustomSubTitle> timeUnixNano={timeUnixNano}
<Popover content="Relative to start of the full trace"> />
<InfoCircleOutlined /> ) : (
</Popover> <EventStartTime timeUnixNano={timeUnixNano} />
</Space> )}
<CustomSubText isDarkMode={isDarkMode}>
{`${time.toFixed(2)} ${timeUnitName}`}
</CustomSubText>
{map(attributes, (event) => { {map(attributes, (event) => {
const value = attributeMap[event]; const value = attributeMap[event];
@ -93,7 +85,11 @@ interface ErrorTagProps {
event: ITraceTree['event']; event: ITraceTree['event'];
onToggleHandler: (isOpen: boolean) => void; onToggleHandler: (isOpen: boolean) => void;
setText: (text: { subText: string; text: string }) => void; setText: (text: { subText: string; text: string }) => void;
firstSpanStartTime: number; firstSpanStartTime?: number;
} }
ErrorTag.defaultProps = {
firstSpanStartTime: undefined,
};
export default ErrorTag; export default ErrorTag;

View File

@ -0,0 +1,31 @@
import { Popover } from 'antd';
import dayjs from 'dayjs';
import useThemeMode from 'hooks/useThemeMode';
import React from 'react';
import { CustomSubText, CustomSubTitle } from '../styles';
function EventStartTime({ timeUnixNano }: EventStartTimeProps): JSX.Element {
const { isDarkMode } = useThemeMode();
const humanReadableTimeInDayJs = dayjs(timeUnixNano / 1e6).format(
'YYYY-MM-DD hh:mm:ss.SSS A',
);
return (
<>
<CustomSubTitle style={{ margin: 0 }}>Event Time</CustomSubTitle>
<CustomSubText ellipsis isDarkMode={isDarkMode}>
<Popover content={humanReadableTimeInDayJs}>
{humanReadableTimeInDayJs}
</Popover>
</CustomSubText>
</>
);
}
interface EventStartTimeProps {
timeUnixNano: number;
}
export default EventStartTime;

View File

@ -0,0 +1,42 @@
import { InfoCircleOutlined } from '@ant-design/icons';
import { Popover, Space } from 'antd';
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
import useThemeMode from 'hooks/useThemeMode';
import React from 'react';
import { CustomSubText, CustomSubTitle } from '../styles';
function StartTime({
firstSpanStartTime,
timeUnixNano,
}: StartTimeProps): JSX.Element {
const { isDarkMode } = useThemeMode();
const { time, timeUnitName } = convertTimeToRelevantUnit(
timeUnixNano / 1e6 - (firstSpanStartTime || 0),
);
return (
<>
<Space direction="horizontal" align="center">
<CustomSubTitle style={{ margin: 0 }} ellipsis>
Event Start Time
</CustomSubTitle>
<Popover content="Relative to start of the full trace">
<InfoCircleOutlined />
</Popover>
</Space>
<CustomSubText isDarkMode={isDarkMode}>
{`${time.toFixed(2)} ${timeUnitName}`}
</CustomSubText>
</>
);
}
interface StartTimeProps {
timeUnixNano: number;
firstSpanStartTime: number;
}
export default StartTime;

View File

@ -76,7 +76,7 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
/* eslint-enable */ /* eslint-enable */
}, [treesData, spanServiceColors]); }, [treesData, spanServiceColors]);
const firstSpanStartTime = tree.spanTree[0].startTime; const firstSpanStartTime = tree.spanTree[0]?.startTime;
const [globalTraceMetadata] = useState<ITraceMetaData>({ const [globalTraceMetadata] = useState<ITraceMetaData>({
...traceMetaData, ...traceMetaData,

View File

@ -34,9 +34,10 @@ export const traceDateAndTimelineContainer = css`
export const traceDateTimeContainer = css` export const traceDateTimeContainer = css`
display: flex; display: flex;
aligh-items: center; align-items: center;
justify-content: center; justify-content: center;
`; `;
export const timelineContainer = css` export const timelineContainer = css`
overflow: visible; overflow: visible;
margin: 0 1rem 0 0; margin: 0 1rem 0 0;
@ -48,7 +49,7 @@ export const ganttChartContainer = css`
position: relative; position: relative;
flex: 1; flex: 1;
overflow-y: auto; overflow-y: auto;
overflow-x: hidden; overflow-x: scroll;
`; `;
export const selectedSpanDetailContainer = css` export const selectedSpanDetailContainer = css`

View File

@ -1,6 +1,6 @@
import { WarningFilled } from '@ant-design/icons'; import { WarningFilled } from '@ant-design/icons';
import { Button, Card, Form, Space, Typography } from 'antd'; import { Button, Card, Form, Space, Typography } from 'antd';
import React, { useCallback } from 'react'; import React from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers'; import { AppState } from 'store/reducers';
@ -14,10 +14,6 @@ function Version(): JSX.Element {
const [form] = Form.useForm(); const [form] = Form.useForm();
const { t } = useTranslation(); const { t } = useTranslation();
const onClickUpgradeHandler = useCallback((link: string) => {
window.open(link, '_blank');
}, []);
const { const {
currentVersion, currentVersion,
latestVersion, latestVersion,
@ -60,9 +56,8 @@ function Version(): JSX.Element {
placeholder={t('latest_version')} placeholder={t('latest_version')}
/> />
<Button <Button
onClick={(): void => href="https://github.com/SigNoz/signoz/releases"
onClickUpgradeHandler('https://github.com/SigNoz/signoz/releases') target="_blank"
}
type="link" type="link"
> >
{t('release_notes')} {t('release_notes')}
@ -94,11 +89,8 @@ function Version(): JSX.Element {
{!isError && !isLatestVersion && ( {!isError && !isLatestVersion && (
<Button <Button
onClick={(): void => href="https://signoz.io/docs/operate/docker-standalone/#upgrade"
onClickUpgradeHandler( target="_blank"
'https://signoz.io/docs/operate/docker-standalone/#upgrade',
)
}
> >
{t('read_how_to_upgrade')} {t('read_how_to_upgrade')}
</Button> </Button>

View File

@ -2,20 +2,34 @@
// @ts-ignore // @ts-ignore
// @ts-nocheck // @ts-nocheck
import { QueryTypes, StringTypeQueryOperators } from "./tokens";
export const reverseParser = ( export const reverseParser = (
parserQueryArr: { type: string; value: any }[] = [], parserQueryArr: { type: string; value: any }[] = [],
) => { ) => {
let queryString = ''; let queryString = '';
let lastToken: { type: string; value: any };
parserQueryArr.forEach((query) => { parserQueryArr.forEach((query) => {
if (queryString) { if (queryString) {
queryString += ' '; queryString += ' ';
} }
if (Array.isArray(query.value) && query.value.length > 0) { if (Array.isArray(query.value) && query.value.length > 0) {
// if the values are array type, here we spread them in
// ('a', 'b') format
queryString += `(${query.value.map((val) => `'${val}'`).join(',')})`; queryString += `(${query.value.map((val) => `'${val}'`).join(',')})`;
} else { } else {
queryString += query.value; if (query.type === QueryTypes.QUERY_VALUE
&& lastToken.type === QueryTypes.QUERY_OPERATOR
&& Object.values(StringTypeQueryOperators).includes(lastToken.value) ) {
// for operators that need string type value, here we append single
// quotes. if the content has single quote they would be removed
queryString += `'${query.value?.replace(/'/g, '')}'`;
} else {
queryString += query.value;
}
} }
lastToken = query;
}); });
// console.log(queryString); // console.log(queryString);

View File

@ -7,6 +7,21 @@ export const QueryOperatorsSingleVal = {
NCONTAINS: 'NCONTAINS', NCONTAINS: 'NCONTAINS',
}; };
// list of operators that support only number values
export const NumTypeQueryOperators = {
GTE: 'GTE',
GT: 'GT',
LTE: 'LTE',
LT: 'LT',
};
// list of operators that support only string values
export const StringTypeQueryOperators = {
CONTAINS: 'CONTAINS',
NCONTAINS: 'NCONTAINS',
};
// list of operators that support array values
export const QueryOperatorsMultiVal = { export const QueryOperatorsMultiVal = {
IN: 'IN', IN: 'IN',
NIN: 'NIN', NIN: 'NIN',
@ -23,3 +38,46 @@ export const QueryTypes = {
QUERY_VALUE: 'QUERY_VALUE', QUERY_VALUE: 'QUERY_VALUE',
CONDITIONAL_OPERATOR: 'CONDITIONAL_OPERATOR', CONDITIONAL_OPERATOR: 'CONDITIONAL_OPERATOR',
}; };
export const ValidTypeValue = (
op: string,
value: string | string[],
): boolean => {
if (!op) return true;
if (Object.values(NumTypeQueryOperators).includes(op)) {
if (Array.isArray(value)) return false;
return !Number.isNaN(Number(value));
}
return true;
};
// ValidTypeSequence takes prior, current and next op to confirm
// the proper sequence. For example, if QUERY_VALUE needs to be
// in between QUERY_OPERATOR and (empty or CONDITIONAL_OPERATOR).
export const ValidTypeSequence = (
prior: string | undefined,
current: string | undefined,
next: string | undefined,
): boolean => {
switch (current) {
case QueryTypes.QUERY_KEY:
// query key can have an empty prior
if (!prior) return true;
return [QueryTypes.CONDITIONAL_OPERATOR].includes(prior);
case QueryTypes.QUERY_OPERATOR:
// empty prior is not allowed
if (!prior || ![QueryTypes.QUERY_KEY].includes(prior)) return false;
if (!next || ![QueryTypes.QUERY_VALUE].includes(next)) return false;
return true;
case QueryTypes.QUERY_VALUE:
// empty prior is not allowed
if (!prior) return false;
return [QueryTypes.QUERY_OPERATOR].includes(prior);
case QueryTypes.CONDITIONAL_OPERATOR:
// empty prior is not allowed
if (!next) return false;
return [QueryTypes.QUERY_KEY].includes(next);
default:
return false;
}
};

View File

@ -6,10 +6,8 @@
*/ */
export const isPasswordValid = (value: string): boolean => { export const isPasswordValid = (value: string): boolean => {
// eslint-disable-next-line prefer-regex-literals // eslint-disable-next-line prefer-regex-literals
const pattern = new RegExp( const pattern = new RegExp('^.{8,}$');
'^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[#?!@$%^&*-]).{8,}$',
);
return pattern.test(value); return pattern.test(value);
}; };
export const isPasswordNotValidMessage = `Password must a have minimum of 8 characters with at least one lower case, one number ,one upper case and one special character`; export const isPasswordNotValidMessage = `Password must a have minimum of 8 characters`;

View File

@ -1 +1,4 @@
export const SPAN_DETAILS_LEFT_COL_WIDTH = 350; export const SPAN_DETAILS_LEFT_COL_WIDTH = 350;
export const noEventMessage =
'The requested trace id was not found. Sometimes this happens because of insertion delay in trace data. Please try again after some time';

View File

@ -1,5 +1,6 @@
import { Typography } from 'antd'; import { Typography } from 'antd';
import getTraceItem from 'api/trace/getTraceItem'; import getTraceItem from 'api/trace/getTraceItem';
import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner'; import Spinner from 'components/Spinner';
import TraceDetailContainer from 'container/TraceDetail'; import TraceDetailContainer from 'container/TraceDetail';
import useUrlQuery from 'hooks/useUrlQuery'; import useUrlQuery from 'hooks/useUrlQuery';
@ -8,6 +9,8 @@ import { useQuery } from 'react-query';
import { useParams } from 'react-router-dom'; import { useParams } from 'react-router-dom';
import { Props as TraceDetailProps } from 'types/api/trace/getTraceItem'; import { Props as TraceDetailProps } from 'types/api/trace/getTraceItem';
import { noEventMessage } from './constants';
function TraceDetail(): JSX.Element { function TraceDetail(): JSX.Element {
const { id } = useParams<TraceDetailProps>(); const { id } = useParams<TraceDetailProps>();
const urlQuery = useUrlQuery(); const urlQuery = useUrlQuery();
@ -19,6 +22,7 @@ function TraceDetail(): JSX.Element {
}), }),
[urlQuery], [urlQuery],
); );
const { data: traceDetailResponse, error, isLoading, isError } = useQuery( const { data: traceDetailResponse, error, isLoading, isError } = useQuery(
`getTraceItem/${id}`, `getTraceItem/${id}`,
() => getTraceItem({ id, spanId, levelUp, levelDown }), () => getTraceItem({ id, spanId, levelUp, levelDown }),
@ -39,6 +43,10 @@ function TraceDetail(): JSX.Element {
return <Spinner tip="Loading.." />; return <Spinner tip="Loading.." />;
} }
if (traceDetailResponse.payload[0].events.length === 0) {
return <NotFound text={noEventMessage} />;
}
return <TraceDetailContainer response={traceDetailResponse.payload} />; return <TraceDetailContainer response={traceDetailResponse.payload} />;
} }

View File

@ -15,6 +15,8 @@ export interface Props {
orderParam?: OrderBy; orderParam?: OrderBy;
limit?: number; limit?: number;
offset?: number; offset?: number;
exceptionType?: string;
serviceName?: string;
} }
export interface Exception { export interface Exception {

View File

@ -45,6 +45,7 @@ import (
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager" am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
"go.signoz.io/signoz/pkg/query-service/interfaces" "go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model" "go.signoz.io/signoz/pkg/query-service/model"
"go.signoz.io/signoz/pkg/query-service/telemetry"
"go.signoz.io/signoz/pkg/query-service/utils" "go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap" "go.uber.org/zap"
) )
@ -1177,33 +1178,54 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
traceFilterReponse.Status = map[string]uint64{"ok": 0, "error": 0} traceFilterReponse.Status = map[string]uint64{"ok": 0, "error": 0}
} }
case constants.Duration: case constants.Duration:
finalQuery := fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable) err := r.featureFlags.CheckFeature(constants.DurationSort)
finalQuery += query durationSortEnabled := err == nil
finalQuery += " ORDER BY durationNano LIMIT 1" finalQuery := ""
var dBResponse []model.DBResponseTotal if !durationSortEnabled {
err := r.db.Select(ctx, &dBResponse, finalQuery, args...) // if duration sort is not enabled, we need to get the min and max duration from the index table
zap.S().Info(finalQuery) finalQuery = fmt.Sprintf("SELECT min(durationNano) as min, max(durationNano) as max FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
var dBResponse []model.DBResponseMinMax
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.S().Info(finalQuery)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
}
if len(dBResponse) > 0 {
traceFilterReponse.Duration = map[string]uint64{"minDuration": dBResponse[0].Min, "maxDuration": dBResponse[0].Max}
}
} else {
// when duration sort is enabled, we need to get the min and max duration from the duration table
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
finalQuery += query
finalQuery += " ORDER BY durationNano LIMIT 1"
var dBResponse []model.DBResponseTotal
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.S().Info(finalQuery)
if err != nil { if err != nil {
zap.S().Debug("Error in processing sql query: ", err) zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
} }
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
finalQuery += query
finalQuery += " ORDER BY durationNano DESC LIMIT 1"
var dBResponse2 []model.DBResponseTotal
err = r.db.Select(ctx, &dBResponse2, finalQuery, args...)
zap.S().Info(finalQuery)
if err != nil { finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
zap.S().Debug("Error in processing sql query: ", err) finalQuery += query
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)} finalQuery += " ORDER BY durationNano DESC LIMIT 1"
} var dBResponse2 []model.DBResponseTotal
if len(dBResponse) > 0 { err = r.db.Select(ctx, &dBResponse2, finalQuery, args...)
traceFilterReponse.Duration["minDuration"] = dBResponse[0].NumTotal zap.S().Info(finalQuery)
}
if len(dBResponse2) > 0 { if err != nil {
traceFilterReponse.Duration["maxDuration"] = dBResponse2[0].NumTotal zap.S().Debug("Error in processing sql query: ", err)
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("Error in processing sql query: %s", err)}
}
if len(dBResponse) > 0 {
traceFilterReponse.Duration["minDuration"] = dBResponse[0].NumTotal
}
if len(dBResponse2) > 0 {
traceFilterReponse.Duration["maxDuration"] = dBResponse2[0].NumTotal
}
} }
case constants.RPCMethod: case constants.RPCMethod:
finalQuery := fmt.Sprintf("SELECT rpcMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable) finalQuery := fmt.Sprintf("SELECT rpcMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
@ -2506,8 +2528,35 @@ func (r *ClickHouseReader) ListErrors(ctx context.Context, queryParams *model.Li
var getErrorResponses []model.Error var getErrorResponses []model.Error
query := fmt.Sprintf("SELECT any(exceptionType) as exceptionType, any(exceptionMessage) as exceptionMessage, count() AS exceptionCount, min(timestamp) as firstSeen, max(timestamp) as lastSeen, any(serviceName) as serviceName, groupID FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU GROUP BY groupID", r.TraceDB, r.errorTable) query := "SELECT any(exceptionMessage) as exceptionMessage, count() AS exceptionCount, min(timestamp) as firstSeen, max(timestamp) as lastSeen, groupID"
if len(queryParams.ServiceName) != 0 {
query = query + ", serviceName"
} else {
query = query + ", any(serviceName) as serviceName"
}
if len(queryParams.ExceptionType) != 0 {
query = query + ", exceptionType"
} else {
query = query + ", any(exceptionType) as exceptionType"
}
query += fmt.Sprintf(" FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))} args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
if len(queryParams.ServiceName) != 0 {
query = query + " AND serviceName ilike @serviceName"
args = append(args, clickhouse.Named("serviceName", "%"+queryParams.ServiceName+"%"))
}
if len(queryParams.ExceptionType) != 0 {
query = query + " AND exceptionType ilike @exceptionType"
args = append(args, clickhouse.Named("exceptionType", "%"+queryParams.ExceptionType+"%"))
}
query = query + " GROUP BY groupID"
if len(queryParams.ServiceName) != 0 {
query = query + ", serviceName"
}
if len(queryParams.ExceptionType) != 0 {
query = query + ", exceptionType"
}
if len(queryParams.OrderParam) != 0 { if len(queryParams.OrderParam) != 0 {
if queryParams.Order == constants.Descending { if queryParams.Order == constants.Descending {
query = query + " ORDER BY " + queryParams.OrderParam + " DESC" query = query + " ORDER BY " + queryParams.OrderParam + " DESC"
@ -2542,7 +2591,14 @@ func (r *ClickHouseReader) CountErrors(ctx context.Context, queryParams *model.C
query := fmt.Sprintf("SELECT count(distinct(groupID)) FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.errorTable) query := fmt.Sprintf("SELECT count(distinct(groupID)) FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.errorTable)
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))} args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
if len(queryParams.ServiceName) != 0 {
query = query + " AND serviceName = @serviceName"
args = append(args, clickhouse.Named("serviceName", queryParams.ServiceName))
}
if len(queryParams.ExceptionType) != 0 {
query = query + " AND exceptionType = @exceptionType"
args = append(args, clickhouse.Named("exceptionType", queryParams.ExceptionType))
}
err := r.db.QueryRow(ctx, query, args...).Scan(&errorCount) err := r.db.QueryRow(ctx, query, args...).Scan(&errorCount)
zap.S().Info(query) zap.S().Info(query)
@ -3067,6 +3123,20 @@ func (r *ClickHouseReader) GetSamplesInfoInLastHeartBeatInterval(ctx context.Con
return totalSamples, nil return totalSamples, nil
} }
func (r *ClickHouseReader) GetDistributedInfoInLastHeartBeatInterval(ctx context.Context) (map[string]interface{}, error) {
clusterInfo := []model.ClusterInfo{}
queryStr := `SELECT shard_num, shard_weight, replica_num, errors_count, slowdowns_count, estimated_recovery_time FROM system.clusters where cluster='cluster';`
r.db.Select(ctx, &clusterInfo, queryStr)
if len(clusterInfo) == 1 {
return clusterInfo[0].GetMapFromStruct(), nil
}
return nil, nil
}
func (r *ClickHouseReader) GetLogsInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error) { func (r *ClickHouseReader) GetLogsInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error) {
var totalLogLines uint64 var totalLogLines uint64
@ -3197,7 +3267,8 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda
// remove index // remove index
query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s DROP INDEX IF EXISTS %s_idx", r.logsDB, r.logsLocalTable, cluster, field.Name) query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s DROP INDEX IF EXISTS %s_idx", r.logsDB, r.logsLocalTable, cluster, field.Name)
err := r.db.Exec(ctx, query) err := r.db.Exec(ctx, query)
if err != nil { // we are ignoring errors with code 341 as it is an error with updating old part https://github.com/SigNoz/engineering-pod/issues/919#issuecomment-1366344346
if err != nil && !strings.HasPrefix(err.Error(), "code: 341") {
return &model.ApiError{Err: err, Typ: model.ErrorInternal} return &model.ApiError{Err: err, Typ: model.ErrorInternal}
} }
} }
@ -3212,11 +3283,18 @@ func (r *ClickHouseReader) GetLogs(ctx context.Context, params *model.LogsFilter
} }
isPaginatePrev := logs.CheckIfPrevousPaginateAndModifyOrder(params) isPaginatePrev := logs.CheckIfPrevousPaginateAndModifyOrder(params)
filterSql, err := logs.GenerateSQLWhere(fields, params) filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, params)
if err != nil { if err != nil {
return nil, &model.ApiError{Err: err, Typ: model.ErrorBadData} return nil, &model.ApiError{Err: err, Typ: model.ErrorBadData}
} }
data := map[string]interface{}{
"lenFilters": lenFilters,
}
if lenFilters != 0 {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data)
}
query := fmt.Sprintf("%s from %s.%s", constants.LogsSQLSelect, r.logsDB, r.logsTable) query := fmt.Sprintf("%s from %s.%s", constants.LogsSQLSelect, r.logsDB, r.logsTable)
if filterSql != "" { if filterSql != "" {
@ -3246,10 +3324,17 @@ func (r *ClickHouseReader) TailLogs(ctx context.Context, client *model.LogsTailC
return return
} }
filterSql, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{ filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
Query: client.Filter.Query, Query: client.Filter.Query,
}) })
data := map[string]interface{}{
"lenFilters": lenFilters,
}
if lenFilters != 0 {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data)
}
if err != nil { if err != nil {
client.Error <- err client.Error <- err
return return
@ -3326,13 +3411,20 @@ func (r *ClickHouseReader) AggregateLogs(ctx context.Context, params *model.Logs
return nil, apiErr return nil, apiErr
} }
filterSql, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{ filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
Query: params.Query, Query: params.Query,
}) })
if err != nil { if err != nil {
return nil, &model.ApiError{Err: err, Typ: model.ErrorBadData} return nil, &model.ApiError{Err: err, Typ: model.ErrorBadData}
} }
data := map[string]interface{}{
"lenFilters": lenFilters,
}
if lenFilters != 0 {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data)
}
query := "" query := ""
if params.GroupBy != "" { if params.GroupBy != "" {
query = fmt.Sprintf("SELECT toInt64(toUnixTimestamp(toStartOfInterval(toDateTime(timestamp/1000000000), INTERVAL %d minute))*1000000000) as ts_start_interval, toString(%s) as groupBy, "+ query = fmt.Sprintf("SELECT toInt64(toUnixTimestamp(toStartOfInterval(toDateTime(timestamp/1000000000), INTERVAL %d minute))*1000000000) as ts_start_interval, toString(%s) as groupBy, "+

View File

@ -1159,6 +1159,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
RespondError(w, &model.ApiError{model.ErrorTimeout, res.Err}, nil) RespondError(w, &model.ApiError{model.ErrorTimeout, res.Err}, nil)
} }
RespondError(w, &model.ApiError{model.ErrorExec, res.Err}, nil) RespondError(w, &model.ApiError{model.ErrorExec, res.Err}, nil)
return
} }
response_data := &model.QueryData{ response_data := &model.QueryData{
@ -1332,6 +1333,9 @@ func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
} }
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_NUMBER_OF_SERVICES, data) telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_NUMBER_OF_SERVICES, data)
if (data["number"] != 0) && (data["number"] != telemetry.DEFAULT_NUMBER_OF_SERVICES) {
telemetry.GetInstance().AddActiveTracesUser()
}
aH.WriteJSON(w, r, result) aH.WriteJSON(w, r, result)
} }
@ -2190,6 +2194,8 @@ func (aH *APIHandler) tailLogs(w http.ResponseWriter, r *http.Request) {
RespondError(w, &err, "streaming is not supported") RespondError(w, &err, "streaming is not supported")
return return
} }
// flush the headers
flusher.Flush()
for { for {
select { select {

View File

@ -36,7 +36,7 @@ const (
DESC = "desc" DESC = "desc"
) )
var tokenRegex, _ = regexp.Compile(`(?i)(and( )*?|or( )*?)?(([\w.-]+ (in|nin) \([^(]+\))|([\w.]+ (gt|lt|gte|lte) (')?[\S]+(')?)|([\w.]+ (contains|ncontains)) [^\\]?'(.*?[^\\])')`) var tokenRegex, _ = regexp.Compile(`(?i)(and( )*?|or( )*?)?(([\w.-]+( )+(in|nin)( )+\([^(]+\))|([\w.]+( )+(gt|lt|gte|lte)( )+(')?[\S]+(')?)|([\w.]+( )+(contains|ncontains))( )+[^\\]?'(.*?[^\\])')`)
var operatorRegex, _ = regexp.Compile(`(?i)(?: )(in|nin|gt|lt|gte|lte|contains|ncontains)(?: )`) var operatorRegex, _ = regexp.Compile(`(?i)(?: )(in|nin|gt|lt|gte|lte|contains|ncontains)(?: )`)
func ParseLogFilterParams(r *http.Request) (*model.LogsFilterParams, error) { func ParseLogFilterParams(r *http.Request) (*model.LogsFilterParams, error) {
@ -152,6 +152,7 @@ func ParseLogAggregateParams(r *http.Request) (*model.LogsAggregateParams, error
func parseLogQuery(query string) ([]string, error) { func parseLogQuery(query string) ([]string, error) {
sqlQueryTokens := []string{} sqlQueryTokens := []string{}
filterTokens := tokenRegex.FindAllString(query, -1) filterTokens := tokenRegex.FindAllString(query, -1)
if len(filterTokens) == 0 { if len(filterTokens) == 0 {
@ -190,7 +191,13 @@ func parseLogQuery(query string) ([]string, error) {
sqlQueryTokens = append(sqlQueryTokens, f) sqlQueryTokens = append(sqlQueryTokens, f)
} else { } else {
symbol := operatorMapping[strings.ToLower(op)] symbol := operatorMapping[strings.ToLower(op)]
sqlQueryTokens = append(sqlQueryTokens, strings.Replace(v, " "+op+" ", " "+symbol+" ", 1)+" ") sqlExpr := strings.Replace(v, " "+op+" ", " "+symbol+" ", 1)
splittedExpr := strings.Split(sqlExpr, symbol)
if len(splittedExpr) != 2 {
return nil, fmt.Errorf("error while splitting expression: %s", sqlExpr)
}
trimmedSqlExpr := fmt.Sprintf("%s %s %s ", strings.Join(strings.Fields(splittedExpr[0]), " "), symbol, strings.TrimSpace(splittedExpr[1]))
sqlQueryTokens = append(sqlQueryTokens, trimmedSqlExpr)
} }
} }
@ -272,20 +279,23 @@ func CheckIfPrevousPaginateAndModifyOrder(params *model.LogsFilterParams) (isPag
return return
} }
func GenerateSQLWhere(allFields *model.GetFieldsResponse, params *model.LogsFilterParams) (string, error) { func GenerateSQLWhere(allFields *model.GetFieldsResponse, params *model.LogsFilterParams) (string, int, error) {
var tokens []string var tokens []string
var err error var err error
var sqlWhere string var sqlWhere string
var lenTokens = 0
if params.Query != "" { if params.Query != "" {
tokens, err = parseLogQuery(params.Query) tokens, err = parseLogQuery(params.Query)
if err != nil { if err != nil {
return sqlWhere, err return sqlWhere, -1, err
} }
lenTokens = len(tokens)
} }
tokens, err = replaceInterestingFields(allFields, tokens) tokens, err = replaceInterestingFields(allFields, tokens)
if err != nil { if err != nil {
return sqlWhere, err return sqlWhere, -1, err
} }
filterTokens := []string{} filterTokens := []string{}
@ -335,5 +345,5 @@ func GenerateSQLWhere(allFields *model.GetFieldsResponse, params *model.LogsFilt
sqlWhere = strings.Join(tokens, "") sqlWhere = strings.Join(tokens, "")
return sqlWhere, nil return sqlWhere, lenTokens, nil
} }

View File

@ -80,7 +80,17 @@ var correctQueriesTest = []struct {
{ {
`filters with extra spaces`, `filters with extra spaces`,
`service IN ('name > 100') AND length gt 100`, `service IN ('name > 100') AND length gt 100`,
[]string{`service IN ('name > 100') `, `AND length > 100 `}, []string{`service IN ('name > 100') `, `AND length > 100 `},
},
{
`Extra space within a filter expression`,
`service IN ('name > 100')`,
[]string{`service IN ('name > 100') `},
},
{
`Extra space between a query filter`,
`data contains 'hello world .'`,
[]string{`data ILIKE '%hello world .%' `},
}, },
{ {
`filters with special characters in key name`, `filters with special characters in key name`,

View File

@ -480,14 +480,18 @@ func parseListErrorsRequest(r *http.Request) (*model.ListErrorsParams, error) {
if err != nil { if err != nil {
return nil, errors.New("offset param is not in correct format") return nil, errors.New("offset param is not in correct format")
} }
serviceName := r.URL.Query().Get("serviceName")
exceptionType := r.URL.Query().Get("exceptionType")
params := &model.ListErrorsParams{ params := &model.ListErrorsParams{
Start: startTime, Start: startTime,
End: endTime, End: endTime,
OrderParam: orderParam, OrderParam: orderParam,
Order: order, Order: order,
Limit: int64(limitInt), Limit: int64(limitInt),
Offset: int64(offsetInt), Offset: int64(offsetInt),
ServiceName: serviceName,
ExceptionType: exceptionType,
} }
return params, nil return params, nil

View File

@ -1,8 +1,11 @@
package app package app
import ( import (
"bytes"
"context" "context"
"encoding/json"
"fmt" "fmt"
"io/ioutil"
"net" "net"
"net/http" "net/http"
_ "net/http/pprof" // http profiler _ "net/http/pprof" // http profiler
@ -235,15 +238,84 @@ func (lrw *loggingResponseWriter) Flush() {
lrw.ResponseWriter.(http.Flusher).Flush() lrw.ResponseWriter.(http.Flusher).Flush()
} }
func extractDashboardMetaData(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFrom := "/api/v2/metrics/query_range"
var requestBody map[string]interface{}
data := map[string]interface{}{}
if path == pathToExtractBodyFrom && (r.Method == "POST") {
bodyBytes, _ := ioutil.ReadAll(r.Body)
r.Body.Close() // must close
r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes))
json.Unmarshal(bodyBytes, &requestBody)
} else {
return nil, false
}
compositeMetricQuery, compositeMetricQueryExists := requestBody["compositeMetricQuery"]
compositeMetricQueryMap := compositeMetricQuery.(map[string]interface{})
signozMetricFound := false
if compositeMetricQueryExists {
signozMetricFound = telemetry.GetInstance().CheckSigNozMetrics(compositeMetricQueryMap)
queryType, queryTypeExists := compositeMetricQueryMap["queryType"]
if queryTypeExists {
data["queryType"] = queryType
}
panelType, panelTypeExists := compositeMetricQueryMap["panelType"]
if panelTypeExists {
data["panelType"] = panelType
}
}
datasource, datasourceExists := requestBody["dataSource"]
if datasourceExists {
data["datasource"] = datasource
}
if !signozMetricFound {
telemetry.GetInstance().AddActiveMetricsUser()
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_DASHBOARDS_METADATA, data, false)
}
return data, true
}
func getActiveLogs(path string, r *http.Request) {
// if path == "/api/v1/dashboards/{uuid}" {
// telemetry.GetInstance().AddActiveMetricsUser()
// }
if path == "/api/v1/logs" {
hasFilters := len(r.URL.Query().Get("q"))
if hasFilters > 0 {
telemetry.GetInstance().AddActiveLogsUser()
}
}
}
func (s *Server) analyticsMiddleware(next http.Handler) http.Handler { func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
route := mux.CurrentRoute(r) route := mux.CurrentRoute(r)
path, _ := route.GetPathTemplate() path, _ := route.GetPathTemplate()
dashboardMetadata, metadataExists := extractDashboardMetaData(path, r)
getActiveLogs(path, r)
lrw := NewLoggingResponseWriter(w) lrw := NewLoggingResponseWriter(w)
next.ServeHTTP(lrw, r) next.ServeHTTP(lrw, r)
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode} data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
if metadataExists {
for key, value := range dashboardMetadata {
data[key] = value
}
}
if telemetry.GetInstance().IsSampled() { if telemetry.GetInstance().IsSampled() {
if _, ok := telemetry.IgnoredPaths()[path]; !ok { if _, ok := telemetry.IgnoredPaths()[path]; !ok {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data) telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data)

View File

@ -2,9 +2,7 @@ package auth
import ( import (
"context" "context"
"fmt"
"net/http" "net/http"
"regexp"
"github.com/pkg/errors" "github.com/pkg/errors"
"go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/constants"
@ -74,21 +72,21 @@ func ValidatePassword(password string) error {
return errors.Errorf("Password should be atleast %d characters.", minimumPasswordLength) return errors.Errorf("Password should be atleast %d characters.", minimumPasswordLength)
} }
num := `[0-9]{1}` // num := `[0-9]{1}`
lower := `[a-z]{1}` // lower := `[a-z]{1}`
upper := `[A-Z]{1}` // upper := `[A-Z]{1}`
symbol := `[!@#$&*]{1}` // symbol := `[!@#$&*]{1}`
if b, err := regexp.MatchString(num, password); !b || err != nil { // if b, err := regexp.MatchString(num, password); !b || err != nil {
return fmt.Errorf("password should have atleast one number") // return fmt.Errorf("password should have atleast one number")
} // }
if b, err := regexp.MatchString(lower, password); !b || err != nil { // if b, err := regexp.MatchString(lower, password); !b || err != nil {
return fmt.Errorf("password should have atleast one lower case letter") // return fmt.Errorf("password should have atleast one lower case letter")
} // }
if b, err := regexp.MatchString(upper, password); !b || err != nil { // if b, err := regexp.MatchString(upper, password); !b || err != nil {
return fmt.Errorf("password should have atleast one upper case letter") // return fmt.Errorf("password should have atleast one upper case letter")
} // }
if b, err := regexp.MatchString(symbol, password); !b || err != nil { // if b, err := regexp.MatchString(symbol, password); !b || err != nil {
return fmt.Errorf("password should have atleast one special character from !@#$&* ") // return fmt.Errorf("password should have atleast one special character from !@#$&* ")
} // }
return nil return nil
} }

View File

@ -125,6 +125,13 @@ func (mds *ModelDaoSqlite) initializeOrgPreferences(ctx context.Context) error {
// set telemetry fields from userPreferences // set telemetry fields from userPreferences
telemetry.GetInstance().SetDistinctId(org.Id) telemetry.GetInstance().SetDistinctId(org.Id)
users, _ := mds.GetUsers(ctx)
countUsers := len(users)
telemetry.GetInstance().SetCountUsers(int8(countUsers))
if countUsers > 0 {
telemetry.GetInstance().SetCompanyDomain(users[countUsers-1].Email)
}
return nil return nil
} }

View File

@ -63,6 +63,7 @@ type Reader interface {
GetSamplesInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error) GetSamplesInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error)
GetLogsInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error) GetLogsInfoInLastHeartBeatInterval(ctx context.Context) (uint64, error)
GetTagsInfoInLastHeartBeatInterval(ctx context.Context) (*model.TagsInfo, error) GetTagsInfoInLastHeartBeatInterval(ctx context.Context) (*model.TagsInfo, error)
GetDistributedInfoInLastHeartBeatInterval(ctx context.Context) (map[string]interface{}, error)
// Logs // Logs
GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError) GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError)
UpdateLogField(ctx context.Context, field *model.UpdateField) *model.ApiError UpdateLogField(ctx context.Context, field *model.UpdateField) *model.ApiError

View File

@ -296,17 +296,21 @@ type GetTTLParams struct {
} }
type ListErrorsParams struct { type ListErrorsParams struct {
Start *time.Time Start *time.Time
End *time.Time End *time.Time
Limit int64 Limit int64
OrderParam string OrderParam string
Order string Order string
Offset int64 Offset int64
ServiceName string
ExceptionType string
} }
type CountErrorsParams struct { type CountErrorsParams struct {
Start *time.Time Start *time.Time
End *time.Time End *time.Time
ServiceName string
ExceptionType string
} }
type GetErrorParams struct { type GetErrorParams struct {

View File

@ -399,6 +399,11 @@ type DBResponseTotal struct {
NumTotal uint64 `ch:"numTotal"` NumTotal uint64 `ch:"numTotal"`
} }
type DBResponseMinMax struct {
Min uint64 `ch:"min"`
Max uint64 `ch:"max"`
}
type SpanFiltersResponse struct { type SpanFiltersResponse struct {
ServiceName map[string]uint64 `json:"serviceName"` ServiceName map[string]uint64 `json:"serviceName"`
Status map[string]uint64 `json:"status"` Status map[string]uint64 `json:"status"`
@ -564,3 +569,19 @@ type TagTelemetryData struct {
Env string `json:"env" ch:"env"` Env string `json:"env" ch:"env"`
Language string `json:"language" ch:"language"` Language string `json:"language" ch:"language"`
} }
type ClusterInfo struct {
ShardNum uint32 `json:"shard_num" ch:"shard_num"`
ShardWeight uint32 `json:"shard_weight" ch:"shard_weight"`
ReplicaNum uint32 `json:"replica_num" ch:"replica_num"`
ErrorsCount uint32 `json:"errors_count" ch:"errors_count"`
SlowdownsCount uint32 `json:"slowdowns_count" ch:"slowdowns_count"`
EstimatedRecoveryTime uint32 `json:"estimated_recovery_time" ch:"estimated_recovery_time"`
}
func (ci *ClusterInfo) GetMapFromStruct() map[string]interface{} {
var clusterInfoMap map[string]interface{}
data, _ := json.Marshal(*ci)
json.Unmarshal(data, &clusterInfoMap)
return clusterInfoMap
}

View File

@ -2,6 +2,7 @@ package telemetry
import ( import (
"context" "context"
"encoding/json"
"io/ioutil" "io/ioutil"
"math/rand" "math/rand"
"net/http" "net/http"
@ -32,19 +33,24 @@ const (
TELEMETRY_LICENSE_ACT_FAILED = "License Activation Failed" TELEMETRY_LICENSE_ACT_FAILED = "License Activation Failed"
TELEMETRY_EVENT_ENVIRONMENT = "Environment" TELEMETRY_EVENT_ENVIRONMENT = "Environment"
TELEMETRY_EVENT_LANGUAGE = "Language" TELEMETRY_EVENT_LANGUAGE = "Language"
TELEMETRY_EVENT_LOGS_FILTERS = "Logs Filters"
TELEMETRY_EVENT_DISTRIBUTED = "Distributed"
TELEMETRY_EVENT_DASHBOARDS_METADATA = "Dashboards Metadata"
TELEMETRY_EVENT_ACTIVE_USER = "Active User"
) )
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz" const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
const ph_api_key = "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w" const ph_api_key = "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w"
const IP_NOT_FOUND_PLACEHOLDER = "NA" const IP_NOT_FOUND_PLACEHOLDER = "NA"
const DEFAULT_NUMBER_OF_SERVICES = 6
const HEART_BEAT_DURATION = 6 * time.Hour const HEART_BEAT_DURATION = 6 * time.Hour
// const HEART_BEAT_DURATION = 10 * time.Second // const HEART_BEAT_DURATION = 10 * time.Second
const RATE_LIMIT_CHECK_DURATION = 1 * time.Minute const RATE_LIMIT_CHECK_DURATION = 1 * time.Minute
const RATE_LIMIT_VALUE = 10 const RATE_LIMIT_VALUE = 2
// const RATE_LIMIT_CHECK_DURATION = 20 * time.Second // const RATE_LIMIT_CHECK_DURATION = 20 * time.Second
// const RATE_LIMIT_VALUE = 5 // const RATE_LIMIT_VALUE = 5
@ -64,6 +70,33 @@ func (a *Telemetry) IsSampled() bool {
} }
func (telemetry *Telemetry) CheckSigNozMetrics(compositeMetricQueryMap map[string]interface{}) bool {
builderQueries, builderQueriesExists := compositeMetricQueryMap["builderQueries"]
if builderQueriesExists {
builderQueriesStr, _ := json.Marshal(builderQueries)
return strings.Contains(string(builderQueriesStr), "signoz_")
}
promQueries, promQueriesExists := compositeMetricQueryMap["promQueries"]
if promQueriesExists {
promQueriesStr, _ := json.Marshal(promQueries)
return strings.Contains(string(promQueriesStr), "signoz_")
}
return false
}
func (telemetry *Telemetry) AddActiveTracesUser() {
telemetry.activeUser["traces"] = 1
}
func (telemetry *Telemetry) AddActiveMetricsUser() {
telemetry.activeUser["metrics"] = 1
}
func (telemetry *Telemetry) AddActiveLogsUser() {
telemetry.activeUser["logs"] = 1
}
type Telemetry struct { type Telemetry struct {
operator analytics.Client operator analytics.Client
phOperator ph.Client phOperator ph.Client
@ -76,6 +109,8 @@ type Telemetry struct {
minRandInt int minRandInt int
maxRandInt int maxRandInt int
rateLimits map[string]int8 rateLimits map[string]int8
activeUser map[string]int8
countUsers int8
} }
func createTelemetry() { func createTelemetry() {
@ -85,6 +120,7 @@ func createTelemetry() {
phOperator: ph.New(ph_api_key), phOperator: ph.New(ph_api_key),
ipAddress: getOutboundIP(), ipAddress: getOutboundIP(),
rateLimits: make(map[string]int8), rateLimits: make(map[string]int8),
activeUser: make(map[string]int8),
} }
telemetry.minRandInt = 0 telemetry.minRandInt = 0
telemetry.maxRandInt = int(1 / DEFAULT_SAMPLING) telemetry.maxRandInt = int(1 / DEFAULT_SAMPLING)
@ -111,6 +147,13 @@ func createTelemetry() {
for { for {
select { select {
case <-ticker.C: case <-ticker.C:
if (telemetry.activeUser["traces"] != 0) || (telemetry.activeUser["metrics"] != 0) || (telemetry.activeUser["logs"] != 0) {
telemetry.activeUser["any"] = 1
}
telemetry.SendEvent(TELEMETRY_EVENT_ACTIVE_USER, map[string]interface{}{"traces": telemetry.activeUser["traces"], "metrics": telemetry.activeUser["metrics"], "logs": telemetry.activeUser["logs"], "any": telemetry.activeUser["any"]})
telemetry.activeUser = map[string]int8{"traces": 0, "metrics": 0, "logs": 0, "any": 0}
tagsInfo, _ := telemetry.reader.GetTagsInfoInLastHeartBeatInterval(context.Background()) tagsInfo, _ := telemetry.reader.GetTagsInfoInLastHeartBeatInterval(context.Background())
if len(tagsInfo.Env) != 0 { if len(tagsInfo.Env) != 0 {
@ -128,16 +171,28 @@ func createTelemetry() {
getLogsInfoInLastHeartBeatInterval, _ := telemetry.reader.GetLogsInfoInLastHeartBeatInterval(context.Background()) getLogsInfoInLastHeartBeatInterval, _ := telemetry.reader.GetLogsInfoInLastHeartBeatInterval(context.Background())
traceTTL, _ := telemetry.reader.GetTTL(context.Background(), &model.GetTTLParams{Type: constants.TraceTTL})
metricsTTL, _ := telemetry.reader.GetTTL(context.Background(), &model.GetTTLParams{Type: constants.MetricsTTL})
logsTTL, _ := telemetry.reader.GetTTL(context.Background(), &model.GetTTLParams{Type: constants.LogsTTL})
data := map[string]interface{}{ data := map[string]interface{}{
"totalSpans": totalSpans, "totalSpans": totalSpans,
"spansInLastHeartBeatInterval": spansInLastHeartBeatInterval, "spansInLastHeartBeatInterval": spansInLastHeartBeatInterval,
"getSamplesInfoInLastHeartBeatInterval": getSamplesInfoInLastHeartBeatInterval, "getSamplesInfoInLastHeartBeatInterval": getSamplesInfoInLastHeartBeatInterval,
"getLogsInfoInLastHeartBeatInterval": getLogsInfoInLastHeartBeatInterval, "getLogsInfoInLastHeartBeatInterval": getLogsInfoInLastHeartBeatInterval,
"countUsers": telemetry.countUsers,
"metricsTTLStatus": metricsTTL.Status,
"tracesTTLStatus": traceTTL.Status,
"logsTTLStatus": logsTTL.Status,
} }
for key, value := range tsInfo { for key, value := range tsInfo {
data[key] = value data[key] = value
} }
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data) telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data)
getDistributedInfoInLastHeartBeatInterval, _ := telemetry.reader.GetDistributedInfoInLastHeartBeatInterval(context.Background())
telemetry.SendEvent(TELEMETRY_EVENT_DISTRIBUTED, getDistributedInfoInLastHeartBeatInterval)
} }
} }
}() }()
@ -169,7 +224,7 @@ func (a *Telemetry) IdentifyUser(user *model.User) {
if !a.isTelemetryEnabled() || a.isTelemetryAnonymous() { if !a.isTelemetryEnabled() || a.isTelemetryAnonymous() {
return return
} }
a.setCompanyDomain(user.Email) a.SetCompanyDomain(user.Email)
a.operator.Enqueue(analytics.Identify{ a.operator.Enqueue(analytics.Identify{
UserId: a.ipAddress, UserId: a.ipAddress,
@ -185,7 +240,11 @@ func (a *Telemetry) IdentifyUser(user *model.User) {
} }
func (a *Telemetry) setCompanyDomain(email string) { func (a *Telemetry) SetCountUsers(countUsers int8) {
a.countUsers = countUsers
}
func (a *Telemetry) SetCompanyDomain(email string) {
email_split := strings.Split(email, "@") email_split := strings.Split(email, "@")
if len(email_split) != 2 { if len(email_split) != 2 {
@ -207,7 +266,12 @@ func (a *Telemetry) checkEvents(event string) bool {
return sendEvent return sendEvent
} }
func (a *Telemetry) SendEvent(event string, data map[string]interface{}) { func (a *Telemetry) SendEvent(event string, data map[string]interface{}, opts ...bool) {
rateLimitFlag := true
if len(opts) > 0 {
rateLimitFlag = opts[0]
}
if !a.isTelemetryEnabled() { if !a.isTelemetryEnabled() {
return return
@ -218,10 +282,12 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}) {
return return
} }
if a.rateLimits[event] < RATE_LIMIT_VALUE { if rateLimitFlag {
a.rateLimits[event] += 1 if a.rateLimits[event] < RATE_LIMIT_VALUE {
} else { a.rateLimits[event] += 1
return } else {
return
}
} }
// zap.S().Info(data) // zap.S().Info(data)

View File

@ -169,7 +169,7 @@ services:
<<: *clickhouse-depends <<: *clickhouse-depends
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:0.66.0 image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-config.yaml"] command: ["--config=/etc/otel-collector-config.yaml"]
user: root # required for reading docker container logs user: root # required for reading docker container logs
volumes: volumes:
@ -194,7 +194,7 @@ services:
<<: *clickhouse-depends <<: *clickhouse-depends
otel-collector-metrics: otel-collector-metrics:
image: signoz/signoz-otel-collector:0.66.0 image: signoz/signoz-otel-collector:0.66.1
command: ["--config=/etc/otel-collector-metrics-config.yaml"] command: ["--config=/etc/otel-collector-metrics-config.yaml"]
volumes: volumes:
- ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml

View File

@ -74,7 +74,7 @@ processors:
signozspanmetrics/prometheus: signozspanmetrics/prometheus:
metrics_exporter: prometheus metrics_exporter: prometheus
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 10000 dimensions_cache_size: 100000
dimensions: dimensions:
- name: service.namespace - name: service.namespace
default: default default: default