diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index fec809c500..34c639ae43 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -137,7 +137,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.21.0 + image: signoz/query-service:0.22.0 command: ["-config=/root/config/prometheus.yml"] # ports: # - "6060:6060" # pprof port @@ -166,7 +166,7 @@ services: <<: *clickhouse-depend frontend: - image: signoz/frontend:0.21.0 + image: signoz/frontend:0.22.0 deploy: restart_policy: condition: on-failure @@ -179,7 +179,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.79.1 + image: signoz/signoz-otel-collector:0.79.2 command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] user: root # required for reading docker container logs volumes: @@ -208,7 +208,7 @@ services: <<: *clickhouse-depend otel-collector-metrics: - image: signoz/signoz-otel-collector:0.79.1 + image: signoz/signoz-otel-collector:0.79.2 command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] volumes: - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml @@ -233,7 +233,7 @@ services: max-file: "3" load-hotrod: - image: "grubykarol/locust:1.2.3-python3.9-alpine3.12" + image: "signoz/locust:1.2.3" hostname: load-hotrod environment: ATTACKED_HOST: http://hotrod:8080 diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 84d2bdcd93..79ae0c765e 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -41,7 +41,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: otel-collector - image: signoz/signoz-otel-collector:0.79.1 + image: signoz/signoz-otel-collector:0.79.2 command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] # user: root # required for reading docker container logs volumes: @@ -67,7 +67,7 @@ services: otel-collector-metrics: container_name: otel-collector-metrics - image: signoz/signoz-otel-collector:0.79.1 + image: signoz/signoz-otel-collector:0.79.2 command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] volumes: - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml @@ -93,7 +93,7 @@ services: - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces load-hotrod: - image: "grubykarol/locust:1.2.3-python3.9-alpine3.12" + image: "signoz/locust:1.2.3" container_name: load-hotrod hostname: load-hotrod environment: diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 70441af123..5d05f15528 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -153,7 +153,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` query-service: - image: signoz/query-service:${DOCKER_TAG:-0.21.0} + image: signoz/query-service:${DOCKER_TAG:-0.22.0} container_name: query-service command: ["-config=/root/config/prometheus.yml"] # ports: @@ -181,7 +181,7 @@ services: <<: *clickhouse-depend frontend: - image: signoz/frontend:${DOCKER_TAG:-0.21.0} + image: signoz/frontend:${DOCKER_TAG:-0.22.0} container_name: frontend restart: on-failure depends_on: @@ -193,7 +193,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.1} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.2} command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] user: root # required for reading docker container logs volumes: @@ -219,7 +219,7 @@ services: <<: *clickhouse-depend otel-collector-metrics: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.1} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.2} command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] volumes: - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml @@ -243,7 +243,7 @@ services: - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces load-hotrod: - image: "grubykarol/locust:1.2.3-python3.9-alpine3.12" + image: "signoz/locust:1.2.3" container_name: load-hotrod hostname: load-hotrod environment: diff --git a/ee/query-service/app/api/api.go b/ee/query-service/app/api/api.go index 42410b65e7..df1220d80c 100644 --- a/ee/query-service/app/api/api.go +++ b/ee/query-service/app/api/api.go @@ -16,6 +16,7 @@ import ( type APIHandlerOptions struct { DataConnector interfaces.DataConnector + SkipConfig *basemodel.SkipConfig AppDao dao.ModelDao RulesManager *rules.Manager FeatureFlags baseint.FeatureLookup @@ -32,6 +33,7 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) { baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{ Reader: opts.DataConnector, + SkipConfig: opts.SkipConfig, AppDao: opts.AppDao, RuleManager: opts.RulesManager, FeatureFlags: opts.FeatureFlags}) diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index 942ed24ced..ec2895acd8 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -49,9 +49,10 @@ import ( const AppDbEngine = "sqlite" type ServerOptions struct { - PromConfigPath string - HTTPHostPort string - PrivateHostPort string + PromConfigPath string + SkipTopLvlOpsPath string + HTTPHostPort string + PrivateHostPort string // alert specific params DisableRules bool RuleRepoURL string @@ -119,7 +120,15 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { go qb.Start(readerReady) reader = qb } else { - return nil, fmt.Errorf("Storage type: %s is not supported in query service", storage) + return nil, fmt.Errorf("storage type: %s is not supported in query service", storage) + } + skipConfig := &basemodel.SkipConfig{} + if serverOptions.SkipTopLvlOpsPath != "" { + // read skip config + skipConfig, err = basemodel.ReadSkipConfig(serverOptions.SkipTopLvlOpsPath) + if err != nil { + return nil, err + } } <-readerReady @@ -160,6 +169,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { apiOpts := api.APIHandlerOptions{ DataConnector: reader, + SkipConfig: skipConfig, AppDao: modelDao, RulesManager: rm, FeatureFlags: lm, diff --git a/ee/query-service/main.go b/ee/query-service/main.go index 6d38fb9f65..67cbde2151 100644 --- a/ee/query-service/main.go +++ b/ee/query-service/main.go @@ -74,7 +74,7 @@ func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger { } func main() { - var promConfigPath string + var promConfigPath, skipTopLvlOpsPath string // disables rule execution but allows change to the rule definition var disableRules bool @@ -85,6 +85,7 @@ func main() { var enableQueryServiceLogOTLPExport bool flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") + flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)") flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)") flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)") flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)") @@ -98,11 +99,12 @@ func main() { version.PrintVersion() serverOptions := &app.ServerOptions{ - HTTPHostPort: baseconst.HTTPHostPort, - PromConfigPath: promConfigPath, - PrivateHostPort: baseconst.PrivateHostPort, - DisableRules: disableRules, - RuleRepoURL: ruleRepoURL, + HTTPHostPort: baseconst.HTTPHostPort, + PromConfigPath: promConfigPath, + SkipTopLvlOpsPath: skipTopLvlOpsPath, + PrivateHostPort: baseconst.PrivateHostPort, + DisableRules: disableRules, + RuleRepoURL: ruleRepoURL, } // Read the jwt secret key diff --git a/frontend/src/components/Graph/index.tsx b/frontend/src/components/Graph/index.tsx index f4ea8b14b2..6f0f475bac 100644 --- a/frontend/src/components/Graph/index.tsx +++ b/frontend/src/components/Graph/index.tsx @@ -181,6 +181,9 @@ function Graph({ }, }, position: 'custom', + itemSort(item1, item2) { + return item2.parsed.y - item1.parsed.y; + }, }, [dragSelectPluginId]: createDragSelectPluginOptions( !!onDragSelect, diff --git a/frontend/src/components/ResizeTable/ResizeTable.tsx b/frontend/src/components/ResizeTable/ResizeTable.tsx index 681d8b8670..d6898d0815 100644 --- a/frontend/src/components/ResizeTable/ResizeTable.tsx +++ b/frontend/src/components/ResizeTable/ResizeTable.tsx @@ -1,14 +1,20 @@ import { Table } from 'antd'; import type { TableProps } from 'antd/es/table'; import { ColumnsType } from 'antd/lib/table'; -import { SyntheticEvent, useCallback, useMemo, useState } from 'react'; +import { + SyntheticEvent, + useCallback, + useEffect, + useMemo, + useState, +} from 'react'; import { ResizeCallbackData } from 'react-resizable'; import ResizableHeader from './ResizableHeader'; // eslint-disable-next-line @typescript-eslint/no-explicit-any function ResizeTable({ columns, ...restprops }: TableProps): JSX.Element { - const [columnsData, setColumns] = useState(columns || []); + const [columnsData, setColumns] = useState([]); const handleResize = useCallback( (index: number) => ( @@ -37,6 +43,12 @@ function ResizeTable({ columns, ...restprops }: TableProps): JSX.Element { [columnsData, handleResize], ); + useEffect(() => { + if (columns) { + setColumns(columns); + } + }, [columns]); + return ( = { VALUE: 'value', TABLE: 'table', LIST: 'list', + TRACE: 'trace', EMPTY_WIDGET: 'EMPTY_WIDGET', }; diff --git a/frontend/src/container/Controls/index.tsx b/frontend/src/container/Controls/index.tsx index a9a656bfc8..8971214a05 100644 --- a/frontend/src/container/Controls/index.tsx +++ b/frontend/src/container/Controls/index.tsx @@ -1,33 +1,30 @@ import { LeftOutlined, RightOutlined } from '@ant-design/icons'; import { Button, Select } from 'antd'; +import { DEFAULT_PER_PAGE_OPTIONS, Pagination } from 'hooks/queryPagination'; import { memo, useMemo } from 'react'; -import { defaultSelectStyle, ITEMS_PER_PAGE_OPTIONS } from './config'; +import { defaultSelectStyle } from './config'; import { Container } from './styles'; -interface ControlsProps { - count: number; - countPerPage: number; - isLoading: boolean; - handleNavigatePrevious: () => void; - handleNavigateNext: () => void; - handleCountItemsPerPageChange: (e: number) => void; -} - -function Controls(props: ControlsProps): JSX.Element | null { - const { - count, - isLoading, - countPerPage, - handleNavigatePrevious, - handleNavigateNext, - handleCountItemsPerPageChange, - } = props; - +function Controls({ + offset = 0, + perPageOptions = DEFAULT_PER_PAGE_OPTIONS, + isLoading, + totalCount, + countPerPage, + handleNavigatePrevious, + handleNavigateNext, + handleCountItemsPerPageChange, +}: ControlsProps): JSX.Element | null { const isNextAndPreviousDisabled = useMemo( - () => isLoading || countPerPage === 0 || count === 0 || count < countPerPage, - [isLoading, countPerPage, count], + () => isLoading || countPerPage < 0 || totalCount === 0, + [isLoading, countPerPage, totalCount], ); + const isPreviousDisabled = useMemo(() => offset <= 0, [offset]); + const isNextDisabled = useMemo(() => totalCount < countPerPage, [ + countPerPage, + totalCount, + ]); return ( @@ -35,7 +32,7 @@ function Controls(props: ControlsProps): JSX.Element | null { loading={isLoading} size="small" type="link" - disabled={isNextAndPreviousDisabled} + disabled={isPreviousDisabled || isNextAndPreviousDisabled} onClick={handleNavigatePrevious} > Previous @@ -44,18 +41,18 @@ function Controls(props: ControlsProps): JSX.Element | null { loading={isLoading} size="small" type="link" - disabled={isNextAndPreviousDisabled} + disabled={isNextDisabled || isNextAndPreviousDisabled} onClick={handleNavigateNext} > Next - - + {!isTracePanelType && ( + + + + {renderAdditionalFilters()} + + + + )} + {panelType !== PANEL_TYPES.LIST && panelType !== PANEL_TYPES.TRACE && ( + + + + )} ); }); diff --git a/frontend/src/container/QueryBuilder/filters/AggregateEveryFilter/index.tsx b/frontend/src/container/QueryBuilder/filters/AggregateEveryFilter/index.tsx index c787039dc7..c68dad2cff 100644 --- a/frontend/src/container/QueryBuilder/filters/AggregateEveryFilter/index.tsx +++ b/frontend/src/container/QueryBuilder/filters/AggregateEveryFilter/index.tsx @@ -1,11 +1,7 @@ -import { Input } from 'antd'; -import getStep from 'lib/getStep'; +import { InputNumber, InputNumberProps } from 'antd'; import { useMemo } from 'react'; -import { useSelector } from 'react-redux'; -import { AppState } from 'store/reducers'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { DataSource } from 'types/common/queryBuilder'; -import { GlobalReducer } from 'types/reducer/globalTime'; import { selectStyle } from '../QueryBuilderSearch/config'; @@ -13,49 +9,27 @@ function AggregateEveryFilter({ onChange, query, }: AggregateEveryFilterProps): JSX.Element { - const { maxTime, minTime } = useSelector( - (state) => state.globalTime, - ); - - const stepInterval = useMemo( - () => - getStep({ - start: minTime, - end: maxTime, - inputFormat: 'ns', - }), - [maxTime, minTime], - ); - - const handleKeyDown = (event: { - keyCode: number; - which: number; - preventDefault: () => void; - }): void => { - const keyCode = event.keyCode || event.which; - const isBackspace = keyCode === 8; - const isNumeric = - (keyCode >= 48 && keyCode <= 57) || (keyCode >= 96 && keyCode <= 105); - - if (!isNumeric && !isBackspace) { - event.preventDefault(); - } - }; - const isMetricsDataSource = useMemo( () => query.dataSource === DataSource.METRICS, [query.dataSource], ); + const onChangeHandler: InputNumberProps['onChange'] = (event) => { + if (event && event >= 0) { + onChange(event); + } + }; + + const isDisabled = isMetricsDataSource && !query.aggregateAttribute.key; + return ( - onChange(Number(event.target.value))} - onKeyDown={handleKeyDown} + value={query.stepInterval} + onChange={onChangeHandler} + min={0} /> ); } diff --git a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.intefaces.ts b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.intefaces.ts index cff5c295df..35c5f0178a 100644 --- a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.intefaces.ts +++ b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.intefaces.ts @@ -1,7 +1,8 @@ +import { AutoCompleteProps } from 'antd'; import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; -export type AgregatorFilterProps = { - onChange: (value: BaseAutocompleteData) => void; +export type AgregatorFilterProps = Pick & { query: IBuilderQuery; + onChange: (value: BaseAutocompleteData) => void; }; diff --git a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx index a168d32d0c..2f8cf1ea90 100644 --- a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx +++ b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx @@ -28,8 +28,9 @@ import { selectStyle } from '../QueryBuilderSearch/config'; import { AgregatorFilterProps } from './AggregatorFilter.intefaces'; export const AggregatorFilter = memo(function AggregatorFilter({ - onChange, query, + disabled, + onChange, }: AgregatorFilterProps): JSX.Element { const [optionsData, setOptionsData] = useState([]); const debouncedValue = useDebounce(query.aggregateAttribute.key, 300); @@ -119,6 +120,7 @@ export const AggregatorFilter = memo(function AggregatorFilter({ options={optionsData} value={value} onChange={handleChangeAttribute} + disabled={disabled} /> ); }); diff --git a/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx b/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx index 111ff2ea5c..62bd165ffb 100644 --- a/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx +++ b/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx @@ -102,10 +102,12 @@ export const GroupByFilter = memo(function GroupByFilter({ return { id, - key, - dataType: dataType as DataType, - type: type as AutocompleteType, - isColumn: isColumn === 'true', + key: key || currentValue, + dataType: (dataType as DataType) || initialAutocompleteData.dataType, + type: (type as AutocompleteType) || initialAutocompleteData.type, + isColumn: isColumn + ? isColumn === 'true' + : initialAutocompleteData.isColumn, }; } diff --git a/frontend/src/container/QueryBuilder/filters/OrderByFilter/OrderByFilter.tsx b/frontend/src/container/QueryBuilder/filters/OrderByFilter/OrderByFilter.tsx index 13a8baad33..f85c72dc34 100644 --- a/frontend/src/container/QueryBuilder/filters/OrderByFilter/OrderByFilter.tsx +++ b/frontend/src/container/QueryBuilder/filters/OrderByFilter/OrderByFilter.tsx @@ -1,6 +1,8 @@ import { Select, Spin } from 'antd'; import { getAggregateKeys } from 'api/queryBuilder/getAttributeKeys'; import { QueryBuilderKeys } from 'constants/queryBuilder'; +import { IOption } from 'hooks/useResourceAttribute/types'; +import { uniqWith } from 'lodash-es'; import * as Papa from 'papaparse'; import { useCallback, useMemo, useState } from 'react'; import { useQuery } from 'react-query'; @@ -9,12 +11,14 @@ import { DataSource, MetricAggregateOperator } from 'types/common/queryBuilder'; import { selectStyle } from '../QueryBuilderSearch/config'; import { getRemoveOrderFromValue } from '../QueryBuilderSearch/utils'; +import { FILTERS } from './config'; import { OrderByFilterProps } from './OrderByFilter.interfaces'; import { checkIfKeyPresent, getLabelFromValue, mapLabelValuePairs, orderByValueDelimiter, + transformToOrderByStringValues, } from './utils'; export function OrderByFilter({ @@ -22,7 +26,9 @@ export function OrderByFilter({ onChange, }: OrderByFilterProps): JSX.Element { const [searchText, setSearchText] = useState(''); - const [selectedValue, setSelectedValue] = useState([]); + const [selectedValue, setSelectedValue] = useState( + transformToOrderByStringValues(query.orderBy) || [], + ); const { data, isFetching } = useQuery( [QueryBuilderKeys.GET_AGGREGATE_KEYS, searchText], @@ -55,23 +61,41 @@ export function OrderByFilter({ .flat() .concat([ { - label: `${query.aggregateOperator}(${query.aggregateAttribute.key}) asc`, - value: `${query.aggregateOperator}(${query.aggregateAttribute.key})${orderByValueDelimiter}asc`, + label: `${query.aggregateOperator}(${query.aggregateAttribute.key}) ${FILTERS.ASC}`, + value: `${query.aggregateOperator}(${query.aggregateAttribute.key})${orderByValueDelimiter}${FILTERS.ASC}`, }, { - label: `${query.aggregateOperator}(${query.aggregateAttribute.key}) desc`, - value: `${query.aggregateOperator}(${query.aggregateAttribute.key})${orderByValueDelimiter}desc`, + label: `${query.aggregateOperator}(${query.aggregateAttribute.key}) ${FILTERS.DESC}`, + value: `${query.aggregateOperator}(${query.aggregateAttribute.key})${orderByValueDelimiter}${FILTERS.DESC}`, }, ]), [query.aggregateAttribute.key, query.aggregateOperator, query.groupBy], ); + const customValue: IOption[] = useMemo(() => { + if (!searchText) return []; + + return [ + { + label: `${searchText} ${FILTERS.ASC}`, + value: `${searchText}${orderByValueDelimiter}${FILTERS.ASC}`, + }, + { + label: `${searchText} ${FILTERS.DESC}`, + value: `${searchText}${orderByValueDelimiter}${FILTERS.DESC}`, + }, + ]; + }, [searchText]); + const optionsData = useMemo(() => { const options = query.aggregateOperator === MetricAggregateOperator.NOOP ? noAggregationOptions : aggregationOptions; - return options.filter( + + const resultOption = [...customValue, ...options]; + + return resultOption.filter( (option) => !getLabelFromValue(selectedValue).includes( getRemoveOrderFromValue(option.value), @@ -79,30 +103,58 @@ export function OrderByFilter({ ); }, [ aggregationOptions, + customValue, noAggregationOptions, query.aggregateOperator, selectedValue, ]); - const handleChange = (values: string[]): void => { - setSelectedValue(values); - const orderByValues: OrderByPayload[] = values.map((item) => { - const match = Papa.parse(item, { delimiter: '|' }); + const getUniqValues = useCallback((values: IOption[]): IOption[] => { + const modifiedValues = values.map((item) => { + const match = Papa.parse(item.value, { delimiter: orderByValueDelimiter }); + if (!match) return { label: item.label, value: item.value }; + // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-unused-vars + const [_, order] = match.data.flat() as string[]; + if (order) return { label: item.label, value: item.value }; + + return { + label: `${item.value} ${FILTERS.ASC}`, + value: `${item.value}${orderByValueDelimiter}${FILTERS.ASC}`, + }; + }); + + return uniqWith( + modifiedValues, + (current, next) => + getRemoveOrderFromValue(current.value) === + getRemoveOrderFromValue(next.value), + ); + }, []); + + const handleChange = (values: IOption[]): void => { + const result = getUniqValues(values); + + setSelectedValue(result); + const orderByValues: OrderByPayload[] = result.map((item) => { + const match = Papa.parse(item.value, { delimiter: orderByValueDelimiter }); + if (match) { const [columnName, order] = match.data.flat() as string[]; return { columnName: checkIfKeyPresent(columnName, query.aggregateAttribute.key) ? '#SIGNOZ_VALUE' : columnName, - order, + order: order ?? 'asc', }; } return { - columnName: item, - order: '', + columnName: item.value, + order: 'asc', }; }); + + setSearchText(''); onChange(orderByValues); }; @@ -126,6 +178,8 @@ export function OrderByFilter({ showSearch disabled={isMetricsDataSource && isDisabledSelect} showArrow={false} + value={selectedValue} + labelInValue filterOption={false} options={optionsData} notFoundContent={isFetching ? : null} diff --git a/frontend/src/container/QueryBuilder/filters/OrderByFilter/config.ts b/frontend/src/container/QueryBuilder/filters/OrderByFilter/config.ts new file mode 100644 index 0000000000..9e4b0c9a1b --- /dev/null +++ b/frontend/src/container/QueryBuilder/filters/OrderByFilter/config.ts @@ -0,0 +1,4 @@ +export const FILTERS = { + ASC: 'asc', + DESC: 'desc', +}; diff --git a/frontend/src/container/QueryBuilder/filters/OrderByFilter/utils.ts b/frontend/src/container/QueryBuilder/filters/OrderByFilter/utils.ts index 9907e5b88f..540674dec2 100644 --- a/frontend/src/container/QueryBuilder/filters/OrderByFilter/utils.ts +++ b/frontend/src/container/QueryBuilder/filters/OrderByFilter/utils.ts @@ -2,9 +2,18 @@ import { IOption } from 'hooks/useResourceAttribute/types'; import { transformStringWithPrefix } from 'lib/query/transformStringWithPrefix'; import * as Papa from 'papaparse'; import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse'; +import { OrderByPayload } from 'types/api/queryBuilder/queryBuilderData'; export const orderByValueDelimiter = '|'; +export const transformToOrderByStringValues = ( + orderBy: OrderByPayload[], +): IOption[] => + orderBy.map((item) => ({ + label: `${item.columnName} ${item.order}`, + value: `${item.columnName}${orderByValueDelimiter}${item.order}`, + })); + export function mapLabelValuePairs( arr: BaseAutocompleteData[], ): Array[] { @@ -28,14 +37,15 @@ export function mapLabelValuePairs( }); } -export function getLabelFromValue(arr: string[]): string[] { +export function getLabelFromValue(arr: IOption[]): string[] { return arr.flat().map((item) => { - const match = Papa.parse(item, { delimiter: orderByValueDelimiter }); + const match = Papa.parse(item.value, { delimiter: orderByValueDelimiter }); if (match) { const [key] = match.data as string[]; return key[0]; } - return item; + + return item.value; }); } diff --git a/frontend/src/container/QueryTable/QueryTable.intefaces.ts b/frontend/src/container/QueryTable/QueryTable.intefaces.ts new file mode 100644 index 0000000000..f76aba08b3 --- /dev/null +++ b/frontend/src/container/QueryTable/QueryTable.intefaces.ts @@ -0,0 +1,16 @@ +import { TableProps } from 'antd'; +import { ColumnsType } from 'antd/es/table'; +import { RowData } from 'lib/query/createTableColumnsFromQuery'; +import { ReactNode } from 'react'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; +import { QueryDataV3 } from 'types/api/widgets/getQuery'; + +export type QueryTableProps = Omit< + TableProps, + 'columns' | 'dataSource' +> & { + queryTableData: QueryDataV3[]; + query: Query; + renderActionCell?: (record: RowData) => ReactNode; + modifyColumns?: (columns: ColumnsType) => ColumnsType; +}; diff --git a/frontend/src/container/QueryTable/QueryTable.tsx b/frontend/src/container/QueryTable/QueryTable.tsx new file mode 100644 index 0000000000..d08abbd592 --- /dev/null +++ b/frontend/src/container/QueryTable/QueryTable.tsx @@ -0,0 +1,57 @@ +import type { ColumnsType } from 'antd/es/table'; +import { ResizeTable } from 'components/ResizeTable'; +import dayjs from 'dayjs'; +import { + createTableColumnsFromQuery, + RowData, +} from 'lib/query/createTableColumnsFromQuery'; +import { useMemo } from 'react'; + +import { QueryTableProps } from './QueryTable.intefaces'; + +export function QueryTable({ + queryTableData, + query, + renderActionCell, + modifyColumns, + ...props +}: QueryTableProps): JSX.Element { + const { columns, dataSource } = useMemo( + () => + createTableColumnsFromQuery({ + query, + queryTableData, + renderActionCell, + }), + [query, queryTableData, renderActionCell], + ); + + const modifiedColumns = useMemo(() => { + const currentColumns: ColumnsType = columns.map((column) => + column.key === 'timestamp' + ? { + ...column, + render: (_, record): string => + dayjs(new Date(record.timestamp)).format('MMM DD, YYYY, HH:mm:ss'), + } + : column, + ); + + return currentColumns; + }, [columns]); + + const tableColumns = modifyColumns + ? modifyColumns(modifiedColumns) + : modifiedColumns; + + return ( + + ); +} diff --git a/frontend/src/container/QueryTable/index.ts b/frontend/src/container/QueryTable/index.ts new file mode 100644 index 0000000000..bb785ff437 --- /dev/null +++ b/frontend/src/container/QueryTable/index.ts @@ -0,0 +1 @@ +export { QueryTable } from './QueryTable'; diff --git a/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx b/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx new file mode 100644 index 0000000000..ca01a0156c --- /dev/null +++ b/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx @@ -0,0 +1,53 @@ +import Graph from 'components/Graph'; +import Spinner from 'components/Spinner'; +import getChartData from 'lib/getChartData'; +import { useMemo } from 'react'; +import { SuccessResponse } from 'types/api'; +import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; + +import { Container, ErrorText } from './styles'; + +function TimeSeriesView({ + data, + isLoading, + isError, +}: TimeSeriesViewProps): JSX.Element { + const chartData = useMemo( + () => + getChartData({ + queryData: [ + { + queryData: data?.payload?.data?.result || [], + }, + ], + }), + [data?.payload?.data?.result], + ); + + return ( + + {isLoading && } + {isError && {data?.error || 'Something went wrong'}} + {!isLoading && !isError && ( + + )} + + ); +} + +interface TimeSeriesViewProps { + data?: SuccessResponse; + isLoading: boolean; + isError: boolean; +} + +TimeSeriesView.defaultProps = { + data: undefined, +}; + +export default TimeSeriesView; diff --git a/frontend/src/container/TimeSeriesView/index.tsx b/frontend/src/container/TimeSeriesView/index.tsx new file mode 100644 index 0000000000..5212334a4d --- /dev/null +++ b/frontend/src/container/TimeSeriesView/index.tsx @@ -0,0 +1,55 @@ +import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder'; +import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; +import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { DataSource } from 'types/common/queryBuilder'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +import TimeSeriesView from './TimeSeriesView'; + +function TimeSeriesViewContainer({ + dataSource = DataSource.TRACES, +}: TimeSeriesViewProps): JSX.Element { + const { stagedQuery, panelType } = useQueryBuilder(); + + const { selectedTime: globalSelectedTime, maxTime, minTime } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); + + const { data, isLoading, isError } = useGetQueryRange( + { + query: stagedQuery || initialQueriesMap[dataSource], + graphType: panelType || PANEL_TYPES.TIME_SERIES, + selectedTime: 'GLOBAL_TIME', + globalSelectedInterval: globalSelectedTime, + params: { + dataSource, + }, + }, + { + queryKey: [ + REACT_QUERY_KEY.GET_QUERY_RANGE, + globalSelectedTime, + maxTime, + minTime, + stagedQuery, + ], + enabled: !!stagedQuery && panelType === PANEL_TYPES.TIME_SERIES, + }, + ); + + return ; +} + +interface TimeSeriesViewProps { + dataSource?: DataSource; +} + +TimeSeriesViewContainer.defaultProps = { + dataSource: DataSource.TRACES, +}; + +export default TimeSeriesViewContainer; diff --git a/frontend/src/container/TracesExplorer/TimeSeriesView/styles.ts b/frontend/src/container/TimeSeriesView/styles.ts similarity index 100% rename from frontend/src/container/TracesExplorer/TimeSeriesView/styles.ts rename to frontend/src/container/TimeSeriesView/styles.ts diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx index db4a0d8c42..2fc05ecca4 100644 --- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx +++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx @@ -4,6 +4,9 @@ import getLocalStorageKey from 'api/browser/localstorage/get'; import setLocalStorageKey from 'api/browser/localstorage/set'; import { LOCALSTORAGE } from 'constants/localStorage'; import dayjs, { Dayjs } from 'dayjs'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval'; +import GetMinMax from 'lib/getMinMax'; import getTimeString from 'lib/getTimeString'; import { useCallback, useEffect, useState } from 'react'; import { connect, useSelector } from 'react-redux'; @@ -66,6 +69,8 @@ function DateTimeSelection({ false, ); + const { stagedQuery, initQueryBuilderData } = useQueryBuilder(); + const { maxTime, minTime, selectedTime } = useSelector< AppState, GlobalReducer @@ -174,6 +179,14 @@ function DateTimeSelection({ setRefreshButtonHidden(true); setCustomDTPickerVisible(true); } + + if (!stagedQuery) { + return; + } + + const { maxTime, minTime } = GetMinMax(value, getTime()); + + initQueryBuilderData(updateStepInterval(stagedQuery, maxTime, minTime)); }; const onRefreshHandler = (): void => { diff --git a/frontend/src/container/TracesExplorer/Controls/index.tsx b/frontend/src/container/TracesExplorer/Controls/index.tsx index 515fe12c67..5e793f9268 100644 --- a/frontend/src/container/TracesExplorer/Controls/index.tsx +++ b/frontend/src/container/TracesExplorer/Controls/index.tsx @@ -1,25 +1,51 @@ -import Controls from 'container/Controls'; +import Controls, { ControlsProps } from 'container/Controls'; +import OptionsMenu from 'container/OptionsMenu'; +import { OptionsMenuConfig } from 'container/OptionsMenu/types'; +import useQueryPagination from 'hooks/queryPagination/useQueryPagination'; import { memo } from 'react'; import { Container } from './styles'; -function TraceExplorerControls(): JSX.Element | null { - const handleCountItemsPerPageChange = (): void => {}; - const handleNavigatePrevious = (): void => {}; - const handleNavigateNext = (): void => {}; +function TraceExplorerControls({ + isLoading, + totalCount, + perPageOptions, + config, +}: TraceExplorerControlsProps): JSX.Element | null { + const { + pagination, + handleCountItemsPerPageChange, + handleNavigateNext, + handleNavigatePrevious, + } = useQueryPagination(totalCount, perPageOptions); return ( + {config && } + ); } +TraceExplorerControls.defaultProps = { + config: null, +}; + +type TraceExplorerControlsProps = Pick< + ControlsProps, + 'isLoading' | 'totalCount' | 'perPageOptions' +> & { + config?: OptionsMenuConfig | null; +}; + export default memo(TraceExplorerControls); diff --git a/frontend/src/container/TracesExplorer/ListView/configs.tsx b/frontend/src/container/TracesExplorer/ListView/configs.tsx new file mode 100644 index 0000000000..3b05ed8169 --- /dev/null +++ b/frontend/src/container/TracesExplorer/ListView/configs.tsx @@ -0,0 +1,11 @@ +import { DEFAULT_PER_PAGE_OPTIONS } from 'hooks/queryPagination'; + +export const defaultSelectedColumns: string[] = [ + 'name', + 'serviceName', + 'responseStatusCode', + 'httpMethod', + 'durationNano', +]; + +export const PER_PAGE_OPTIONS: number[] = [10, ...DEFAULT_PER_PAGE_OPTIONS]; diff --git a/frontend/src/container/TracesExplorer/ListView/index.tsx b/frontend/src/container/TracesExplorer/ListView/index.tsx new file mode 100644 index 0000000000..e93b11b1f4 --- /dev/null +++ b/frontend/src/container/TracesExplorer/ListView/index.tsx @@ -0,0 +1,134 @@ +import { ColumnsType } from 'antd/es/table'; +import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder'; +import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; +import { useOptionsMenu } from 'container/OptionsMenu'; +import { QueryTable } from 'container/QueryTable'; +import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { Pagination, URL_PAGINATION } from 'hooks/queryPagination'; +import useUrlQueryData from 'hooks/useUrlQueryData'; +import history from 'lib/history'; +import { RowData } from 'lib/query/createTableColumnsFromQuery'; +import { HTMLAttributes, memo, useCallback, useMemo } from 'react'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { DataSource } from 'types/common/queryBuilder'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +import TraceExplorerControls from '../Controls'; +import { defaultSelectedColumns, PER_PAGE_OPTIONS } from './configs'; +import { Container, ErrorText, tableStyles } from './styles'; +import { getTraceLink, modifyColumns, transformDataWithDate } from './utils'; + +function ListView(): JSX.Element { + const { stagedQuery, panelType } = useQueryBuilder(); + + const { selectedTime: globalSelectedTime, maxTime, minTime } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); + + const { options, config } = useOptionsMenu({ + dataSource: DataSource.TRACES, + aggregateOperator: 'count', + initialOptions: { + selectColumns: defaultSelectedColumns, + }, + }); + + const { queryData: paginationQueryData } = useUrlQueryData( + URL_PAGINATION, + ); + + const { data, isFetching, isError } = useGetQueryRange( + { + query: stagedQuery || initialQueriesMap.traces, + graphType: panelType || PANEL_TYPES.LIST, + selectedTime: 'GLOBAL_TIME', + globalSelectedInterval: globalSelectedTime, + params: { + dataSource: 'traces', + }, + tableParams: { + pagination: paginationQueryData, + selectColumns: options?.selectColumns, + }, + }, + { + queryKey: [ + REACT_QUERY_KEY.GET_QUERY_RANGE, + globalSelectedTime, + maxTime, + minTime, + stagedQuery, + panelType, + paginationQueryData, + options?.selectColumns, + ], + enabled: + !!stagedQuery && panelType === PANEL_TYPES.LIST && !!options?.selectColumns, + }, + ); + + const dataLength = + data?.payload?.data?.newResult?.data?.result[0]?.list?.length; + const totalCount = useMemo(() => dataLength || 0, [dataLength]); + + const queryTableDataResult = data?.payload.data.newResult.data.result; + const queryTableData = useMemo(() => queryTableDataResult || [], [ + queryTableDataResult, + ]); + + const transformedQueryTableData = useMemo( + () => transformDataWithDate(queryTableData), + [queryTableData], + ); + + const handleModifyColumns = useCallback( + (columns: ColumnsType) => + modifyColumns(columns, options?.selectColumns || []), + [options?.selectColumns], + ); + + const handleRow = useCallback( + (record: RowData): HTMLAttributes => ({ + onClick: (event): void => { + event.preventDefault(); + event.stopPropagation(); + if (event.metaKey || event.ctrlKey) { + window.open(getTraceLink(record), '_blank'); + } else { + history.push(getTraceLink(record)); + } + }, + }), + [], + ); + + return ( + + + + {isError && {data?.error || 'Something went wrong'}} + + {!isError && ( + + )} + + ); +} + +export default memo(ListView); diff --git a/frontend/src/container/TracesExplorer/ListView/styles.ts b/frontend/src/container/TracesExplorer/ListView/styles.ts new file mode 100644 index 0000000000..292b04b1f9 --- /dev/null +++ b/frontend/src/container/TracesExplorer/ListView/styles.ts @@ -0,0 +1,21 @@ +import { Typography } from 'antd'; +import { CSSProperties } from 'react'; +import styled from 'styled-components'; + +export const tableStyles: CSSProperties = { + cursor: 'pointer', +}; + +export const Container = styled.div` + display: flex; + flex-direction: column; + gap: 15px; +`; + +export const ErrorText = styled(Typography)` + text-align: center; +`; + +export const DateText = styled(Typography)` + min-width: 145px; +`; diff --git a/frontend/src/container/TracesExplorer/ListView/utils.tsx b/frontend/src/container/TracesExplorer/ListView/utils.tsx new file mode 100644 index 0000000000..eb6be9a90d --- /dev/null +++ b/frontend/src/container/TracesExplorer/ListView/utils.tsx @@ -0,0 +1,100 @@ +import { Tag } from 'antd'; +import { ColumnsType } from 'antd/es/table'; +import Typography from 'antd/es/typography/Typography'; +import ROUTES from 'constants/routes'; +import { getMs } from 'container/Trace/Filters/Panel/PanelBody/Duration/util'; +import { formUrlParams } from 'container/TraceDetail/utils'; +import dayjs from 'dayjs'; +import { RowData } from 'lib/query/createTableColumnsFromQuery'; +import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse'; +import { QueryDataV3 } from 'types/api/widgets/getQuery'; + +import { DateText } from './styles'; + +export const transformDataWithDate = (data: QueryDataV3[]): QueryDataV3[] => + data.map((query) => ({ + ...query, + list: + query?.list?.map((listItem) => ({ + ...listItem, + data: { + ...listItem?.data, + date: listItem?.timestamp, + }, + })) || null, + })); + +export const modifyColumns = ( + columns: ColumnsType, + selectedColumns: BaseAutocompleteData[], +): ColumnsType => { + const initialColumns = columns.filter(({ key }) => { + let isValidColumn = true; + + const checkIsExistColumnByKey = (attributeKey: string): boolean => + !selectedColumns.find(({ key }) => key === attributeKey) && + attributeKey === key; + + const isSelectedSpanId = checkIsExistColumnByKey('spanID'); + const isSelectedTraceId = checkIsExistColumnByKey('traceID'); + + if (isSelectedSpanId || isSelectedTraceId || key === 'date') + isValidColumn = false; + + return isValidColumn; + }); + + const dateColumn = columns.find(({ key }) => key === 'date'); + + if (dateColumn) { + initialColumns.unshift(dateColumn); + } + + return initialColumns.map((column) => { + const key = column.key as string; + + const getHttpMethodOrStatus = (value: string): JSX.Element => { + if (value === 'N/A') { + return {value}; + } + + return {value}; + }; + + if (key === 'durationNano') { + return { + ...column, + render: (duration: string): JSX.Element => ( + {getMs(duration)}ms + ), + }; + } + + if (key === 'httpMethod' || key === 'responseStatusCode') { + return { + ...column, + render: getHttpMethodOrStatus, + }; + } + + if (key === 'date') { + return { + ...column, + width: 145, + render: (date: string): JSX.Element => { + const day = dayjs(date); + return {day.format('YYYY/MM/DD HH:mm:ss')}; + }, + }; + } + + return column; + }); +}; + +export const getTraceLink = (record: RowData): string => + `${ROUTES.TRACE}/${record.traceID}${formUrlParams({ + spanId: record.spanID, + levelUp: 0, + levelDown: 0, + })}`; diff --git a/frontend/src/container/TracesExplorer/QuerySection/index.tsx b/frontend/src/container/TracesExplorer/QuerySection/index.tsx index 169fa7ba79..dbb922c139 100644 --- a/frontend/src/container/TracesExplorer/QuerySection/index.tsx +++ b/frontend/src/container/TracesExplorer/QuerySection/index.tsx @@ -1,7 +1,9 @@ import { Button } from 'antd'; import { PANEL_TYPES } from 'constants/queryBuilder'; import { QueryBuilder } from 'container/QueryBuilder'; +import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { memo } from 'react'; import { DataSource } from 'types/common/queryBuilder'; import { ButtonWrapper, Container } from './styles'; @@ -9,10 +11,12 @@ import { ButtonWrapper, Container } from './styles'; function QuerySection(): JSX.Element { const { handleRunQuery } = useQueryBuilder(); + const panelTypes = useGetPanelTypesQueryParam(PANEL_TYPES.LIST); + return ( ((state) => state.globalTime); - - const { data, isLoading, isError } = useGetQueryRange( - { - query: stagedQuery || initialQueriesMap.traces, - graphType: 'graph', - selectedTime: 'GLOBAL_TIME', - globalSelectedInterval: globalSelectedTime, - params: { - dataSource: 'traces', - }, - }, - { - queryKey: [ - REACT_QUERY_KEY.GET_QUERY_RANGE, - globalSelectedTime, - stagedQuery, - maxTime, - minTime, - ], - enabled: !!stagedQuery, - }, - ); - - const chartData = useMemo( - () => - getChartData({ - queryData: [ - { - queryData: data?.payload?.data?.result || [], - }, - ], - }), - [data], - ); - - return ( - - {isLoading && } - {isError && {data?.error || 'Something went wrong'}} - {!isLoading && !isError && ( - - )} - - ); -} - -export default TimeSeriesView; diff --git a/frontend/src/container/TracesExplorer/TracesView/configs.tsx b/frontend/src/container/TracesExplorer/TracesView/configs.tsx new file mode 100644 index 0000000000..3bbf528eee --- /dev/null +++ b/frontend/src/container/TracesExplorer/TracesView/configs.tsx @@ -0,0 +1,49 @@ +import { Typography } from 'antd'; +import { ColumnsType } from 'antd/es/table'; +import ROUTES from 'constants/routes'; +import { getMs } from 'container/Trace/Filters/Panel/PanelBody/Duration/util'; +import { DEFAULT_PER_PAGE_OPTIONS } from 'hooks/queryPagination'; +import { generatePath } from 'react-router-dom'; +import { ListItem } from 'types/api/widgets/getQuery'; + +export const PER_PAGE_OPTIONS: number[] = [10, ...DEFAULT_PER_PAGE_OPTIONS]; + +export const columns: ColumnsType = [ + { + title: 'Root Service Name', + dataIndex: 'subQuery.serviceName', + key: 'serviceName', + }, + { + title: 'Root Operation Name', + dataIndex: 'subQuery.name', + key: 'name', + }, + { + title: 'Root Duration (in ms)', + dataIndex: 'subQuery.durationNano', + key: 'durationNano', + render: (duration: number): JSX.Element => ( + {getMs(String(duration))}ms + ), + }, + { + title: 'No of Spans', + dataIndex: 'span_count', + key: 'span_count', + }, + { + title: 'TraceID', + dataIndex: 'traceID', + key: 'traceID', + render: (traceID: string): JSX.Element => ( + + {traceID} + + ), + }, +]; diff --git a/frontend/src/container/TracesExplorer/TracesView/index.tsx b/frontend/src/container/TracesExplorer/TracesView/index.tsx new file mode 100644 index 0000000000..0d1eae9d02 --- /dev/null +++ b/frontend/src/container/TracesExplorer/TracesView/index.tsx @@ -0,0 +1,87 @@ +import Typography from 'antd/es/typography/Typography'; +import { ResizeTable } from 'components/ResizeTable'; +import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder'; +import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; +import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { Pagination, URL_PAGINATION } from 'hooks/queryPagination'; +import useUrlQueryData from 'hooks/useUrlQueryData'; +import { memo, useMemo } from 'react'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +import TraceExplorerControls from '../Controls'; +import { columns, PER_PAGE_OPTIONS } from './configs'; +import { ActionsContainer, Container } from './styles'; + +function TracesView(): JSX.Element { + const { stagedQuery, panelType } = useQueryBuilder(); + + const { selectedTime: globalSelectedTime, maxTime, minTime } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); + + const { queryData: paginationQueryData } = useUrlQueryData( + URL_PAGINATION, + ); + + const { data, isLoading } = useGetQueryRange( + { + query: stagedQuery || initialQueriesMap.traces, + graphType: panelType || PANEL_TYPES.TRACE, + selectedTime: 'GLOBAL_TIME', + globalSelectedInterval: globalSelectedTime, + params: { + dataSource: 'traces', + }, + tableParams: { + pagination: paginationQueryData, + }, + }, + { + queryKey: [ + REACT_QUERY_KEY.GET_QUERY_RANGE, + globalSelectedTime, + maxTime, + minTime, + stagedQuery, + panelType, + paginationQueryData, + ], + enabled: !!stagedQuery && panelType === PANEL_TYPES.TRACE, + }, + ); + + const responseData = data?.payload?.data?.newResult?.data?.result[0]?.list; + const tableData = useMemo( + () => responseData?.map((listItem) => listItem.data), + [responseData], + ); + + return ( + + + + Showing up to X of the slowest traces form the selected time range + + + + + + ); +} + +export default memo(TracesView); diff --git a/frontend/src/container/TracesExplorer/TracesView/styles.ts b/frontend/src/container/TracesExplorer/TracesView/styles.ts new file mode 100644 index 0000000000..f9c9a7c8ea --- /dev/null +++ b/frontend/src/container/TracesExplorer/TracesView/styles.ts @@ -0,0 +1,13 @@ +import styled from 'styled-components'; + +export const Container = styled.div` + display: flex; + flex-direction: column; + gap: 15px; +`; + +export const ActionsContainer = styled.div` + display: flex; + justify-content: space-between; + align-items: center; +`; diff --git a/frontend/src/hooks/dashboard/useGetAllDashboard.tsx b/frontend/src/hooks/dashboard/useGetAllDashboard.tsx new file mode 100644 index 0000000000..aced8e6e2e --- /dev/null +++ b/frontend/src/hooks/dashboard/useGetAllDashboard.tsx @@ -0,0 +1,16 @@ +import getAll from 'api/dashboard/getAll'; +import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; +import { useQuery, UseQueryResult } from 'react-query'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/dashboard/getAll'; + +export const useGetAllDashboard = (): DashboardProps => + useQuery({ + queryFn: getAll, + queryKey: REACT_QUERY_KEY.GET_ALL_DASHBOARDS, + }); + +type DashboardProps = UseQueryResult< + SuccessResponse | ErrorResponse, + unknown +>; diff --git a/frontend/src/hooks/dashboard/useUpdateDashboard.tsx b/frontend/src/hooks/dashboard/useUpdateDashboard.tsx new file mode 100644 index 0000000000..b4c34974dc --- /dev/null +++ b/frontend/src/hooks/dashboard/useUpdateDashboard.tsx @@ -0,0 +1,14 @@ +import update from 'api/dashboard/update'; +import { useMutation, UseMutationResult } from 'react-query'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { Dashboard } from 'types/api/dashboard/getAll'; +import { Props } from 'types/api/dashboard/update'; + +export const useUpdateDashboard = (): UseUpdateDashboard => useMutation(update); + +type UseUpdateDashboard = UseMutationResult< + SuccessResponse | ErrorResponse, + unknown, + Props, + unknown +>; diff --git a/frontend/src/hooks/dashboard/utils.ts b/frontend/src/hooks/dashboard/utils.ts new file mode 100644 index 0000000000..ea2457daa8 --- /dev/null +++ b/frontend/src/hooks/dashboard/utils.ts @@ -0,0 +1,37 @@ +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { Dashboard } from 'types/api/dashboard/getAll'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; + +export const addEmptyWidgetInDashboardJSONWithQuery = ( + dashboard: Dashboard, + query: Query, +): Dashboard => ({ + ...dashboard, + data: { + ...dashboard.data, + layout: [ + { + i: 'empty', + w: 6, + x: 0, + h: 2, + y: 0, + }, + ...(dashboard?.data?.layout || []), + ], + widgets: [ + ...(dashboard?.data?.widgets || []), + { + id: 'empty', + query, + description: '', + isStacked: false, + nullZeroValues: '', + opacity: '', + title: '', + timePreferance: 'GLOBAL_TIME', + panelTypes: PANEL_TYPES.TIME_SERIES, + }, + ], + }, +}); diff --git a/frontend/src/hooks/queryBuilder/useGetCompositeQueryParam.ts b/frontend/src/hooks/queryBuilder/useGetCompositeQueryParam.ts index 4477a9fbf7..894167815b 100644 --- a/frontend/src/hooks/queryBuilder/useGetCompositeQueryParam.ts +++ b/frontend/src/hooks/queryBuilder/useGetCompositeQueryParam.ts @@ -8,7 +8,16 @@ export const useGetCompositeQueryParam = (): Query | null => { return useMemo(() => { const compositeQuery = urlQuery.get(COMPOSITE_QUERY); + let parsedCompositeQuery: Query | null = null; - return compositeQuery ? JSON.parse(compositeQuery) : null; + try { + if (!compositeQuery) return null; + + parsedCompositeQuery = JSON.parse(decodeURIComponent(compositeQuery)); + } catch (e) { + parsedCompositeQuery = null; + } + + return parsedCompositeQuery; }, [urlQuery]); }; diff --git a/frontend/src/hooks/queryBuilder/useGetWidgetQueryRange.ts b/frontend/src/hooks/queryBuilder/useGetWidgetQueryRange.ts index 289e6c9f1a..28bc07566f 100644 --- a/frontend/src/hooks/queryBuilder/useGetWidgetQueryRange.ts +++ b/frontend/src/hooks/queryBuilder/useGetWidgetQueryRange.ts @@ -1,6 +1,5 @@ -import { COMPOSITE_QUERY } from 'constants/queryBuilderQueryNames'; +import { initialQueriesMap } from 'constants/queryBuilder'; import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; -import useUrlQuery from 'hooks/useUrlQuery'; import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables'; import { UseQueryOptions, UseQueryResult } from 'react-query'; import { useSelector } from 'react-redux'; @@ -11,6 +10,7 @@ import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; import { GlobalReducer } from 'types/reducer/globalTime'; import { useGetQueryRange } from './useGetQueryRange'; +import { useQueryBuilder } from './useQueryBuilder'; export const useGetWidgetQueryRange = ( { @@ -19,31 +19,29 @@ export const useGetWidgetQueryRange = ( }: Pick, options?: UseQueryOptions, Error>, ): UseQueryResult, Error> => { - const urlQuery = useUrlQuery(); - const { selectedTime: globalSelectedInterval } = useSelector< AppState, GlobalReducer >((state) => state.globalTime); - const compositeQuery = urlQuery.get(COMPOSITE_QUERY); + const { stagedQuery } = useQueryBuilder(); return useGetQueryRange( { graphType, selectedTime, globalSelectedInterval, - query: JSON.parse(compositeQuery || '{}'), + query: stagedQuery || initialQueriesMap.metrics, variables: getDashboardVariables(), }, { - enabled: !!compositeQuery, + enabled: !!stagedQuery, retry: false, queryKey: [ REACT_QUERY_KEY.GET_QUERY_RANGE, selectedTime, globalSelectedInterval, - compositeQuery, + stagedQuery, ], ...options, }, diff --git a/frontend/src/hooks/queryBuilder/useQueryOperations.ts b/frontend/src/hooks/queryBuilder/useQueryOperations.ts index b1d2f35c18..02af0118d0 100644 --- a/frontend/src/hooks/queryBuilder/useQueryOperations.ts +++ b/frontend/src/hooks/queryBuilder/useQueryOperations.ts @@ -2,6 +2,7 @@ import { initialAutocompleteData, initialQueryBuilderFormValuesMap, mapOfFilters, + PANEL_TYPES, } from 'constants/queryBuilder'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { getOperatorsBySourceAndPanelType } from 'lib/newQueryBuilder/getOperatorsBySourceAndPanelType'; @@ -56,9 +57,16 @@ export const useQueryOperations: UseQueryOperations = ({ query, index }) => { ); const getNewListOfAdditionalFilters = useCallback( - (dataSource: DataSource): string[] => - mapOfFilters[dataSource].map((item) => item.text), - [], + (dataSource: DataSource): string[] => { + const listOfFilters = mapOfFilters[dataSource].map((item) => item.text); + + if (panelType === PANEL_TYPES.LIST) { + return listOfFilters.filter((filter) => filter !== 'Aggregation interval'); + } + + return listOfFilters; + }, + [panelType], ); const handleChangeAggregatorAttribute = useCallback( @@ -78,7 +86,7 @@ export const useQueryOperations: UseQueryOperations = ({ query, index }) => { (nextSource: DataSource): void => { const newOperators = getOperatorsBySourceAndPanelType({ dataSource: nextSource, - panelType, + panelType: panelType || PANEL_TYPES.TIME_SERIES, }); const entries = Object.entries( @@ -121,33 +129,22 @@ export const useQueryOperations: UseQueryOperations = ({ query, index }) => { [query.dataSource], ); + const isTracePanelType = useMemo(() => panelType === PANEL_TYPES.TRACE, [ + panelType, + ]); + useEffect(() => { if (initialDataSource && dataSource !== initialDataSource) return; const initialOperators = getOperatorsBySourceAndPanelType({ dataSource, - panelType, + panelType: panelType || PANEL_TYPES.TIME_SERIES, }); if (JSON.stringify(operators) === JSON.stringify(initialOperators)) return; setOperators(initialOperators); - - const isCurrentOperatorAvailableInList = initialOperators - .map((operator) => operator.value) - .includes(aggregateOperator); - - if (!isCurrentOperatorAvailableInList) { - handleChangeOperator(initialOperators[0].value); - } - }, [ - dataSource, - initialDataSource, - panelType, - operators, - aggregateOperator, - handleChangeOperator, - ]); + }, [dataSource, initialDataSource, panelType, operators]); useEffect(() => { const additionalFilters = getNewListOfAdditionalFilters(dataSource); @@ -156,6 +153,7 @@ export const useQueryOperations: UseQueryOperations = ({ query, index }) => { }, [dataSource, aggregateOperator, getNewListOfAdditionalFilters]); return { + isTracePanelType, isMetricsDataSource, operators, listOfAdditionalFilters, diff --git a/frontend/src/hooks/queryBuilder/useShareBuilderUrl.ts b/frontend/src/hooks/queryBuilder/useShareBuilderUrl.ts index 168e5af77a..b3ccc230e9 100644 --- a/frontend/src/hooks/queryBuilder/useShareBuilderUrl.ts +++ b/frontend/src/hooks/queryBuilder/useShareBuilderUrl.ts @@ -5,11 +5,9 @@ import { Query } from 'types/api/queryBuilder/queryBuilderData'; import { useGetCompositeQueryParam } from './useGetCompositeQueryParam'; import { useQueryBuilder } from './useQueryBuilder'; -type UseShareBuilderUrlParams = { defaultValue: Query }; +export type UseShareBuilderUrlParams = { defaultValue: Query }; -export const useShareBuilderUrl = ({ - defaultValue, -}: UseShareBuilderUrlParams): void => { +export const useShareBuilderUrl = (defaultQuery: Query): void => { const { redirectWithQueryBuilderData, resetStagedQuery } = useQueryBuilder(); const urlQuery = useUrlQuery(); @@ -17,9 +15,9 @@ export const useShareBuilderUrl = ({ useEffect(() => { if (!compositeQuery) { - redirectWithQueryBuilderData(defaultValue); + redirectWithQueryBuilderData(defaultQuery); } - }, [defaultValue, urlQuery, redirectWithQueryBuilderData, compositeQuery]); + }, [defaultQuery, urlQuery, redirectWithQueryBuilderData, compositeQuery]); useEffect( () => (): void => { diff --git a/frontend/src/hooks/queryBuilder/useStepInterval.ts b/frontend/src/hooks/queryBuilder/useStepInterval.ts new file mode 100644 index 0000000000..55dad54664 --- /dev/null +++ b/frontend/src/hooks/queryBuilder/useStepInterval.ts @@ -0,0 +1,37 @@ +import getStep from 'lib/getStep'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { Widgets } from 'types/api/dashboard/getAll'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +export const updateStepInterval = ( + query: Widgets['query'], + maxTime: number, + minTime: number, +): Widgets['query'] => { + const stepInterval = getStep({ + start: minTime, + end: maxTime, + inputFormat: 'ns', + }); + + return { + ...query, + builder: { + ...query?.builder, + queryData: + query?.builder?.queryData?.map((item) => ({ + ...item, + stepInterval, + })) || [], + }, + }; +}; + +export const useStepInterval = (query: Widgets['query']): Widgets['query'] => { + const { maxTime, minTime } = useSelector( + (state) => state.globalTime, + ); + + return updateStepInterval(query, maxTime, minTime); +}; diff --git a/frontend/src/hooks/queryPagination/config.ts b/frontend/src/hooks/queryPagination/config.ts new file mode 100644 index 0000000000..72dc032051 --- /dev/null +++ b/frontend/src/hooks/queryPagination/config.ts @@ -0,0 +1,3 @@ +export const URL_PAGINATION = 'pagination'; + +export const DEFAULT_PER_PAGE_OPTIONS: number[] = [25, 50, 100, 200]; diff --git a/frontend/src/hooks/queryPagination/index.ts b/frontend/src/hooks/queryPagination/index.ts new file mode 100644 index 0000000000..2acd167c81 --- /dev/null +++ b/frontend/src/hooks/queryPagination/index.ts @@ -0,0 +1,2 @@ +export * from './config'; +export * from './types'; diff --git a/frontend/src/hooks/queryPagination/types.ts b/frontend/src/hooks/queryPagination/types.ts new file mode 100644 index 0000000000..8bea38eef9 --- /dev/null +++ b/frontend/src/hooks/queryPagination/types.ts @@ -0,0 +1,4 @@ +export interface Pagination { + offset: number; + limit: number; +} diff --git a/frontend/src/hooks/queryPagination/useQueryPagination.ts b/frontend/src/hooks/queryPagination/useQueryPagination.ts new file mode 100644 index 0000000000..29cee3ecb8 --- /dev/null +++ b/frontend/src/hooks/queryPagination/useQueryPagination.ts @@ -0,0 +1,88 @@ +import { ControlsProps } from 'container/Controls'; +import useUrlQueryData from 'hooks/useUrlQueryData'; +import { useCallback, useEffect, useMemo } from 'react'; + +import { DEFAULT_PER_PAGE_OPTIONS, URL_PAGINATION } from './config'; +import { Pagination } from './types'; +import { + checkIsValidPaginationData, + getDefaultPaginationConfig, +} from './utils'; + +const useQueryPagination = ( + totalCount: number, + perPageOptions: number[] = DEFAULT_PER_PAGE_OPTIONS, +): UseQueryPagination => { + const defaultPaginationConfig = useMemo( + () => getDefaultPaginationConfig(perPageOptions), + [perPageOptions], + ); + + const { + query: paginationQuery, + queryData: paginationQueryData, + redirectWithQuery: redirectWithCurrentPagination, + } = useUrlQueryData(URL_PAGINATION); + + const handleCountItemsPerPageChange = useCallback( + (newLimit: Pagination['limit']) => { + redirectWithCurrentPagination({ + ...paginationQueryData, + limit: newLimit, + }); + }, + [paginationQueryData, redirectWithCurrentPagination], + ); + + const handleNavigatePrevious = useCallback(() => { + const previousOffset = paginationQueryData.offset - paginationQueryData.limit; + + redirectWithCurrentPagination({ + ...paginationQueryData, + offset: previousOffset > 0 ? previousOffset : 0, + }); + }, [paginationQueryData, redirectWithCurrentPagination]); + + const handleNavigateNext = useCallback(() => { + redirectWithCurrentPagination({ + ...paginationQueryData, + offset: + paginationQueryData.limit === totalCount + ? paginationQueryData.offset + paginationQueryData.limit + : paginationQueryData.offset, + }); + }, [totalCount, paginationQueryData, redirectWithCurrentPagination]); + + useEffect(() => { + const isValidPaginationData = checkIsValidPaginationData( + paginationQueryData || defaultPaginationConfig, + perPageOptions, + ); + + if (paginationQuery && isValidPaginationData) return; + + redirectWithCurrentPagination(defaultPaginationConfig); + }, [ + defaultPaginationConfig, + perPageOptions, + paginationQuery, + paginationQueryData, + redirectWithCurrentPagination, + ]); + + return { + pagination: paginationQueryData || defaultPaginationConfig, + handleCountItemsPerPageChange, + handleNavigatePrevious, + handleNavigateNext, + }; +}; + +type UseQueryPagination = Pick< + ControlsProps, + | 'handleCountItemsPerPageChange' + | 'handleNavigateNext' + | 'handleNavigatePrevious' +> & { pagination: Pagination }; + +export default useQueryPagination; diff --git a/frontend/src/hooks/queryPagination/utils.ts b/frontend/src/hooks/queryPagination/utils.ts new file mode 100644 index 0000000000..217a19afe1 --- /dev/null +++ b/frontend/src/hooks/queryPagination/utils.ts @@ -0,0 +1,20 @@ +import { DEFAULT_PER_PAGE_OPTIONS } from './config'; +import { Pagination } from './types'; + +export const checkIsValidPaginationData = ( + { limit, offset }: Pagination, + perPageOptions: number[], +): boolean => + Boolean( + Number.isInteger(limit) && + limit > 0 && + offset >= 0 && + perPageOptions.find((option) => option === limit), + ); + +export const getDefaultPaginationConfig = ( + perPageOptions = DEFAULT_PER_PAGE_OPTIONS, +): Pagination => ({ + offset: 0, + limit: perPageOptions[0], +}); diff --git a/frontend/src/hooks/useUrlQueryData.ts b/frontend/src/hooks/useUrlQueryData.ts new file mode 100644 index 0000000000..1a0e556afd --- /dev/null +++ b/frontend/src/hooks/useUrlQueryData.ts @@ -0,0 +1,44 @@ +import useUrlQuery from 'hooks/useUrlQuery'; +import { useCallback, useMemo } from 'react'; +import { useHistory, useLocation } from 'react-router-dom'; + +const useUrlQueryData = ( + queryKey: string, + defaultData?: T, +): UseUrlQueryData => { + const history = useHistory(); + const location = useLocation(); + const urlQuery = useUrlQuery(); + + const query = useMemo(() => urlQuery.get(queryKey), [queryKey, urlQuery]); + + const queryData: T = useMemo(() => (query ? JSON.parse(query) : defaultData), [ + query, + defaultData, + ]); + + const redirectWithQuery = useCallback( + (newQueryData: T): void => { + const newQuery = JSON.stringify(newQueryData); + + urlQuery.set(queryKey, newQuery); + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; + history.push(generatedUrl); + }, + [history, location, urlQuery, queryKey], + ); + + return { + query, + queryData, + redirectWithQuery, + }; +}; + +interface UseUrlQueryData { + query: string | null; + queryData: T; + redirectWithQuery: (newQueryData: T) => void; +} + +export default useUrlQueryData; diff --git a/frontend/src/lib/__tests__/getStep.test.ts b/frontend/src/lib/__tests__/getStep.test.ts index ad31573ebf..c799bfa9e0 100644 --- a/frontend/src/lib/__tests__/getStep.test.ts +++ b/frontend/src/lib/__tests__/getStep.test.ts @@ -39,7 +39,12 @@ describe('lib/getStep', () => { const startUnix = start.valueOf(); const endUnix = end.valueOf(); - const expectedStepSize = Math.floor(end.diff(start, 's') / MaxDataPoints); + let expectedStepSize = Math.max( + Math.floor(end.diff(start, 's') / MaxDataPoints), + DefaultStepSize, + ); + + expectedStepSize -= expectedStepSize % 60; expect( getStep({ diff --git a/frontend/src/lib/dashbaordVariables/getDashboardVariables.ts b/frontend/src/lib/dashbaordVariables/getDashboardVariables.ts index e9f4f1c6e1..aa870f9bbb 100644 --- a/frontend/src/lib/dashbaordVariables/getDashboardVariables.ts +++ b/frontend/src/lib/dashbaordVariables/getDashboardVariables.ts @@ -7,7 +7,7 @@ export const getDashboardVariables = (): Record => { globalTime, dashboards: { dashboards }, } = store.getState(); - const [selectedDashboard] = dashboards; + const [selectedDashboard] = dashboards || []; const { data: { variables = {} }, } = selectedDashboard; diff --git a/frontend/src/lib/getStep.test.ts b/frontend/src/lib/getStep.test.ts new file mode 100644 index 0000000000..a70057245f --- /dev/null +++ b/frontend/src/lib/getStep.test.ts @@ -0,0 +1,137 @@ +import dayjs from 'dayjs'; + +import getStep, { DefaultStepSize, MaxDataPoints } from './getStep'; + +describe('get dynamic step size', () => { + test('should return default step size if diffSec is less than MaxDataPoints', () => { + const start = dayjs().subtract(1, 'minute').valueOf(); + const end = dayjs().valueOf(); + + const step = getStep({ + start, + end, + inputFormat: 'ms', + }); + + expect(step).toBe(DefaultStepSize); + }); + + test('should return appropriate step size if diffSec is more than MaxDataPoints', () => { + const start = dayjs().subtract(4, 'hour').valueOf(); + const end = dayjs().valueOf(); + + const step = getStep({ + start, + end, + inputFormat: 'ms', + }); + + // the expected step size should be no less than DefaultStepSize + const diffSec = Math.abs(dayjs(end).diff(dayjs(start), 's')); + const expectedStep = Math.max( + Math.floor(diffSec / MaxDataPoints), + DefaultStepSize, + ); + + expect(step).toBe(expectedStep); + }); + + test('should correctly handle different input formats', () => { + const endSec = dayjs().unix(); + const startSec = endSec - 4 * 3600; // 4 hours earlier + + const stepSec = getStep({ + start: startSec, + end: endSec, + inputFormat: 's', + }); + + const diffSec = Math.abs(dayjs.unix(endSec).diff(dayjs.unix(startSec), 's')); + const expectedStep = Math.max( + Math.floor(diffSec / MaxDataPoints), + DefaultStepSize, + ); + + expect(stepSec).toBe(expectedStep); + + const startNs = startSec * 1e9; // convert to nanoseconds + const endNs = endSec * 1e9; // convert to nanoseconds + + const stepNs = getStep({ + start: startNs, + end: endNs, + inputFormat: 'ns', + }); + + expect(stepNs).toBe(expectedStep); // Expect the same result as 's' inputFormat + }); + + test('should throw an error for invalid input format', () => { + const start = dayjs().valueOf(); + const end = dayjs().valueOf(); + + expect(() => { + getStep({ + start, + end, + inputFormat: 'invalid' as never, + }); + }).toThrow('invalid format'); + }); + + test('should return DefaultStepSize when start and end are the same', () => { + const start = dayjs().valueOf(); + const end = start; // same as start + + const step = getStep({ + start, + end, + inputFormat: 'ms', + }); + + expect(step).toBe(DefaultStepSize); + }); + + test('should return DefaultStepSize if diffSec is exactly MaxDataPoints', () => { + const endMs = dayjs().valueOf(); + const startMs = endMs - MaxDataPoints * 1000; // exactly MaxDataPoints seconds earlier + + const step = getStep({ + start: startMs, + end: endMs, + inputFormat: 'ms', + }); + + expect(step).toBe(DefaultStepSize); // since calculated step size is less than DefaultStepSize, it should return DefaultStepSize + }); + + test('should return DefaultStepSize for future dates less than (MaxDataPoints * DefaultStepSize) seconds ahead', () => { + const start = dayjs().valueOf(); + const end = start + MaxDataPoints * DefaultStepSize * 1000 - 1; // just one millisecond less than (MaxDataPoints * DefaultStepSize) seconds ahead + + const step = getStep({ + start, + end, + inputFormat: 'ms', + }); + + expect(step).toBe(DefaultStepSize); + }); + + test('should handle string inputs correctly for a time range greater than (MaxDataPoints * DefaultStepSize) seconds', () => { + const endMs = dayjs().valueOf(); + const startMs = endMs - (MaxDataPoints * DefaultStepSize * 1000 + 1); // one millisecond more than (MaxDataPoints * DefaultStepSize) seconds earlier + + const step = getStep({ + start: startMs.toString(), + end: endMs.toString(), + inputFormat: 'ms', + }); + + const diffSec = Math.abs( + dayjs(Number(endMs)).diff(dayjs(Number(startMs)), 's'), + ); + + expect(step).toBe(Math.floor(diffSec / MaxDataPoints)); + }); +}); diff --git a/frontend/src/lib/getStep.ts b/frontend/src/lib/getStep.ts index 347f9dc332..0a7f4b043d 100644 --- a/frontend/src/lib/getStep.ts +++ b/frontend/src/lib/getStep.ts @@ -30,7 +30,7 @@ const convertToMs = ( }; export const DefaultStepSize = 60; -export const MaxDataPoints = 200; +export const MaxDataPoints = 300; /** * Returns relevant step size based on given start and end date. @@ -40,7 +40,13 @@ const getStep = ({ start, end, inputFormat = 'ms' }: GetStepInput): number => { const endDate = dayjs(convertToMs(Number(end), inputFormat)); const diffSec = Math.abs(endDate.diff(startDate, 's')); - return Math.max(Math.floor(diffSec / MaxDataPoints), DefaultStepSize); + let result = + Math.max(Math.floor(diffSec / MaxDataPoints), DefaultStepSize) || + DefaultStepSize; + + result -= result % 60; + + return result; }; export default getStep; diff --git a/frontend/src/lib/newQueryBuilder/convertNewDataToOld.ts b/frontend/src/lib/newQueryBuilder/convertNewDataToOld.ts index dcb103d6cb..eed4f83da3 100644 --- a/frontend/src/lib/newQueryBuilder/convertNewDataToOld.ts +++ b/frontend/src/lib/newQueryBuilder/convertNewDataToOld.ts @@ -35,5 +35,9 @@ export const convertNewDataToOld = ( }); const oldResultType = resultType; - return { data: { result: oldResult, resultType: oldResultType } }; + // TODO: fix it later for using only v3 version of api + + return { + data: { result: oldResult, resultType: oldResultType, newResult: newData }, + }; }; diff --git a/frontend/src/lib/newQueryBuilder/getOperatorsBySourceAndPanelType.ts b/frontend/src/lib/newQueryBuilder/getOperatorsBySourceAndPanelType.ts index 21b3bc7498..dd6869847b 100644 --- a/frontend/src/lib/newQueryBuilder/getOperatorsBySourceAndPanelType.ts +++ b/frontend/src/lib/newQueryBuilder/getOperatorsBySourceAndPanelType.ts @@ -15,12 +15,16 @@ export const getOperatorsBySourceAndPanelType = ({ }: GetQueryOperatorsParams): SelectOption[] => { let operatorsByDataSource = mapOfOperators[dataSource]; - if (panelType === PANEL_TYPES.LIST) { + if (panelType === PANEL_TYPES.LIST || panelType === PANEL_TYPES.TRACE) { operatorsByDataSource = operatorsByDataSource.filter( (operator) => operator.value === StringOperators.NOOP, ); } - if (dataSource !== DataSource.METRICS && panelType !== PANEL_TYPES.LIST) { + if ( + dataSource !== DataSource.METRICS && + panelType !== PANEL_TYPES.LIST && + panelType !== PANEL_TYPES.TRACE + ) { operatorsByDataSource = operatorsByDataSource.filter( (operator) => operator.value !== StringOperators.NOOP, ); diff --git a/frontend/src/lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi.ts b/frontend/src/lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi.ts index c0016fa0de..f89c8b025e 100644 --- a/frontend/src/lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi.ts +++ b/frontend/src/lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi.ts @@ -1,3 +1,4 @@ +import { GetQueryResultsProps } from 'store/actions/dashboard/getQueryResults'; import { MapData, MapQueryDataToApiResult, @@ -6,6 +7,7 @@ import { export const mapQueryDataToApi = ( data: Data[], nameField: Key, + tableParams?: GetQueryResultsProps['tableParams'], ): MapQueryDataToApiResult> => { const newLegendMap: Record = {}; @@ -14,6 +16,10 @@ export const mapQueryDataToApi = ( ...acc, [query[nameField] as string]: { ...query, + ...tableParams?.pagination, + ...(tableParams?.selectColumns + ? { selectColumns: tableParams?.selectColumns } + : null), }, }; diff --git a/frontend/src/lib/query/createTableColumnsFromQuery.ts b/frontend/src/lib/query/createTableColumnsFromQuery.ts new file mode 100644 index 0000000000..653a859ba8 --- /dev/null +++ b/frontend/src/lib/query/createTableColumnsFromQuery.ts @@ -0,0 +1,335 @@ +import { ColumnsType } from 'antd/es/table'; +import { ColumnType } from 'antd/lib/table'; +import { FORMULA_REGEXP } from 'constants/regExp'; +import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces'; +import { toCapitalize } from 'lib/toCapitalize'; +import { ReactNode } from 'react'; +import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData'; +import { ListItem, QueryDataV3, SeriesItem } from 'types/api/widgets/getQuery'; +import { v4 as uuid } from 'uuid'; + +type CreateTableDataFromQueryParams = Pick< + QueryTableProps, + 'queryTableData' | 'query' | 'renderActionCell' +>; + +export type RowData = { + timestamp: number; + key: string; + [key: string]: string | number; +}; + +type DynamicColumn = { + key: keyof RowData; + data: (string | number)[]; + type: 'field' | 'operator'; + // sortable: boolean; +}; + +type DynamicColumns = DynamicColumn[]; + +type CreateTableDataFromQuery = ( + params: CreateTableDataFromQueryParams, +) => { + columns: ColumnsType; + dataSource: RowData[]; + rowsLength: number; +}; + +type FillColumnData = ( + queryTableData: QueryDataV3[], + dynamicColumns: DynamicColumns, +) => { filledDynamicColumns: DynamicColumns; rowsLength: number }; + +type GetDynamicColumns = ( + queryTableData: QueryDataV3[], + query: Query, +) => DynamicColumns; + +type ListItemData = ListItem['data']; +type ListItemKey = keyof ListItemData; +type SeriesItemLabels = SeriesItem['labels']; + +const isFormula = (queryName: string): boolean => + FORMULA_REGEXP.test(queryName); + +const isColumnExist = ( + columnName: string, + columns: DynamicColumns, +): boolean => { + const columnKeys = columns.map((item) => item.key); + + return columnKeys.includes(columnName); +}; + +const prepareColumnTitle = (title: string): string => { + const haveUnderscore = title.includes('_'); + + if (haveUnderscore) { + return title + .split('_') + .map((str) => toCapitalize(str)) + .join(' '); + } + + return toCapitalize(title); +}; + +const getQueryOperator = ( + queryData: IBuilderQuery[], + currentQueryName: string, +): string => { + const builderQuery = queryData.find((q) => q.queryName === currentQueryName); + + return builderQuery ? builderQuery.aggregateOperator : ''; +}; + +const createLabels = ( + labels: T, + label: keyof T, + dynamicColumns: DynamicColumns, +): void => { + if (isColumnExist(label as string, dynamicColumns)) return; + + // const labelValue = labels[label]; + + // const isNumber = !Number.isNaN(parseFloat(String(labelValue))); + + const fieldObj: DynamicColumn = { + key: label as string, + data: [], + type: 'field', + // sortable: isNumber, + }; + + dynamicColumns.push(fieldObj); +}; + +const getDynamicColumns: GetDynamicColumns = (queryTableData, query) => { + const dynamicColumns: DynamicColumns = []; + + queryTableData.forEach((currentQuery) => { + if (currentQuery.list) { + currentQuery.list.forEach((listItem) => { + Object.keys(listItem.data).forEach((label) => { + createLabels( + listItem.data, + label as ListItemKey, + dynamicColumns, + ); + }); + }); + } + + if (currentQuery.series) { + if (!isColumnExist('timestamp', dynamicColumns)) { + dynamicColumns.push({ + key: 'timestamp', + data: [], + type: 'field', + // sortable: true, + }); + } + + currentQuery.series.forEach((seria) => { + Object.keys(seria.labels).forEach((label) => { + createLabels(seria.labels, label, dynamicColumns); + }); + }); + + const operator = getQueryOperator( + query.builder.queryData, + currentQuery.queryName, + ); + + if (operator === '' || isColumnExist(operator, dynamicColumns)) return; + + const operatorColumn: DynamicColumn = { + key: operator, + data: [], + type: 'operator', + // sortable: true, + }; + dynamicColumns.push(operatorColumn); + } + }); + + return dynamicColumns; +}; + +const fillEmptyRowCells = ( + unusedColumnsKeys: Set, + sourceColumns: DynamicColumns, + currentColumn: DynamicColumn, +): void => { + unusedColumnsKeys.forEach((key) => { + if (key === currentColumn.key) { + const unusedCol = sourceColumns.find((item) => item.key === key); + + if (unusedCol) { + unusedCol.data.push('N/A'); + unusedColumnsKeys.delete(key); + } + } + }); +}; + +const fillDataFromSeria = ( + seria: SeriesItem, + columns: DynamicColumns, + currentQueryName: string, +): void => { + const labelEntries = Object.entries(seria.labels); + + seria.values.forEach((value) => { + const unusedColumnsKeys = new Set( + columns.map((item) => item.key), + ); + + columns.forEach((column) => { + if (column.key === 'timestamp') { + column.data.push(value.timestamp); + unusedColumnsKeys.delete('timestamp'); + return; + } + + if (currentQueryName === column.key) { + column.data.push(parseFloat(value.value).toFixed(2)); + unusedColumnsKeys.delete(column.key); + return; + } + + labelEntries.forEach(([key, currentValue]) => { + if (column.key === key) { + column.data.push(currentValue); + unusedColumnsKeys.delete(key); + } + }); + + fillEmptyRowCells(unusedColumnsKeys, columns, column); + }); + }); +}; + +const fillDataFromList = ( + listItem: ListItem, + columns: DynamicColumns, +): void => { + columns.forEach((column) => { + if (isFormula(column.key as string)) return; + + Object.keys(listItem.data).forEach((label) => { + if (column.key === label) { + if (listItem.data[label as ListItemKey] !== '') { + column.data.push(listItem.data[label as ListItemKey].toString()); + } else { + column.data.push('N/A'); + } + } + }); + }); +}; + +const fillColumnsData: FillColumnData = (queryTableData, cols) => { + const fields = cols.filter((item) => item.type === 'field'); + const operators = cols.filter((item) => item.type === 'operator'); + const resultColumns = [...fields, ...operators]; + + queryTableData.forEach((currentQuery) => { + // const currentOperator = getQueryOperator( + // query.builder.queryData, + // currentQuery.queryName, + // ); + + if (currentQuery.series) { + currentQuery.series.forEach((seria) => { + fillDataFromSeria(seria, resultColumns, currentQuery.queryName); + }); + } + + if (currentQuery.list) { + currentQuery.list.forEach((listItem) => { + fillDataFromList(listItem, resultColumns); + }); + } + }); + + const rowsLength = resultColumns.length > 0 ? resultColumns[0].data.length : 0; + + return { filledDynamicColumns: resultColumns, rowsLength }; +}; + +const generateData = ( + dynamicColumns: DynamicColumns, + rowsLength: number, +): RowData[] => { + const data: RowData[] = []; + + for (let i = 0; i < rowsLength; i += 1) { + const rowData: RowData = dynamicColumns.reduce((acc, item) => { + const { key } = item; + + acc[key] = item.data[i]; + acc.key = uuid(); + + return acc; + }, {} as RowData); + + data.push(rowData); + } + + return data; +}; + +const generateTableColumns = ( + dynamicColumns: DynamicColumns, +): ColumnsType => { + const columns: ColumnsType = dynamicColumns.reduce< + ColumnsType + >((acc, item) => { + const column: ColumnType = { + dataIndex: item.key, + key: item.key, + title: prepareColumnTitle(item.key as string), + // sorter: item.sortable + // ? (a: RowData, b: RowData): number => + // (a[item.key] as number) - (b[item.key] as number) + // : false, + }; + + return [...acc, column]; + }, []); + + return columns; +}; + +export const createTableColumnsFromQuery: CreateTableDataFromQuery = ({ + query, + queryTableData, + renderActionCell, +}) => { + const dynamicColumns = getDynamicColumns(queryTableData, query); + + const { filledDynamicColumns, rowsLength } = fillColumnsData( + queryTableData, + dynamicColumns, + ); + + const dataSource = generateData(filledDynamicColumns, rowsLength); + + const columns = generateTableColumns(filledDynamicColumns); + + const actionsCell: ColumnType | null = renderActionCell + ? { + key: 'actions', + title: 'Actions', + render: (_, record): ReactNode => renderActionCell(record), + } + : null; + + if (actionsCell && dataSource.length > 0) { + columns.push(actionsCell); + } + + return { columns, dataSource, rowsLength }; +}; diff --git a/frontend/src/lib/toCapitalize.ts b/frontend/src/lib/toCapitalize.ts new file mode 100644 index 0000000000..7692dbf246 --- /dev/null +++ b/frontend/src/lib/toCapitalize.ts @@ -0,0 +1,5 @@ +export const toCapitalize = (str: string): string => { + if (!str) return ''; + + return str[0].toUpperCase() + str.slice(1); +}; diff --git a/frontend/src/pages/Logs/config.ts b/frontend/src/pages/Logs/config.ts index 60f46195bd..39a251cb05 100644 --- a/frontend/src/pages/Logs/config.ts +++ b/frontend/src/pages/Logs/config.ts @@ -25,3 +25,24 @@ export const logsOptions = ['raw', 'table']; export const defaultSelectStyle: CSSProperties = { minWidth: '6rem', }; + +export enum OrderPreferenceItems { + DESC = 'desc', + ASC = 'asc', +} + +export const orderItems: OrderPreference[] = [ + { + name: 'Descending', + enum: OrderPreferenceItems.DESC, + }, + { + name: 'Ascending', + enum: OrderPreferenceItems.ASC, + }, +]; + +export interface OrderPreference { + name: string; + enum: OrderPreferenceItems; +} diff --git a/frontend/src/pages/Logs/index.tsx b/frontend/src/pages/Logs/index.tsx index 76433a7c4d..4810d3e00b 100644 --- a/frontend/src/pages/Logs/index.tsx +++ b/frontend/src/pages/Logs/index.tsx @@ -1,4 +1,5 @@ import { Button, Col, Divider, Popover, Row, Select, Space } from 'antd'; +import { QueryParams } from 'constants/query'; import LogControls from 'container/LogControls'; import LogDetailedView from 'container/LogDetailedView'; import LogLiveTail from 'container/LogLiveTail'; @@ -6,20 +7,31 @@ import LogsAggregate from 'container/LogsAggregate'; import LogsFilters from 'container/LogsFilters'; import LogsSearchFilter from 'container/LogsSearchFilter'; import LogsTable from 'container/LogsTable'; +import history from 'lib/history'; import { useCallback, useMemo } from 'react'; -import { useDispatch } from 'react-redux'; +import { useDispatch, useSelector } from 'react-redux'; +import { useLocation } from 'react-router-dom'; import { Dispatch } from 'redux'; +import { AppState } from 'store/reducers'; import AppActions from 'types/actions'; -import { SET_DETAILED_LOG_DATA } from 'types/actions/logs'; +import { SET_DETAILED_LOG_DATA, SET_LOGS_ORDER } from 'types/actions/logs'; import { ILog } from 'types/api/logs/log'; +import { ILogsReducer } from 'types/reducer/logs'; -import { defaultSelectStyle, logsOptions } from './config'; +import { + defaultSelectStyle, + logsOptions, + orderItems, + OrderPreferenceItems, +} from './config'; import { useSelectedLogView } from './hooks'; import PopoverContent from './PopoverContent'; import SpaceContainer from './styles'; function Logs(): JSX.Element { const dispatch = useDispatch>(); + const { order } = useSelector((store) => store.logs); + const location = useLocation(); const showExpandedLog = useCallback( (logData: ILog) => { @@ -67,6 +79,16 @@ function Logs(): JSX.Element { [handleViewModeOptionChange], ); + const handleChangeOrder = (value: OrderPreferenceItems): void => { + dispatch({ + type: SET_LOGS_ORDER, + payload: value, + }); + const params = new URLSearchParams(location.search); + params.set(QueryParams.order, value); + history.push({ search: params.toString() }); + }; + return ( <> Format )} + + diff --git a/frontend/src/pages/Logs/utils.ts b/frontend/src/pages/Logs/utils.ts index 2d8f317a9a..8fffa3bb21 100644 --- a/frontend/src/pages/Logs/utils.ts +++ b/frontend/src/pages/Logs/utils.ts @@ -1,7 +1,29 @@ import { LogViewMode } from 'container/LogsTable'; -import { viewModeOptionList } from './config'; +import { OrderPreferenceItems, viewModeOptionList } from './config'; export const isLogViewMode = (value: unknown): value is LogViewMode => typeof value === 'string' && viewModeOptionList.some((option) => option.key === value); + +export const getIdConditions = ( + idStart: string, + idEnd: string, + order: OrderPreferenceItems, +): Record => { + const idConditions: Record = {}; + + if (idStart && order === OrderPreferenceItems.ASC) { + idConditions.idLt = idStart; + } else if (idStart) { + idConditions.idGt = idStart; + } + + if (idEnd && order === OrderPreferenceItems.ASC) { + idConditions.idGt = idEnd; + } else if (idEnd) { + idConditions.idLt = idEnd; + } + + return idConditions; +}; diff --git a/frontend/src/pages/LogsExplorer/index.tsx b/frontend/src/pages/LogsExplorer/index.tsx index fb8685717c..519538365e 100644 --- a/frontend/src/pages/LogsExplorer/index.tsx +++ b/frontend/src/pages/LogsExplorer/index.tsx @@ -1,21 +1,32 @@ import { Button, Col, Row } from 'antd'; import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder'; -import { LogsExplorerChart } from 'container/LogsExplorerChart'; -import { LogsExplorerViews } from 'container/LogsExplorerViews'; +import LogsExplorerChart from 'container/LogsExplorerChart'; +import LogsExplorerViews from 'container/LogsExplorerViews'; import { QueryBuilder } from 'container/QueryBuilder'; import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl'; +import { useMemo } from 'react'; import { DataSource } from 'types/common/queryBuilder'; // ** Styles import { ButtonWrapperStyled, WrapperStyled } from './styles'; -function LogsExporer(): JSX.Element { - const { handleRunQuery } = useQueryBuilder(); +function LogsExplorer(): JSX.Element { + const { handleRunQuery, updateAllQueriesOperators } = useQueryBuilder(); const panelTypes = useGetPanelTypesQueryParam(PANEL_TYPES.LIST); - useShareBuilderUrl({ defaultValue: initialQueriesMap.logs }); + const defaultValue = useMemo( + () => + updateAllQueriesOperators( + initialQueriesMap.logs, + PANEL_TYPES.LIST, + DataSource.LOGS, + ), + [updateAllQueriesOperators], + ); + + useShareBuilderUrl(defaultValue); return ( @@ -42,4 +53,4 @@ function LogsExporer(): JSX.Element { ); } -export default LogsExporer; +export default LogsExplorer; diff --git a/frontend/src/pages/TracesExplorer/constants.ts b/frontend/src/pages/TracesExplorer/constants.ts deleted file mode 100644 index ceea4e582e..0000000000 --- a/frontend/src/pages/TracesExplorer/constants.ts +++ /dev/null @@ -1,6 +0,0 @@ -export const CURRENT_TRACES_EXPLORER_TAB = 'currentTab'; - -export enum TracesExplorerTabs { - TIME_SERIES = 'times-series', - TRACES = 'traces', -} diff --git a/frontend/src/pages/TracesExplorer/index.tsx b/frontend/src/pages/TracesExplorer/index.tsx index 57c2310d78..179a9a93e0 100644 --- a/frontend/src/pages/TracesExplorer/index.tsx +++ b/frontend/src/pages/TracesExplorer/index.tsx @@ -1,56 +1,179 @@ import { Tabs } from 'antd'; -import { initialQueriesMap } from 'constants/queryBuilder'; +import axios from 'axios'; +import { QueryParams } from 'constants/query'; +import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder'; +import { + COMPOSITE_QUERY, + PANEL_TYPES_QUERY, +} from 'constants/queryBuilderQueryNames'; +import ROUTES from 'constants/routes'; +import ExportPanel from 'container/ExportPanel'; +import { GRAPH_TYPES } from 'container/NewDashboard/ComponentsSlider'; import QuerySection from 'container/TracesExplorer/QuerySection'; +import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard'; +import { addEmptyWidgetInDashboardJSONWithQuery } from 'hooks/dashboard/utils'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl'; -import useUrlQuery from 'hooks/useUrlQuery'; -import { useCallback, useEffect } from 'react'; -import { useHistory, useLocation } from 'react-router-dom'; +import { useNotifications } from 'hooks/useNotifications'; +import history from 'lib/history'; +import { useCallback, useEffect, useMemo } from 'react'; +import { generatePath } from 'react-router-dom'; +import { Dashboard } from 'types/api/dashboard/getAll'; +import { DataSource } from 'types/common/queryBuilder'; -import { CURRENT_TRACES_EXPLORER_TAB, TracesExplorerTabs } from './constants'; -import { Container } from './styles'; +import { ActionsWrapper, Container } from './styles'; import { getTabsItems } from './utils'; function TracesExplorer(): JSX.Element { - const urlQuery = useUrlQuery(); - const history = useHistory(); - const location = useLocation(); + const { notifications } = useNotifications(); + const { + currentQuery, + stagedQuery, + panelType, + updateAllQueriesOperators, + redirectWithQueryBuilderData, + } = useQueryBuilder(); - const currentUrlTab = urlQuery.get( - CURRENT_TRACES_EXPLORER_TAB, - ) as TracesExplorerTabs; - const currentTab = currentUrlTab || TracesExplorerTabs.TIME_SERIES; - const tabsItems = getTabsItems(); + const currentTab = panelType || PANEL_TYPES.LIST; - const redirectWithCurrentTab = useCallback( - (tabKey: string): void => { - urlQuery.set(CURRENT_TRACES_EXPLORER_TAB, tabKey); - const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; - history.push(generatedUrl); + const isMultipleQueries = useMemo( + () => + currentQuery.builder.queryData.length > 1 || + currentQuery.builder.queryFormulas.length > 0, + [currentQuery], + ); + + const isGroupByExist = useMemo(() => { + const groupByCount: number = currentQuery.builder.queryData.reduce( + (acc, query) => acc + query.groupBy.length, + 0, + ); + + return groupByCount > 0; + }, [currentQuery]); + + const defaultQuery = useMemo(() => { + const query = updateAllQueriesOperators( + initialQueriesMap.traces, + PANEL_TYPES.LIST, + DataSource.TRACES, + ); + + return { + ...query, + builder: { + ...query.builder, + queryData: [ + { + ...query.builder.queryData[0], + orderBy: [{ columnName: 'timestamp', order: 'desc' }], + }, + ], + }, + }; + }, [updateAllQueriesOperators]); + + const tabsItems = getTabsItems({ + isListViewDisabled: isMultipleQueries || isGroupByExist, + }); + + const exportDefaultQuery = useMemo( + () => + updateAllQueriesOperators( + stagedQuery || initialQueriesMap.traces, + PANEL_TYPES.TIME_SERIES, + DataSource.TRACES, + ), + [stagedQuery, updateAllQueriesOperators], + ); + + const { mutate: updateDashboard, isLoading } = useUpdateDashboard(); + + const handleExport = useCallback( + (dashboard: Dashboard | null): void => { + if (!dashboard) return; + + const updatedDashboard = addEmptyWidgetInDashboardJSONWithQuery( + dashboard, + exportDefaultQuery, + ); + + updateDashboard(updatedDashboard, { + onSuccess: (data) => { + const dashboardEditView = `${generatePath(ROUTES.DASHBOARD, { + dashboardId: data?.payload?.uuid, + })}/new?${QueryParams.graphType}=graph&${ + QueryParams.widgetId + }=empty&${COMPOSITE_QUERY}=${encodeURIComponent( + JSON.stringify(exportDefaultQuery), + )}`; + + history.push(dashboardEditView); + }, + onError: (error) => { + if (axios.isAxiosError(error)) { + notifications.error({ + message: error.message, + }); + } + }, + }); }, - [history, location, urlQuery], + [exportDefaultQuery, notifications, updateDashboard], ); const handleTabChange = useCallback( - (tabKey: string): void => { - redirectWithCurrentTab(tabKey); + (newPanelType: string): void => { + if (panelType === newPanelType) return; + + const query = updateAllQueriesOperators( + currentQuery, + newPanelType as GRAPH_TYPES, + DataSource.TRACES, + ); + + redirectWithQueryBuilderData(query, { [PANEL_TYPES_QUERY]: newPanelType }); }, - [redirectWithCurrentTab], + [ + currentQuery, + panelType, + redirectWithQueryBuilderData, + updateAllQueriesOperators, + ], ); - useShareBuilderUrl({ defaultValue: initialQueriesMap.traces }); + useShareBuilderUrl(defaultQuery); useEffect(() => { - if (currentUrlTab) return; + const shouldChangeView = isMultipleQueries || isGroupByExist; - redirectWithCurrentTab(TracesExplorerTabs.TIME_SERIES); - }, [currentUrlTab, redirectWithCurrentTab]); + if ( + (currentTab === PANEL_TYPES.LIST || currentTab === PANEL_TYPES.TRACE) && + shouldChangeView + ) { + handleTabChange(PANEL_TYPES.TIME_SERIES); + } + }, [currentTab, isMultipleQueries, isGroupByExist, handleTabChange]); return ( <> - + + + + + ); diff --git a/frontend/src/pages/TracesExplorer/styles.ts b/frontend/src/pages/TracesExplorer/styles.ts index 6da55b8d4d..9e68ad5a5c 100644 --- a/frontend/src/pages/TracesExplorer/styles.ts +++ b/frontend/src/pages/TracesExplorer/styles.ts @@ -3,3 +3,8 @@ import styled from 'styled-components'; export const Container = styled.div` margin: 1rem 0; `; + +export const ActionsWrapper = styled.div` + display: flex; + justify-content: flex-end; +`; diff --git a/frontend/src/pages/TracesExplorer/utils.tsx b/frontend/src/pages/TracesExplorer/utils.tsx index aff29bba58..2c0143dadc 100644 --- a/frontend/src/pages/TracesExplorer/utils.tsx +++ b/frontend/src/pages/TracesExplorer/utils.tsx @@ -1,17 +1,32 @@ import { TabsProps } from 'antd'; -import TimeSeriesView from 'container/TracesExplorer/TimeSeriesView'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import TimeSeriesView from 'container/TimeSeriesView'; +import ListView from 'container/TracesExplorer/ListView'; +import TracesView from 'container/TracesExplorer/TracesView'; +import { DataSource } from 'types/common/queryBuilder'; -import { TracesExplorerTabs } from './constants'; +interface GetTabsItemsProps { + isListViewDisabled: boolean; +} -export const getTabsItems = (): TabsProps['items'] => [ +export const getTabsItems = ({ + isListViewDisabled, +}: GetTabsItemsProps): TabsProps['items'] => [ { - label: 'Time Series', - key: TracesExplorerTabs.TIME_SERIES, - children: , + label: 'List View', + key: PANEL_TYPES.LIST, + children: , + disabled: isListViewDisabled, }, { label: 'Traces', - key: TracesExplorerTabs.TRACES, - children:
Traces tab
, + key: PANEL_TYPES.TRACE, + children: , + disabled: isListViewDisabled, + }, + { + label: 'Time Series', + key: PANEL_TYPES.TIME_SERIES, + children: , }, ]; diff --git a/frontend/src/providers/QueryBuilder.tsx b/frontend/src/providers/QueryBuilder.tsx index 2389ca01bc..7cb43a1a7e 100644 --- a/frontend/src/providers/QueryBuilder.tsx +++ b/frontend/src/providers/QueryBuilder.tsx @@ -16,6 +16,7 @@ import { import { COMPOSITE_QUERY } from 'constants/queryBuilderQueryNames'; import { GRAPH_TYPES } from 'container/NewDashboard/ComponentsSlider'; import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQueryParam'; +import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval'; import useUrlQuery from 'hooks/useUrlQuery'; import { createIdFromObjectFields } from 'lib/createIdFromObjectFields'; import { createNewBuilderItemName } from 'lib/newQueryBuilder/createNewBuilderItemName'; @@ -29,7 +30,9 @@ import { useMemo, useState, } from 'react'; +import { useSelector } from 'react-redux'; import { useHistory, useLocation } from 'react-router-dom'; +import { AppState } from 'store/reducers'; // ** Types import { IBuilderFormula, @@ -45,6 +48,7 @@ import { QueryBuilderContextType, QueryBuilderData, } from 'types/common/queryBuilder'; +import { GlobalReducer } from 'types/reducer/globalTime'; import { v4 as uuid } from 'uuid'; export const QueryBuilderContext = createContext({ @@ -52,11 +56,11 @@ export const QueryBuilderContext = createContext({ stagedQuery: initialQueriesMap.metrics, initialDataSource: null, panelType: PANEL_TYPES.TIME_SERIES, + isEnabledQuery: false, handleSetQueryData: () => {}, handleSetFormulaData: () => {}, handleSetQueryItemData: () => {}, - handleSetPanelType: () => {}, - setupInitialDataSource: () => {}, + handleSetConfig: () => {}, removeQueryBuilderEntityByIndex: () => {}, removeQueryTypeItemByIndex: () => {}, addNewBuilderQuery: () => {}, @@ -65,6 +69,8 @@ export const QueryBuilderContext = createContext({ redirectWithQueryBuilderData: () => {}, handleRunQuery: () => {}, resetStagedQuery: () => {}, + updateAllQueriesOperators: () => initialQueriesMap.metrics, + initQueryBuilderData: () => {}, }); export function QueryBuilderProvider({ @@ -73,77 +79,125 @@ export function QueryBuilderProvider({ const urlQuery = useUrlQuery(); const history = useHistory(); const location = useLocation(); + const { maxTime, minTime } = useSelector( + (state) => state.globalTime, + ); const compositeQueryParam = useGetCompositeQueryParam(); + const { queryType: queryTypeParam, ...queryState } = + compositeQueryParam || initialQueriesMap.metrics; const [initialDataSource, setInitialDataSource] = useState( null, ); - const [panelType, setPanelType] = useState( - PANEL_TYPES.TIME_SERIES, - ); + const [panelType, setPanelType] = useState(null); const [currentQuery, setCurrentQuery] = useState( - initialQueryState, + queryState || initialQueryState, ); const [stagedQuery, setStagedQuery] = useState(null); - const [queryType, setQueryType] = useState( - EQueryType.QUERY_BUILDER, + const [queryType, setQueryType] = useState(queryTypeParam); + + const getElementWithActualOperator = useCallback( + ( + queryData: IBuilderQuery, + dataSource: DataSource, + currentPanelType: GRAPH_TYPES, + ): IBuilderQuery => { + const initialOperators = getOperatorsBySourceAndPanelType({ + dataSource, + panelType: currentPanelType, + }); + + const isCurrentOperatorAvailableInList = initialOperators + .map((operator) => operator.value) + .includes(queryData.aggregateOperator); + + if (!isCurrentOperatorAvailableInList) { + return { ...queryData, aggregateOperator: initialOperators[0].value }; + } + + return queryData; + }, + [], ); - const initQueryBuilderData = useCallback( - (query: Query): void => { - const { queryType: newQueryType, ...queryState } = query; - + const prepareQueryBuilderData = useCallback( + (query: Query): Query => { const builder: QueryBuilderData = { - queryData: queryState.builder.queryData.map((item) => ({ + queryData: query.builder.queryData.map((item) => ({ ...initialQueryBuilderFormValuesMap[ initialDataSource || DataSource.METRICS ], ...item, })), - queryFormulas: queryState.builder.queryFormulas.map((item) => ({ + queryFormulas: query.builder.queryFormulas.map((item) => ({ ...initialFormulaBuilderFormValues, ...item, })), }; - const promql: IPromQLQuery[] = queryState.promql.map((item) => ({ + const setupedQueryData = builder.queryData.map((item) => { + const currentElement: IBuilderQuery = { + ...item, + groupBy: item.groupBy.map(({ id: _, ...item }) => ({ + ...item, + id: createIdFromObjectFields(item, baseAutoCompleteIdKeysOrder), + })), + aggregateAttribute: { + ...item.aggregateAttribute, + id: createIdFromObjectFields( + item.aggregateAttribute, + baseAutoCompleteIdKeysOrder, + ), + }, + }; + + return currentElement; + }); + + const promql: IPromQLQuery[] = query.promql.map((item) => ({ ...initialQueryPromQLData, ...item, })); - const clickHouse: IClickHouseQuery[] = queryState.clickhouse_sql.map( - (item) => ({ - ...initialClickHouseData, - ...item, - }), - ); - - const type = newQueryType || EQueryType.QUERY_BUILDER; + const clickHouse: IClickHouseQuery[] = query.clickhouse_sql.map((item) => ({ + ...initialClickHouseData, + ...item, + })); const newQueryState: QueryState = { clickhouse_sql: clickHouse, promql, builder: { ...builder, - queryData: builder.queryData.map((q) => ({ - ...q, - groupBy: q.groupBy.map(({ id: _, ...item }) => ({ - ...item, - id: createIdFromObjectFields(item, baseAutoCompleteIdKeysOrder), - })), - aggregateAttribute: { - ...q.aggregateAttribute, - id: createIdFromObjectFields( - q.aggregateAttribute, - baseAutoCompleteIdKeysOrder, - ), - }, - })), + queryData: setupedQueryData, }, + id: query.id, + }; + + const nextQuery: Query = { + ...newQueryState, + queryType: query.queryType, + }; + + return nextQuery; + }, + [initialDataSource], + ); + + const initQueryBuilderData = useCallback( + (query: Query): void => { + const { queryType: newQueryType, ...queryState } = prepareQueryBuilderData( + query, + ); + + const type = newQueryType || EQueryType.QUERY_BUILDER; + + const newQueryState: QueryState = { + ...queryState, id: queryState.id, }; @@ -153,7 +207,19 @@ export function QueryBuilderProvider({ setCurrentQuery(newQueryState); setQueryType(type); }, - [initialDataSource], + [prepareQueryBuilderData], + ); + + const updateAllQueriesOperators = useCallback( + (query: Query, panelType: GRAPH_TYPES, dataSource: DataSource): Query => { + const queryData = query.builder.queryData.map((item) => + getElementWithActualOperator(item, dataSource, panelType), + ); + + return { ...query, builder: { ...query.builder, queryData } }; + }, + + [getElementWithActualOperator], ); const removeQueryBuilderEntityByIndex = useCallback( @@ -161,11 +227,14 @@ export function QueryBuilderProvider({ setCurrentQuery((prevState) => { const currentArray: (IBuilderQuery | IBuilderFormula)[] = prevState.builder[type]; + + const filteredArray = currentArray.filter((_, i) => index !== i); + return { ...prevState, builder: { ...prevState.builder, - [type]: currentArray.filter((_, i) => index !== i), + [type]: filteredArray, }, }; }); @@ -199,20 +268,11 @@ export function QueryBuilderProvider({ existNames, sourceNames: alphabet, }), - ...(initialDataSource - ? { - dataSource: initialDataSource, - aggregateOperator: getOperatorsBySourceAndPanelType({ - dataSource: initialDataSource, - panelType, - })[0].value, - } - : {}), }; return newQuery; }, - [initialDataSource, panelType], + [initialDataSource], ); const createNewBuilderFormula = useCallback((formulas: IBuilderFormula[]) => { @@ -297,12 +357,6 @@ export function QueryBuilderProvider({ }); }, [createNewBuilderFormula]); - const setupInitialDataSource = useCallback( - (newInitialDataSource: DataSource | null) => - setInitialDataSource(newInitialDataSource), - [], - ); - const updateQueryBuilderData: ( arr: T[], index: number, @@ -310,7 +364,6 @@ export function QueryBuilderProvider({ ) => T[] = useCallback( (arr, index, newQueryItem) => arr.map((item, idx) => (index === idx ? newQueryItem : item)), - [], ); @@ -377,33 +430,40 @@ export function QueryBuilderProvider({ [updateQueryBuilderData], ); - const handleSetPanelType = useCallback((newPanelType: GRAPH_TYPES) => { - setPanelType(newPanelType); - }, []); - const redirectWithQueryBuilderData = useCallback( (query: Partial, searchParams?: Record) => { + const queryType = + !query.queryType || !Object.values(EQueryType).includes(query.queryType) + ? EQueryType.QUERY_BUILDER + : query.queryType; + + const builder = + !query.builder || query.builder.queryData.length === 0 + ? initialQueryState.builder + : query.builder; + + const promql = + !query.promql || query.promql.length === 0 + ? initialQueryState.promql + : query.promql; + + const clickhouseSql = + !query.clickhouse_sql || query.clickhouse_sql.length === 0 + ? initialQueryState.clickhouse_sql + : query.clickhouse_sql; + const currentGeneratedQuery: Query = { - queryType: - !query.queryType || !Object.values(EQueryType).includes(query.queryType) - ? EQueryType.QUERY_BUILDER - : query.queryType, - builder: - !query.builder || query.builder.queryData.length === 0 - ? initialQueryState.builder - : query.builder, - promql: - !query.promql || query.promql.length === 0 - ? initialQueryState.promql - : query.promql, - clickhouse_sql: - !query.clickhouse_sql || query.clickhouse_sql.length === 0 - ? initialQueryState.clickhouse_sql - : query.clickhouse_sql, + queryType, + builder, + promql, + clickhouse_sql: clickhouseSql, id: uuid(), }; - urlQuery.set(COMPOSITE_QUERY, JSON.stringify(currentGeneratedQuery)); + urlQuery.set( + COMPOSITE_QUERY, + encodeURIComponent(JSON.stringify(currentGeneratedQuery)), + ); if (searchParams) { Object.keys(searchParams).forEach((param) => @@ -415,12 +475,36 @@ export function QueryBuilderProvider({ history.push(generatedUrl); }, - [history, location, urlQuery], + [history, location.pathname, urlQuery], + ); + + const handleSetConfig = useCallback( + (newPanelType: GRAPH_TYPES, dataSource: DataSource | null) => { + setPanelType(newPanelType); + setInitialDataSource(dataSource); + }, + [], ); const handleRunQuery = useCallback(() => { - redirectWithQueryBuilderData({ ...currentQuery, queryType }); - }, [redirectWithQueryBuilderData, currentQuery, queryType]); + redirectWithQueryBuilderData({ + ...{ + ...currentQuery, + ...updateStepInterval( + { + builder: currentQuery.builder, + clickhouse_sql: currentQuery.clickhouse_sql, + promql: currentQuery.promql, + id: currentQuery.id, + queryType, + }, + maxTime, + minTime, + ), + }, + queryType, + }); + }, [currentQuery, queryType, maxTime, minTime, redirectWithQueryBuilderData]); const resetStagedQuery = useCallback(() => { setStagedQuery(null); @@ -458,17 +542,22 @@ export function QueryBuilderProvider({ [currentQuery, queryType], ); + const isEnabledQuery = useMemo(() => !!stagedQuery && !!panelType, [ + stagedQuery, + panelType, + ]); + const contextValues: QueryBuilderContextType = useMemo( () => ({ currentQuery: query, stagedQuery, initialDataSource, panelType, + isEnabledQuery, handleSetQueryData, handleSetFormulaData, handleSetQueryItemData, - handleSetPanelType, - setupInitialDataSource, + handleSetConfig, removeQueryBuilderEntityByIndex, removeQueryTypeItemByIndex, addNewBuilderQuery, @@ -477,17 +566,19 @@ export function QueryBuilderProvider({ redirectWithQueryBuilderData, handleRunQuery, resetStagedQuery, + updateAllQueriesOperators, + initQueryBuilderData, }), [ query, stagedQuery, initialDataSource, panelType, + isEnabledQuery, handleSetQueryData, handleSetFormulaData, handleSetQueryItemData, - handleSetPanelType, - setupInitialDataSource, + handleSetConfig, removeQueryBuilderEntityByIndex, removeQueryTypeItemByIndex, addNewBuilderQuery, @@ -496,6 +587,8 @@ export function QueryBuilderProvider({ redirectWithQueryBuilderData, handleRunQuery, resetStagedQuery, + updateAllQueriesOperators, + initQueryBuilderData, ], ); diff --git a/frontend/src/store/actions/dashboard/getQueryResults.ts b/frontend/src/store/actions/dashboard/getQueryResults.ts index c139c232c3..1487b3f7c8 100644 --- a/frontend/src/store/actions/dashboard/getQueryResults.ts +++ b/frontend/src/store/actions/dashboard/getQueryResults.ts @@ -3,24 +3,27 @@ // @ts-nocheck import { getMetricsQueryRange } from 'api/metrics/getQueryRange'; +import { GRAPH_TYPES } from 'container/NewDashboard/ComponentsSlider'; import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems'; import { Time } from 'container/TopNav/DateTimeSelection/config'; +import getStartEndRangeTime from 'lib/getStartEndRangeTime'; import getStep from 'lib/getStep'; +import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld'; import { mapQueryDataToApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi'; import { isEmpty } from 'lodash-es'; -import { GRAPH_TYPES } from 'container/NewDashboard/ComponentsSlider'; +import store from 'store'; import { SuccessResponse } from 'types/api'; -import { Query } from 'types/api/queryBuilder/queryBuilderData'; import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; import { EQueryType } from 'types/common/dashboard'; -import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld'; -import getStartEndRangeTime from 'lib/getStartEndRangeTime'; +import { Pagination } from 'hooks/queryPagination'; export async function GetMetricQueryRange({ query, globalSelectedInterval, graphType, selectedTime, + tableParams, variables = {}, params = {}, }: GetQueryResultsProps): Promise> { @@ -37,8 +40,9 @@ export async function GetMetricQueryRange({ switch (query.queryType) { case EQueryType.QUERY_BUILDER: { const { queryData: data, queryFormulas } = query.builder; - const currentQueryData = mapQueryDataToApi(data, 'queryName'); + const currentQueryData = mapQueryDataToApi(data, 'queryName', tableParams); const currentFormulas = mapQueryDataToApi(queryFormulas, 'queryName'); + const builderQueries = { ...currentQueryData.data, ...currentFormulas.data, @@ -89,7 +93,11 @@ export async function GetMetricQueryRange({ const response = await getMetricsQueryRange({ start: parseInt(start, 10) * 1e3, end: parseInt(end, 10) * 1e3, - step: getStep({ start, end, inputFormat: 'ms' }), + step: getStep({ + start: store.getState().globalTime.minTime, + end: store.getState().globalTime.maxTime, + inputFormat: 'ns', + }), variables, ...QueryPayload, ...params, @@ -135,4 +143,8 @@ export interface GetQueryResultsProps { globalSelectedInterval: Time; variables?: Record; params?: Record; + tableParams?: { + pagination?: Pagination; + selectColumns?: any; + }; } diff --git a/frontend/src/store/actions/dashboard/saveDashboard.ts b/frontend/src/store/actions/dashboard/saveDashboard.ts index 107fecee25..14e62fedac 100644 --- a/frontend/src/store/actions/dashboard/saveDashboard.ts +++ b/frontend/src/store/actions/dashboard/saveDashboard.ts @@ -4,6 +4,7 @@ import { AxiosError } from 'axios'; import { COMPOSITE_QUERY } from 'constants/queryBuilderQueryNames'; import ROUTES from 'constants/routes'; import { ITEMS } from 'container/NewDashboard/ComponentsSlider/menuItems'; +import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval'; import history from 'lib/history'; import { Layout } from 'react-grid-layout'; import { generatePath } from 'react-router-dom'; @@ -88,9 +89,14 @@ export const SaveDashboard = ({ const allLayout = getAllLayout(); const params = new URLSearchParams(window.location.search); const compositeQuery = params.get(COMPOSITE_QUERY); + const { maxTime, minTime } = store.getState().globalTime; const query = compositeQuery - ? JSON.parse(compositeQuery) - : selectedWidget.query; + ? updateStepInterval( + JSON.parse(decodeURIComponent(compositeQuery)), + maxTime, + minTime, + ) + : updateStepInterval(selectedWidget.query, maxTime, minTime); const response = await updateDashboardApi({ data: { diff --git a/frontend/src/store/reducers/logs.ts b/frontend/src/store/reducers/logs.ts index 06c6b41b70..0d0a69a1a4 100644 --- a/frontend/src/store/reducers/logs.ts +++ b/frontend/src/store/reducers/logs.ts @@ -1,4 +1,5 @@ import { parseQuery } from 'lib/logql'; +import { OrderPreferenceItems } from 'pages/Logs/config'; import { ADD_SEARCH_FIELD_QUERY_STRING, FLUSH_LOGS, @@ -17,6 +18,7 @@ import { SET_LOG_LINES_PER_PAGE, SET_LOGS, SET_LOGS_AGGREGATE_SERIES, + SET_LOGS_ORDER, SET_SEARCH_QUERY_PARSED_PAYLOAD, SET_SEARCH_QUERY_STRING, SET_VIEW_MODE, @@ -37,7 +39,7 @@ const initialState: ILogsReducer = { parsedQuery: [], }, logs: [], - logLinesPerPage: 25, + logLinesPerPage: 200, linesPerRow: 2, viewMode: 'raw', idEnd: '', @@ -49,6 +51,10 @@ const initialState: ILogsReducer = { liveTailStartRange: 15, selectedLogId: null, detailedLog: null, + order: + (new URLSearchParams(window.location.search).get( + 'order', + ) as ILogsReducer['order']) ?? OrderPreferenceItems.DESC, }; export const LogsReducer = ( @@ -129,6 +135,17 @@ export const LogsReducer = ( logs: logsData, }; } + + case SET_LOGS_ORDER: { + const order = action.payload; + return { + ...state, + order, + idStart: '', + idEnd: '', + }; + } + case SET_LOG_LINES_PER_PAGE: { return { ...state, diff --git a/frontend/src/types/actions/logs.ts b/frontend/src/types/actions/logs.ts index bd400f9204..fbaa14a345 100644 --- a/frontend/src/types/actions/logs.ts +++ b/frontend/src/types/actions/logs.ts @@ -1,5 +1,7 @@ import { LogViewMode } from 'container/LogsTable'; +import { Pagination } from 'hooks/queryPagination'; import { ILogQLParsedQueryItem } from 'lib/logql/types'; +import { OrderPreferenceItems } from 'pages/Logs/config'; import { IField, IFieldMoveToSelected, IFields } from 'types/api/logs/fields'; import { TLogsLiveTailState } from 'types/api/logs/liveTail'; import { ILog } from 'types/api/logs/log'; @@ -33,6 +35,7 @@ export const SET_LINES_PER_ROW = 'SET_LINES_PER_ROW'; export const SET_VIEW_MODE = 'SET_VIEW_MODE'; export const UPDATE_SELECTED_FIELDS = 'LOGS_UPDATE_SELECTED_FIELDS'; export const UPDATE_INTERESTING_FIELDS = 'LOGS_UPDATE_INTERESTING_FIELDS'; +export const SET_LOGS_ORDER = 'SET_LOGS_ORDER'; export interface GetFields { type: typeof GET_FIELDS; @@ -70,7 +73,7 @@ export interface UpdateLogs { export interface SetLogsLinesPerPage { type: typeof SET_LOG_LINES_PER_PAGE; payload: { - logsLinesPerPage: number; + logsLinesPerPage: Pagination['limit']; }; } @@ -140,6 +143,11 @@ export interface UpdateSelectedInterestFields { }; } +export interface SetLogsOrder { + type: typeof SET_LOGS_ORDER; + payload: OrderPreferenceItems; +} + export type LogsActions = | GetFields | SetFields @@ -163,4 +171,5 @@ export type LogsActions = | SetLiveTailStartTime | SetLinesPerRow | SetViewMode - | UpdateSelectedInterestFields; + | UpdateSelectedInterestFields + | SetLogsOrder; diff --git a/frontend/src/types/api/logs/log.ts b/frontend/src/types/api/logs/log.ts index ef61ba9871..eb862daa9c 100644 --- a/frontend/src/types/api/logs/log.ts +++ b/frontend/src/types/api/logs/log.ts @@ -1,4 +1,5 @@ export interface ILog { + date: string; timestamp: number; id: string; traceId: string; diff --git a/frontend/src/types/api/metrics/getQueryRange.ts b/frontend/src/types/api/metrics/getQueryRange.ts index f8c32c29a3..5dd80a451f 100644 --- a/frontend/src/types/api/metrics/getQueryRange.ts +++ b/frontend/src/types/api/metrics/getQueryRange.ts @@ -5,6 +5,7 @@ export interface MetricRangePayloadProps { data: { result: QueryData[]; resultType: string; + newResult: MetricRangePayloadV3; }; } diff --git a/frontend/src/types/api/widgets/getQuery.ts b/frontend/src/types/api/widgets/getQuery.ts index 60d679c36e..0b36af1541 100644 --- a/frontend/src/types/api/widgets/getQuery.ts +++ b/frontend/src/types/api/widgets/getQuery.ts @@ -1,8 +1,12 @@ +import { ILog } from '../logs/log'; + export interface PayloadProps { status: 'success' | 'error'; result: QueryData[]; } +export type ListItem = { timestamp: string; data: Omit }; + export interface QueryData { metric: { [key: string]: string; @@ -20,10 +24,10 @@ export interface SeriesItem { } export interface QueryDataV3 { - list: null; + list: ListItem[] | null; queryName: string; legend?: string; - series: SeriesItem[]; + series: SeriesItem[] | null; } export interface Props { diff --git a/frontend/src/types/common/operations.types.ts b/frontend/src/types/common/operations.types.ts index 409f66cc5c..d5872c8bbe 100644 --- a/frontend/src/types/common/operations.types.ts +++ b/frontend/src/types/common/operations.types.ts @@ -18,6 +18,7 @@ export type HandleChangeQueryData = < export type UseQueryOperations = ( params: UseQueryOperationsParams, ) => { + isTracePanelType: boolean; isMetricsDataSource: boolean; operators: SelectOption[]; listOfAdditionalFilters: string[]; diff --git a/frontend/src/types/common/queryBuilder.ts b/frontend/src/types/common/queryBuilder.ts index 16b6f8cd08..4b6801eccf 100644 --- a/frontend/src/types/common/queryBuilder.ts +++ b/frontend/src/types/common/queryBuilder.ts @@ -143,6 +143,7 @@ export type PanelTypeKeys = | 'VALUE' | 'TABLE' | 'LIST' + | 'TRACE' | 'EMPTY_WIDGET'; export type ReduceOperators = 'last' | 'sum' | 'avg' | 'max' | 'min'; @@ -156,7 +157,8 @@ export type QueryBuilderContextType = { currentQuery: Query; stagedQuery: Query | null; initialDataSource: DataSource | null; - panelType: GRAPH_TYPES; + panelType: GRAPH_TYPES | null; + isEnabledQuery: boolean; handleSetQueryData: (index: number, queryData: IBuilderQuery) => void; handleSetFormulaData: (index: number, formulaData: IBuilderFormula) => void; handleSetQueryItemData: ( @@ -164,8 +166,10 @@ export type QueryBuilderContextType = { type: EQueryType.PROM | EQueryType.CLICKHOUSE, newQueryData: IPromQLQuery | IClickHouseQuery, ) => void; - handleSetPanelType: (newPanelType: GRAPH_TYPES) => void; - setupInitialDataSource: (newInitialDataSource: DataSource | null) => void; + handleSetConfig: ( + newPanelType: GRAPH_TYPES, + dataSource: DataSource | null, + ) => void; removeQueryBuilderEntityByIndex: ( type: keyof QueryBuilderData, index: number, @@ -183,6 +187,12 @@ export type QueryBuilderContextType = { ) => void; handleRunQuery: () => void; resetStagedQuery: () => void; + updateAllQueriesOperators: ( + queryData: Query, + panelType: GRAPH_TYPES, + dataSource: DataSource, + ) => Query; + initQueryBuilderData: (query: Query) => void; }; export type QueryAdditionalFilter = { diff --git a/frontend/src/types/reducer/logs.ts b/frontend/src/types/reducer/logs.ts index 762afc199c..52a216f62f 100644 --- a/frontend/src/types/reducer/logs.ts +++ b/frontend/src/types/reducer/logs.ts @@ -1,5 +1,7 @@ import { LogViewMode } from 'container/LogsTable'; +import { Pagination } from 'hooks/queryPagination'; import { ILogQLParsedQueryItem } from 'lib/logql/types'; +import { OrderPreferenceItems } from 'pages/Logs/config'; import { IFields } from 'types/api/logs/fields'; import { TLogsLiveTailState } from 'types/api/logs/liveTail'; import { ILog } from 'types/api/logs/log'; @@ -12,7 +14,7 @@ export interface ILogsReducer { parsedQuery: ILogQLParsedQueryItem[]; }; logs: ILog[]; - logLinesPerPage: number; + logLinesPerPage: Pagination['limit']; linesPerRow: number; viewMode: LogViewMode; idEnd: string; @@ -24,6 +26,7 @@ export interface ILogsReducer { detailedLog: null | ILog; liveTail: TLogsLiveTailState; liveTailStartRange: number; // time in minutes + order: OrderPreferenceItems; } export default ILogsReducer; diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index fb59650982..7185581ab7 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -716,7 +716,7 @@ func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, erro return &services, nil } -func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context) (*map[string][]string, *model.ApiError) { +func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig) (*map[string][]string, *model.ApiError) { operations := map[string][]string{} query := fmt.Sprintf(`SELECT DISTINCT name, serviceName FROM %s.%s`, r.TraceDB, r.topLevelOperationsTable) @@ -737,18 +737,21 @@ func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context) (*map[stri if _, ok := operations[serviceName]; !ok { operations[serviceName] = []string{} } + if skipConfig.ShouldSkip(serviceName, name) { + continue + } operations[serviceName] = append(operations[serviceName], name) } return &operations, nil } -func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) { +func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams, skipConfig *model.SkipConfig) (*[]model.ServiceItem, *model.ApiError) { if r.indexTable == "" { return nil, &model.ApiError{Typ: model.ErrorExec, Err: ErrNoIndexTable} } - topLevelOps, apiErr := r.GetTopLevelOperations(ctx) + topLevelOps, apiErr := r.GetTopLevelOperations(ctx, skipConfig) if apiErr != nil { return nil, apiErr } @@ -839,9 +842,9 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G return &serviceItems, nil } -func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *model.GetServiceOverviewParams) (*[]model.ServiceOverviewItem, *model.ApiError) { +func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *model.GetServiceOverviewParams, skipConfig *model.SkipConfig) (*[]model.ServiceOverviewItem, *model.ApiError) { - topLevelOps, apiErr := r.GetTopLevelOperations(ctx) + topLevelOps, apiErr := r.GetTopLevelOperations(ctx, skipConfig) if apiErr != nil { return nil, apiErr } @@ -4220,8 +4223,11 @@ func (r *ClickHouseReader) GetListResultV3(ctx context.Context, query string) ([ for idx, v := range vars { if columnNames[idx] == "timestamp" { t = time.Unix(0, int64(*v.(*uint64))) + } else if columnNames[idx] == "timestamp_datetime" { + t = *v.(*time.Time) + } else { + row[columnNames[idx]] = v } - row[columnNames[idx]] = v } rowList = append(rowList, &v3.Row{Timestamp: t, Data: row}) } @@ -4296,6 +4302,8 @@ func (r *ClickHouseReader) GetTraceAggregateAttributes(ctx context.Context, req if err := rows.Scan(&tagKey, &tagType, &dataType, &isColumn); err != nil { return nil, fmt.Errorf("error while scanning rows: %s", err.Error()) } + // TODO: Remove this once the column name are updated in the table + tagKey = tempHandleFixedColumns(tagKey) key := v3.AttributeKey{ Key: tagKey, DataType: v3.AttributeKeyDataType(dataType), @@ -4335,6 +4343,8 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi if err := rows.Scan(&tagKey, &tagType, &dataType, &isColumn); err != nil { return nil, fmt.Errorf("error while scanning rows: %s", err.Error()) } + // TODO: Remove this once the column name are updated in the table + tagKey = tempHandleFixedColumns(tagKey) key := v3.AttributeKey{ Key: tagKey, DataType: v3.AttributeKeyDataType(dataType), @@ -4346,6 +4356,19 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi return &response, nil } +// tempHandleFixedColumns is a temporary function to handle the fixed columns whose name has been changed in AttributeKeys Table +func tempHandleFixedColumns(tagKey string) string { + switch { + case tagKey == "traceId": + tagKey = "traceID" + case tagKey == "spanId": + tagKey = "spanID" + case tagKey == "parentSpanId": + tagKey = "parentSpanID" + } + return tagKey +} + func (r *ClickHouseReader) GetTraceAttributeValues(ctx context.Context, req *v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error) { var query string diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 4eb48d1064..604b1f046a 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -63,6 +63,7 @@ type APIHandler struct { basePath string apiPrefix string reader interfaces.Reader + skipConfig *model.SkipConfig appDao dao.ModelDao alertManager am.Manager ruleManager *rules.Manager @@ -81,6 +82,7 @@ type APIHandlerOpts struct { // business data reader e.g. clickhouse Reader interfaces.Reader + SkipConfig *model.SkipConfig // dao layer to perform crud on app objects like dashboard, alerts etc AppDao dao.ModelDao @@ -102,6 +104,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) { aH := &APIHandler{ reader: opts.Reader, appDao: opts.AppDao, + skipConfig: opts.SkipConfig, alertManager: alertManager, ruleManager: opts.RuleManager, featureFlags: opts.FeatureFlags, @@ -1316,7 +1319,7 @@ func (aH *APIHandler) getServiceOverview(w http.ResponseWriter, r *http.Request) return } - result, apiErr := aH.reader.GetServiceOverview(r.Context(), query) + result, apiErr := aH.reader.GetServiceOverview(r.Context(), query, aH.skipConfig) if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) { return } @@ -1327,7 +1330,7 @@ func (aH *APIHandler) getServiceOverview(w http.ResponseWriter, r *http.Request) func (aH *APIHandler) getServicesTopLevelOps(w http.ResponseWriter, r *http.Request) { - result, apiErr := aH.reader.GetTopLevelOperations(r.Context()) + result, apiErr := aH.reader.GetTopLevelOperations(r.Context(), aH.skipConfig) if apiErr != nil { RespondError(w, apiErr, nil) return @@ -1343,7 +1346,7 @@ func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) { return } - result, apiErr := aH.reader.GetServices(r.Context(), query) + result, apiErr := aH.reader.GetServices(r.Context(), query, aH.skipConfig) if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) { return } @@ -2681,6 +2684,11 @@ func (aH *APIHandler) getSpanKeysV3(ctx context.Context, queryRangeParams *v3.Qu if err != nil { return nil, err } + // Add timestamp as a span key to allow ordering by timestamp + spanKeys["timestamp"] = v3.AttributeKey{ + Key: "timestamp", + IsColumn: true, + } return spanKeys, nil } } @@ -2722,7 +2730,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que return } - if queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeList { + if queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeList || queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeTrace { result, err, errQuriesByName = aH.execClickHouseListQueries(r.Context(), queries) } else { result, err, errQuriesByName = aH.execClickHouseGraphQueries(r.Context(), queries) diff --git a/pkg/query-service/app/logs/v3/enrich_query.go b/pkg/query-service/app/logs/v3/enrich_query.go index 88f9097814..5cf15618e2 100644 --- a/pkg/query-service/app/logs/v3/enrich_query.go +++ b/pkg/query-service/app/logs/v3/enrich_query.go @@ -131,7 +131,7 @@ func enrichFieldWithMetadata(field v3.AttributeKey, fields map[string]v3.Attribu // check if the field is present in the fields map if existingField, ok := fields[field.Key]; ok { if existingField.IsColumn { - return field + return existingField } field.Type = existingField.Type field.DataType = existingField.DataType diff --git a/pkg/query-service/app/logs/v3/query_builder.go b/pkg/query-service/app/logs/v3/query_builder.go index 6ff58a2497..1e49250865 100644 --- a/pkg/query-service/app/logs/v3/query_builder.go +++ b/pkg/query-service/app/logs/v3/query_builder.go @@ -321,19 +321,13 @@ func orderByAttributeKeyTags(panelType v3.PanelType, aggregatorOperator v3.Aggre } orderByArray := orderBy(panelType, items, groupTags) - found := false - for i := 0; i < len(orderByArray); i++ { - if strings.Compare(orderByArray[i], constants.TIMESTAMP) == 0 { - orderByArray[i] = "ts" - break - } - } - if !found { - if aggregatorOperator == v3.AggregateOperatorNoOp { + if panelType == v3.PanelTypeList { + if len(orderByArray) == 0 { orderByArray = append(orderByArray, constants.TIMESTAMP) - } else { - orderByArray = append(orderByArray, "ts") } + } else { + // since in other aggregation operator we will have to add ts as it will not be present in group by + orderByArray = append(orderByArray, "ts") } str := strings.Join(orderByArray, ",") diff --git a/pkg/query-service/app/logs/v3/query_builder_test.go b/pkg/query-service/app/logs/v3/query_builder_test.go index 5103c7a177..2712b8a874 100644 --- a/pkg/query-service/app/logs/v3/query_builder_test.go +++ b/pkg/query-service/app/logs/v3/query_builder_test.go @@ -589,7 +589,7 @@ var testBuildLogsQueryData = []struct { }, { Name: "Test Noop", - PanelType: v3.PanelTypeGraph, + PanelType: v3.PanelTypeList, Start: 1680066360726210000, End: 1680066458000000000, Step: 60, @@ -605,6 +605,25 @@ var testBuildLogsQueryData = []struct { "CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " + "from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) order by timestamp", }, + { + Name: "Test Noop order by custom", + PanelType: v3.PanelTypeList, + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, + BuilderQuery: &v3.BuilderQuery{ + SelectColumns: []v3.AttributeKey{}, + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + Expression: "A", + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}, + OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC", IsColumn: true}}, + }, + ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," + + "CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," + + "CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " + + "from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) order by method ASC", + }, { Name: "Test aggregate with having clause", PanelType: v3.PanelTypeGraph, diff --git a/pkg/query-service/app/metrics/query_builder.go b/pkg/query-service/app/metrics/query_builder.go index 16620bf7cc..435e011dbd 100644 --- a/pkg/query-service/app/metrics/query_builder.go +++ b/pkg/query-service/app/metrics/query_builder.go @@ -45,7 +45,7 @@ var AggregateOperatorToSQLFunc = map[model.AggregateOperator]string{ } // See https://github.com/SigNoz/signoz/issues/2151#issuecomment-1467249056 -var rateWithoutNegative = `if (runningDifference(value) < 0 OR runningDifference(ts) < 0, nan, runningDifference(value)/runningDifference(ts))` +var rateWithoutNegative = `if (runningDifference(value) < 0 OR runningDifference(ts) <= 0, nan, runningDifference(value)/runningDifference(ts))` var SupportedFunctions = []string{"exp", "log", "ln", "exp2", "log2", "exp10", "log10", "sqrt", "cbrt", "erf", "erfc", "lgamma", "tgamma", "sin", "cos", "tan", "asin", "acos", "atan", "degrees", "radians"} diff --git a/pkg/query-service/app/metrics/v3/query_builder.go b/pkg/query-service/app/metrics/v3/query_builder.go index dc5aadb618..f7e3956cca 100644 --- a/pkg/query-service/app/metrics/v3/query_builder.go +++ b/pkg/query-service/app/metrics/v3/query_builder.go @@ -37,10 +37,14 @@ var aggregateOperatorToSQLFunc = map[v3.AggregateOperator]string{ v3.AggregateOperatorRateAvg: "avg", v3.AggregateOperatorRateMax: "max", v3.AggregateOperatorRateMin: "min", + v3.AggregateOperatorSumRate: "sum", + v3.AggregateOperatorAvgRate: "avg", + v3.AggregateOperatorMaxRate: "max", + v3.AggregateOperatorMinRate: "min", } // See https://github.com/SigNoz/signoz/issues/2151#issuecomment-1467249056 -var rateWithoutNegative = `if (runningDifference(value) < 0 OR runningDifference(ts) < 0, nan, runningDifference(value)/runningDifference(ts))` +var rateWithoutNegative = `if (runningDifference(value) < 0 OR runningDifference(ts) <= 0, nan, runningDifference(value)/runningDifference(ts))` // buildMetricsTimeSeriesFilterQuery builds the sub-query to be used for filtering // timeseries based on search criteria @@ -212,7 +216,7 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str query = fmt.Sprintf(query, "labels as fullLabels,", subQuery) return query, nil - case v3.AggregateOperatorSumRate: + case v3.AggregateOperatorSumRate, v3.AggregateOperatorAvgRate, v3.AggregateOperatorMaxRate, v3.AggregateOperatorMinRate: rateGroupBy := "fingerprint, " + groupBy rateGroupTags := "fingerprint, " + groupTags rateOrderBy := "fingerprint, " + orderBy @@ -222,7 +226,7 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str ) // labels will be same so any should be fine query := `SELECT %s ts, ` + rateWithoutNegative + `as value FROM(%s) WHERE isNaN(value) = 0` query = fmt.Sprintf(query, groupTags, subQuery) - query = fmt.Sprintf(`SELECT %s ts, sum(value) as value FROM (%s) GROUP BY %s ORDER BY %s ts`, groupTags, query, groupBy, orderBy) + query = fmt.Sprintf(`SELECT %s ts, %s(value) as value FROM (%s) GROUP BY %s ORDER BY %s ts`, groupTags, aggregateOperatorToSQLFunc[mq.AggregateOperator], query, groupBy, orderBy) return query, nil case v3.AggregateOperatorRateSum, diff --git a/pkg/query-service/app/metrics/v3/query_builder_test.go b/pkg/query-service/app/metrics/v3/query_builder_test.go index ff35ed15f2..7319236254 100644 --- a/pkg/query-service/app/metrics/v3/query_builder_test.go +++ b/pkg/query-service/app/metrics/v3/query_builder_test.go @@ -234,3 +234,56 @@ func TestBuildQueryOperators(t *testing.T) { }) } } + +func TestBuildQueryXRate(t *testing.T) { + t.Run("TestBuildQueryXRate", func(t *testing.T) { + + tmpl := `SELECT ts, %s(value) as value FROM (SELECT ts, if (runningDifference(value) < 0 OR runningDifference(ts) <= 0, nan, runningDifference(value)/runningDifference(ts))as value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 0 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 GLOBAL INNER JOIN (SELECT fingerprint FROM signoz_metrics.distributed_time_series_v2 WHERE metric_name = 'name') as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991982000 AND timestamp_ms <= 1651078382000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(value) = 0) GROUP BY ts ORDER BY ts` + + cases := []struct { + aggregateOperator v3.AggregateOperator + expectedQuery string + }{ + { + aggregateOperator: v3.AggregateOperatorAvgRate, + expectedQuery: fmt.Sprintf(tmpl, aggregateOperatorToSQLFunc[v3.AggregateOperatorAvgRate]), + }, + { + aggregateOperator: v3.AggregateOperatorMaxRate, + expectedQuery: fmt.Sprintf(tmpl, aggregateOperatorToSQLFunc[v3.AggregateOperatorMaxRate]), + }, + { + aggregateOperator: v3.AggregateOperatorMinRate, + expectedQuery: fmt.Sprintf(tmpl, aggregateOperatorToSQLFunc[v3.AggregateOperatorMinRate]), + }, + { + aggregateOperator: v3.AggregateOperatorSumRate, + expectedQuery: fmt.Sprintf(tmpl, aggregateOperatorToSQLFunc[v3.AggregateOperatorSumRate]), + }, + } + + for _, c := range cases { + + q := &v3.QueryRangeParamsV3{ + Start: 1650991982000, + End: 1651078382000, + Step: 60, + CompositeQuery: &v3.CompositeQuery{ + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + AggregateAttribute: v3.AttributeKey{Key: "name"}, + AggregateOperator: c.aggregateOperator, + Expression: "A", + }, + }, + QueryType: v3.QueryTypeBuilder, + PanelType: v3.PanelTypeGraph, + }, + } + query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"]) + require.NoError(t, err) + require.Equal(t, query, c.expectedQuery) + } + }) +} diff --git a/pkg/query-service/app/queryBuilder/query_builder.go b/pkg/query-service/app/queryBuilder/query_builder.go index 038b06f312..92325acd45 100644 --- a/pkg/query-service/app/queryBuilder/query_builder.go +++ b/pkg/query-service/app/queryBuilder/query_builder.go @@ -32,6 +32,8 @@ var SupportedFunctions = []string{ "atan", "degrees", "radians", + "now", + "toUnixTimestamp", } var EvalFuncs = map[string]govaluate.ExpressionFunction{} diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index 10a172e000..293d3f8753 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -41,9 +41,10 @@ import ( ) type ServerOptions struct { - PromConfigPath string - HTTPHostPort string - PrivateHostPort string + PromConfigPath string + SkipTopLvlOpsPath string + HTTPHostPort string + PrivateHostPort string // alert specific params DisableRules bool RuleRepoURL string @@ -105,6 +106,14 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { } else { return nil, fmt.Errorf("Storage type: %s is not supported in query service", storage) } + var skipConfig *model.SkipConfig + if serverOptions.SkipTopLvlOpsPath != "" { + // read skip config + skipConfig, err = model.ReadSkipConfig(serverOptions.SkipTopLvlOpsPath) + if err != nil { + return nil, err + } + } <-readerReady rm, err := makeRulesManager(serverOptions.PromConfigPath, constants.GetAlertManagerApiPrefix(), serverOptions.RuleRepoURL, localDB, reader, serverOptions.DisableRules, fm) @@ -115,6 +124,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { telemetry.GetInstance().SetReader(reader) apiHandler, err := NewAPIHandler(APIHandlerOpts{ Reader: reader, + SkipConfig: skipConfig, AppDao: dao.DB(), RuleManager: rm, FeatureFlags: fm, diff --git a/pkg/query-service/app/traces/v3/query_builder.go b/pkg/query-service/app/traces/v3/query_builder.go index d71f802c8c..49572241d9 100644 --- a/pkg/query-service/app/traces/v3/query_builder.go +++ b/pkg/query-service/app/traces/v3/query_builder.go @@ -95,7 +95,7 @@ func enrichKeyWithMetadata(key v3.AttributeKey, keys map[string]v3.AttributeKey) } // getSelectLabels returns the select labels for the query based on groupBy and aggregateOperator -func getSelectLabels(aggregatorOperator v3.AggregateOperator, groupBy []v3.AttributeKey, keys map[string]v3.AttributeKey) (string, error) { +func getSelectLabels(aggregatorOperator v3.AggregateOperator, groupBy []v3.AttributeKey, keys map[string]v3.AttributeKey) string { var selectLabels string if aggregatorOperator == v3.AggregateOperatorNoOp { selectLabels = "" @@ -105,7 +105,16 @@ func getSelectLabels(aggregatorOperator v3.AggregateOperator, groupBy []v3.Attri selectLabels += fmt.Sprintf(", %s as `%s`", filterName, tag.Key) } } - return selectLabels, nil + return selectLabels +} + +func getSelectColumns(sc []v3.AttributeKey, keys map[string]v3.AttributeKey) string { + var columns []string + for _, tag := range sc { + columnName := getColumnName(tag, keys) + columns = append(columns, fmt.Sprintf("%s as `%s` ", columnName, tag.Key)) + } + return strings.Join(columns, ",") } // getZerosForEpochNano returns the number of zeros to be appended to the epoch time for converting it to nanoseconds @@ -139,7 +148,9 @@ func buildTracesFilterQuery(fs *v3.FilterSet, keys map[string]v3.AttributeKey) ( return "", fmt.Errorf("invalid value for key %s: %v", item.Key.Key, err) } } - fmtVal = utils.ClickHouseFormattedValue(val) + if val != nil { + fmtVal = utils.ClickHouseFormattedValue(val) + } if operator, ok := tracesOperatorMappingV3[item.Operator]; ok { switch item.Operator { case v3.FilterOperatorContains, v3.FilterOperatorNotContains: @@ -208,7 +219,7 @@ func handleEmptyValuesInGroupBy(keys map[string]v3.AttributeKey, groupBy []v3.At return "", nil } -func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, tableName string, keys map[string]v3.AttributeKey) (string, error) { +func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, tableName string, keys map[string]v3.AttributeKey, panelType v3.PanelType) (string, error) { filterSubQuery, err := buildTracesFilterQuery(mq.Filters, keys) if err != nil { @@ -217,10 +228,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str // timerange will be sent in epoch millisecond spanIndexTableTimeFilter := fmt.Sprintf("(timestamp >= '%d' AND timestamp <= '%d')", start*getZerosForEpochNano(start), end*getZerosForEpochNano(end)) - selectLabels, err := getSelectLabels(mq.AggregateOperator, mq.GroupBy, keys) - if err != nil { - return "", err - } + selectLabels := getSelectLabels(mq.AggregateOperator, mq.GroupBy, keys) having := having(mq.Having) if having != "" { @@ -234,7 +242,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str "from " + constants.SIGNOZ_TRACE_DBNAME + "." + constants.SIGNOZ_SPAN_INDEX_TABLENAME + " where " + spanIndexTableTimeFilter + "%s " + "group by %s%s " + - "order by %sts" + "order by %s" emptyValuesInGroupByFilter, err := handleEmptyValuesInGroupBy(keys, mq.GroupBy) if err != nil { @@ -243,7 +251,8 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str filterSubQuery += emptyValuesInGroupByFilter groupBy := groupByAttributeKeyTags(keys, mq.GroupBy...) - orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy) + enrichedOrderBy := enrichOrderBy(mq.OrderBy, keys) + orderBy := orderByAttributeKeyTags(panelType, enrichedOrderBy, mq.GroupBy, keys) aggregationKey := "" if mq.AggregateAttribute.Key != "" { @@ -297,15 +306,48 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str query := fmt.Sprintf(queryTmpl, step, op, filterSubQuery, groupBy, having, orderBy) return query, nil case v3.AggregateOperatorNoOp: - // queryTmpl := constants.TracesSQLSelect + "from " + constants.SIGNOZ_TRACE_DBNAME + "." + constants.SIGNOZ_SPAN_INDEX_TABLENAME + " where %s %s" - // query := fmt.Sprintf(queryTmpl, spanIndexTableTimeFilter, filterSubQuery) - // return query, nil - return "", fmt.Errorf("not implemented, part of traces page") + var query string + if panelType == v3.PanelTypeTrace { + withSubQuery := fmt.Sprintf(constants.TracesExplorerViewSQLSelectWithSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME, spanIndexTableTimeFilter, filterSubQuery) + withSubQuery = addLimitToQuery(withSubQuery, mq.Limit, panelType) + if mq.Offset != 0 { + withSubQuery = addOffsetToQuery(withSubQuery, mq.Offset) + } + query = withSubQuery + ") " + fmt.Sprintf(constants.TracesExplorerViewSQLSelectQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME) + } else if panelType == v3.PanelTypeList { + if len(mq.SelectColumns) == 0 { + return "", fmt.Errorf("select columns cannot be empty for panelType %s", panelType) + } + selectColumns := getSelectColumns(mq.SelectColumns, keys) + queryNoOpTmpl := fmt.Sprintf("SELECT timestamp as timestamp_datetime, spanID, traceID, "+"%s ", selectColumns) + "from " + constants.SIGNOZ_TRACE_DBNAME + "." + constants.SIGNOZ_SPAN_INDEX_TABLENAME + " where %s %s" + " order by %s" + query = fmt.Sprintf(queryNoOpTmpl, spanIndexTableTimeFilter, filterSubQuery, orderBy) + } else { + return "", fmt.Errorf("unsupported aggregate operator %s for panelType %s", mq.AggregateOperator, panelType) + } + return query, nil default: - return "", fmt.Errorf("unsupported aggregate operator") + return "", fmt.Errorf("unsupported aggregate operator %s", mq.AggregateOperator) } } +func enrichOrderBy(items []v3.OrderBy, keys map[string]v3.AttributeKey) []v3.OrderBy { + enrichedItems := []v3.OrderBy{} + for i := 0; i < len(items); i++ { + attributeKey := enrichKeyWithMetadata(v3.AttributeKey{ + Key: items[i].ColumnName, + }, keys) + enrichedItems = append(enrichedItems, v3.OrderBy{ + ColumnName: items[i].ColumnName, + Order: items[i].Order, + Key: attributeKey.Key, + DataType: attributeKey.DataType, + Type: attributeKey.Type, + IsColumn: attributeKey.IsColumn, + }) + } + return enrichedItems +} + // groupBy returns a string of comma separated tags for group by clause // `ts` is always added to the group by clause func groupBy(tags ...string) string { @@ -316,47 +358,77 @@ func groupBy(tags ...string) string { func groupByAttributeKeyTags(keys map[string]v3.AttributeKey, tags ...v3.AttributeKey) string { groupTags := []string{} for _, tag := range tags { - groupTags = append(groupTags, tag.Key) + groupTags = append(groupTags, fmt.Sprintf("`%s`", tag.Key)) } return groupBy(groupTags...) } // orderBy returns a string of comma separated tags for order by clause +// if there are remaining items which are not present in tags they are also added // if the order is not specified, it defaults to ASC -func orderBy(items []v3.OrderBy, tags []string) string { +func orderBy(panelType v3.PanelType, items []v3.OrderBy, tags []string, keys map[string]v3.AttributeKey) []string { var orderBy []string + + // create a lookup + addedToOrderBy := map[string]bool{} + itemsLookup := map[string]v3.OrderBy{} + + for i := 0; i < len(items); i++ { + addedToOrderBy[items[i].ColumnName] = false + itemsLookup[items[i].ColumnName] = items[i] + } + for _, tag := range tags { - found := false - for _, item := range items { - if item.ColumnName == tag { - found = true - orderBy = append(orderBy, fmt.Sprintf("%s %s", item.ColumnName, item.Order)) - break - } - } - if !found { - orderBy = append(orderBy, fmt.Sprintf("%s ASC", tag)) + if item, ok := itemsLookup[tag]; ok { + orderBy = append(orderBy, fmt.Sprintf("`%s` %s", item.ColumnName, item.Order)) + addedToOrderBy[item.ColumnName] = true + } else { + orderBy = append(orderBy, fmt.Sprintf("`%s` ASC", tag)) } } - // users might want to order by value of aggreagation + // users might want to order by value of aggregation for _, item := range items { if item.ColumnName == constants.SigNozOrderByValue { orderBy = append(orderBy, fmt.Sprintf("value %s", item.Order)) + addedToOrderBy[item.ColumnName] = true } } - return strings.Join(orderBy, ",") + + // add the remaining items + if panelType == v3.PanelTypeList { + for _, item := range items { + // since these are not present in tags we will have to select them correctly + // for list view there is no need to check if it was added since they wont be added yet but this is just for safety + if !addedToOrderBy[item.ColumnName] { + attr := v3.AttributeKey{Key: item.ColumnName, DataType: item.DataType, Type: item.Type, IsColumn: item.IsColumn} + name := getColumnName(attr, keys) + + if item.IsColumn { + orderBy = append(orderBy, fmt.Sprintf("`%s` %s", name, item.Order)) + } else { + orderBy = append(orderBy, fmt.Sprintf("%s %s", name, item.Order)) + } + } + } + } + return orderBy } -func orderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string { +func orderByAttributeKeyTags(panelType v3.PanelType, items []v3.OrderBy, tags []v3.AttributeKey, keys map[string]v3.AttributeKey) string { var groupTags []string for _, tag := range tags { groupTags = append(groupTags, tag.Key) } - str := orderBy(items, groupTags) - if len(str) > 0 { - str = str + "," + orderByArray := orderBy(panelType, items, groupTags, keys) + + if panelType == v3.PanelTypeList && len(orderByArray) == 0 { + orderByArray = append(orderByArray, constants.TIMESTAMP+" DESC") + } else if panelType == v3.PanelTypeGraph || panelType == v3.PanelTypeTable { + orderByArray = append(orderByArray, "ts") } + + str := strings.Join(orderByArray, ",") return str } @@ -393,10 +465,7 @@ func addLimitToQuery(query string, limit uint64, panelType v3.PanelType) string if limit == 0 { limit = 100 } - if panelType == v3.PanelTypeList { - return fmt.Sprintf("%s LIMIT %d", query, limit) - } - return query + return fmt.Sprintf("%s LIMIT %d", query, limit) } func addOffsetToQuery(query string, offset uint64) string { @@ -404,17 +473,19 @@ func addOffsetToQuery(query string, offset uint64) string { } func PrepareTracesQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery, keys map[string]v3.AttributeKey) (string, error) { - query, err := buildTracesQuery(start, end, mq.StepInterval, mq, constants.SIGNOZ_SPAN_INDEX_TABLENAME, keys) + query, err := buildTracesQuery(start, end, mq.StepInterval, mq, constants.SIGNOZ_SPAN_INDEX_TABLENAME, keys, panelType) if err != nil { return "", err } if panelType == v3.PanelTypeValue { query, err = reduceToQuery(query, mq.ReduceTo, mq.AggregateOperator) } - query = addLimitToQuery(query, mq.Limit, panelType) + if panelType == v3.PanelTypeList { + query = addLimitToQuery(query, mq.Limit, panelType) - if mq.Offset != 0 { - query = addOffsetToQuery(query, mq.Offset) + if mq.Offset != 0 { + query = addOffsetToQuery(query, mq.Offset) + } } return query, err } diff --git a/pkg/query-service/app/traces/v3/query_builder_test.go b/pkg/query-service/app/traces/v3/query_builder_test.go index 9cda548b9e..023a636d12 100644 --- a/pkg/query-service/app/traces/v3/query_builder_test.go +++ b/pkg/query-service/app/traces/v3/query_builder_test.go @@ -244,13 +244,51 @@ var testGetSelectLabelsData = []struct { func TestGetSelectLabels(t *testing.T) { for _, tt := range testGetSelectLabelsData { Convey("testGetSelectLabelsData", t, func() { - selectLabels, err := getSelectLabels(tt.AggregateOperator, tt.GroupByTags, map[string]v3.AttributeKey{}) - So(err, ShouldBeNil) + selectLabels := getSelectLabels(tt.AggregateOperator, tt.GroupByTags, map[string]v3.AttributeKey{}) So(selectLabels, ShouldEqual, tt.SelectLabels) }) } } +var testGetSelectColumnsData = []struct { + Name string + sc []v3.AttributeKey + SelectColumns string +}{ + { + Name: "select columns attribute", + sc: []v3.AttributeKey{{Key: "user.name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, + SelectColumns: "stringTagMap['user.name'] as `user.name` ", + }, + { + Name: "select columns resource", + sc: []v3.AttributeKey{{Key: "user.name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}}, + SelectColumns: "resourceTagsMap['user.name'] as `user.name` ", + }, + { + Name: "select columns attribute and resource", + sc: []v3.AttributeKey{ + {Key: "user.name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, + {Key: "host", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, + }, + SelectColumns: "resourceTagsMap['user.name'] as `user.name` ,stringTagMap['host'] as `host` ", + }, + { + Name: "select columns fixed column", + sc: []v3.AttributeKey{{Key: "host", IsColumn: true, DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, + SelectColumns: "host as `host` ", + }, +} + +func TestGetSelectColumns(t *testing.T) { + for _, tt := range testGetSelectColumnsData { + Convey("testGetSelectColumnsData", t, func() { + selectColumns := getSelectColumns(tt.sc, map[string]v3.AttributeKey{}) + So(selectColumns, ShouldEqual, tt.SelectColumns) + }) + } +} + var testGetZerosForEpochNanoData = []struct { Name string Epoch int64 @@ -282,13 +320,15 @@ func TestGetZerosForEpochNano(t *testing.T) { } var testOrderBy = []struct { - Name string - Items []v3.OrderBy - Tags []string - Result string + Name string + PanelType v3.PanelType + Items []v3.OrderBy + Tags []string + Result []string }{ { - Name: "Test 1", + Name: "Test 1", + PanelType: v3.PanelTypeGraph, Items: []v3.OrderBy{ { ColumnName: "name", @@ -300,10 +340,11 @@ var testOrderBy = []struct { }, }, Tags: []string{"name"}, - Result: "name asc,value desc", + Result: []string{"`name` asc", "value desc"}, }, { - Name: "Test 2", + Name: "Test 2", + PanelType: v3.PanelTypeList, Items: []v3.OrderBy{ { ColumnName: "name", @@ -315,10 +356,11 @@ var testOrderBy = []struct { }, }, Tags: []string{"name", "bytes"}, - Result: "name asc,bytes asc", + Result: []string{"`name` asc", "`bytes` asc"}, }, { - Name: "Test 3", + Name: "Test 3", + PanelType: v3.PanelTypeList, Items: []v3.OrderBy{ { ColumnName: "name", @@ -334,15 +376,70 @@ var testOrderBy = []struct { }, }, Tags: []string{"name", "bytes"}, - Result: "name asc,bytes asc,value asc", + Result: []string{"`name` asc", "`bytes` asc", "value asc"}, + }, + { + Name: "Test 4", + PanelType: v3.PanelTypeList, + Items: []v3.OrderBy{ + { + ColumnName: "name", + Order: "asc", + }, + { + ColumnName: "bytes", + Order: "asc", + }, + { + ColumnName: "response_time", + Order: "desc", + Key: "response_time", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeString, + }, + }, + Tags: []string{"name", "bytes"}, + Result: []string{"`name` asc", "`bytes` asc", "stringTagMap['response_time'] desc"}, + }, + { + Name: "Test 5", + PanelType: v3.PanelTypeList, + Items: []v3.OrderBy{ + { + ColumnName: "name", + Order: "asc", + Key: "name", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeString, + IsColumn: true, + }, + { + ColumnName: "bytes", + Order: "asc", + Key: "bytes", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeString, + IsColumn: true, + }, + { + ColumnName: "response_time", + Order: "desc", + }, + }, + Tags: []string{}, + Result: []string{"`name` asc", "`bytes` asc", "stringTagMap['response_time'] desc"}, }, } func TestOrderBy(t *testing.T) { for _, tt := range testOrderBy { Convey("testOrderBy", t, func() { - res := orderBy(tt.Items, tt.Tags) - So(res, ShouldEqual, tt.Result) + res := orderBy(tt.PanelType, tt.Items, tt.Tags, map[string]v3.AttributeKey{ + "name": {Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, + "bytes": {Key: "bytes", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, + "response_time": {Key: "response_time", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: false}, + }) + So(res, ShouldResemble, tt.Result) }) } } @@ -357,6 +454,7 @@ var testBuildTracesQueryData = []struct { TableName string AggregateOperator v3.AggregateOperator ExpectedQuery string + PanelType v3.PanelType }{ { Name: "Test aggregate count on fixed column of float64 type", @@ -373,6 +471,7 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toFloat64(count()) as value" + " from signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000')" + " group by ts order by ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate rate without aggregate attribute", @@ -388,6 +487,7 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count()/60 as value from" + " signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <=" + " '1680066458000000000') group by ts order by ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate count on fixed column of float64 type with filter", @@ -406,6 +506,7 @@ var testBuildTracesQueryData = []struct { " toFloat64(count()) as value from signoz_traces.distributed_signoz_index_v2" + " where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000')" + " AND stringTagMap['customer_id'] = '10001' group by ts order by ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate count on fixed column of bool type", @@ -422,6 +523,7 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toFloat64(count()) as value" + " from signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000')" + " group by ts order by ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate count on a attribute", @@ -438,6 +540,7 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toFloat64(count()) as value" + " from signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000')" + " AND has(stringTagMap, 'user_name') group by ts order by ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate count on a fixed column of string type", @@ -454,6 +557,7 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toFloat64(count()) as value" + " from signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000')" + " AND name != '' group by ts order by ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate count with filter", @@ -473,6 +577,7 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toFloat64(count()) as value" + " from signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000')" + " AND numberTagMap['bytes'] > 100.000000 AND has(stringTagMap, 'user_name') group by ts order by ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate count distinct and order by value", @@ -490,6 +595,7 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(name))) as value" + " from signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000')" + " group by ts order by value ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate count distinct on string key", @@ -506,6 +612,7 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(stringTagMap['name'])))" + " as value from signoz_traces.distributed_signoz_index_v2 where" + " (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') group by ts order by ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate count distinct with filter and groupBy", @@ -531,8 +638,9 @@ var testBuildTracesQueryData = []struct { "toFloat64(count(distinct(name))) as value from signoz_traces.distributed_signoz_index_v2 " + "where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + "AND stringTagMap['http.method'] = 'GET' AND resourceTagsMap['x'] != 'abc' " + - "AND has(stringTagMap, 'http.method') group by http.method,ts " + - "order by http.method ASC,ts", + "AND has(stringTagMap, 'http.method') group by `http.method`,ts " + + "order by `http.method` ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate count with multiple filter,groupBy and orderBy", @@ -562,8 +670,9 @@ var testBuildTracesQueryData = []struct { "toFloat64(count(distinct(name))) as value from signoz_traces.distributed_signoz_index_v2 " + "where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + "AND stringTagMap['method'] = 'GET' AND resourceTagsMap['x'] != 'abc' " + - "AND has(stringTagMap, 'method') AND has(resourceTagsMap, 'x') group by method,x,ts " + - "order by method ASC,x ASC,ts", + "AND has(stringTagMap, 'method') AND has(resourceTagsMap, 'x') group by `method`,`x`,ts " + + "order by `method` ASC,`x` ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate avg", @@ -589,8 +698,9 @@ var testBuildTracesQueryData = []struct { "from signoz_traces.distributed_signoz_index_v2 " + "where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + "AND stringTagMap['method'] = 'GET' " + - "AND has(stringTagMap, 'method') group by method,ts " + - "order by method ASC,ts", + "AND has(stringTagMap, 'method') group by `method`,ts " + + "order by `method` ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate sum", @@ -616,8 +726,9 @@ var testBuildTracesQueryData = []struct { "from signoz_traces.distributed_signoz_index_v2 " + "where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + "AND stringTagMap['method'] = 'GET' " + - "AND has(stringTagMap, 'method') group by method,ts " + - "order by method ASC,ts", + "AND has(stringTagMap, 'method') group by `method`,ts " + + "order by `method` ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate min", @@ -643,8 +754,9 @@ var testBuildTracesQueryData = []struct { "from signoz_traces.distributed_signoz_index_v2 " + "where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + "AND stringTagMap['method'] = 'GET' " + - "AND has(stringTagMap, 'method') group by method,ts " + - "order by method ASC,ts", + "AND has(stringTagMap, 'method') group by `method`,ts " + + "order by `method` ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate max", @@ -670,8 +782,9 @@ var testBuildTracesQueryData = []struct { "from signoz_traces.distributed_signoz_index_v2 " + "where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + "AND stringTagMap['method'] = 'GET' " + - "AND has(stringTagMap, 'method') group by method,ts " + - "order by method ASC,ts", + "AND has(stringTagMap, 'method') group by `method`,ts " + + "order by `method` ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate PXX", @@ -693,8 +806,9 @@ var testBuildTracesQueryData = []struct { "quantile(0.05)(bytes) as value " + "from signoz_traces.distributed_signoz_index_v2 " + "where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + - "AND has(stringTagMap, 'method') group by method,ts " + - "order by method ASC,ts", + "AND has(stringTagMap, 'method') group by `method`,ts " + + "order by `method` ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate RateSum", @@ -714,7 +828,8 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, stringTagMap['method'] as `method`" + ", sum(bytes)/60 as value from signoz_traces.distributed_signoz_index_v2 " + "where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000')" + - " AND has(stringTagMap, 'method') group by method,ts order by method ASC,ts", + " AND has(stringTagMap, 'method') group by `method`,ts order by `method` ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate rate", @@ -734,8 +849,9 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, stringTagMap['method'] as `method`" + ", count(numberTagMap['bytes'])/60 as value " + "from signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + - "AND has(stringTagMap, 'method') group by method,ts " + - "order by method ASC,ts", + "AND has(stringTagMap, 'method') group by `method`,ts " + + "order by `method` ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate RateSum without fixed column", @@ -756,8 +872,9 @@ var testBuildTracesQueryData = []struct { "stringTagMap['method'] as `method`, " + "sum(numberTagMap['bytes'])/60 as value " + "from signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + - "AND has(stringTagMap, 'method') group by method,ts " + - "order by method ASC,ts", + "AND has(stringTagMap, 'method') group by `method`,ts " + + "order by `method` ASC,ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test aggregate with having clause", @@ -781,6 +898,7 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(stringTagMap['name']))) as value" + " from signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000')" + " group by ts having value > 10 order by ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test count aggregate with having clause and filters", @@ -808,6 +926,7 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toFloat64(count()) as value from " + "signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + "AND stringTagMap['method'] = 'GET' AND has(stringTagMap, 'name') group by ts having value > 10 order by ts", + PanelType: v3.PanelTypeGraph, }, { Name: "Test count distinct aggregate with having clause and filters", @@ -835,32 +954,104 @@ var testBuildTracesQueryData = []struct { ExpectedQuery: "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toFloat64(count(distinct(stringTagMap['name']))) as value" + " from signoz_traces.distributed_signoz_index_v2 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " + "AND stringTagMap['method'] = 'GET' group by ts having value > 10 order by ts", + PanelType: v3.PanelTypeGraph, + }, + { + Name: "Test Noop list view", + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, + BuilderQuery: &v3.BuilderQuery{ + SelectColumns: []v3.AttributeKey{ + {Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, + }, + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + Expression: "A", + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}, + }, + ExpectedQuery: "SELECT timestamp as timestamp_datetime, spanID, traceID," + + " name as `name` from signoz_traces.distributed_signoz_index_v2 where " + + "(timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') order by timestamp DESC", + PanelType: v3.PanelTypeList, + }, + { + Name: "Test Noop list view with order by", + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, + BuilderQuery: &v3.BuilderQuery{ + SelectColumns: []v3.AttributeKey{ + {Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, + }, + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + Expression: "A", + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}, + OrderBy: []v3.OrderBy{{ColumnName: "name", Order: "ASC"}}, + }, + ExpectedQuery: "SELECT timestamp as timestamp_datetime, spanID, traceID," + + " name as `name` from signoz_traces.distributed_signoz_index_v2 where " + + "(timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') order by `name` ASC", + PanelType: v3.PanelTypeList, + }, + { + Name: "Test Noop list view with order by and filter", + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, + BuilderQuery: &v3.BuilderQuery{ + SelectColumns: []v3.AttributeKey{ + {Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, + }, + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + Expression: "A", + Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{ + {Key: v3.AttributeKey{Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "GET", Operator: "="}, + }}, + OrderBy: []v3.OrderBy{{ColumnName: "name", Order: "ASC"}}, + }, + ExpectedQuery: "SELECT timestamp as timestamp_datetime, spanID, traceID," + + " name as `name` from signoz_traces.distributed_signoz_index_v2 where " + + "(timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000')" + + " AND stringTagMap['method'] = 'GET' order by `name` ASC", + PanelType: v3.PanelTypeList, + }, + { + Name: "Test Noop trace view", + Start: 1680066360726210000, + End: 1680066458000000000, + Step: 60, + BuilderQuery: &v3.BuilderQuery{ + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + Expression: "A", + Filters: &v3.FilterSet{ + Operator: "AND", Items: []v3.FilterItem{ + {Key: v3.AttributeKey{Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "GET", Operator: "="}, + }, + }, + }, + ExpectedQuery: "WITH subQuery AS (SELECT distinct on (traceID) traceID, durationNano, serviceName," + + " name FROM signoz_traces.distributed_signoz_index_v2 WHERE parentSpanID = '' AND (timestamp >= '1680066360726210000' AND " + + "timestamp <= '1680066458000000000') AND stringTagMap['method'] = 'GET' ORDER BY durationNano DESC LIMIT 100)" + + " SELECT subQuery.serviceName, subQuery.name, count() AS span_count, subQuery.durationNano, traceID" + + " FROM signoz_traces.distributed_signoz_index_v2 INNER JOIN subQuery ON distributed_signoz_index_v2.traceID" + + " = subQuery.traceID GROUP BY traceID, subQuery.durationNano, subQuery.name, subQuery.serviceName " + + "ORDER BY subQuery.durationNano desc;", + PanelType: v3.PanelTypeTrace, }, - // { - // Name: "Test Noop", - // Start: 1680066360726210000, - // End: 1680066458000000000, - // Step: 60, - // BuilderQuery: &v3.BuilderQuery{ - // SelectColumns: []v3.AttributeKey{}, - // QueryName: "A", - // AggregateOperator: v3.AggregateOperatorNoOp, - // Expression: "A", - // Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}, - // // GroupBy: []v3.AttributeKey{{Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}}, - // // OrderBy: []v3.OrderBy{{ColumnName: "method", Order: "ASC"}}, - // }, - // ExpectedQuery: "", - // }, } func TestBuildTracesQuery(t *testing.T) { for _, tt := range testBuildTracesQueryData { Convey("TestBuildTracesQuery", t, func() { - query, err := buildTracesQuery(tt.Start, tt.End, tt.Step, tt.BuilderQuery, tt.TableName, map[string]v3.AttributeKey{}) + query, err := buildTracesQuery(tt.Start, tt.End, tt.Step, tt.BuilderQuery, tt.TableName, map[string]v3.AttributeKey{ + "name": {Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, + }, tt.PanelType) So(err, ShouldBeNil) So(query, ShouldEqual, tt.ExpectedQuery) - }) } } diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index d86d6df205..31d02b19f6 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -236,6 +236,11 @@ const ( "CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64," + "CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," + "CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " + TracesExplorerViewSQLSelectWithSubQuery = "WITH subQuery AS (SELECT distinct on (traceID) traceID, durationNano, " + + "serviceName, name FROM %s.%s WHERE parentSpanID = '' AND %s %s ORDER BY durationNano DESC " + TracesExplorerViewSQLSelectQuery = "SELECT subQuery.serviceName, subQuery.name, count() AS " + + "span_count, subQuery.durationNano, traceID FROM %s.%s INNER JOIN subQuery ON %s.traceID = subQuery.traceID GROUP " + + "BY traceID, subQuery.durationNano, subQuery.name, subQuery.serviceName ORDER BY subQuery.durationNano desc;" ) // ReservedColumnTargetAliases identifies result value from a user diff --git a/pkg/query-service/interfaces/interface.go b/pkg/query-service/interfaces/interface.go index c4d9bbdbb6..b6a8015fc0 100644 --- a/pkg/query-service/interfaces/interface.go +++ b/pkg/query-service/interfaces/interface.go @@ -21,9 +21,9 @@ type Reader interface { GetInstantQueryMetricsResult(ctx context.Context, query *model.InstantQueryMetricsParams) (*promql.Result, *stats.QueryStats, *model.ApiError) GetQueryRangeResult(ctx context.Context, query *model.QueryRangeParams) (*promql.Result, *stats.QueryStats, *model.ApiError) - GetServiceOverview(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceOverviewItem, *model.ApiError) - GetTopLevelOperations(ctx context.Context) (*map[string][]string, *model.ApiError) - GetServices(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) + GetServiceOverview(ctx context.Context, query *model.GetServiceOverviewParams, skipConfig *model.SkipConfig) (*[]model.ServiceOverviewItem, *model.ApiError) + GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig) (*map[string][]string, *model.ApiError) + GetServices(ctx context.Context, query *model.GetServicesParams, skipConfig *model.SkipConfig) (*[]model.ServiceItem, *model.ApiError) GetTopOperations(ctx context.Context, query *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError) GetUsage(ctx context.Context, query *model.GetUsageParams) (*[]model.UsageItem, error) GetServicesList(ctx context.Context) (*[]string, error) diff --git a/pkg/query-service/main.go b/pkg/query-service/main.go index 10bfe67306..9d769a0940 100644 --- a/pkg/query-service/main.go +++ b/pkg/query-service/main.go @@ -26,7 +26,7 @@ func initZapLog() *zap.Logger { } func main() { - var promConfigPath string + var promConfigPath, skipTopLvlOpsPath string // disables rule execution but allows change to the rule definition var disableRules bool @@ -35,6 +35,7 @@ func main() { var ruleRepoURL string flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") + flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)") flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)") flag.StringVar(&ruleRepoURL, "rules.repo-url", constants.AlertHelpPage, "(host address used to build rule link in alert messages)") flag.Parse() @@ -47,11 +48,12 @@ func main() { version.PrintVersion() serverOptions := &app.ServerOptions{ - HTTPHostPort: constants.HTTPHostPort, - PromConfigPath: promConfigPath, - PrivateHostPort: constants.PrivateHostPort, - DisableRules: disableRules, - RuleRepoURL: ruleRepoURL, + HTTPHostPort: constants.HTTPHostPort, + PromConfigPath: promConfigPath, + SkipTopLvlOpsPath: skipTopLvlOpsPath, + PrivateHostPort: constants.PrivateHostPort, + DisableRules: disableRules, + RuleRepoURL: ruleRepoURL, } // Read the jwt secret key diff --git a/pkg/query-service/model/config.go b/pkg/query-service/model/config.go new file mode 100644 index 0000000000..d1d23385ad --- /dev/null +++ b/pkg/query-service/model/config.go @@ -0,0 +1,57 @@ +package model + +import ( + "os" + + "gopkg.in/yaml.v2" +) + +type SkipConfig struct { + Services []ServiceSkipConfig `yaml:"services"` +} + +type ServiceSkipConfig struct { + Name string `yaml:"name"` + Operations []string `yaml:"operations"` +} + +func (s *SkipConfig) ShouldSkip(serviceName, name string) bool { + for _, service := range s.Services { + if service.Name == serviceName { + for _, operation := range service.Operations { + if name == operation { + return true + } + } + } + } + return false +} + +func ReadYaml(path string, v interface{}) error { + f, err := os.Open(path) + if err != nil { + return err + } + defer f.Close() + + decoder := yaml.NewDecoder(f) + err = decoder.Decode(v) + if err != nil { + return err + } + return nil +} + +func ReadSkipConfig(path string) (*SkipConfig, error) { + if path == "" { + return &SkipConfig{}, nil + } + + skipConfig := &SkipConfig{} + err := ReadYaml(path, skipConfig) + if err != nil { + return nil, err + } + return skipConfig, nil +} diff --git a/pkg/query-service/model/v3/v3.go b/pkg/query-service/model/v3/v3.go index 7d3028b3d8..57f290f133 100644 --- a/pkg/query-service/model/v3/v3.go +++ b/pkg/query-service/model/v3/v3.go @@ -178,11 +178,12 @@ const ( PanelTypeGraph PanelType = "graph" PanelTypeTable PanelType = "table" PanelTypeList PanelType = "list" + PanelTypeTrace PanelType = "trace" ) func (p PanelType) Validate() error { switch p { - case PanelTypeValue, PanelTypeGraph, PanelTypeTable, PanelTypeList: + case PanelTypeValue, PanelTypeGraph, PanelTypeTable, PanelTypeList, PanelTypeTrace: return nil default: return fmt.Errorf("invalid panel type: %s", p) diff --git a/pkg/query-service/rules/thresholdRule.go b/pkg/query-service/rules/thresholdRule.go index 39d0aa0cad..f6e79a1643 100644 --- a/pkg/query-service/rules/thresholdRule.go +++ b/pkg/query-service/rules/thresholdRule.go @@ -22,7 +22,6 @@ import ( querytemplate "go.signoz.io/signoz/pkg/query-service/utils/queryTemplate" "go.signoz.io/signoz/pkg/query-service/utils/times" "go.signoz.io/signoz/pkg/query-service/utils/timestamp" - "go.signoz.io/signoz/pkg/query-service/utils/value" logsv3 "go.signoz.io/signoz/pkg/query-service/app/logs/v3" metricsv3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3" @@ -327,7 +326,7 @@ func (r *ThresholdRule) SendAlerts(ctx context.Context, ts time.Time, resendDela } func (r *ThresholdRule) CheckCondition(v float64) bool { - if value.IsNaN(v) { + if math.IsNaN(v) { zap.S().Debugf("msg:", "found NaN in rule condition", "\t rule name:", r.Name()) return false } @@ -355,21 +354,37 @@ func (r *ThresholdRule) CheckCondition(v float64) bool { func (r *ThresholdRule) prepareQueryRange(ts time.Time) *v3.QueryRangeParamsV3 { // todo(amol): add 30 seconds to evalWindow for rate calc + // todo(srikanthccv): make this configurable + // 2 minutes is reasonable time to wait for data to be available + // 60 seconds (SDK) + 10 seconds (batch) + rest for n/w + serialization + write to disk etc.. + start := ts.Add(-time.Duration(r.evalWindow)).UnixMilli() - 2*60*1000 + end := ts.UnixMilli() - 2*60*1000 + + // round to minute otherwise we could potentially miss data + start = start - (start % (60 * 1000)) + end = end - (end % (60 * 1000)) + if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL { return &v3.QueryRangeParamsV3{ - Start: ts.Add(-time.Duration(r.evalWindow)).UnixMilli(), - End: ts.UnixMilli(), - Step: 30, + Start: start, + End: end, + Step: 60, CompositeQuery: r.ruleCondition.CompositeQuery, Variables: make(map[string]interface{}, 0), } } + if r.ruleCondition.CompositeQuery != nil && r.ruleCondition.CompositeQuery.BuilderQueries != nil { + for _, q := range r.ruleCondition.CompositeQuery.BuilderQueries { + q.StepInterval = 60 + } + } + // default mode return &v3.QueryRangeParamsV3{ - Start: ts.Add(-time.Duration(r.evalWindow)).UnixMilli(), - End: ts.UnixMilli(), - Step: 30, + Start: start, + End: end, + Step: 60, CompositeQuery: r.ruleCondition.CompositeQuery, } } @@ -476,7 +491,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer } } - if value.IsNaN(sample.Point.V) { + if math.IsNaN(sample.Point.V) { continue } @@ -521,7 +536,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer // we skip the first record to support rate cases correctly // improvement(amol): explore approaches to limit this only for // rate uses cases - if exists, _ := skipFirstRecord[labelHash]; exists { + if exists := skipFirstRecord[labelHash]; exists { resultMap[labelHash] = sample } else { // looks like the first record for this label combo, skip it @@ -545,7 +560,9 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer result = append(result, sample) } } - zap.S().Debugf("ruleid:", r.ID(), "\t result (found alerts):", len(result)) + if len(result) != 0 { + zap.S().Infof("For rule %s, with ClickHouseQuery %s, found %d alerts", r.ID(), query, len(result)) + } return result, nil } diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index 057fa66ba2..a511230df6 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -169,7 +169,7 @@ services: <<: *clickhouse-depends otel-collector: - image: signoz/signoz-otel-collector:0.79.1 + image: signoz/signoz-otel-collector:0.79.2 command: ["--config=/etc/otel-collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] user: root # required for reading docker container logs volumes: @@ -195,7 +195,7 @@ services: <<: *clickhouse-depends otel-collector-metrics: - image: signoz/signoz-otel-collector:0.79.1 + image: signoz/signoz-otel-collector:0.79.2 command: ["--config=/etc/otel-collector-metrics-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"] volumes: - ./otel-collector-metrics-config.yaml:/etc/otel-collector-metrics-config.yaml @@ -219,7 +219,7 @@ services: - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces load-hotrod: - image: "grubykarol/locust:1.2.3-python3.9-alpine3.12" + image: "signoz/locust:1.2.3" container_name: load-hotrod hostname: load-hotrod environment: diff --git a/sample-apps/hotrod/hotrod-install.sh b/sample-apps/hotrod/hotrod-install.sh index dc839d0535..197f18d76e 100755 --- a/sample-apps/hotrod/hotrod-install.sh +++ b/sample-apps/hotrod/hotrod-install.sh @@ -15,7 +15,7 @@ fi # Locust's docker image if [[ -z $LOCUST_IMAGE ]]; then - LOCUST_REPO="${LOCUST_REPO:-grubykarol/locust}" + LOCUST_REPO="${LOCUST_REPO:-signoz/locust}" LOCUST_TAG="${LOCUST_TAG:-0.8.1-py3.6}" LOCUST_IMAGE="${LOCUST_REPO}:${LOCUST_TAG}" fi diff --git a/sample-apps/hotrod/hotrod.yaml b/sample-apps/hotrod/hotrod.yaml index 63d7fc88de..0793ec3d95 100644 --- a/sample-apps/hotrod/hotrod.yaml +++ b/sample-apps/hotrod/hotrod.yaml @@ -96,7 +96,7 @@ spec: role: locust-master spec: containers: - - image: grubykarol/locust:0.8.1-py3.6 + - image: signoz/locust:1.2.3 imagePullPolicy: IfNotPresent name: locust-master env: @@ -173,7 +173,7 @@ spec: role: locust-slave spec: containers: - - image: grubykarol/locust:0.8.1-py3.6 + - image: signoz/locust:1.2.3 imagePullPolicy: IfNotPresent name: locust-slave env: