diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index be454590f3..4cb3979cea 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -19,4 +19,4 @@ jobs: - name: 'Dependency Review' with: fail-on-severity: high - uses: actions/dependency-review-action@v2 + uses: actions/dependency-review-action@v3 diff --git a/.github/workflows/e2e-k3s.yaml b/.github/workflows/e2e-k3s.yaml index 770a2f4df3..ef5911f9b0 100644 --- a/.github/workflows/e2e-k3s.yaml +++ b/.github/workflows/e2e-k3s.yaml @@ -15,6 +15,11 @@ jobs: - name: Checkout code uses: actions/checkout@v4 + - name: Setup golang + uses: actions/setup-go@v4 + with: + go-version: "1.21" + - name: Build query-service image env: DEV_BUILD: 1 @@ -65,9 +70,9 @@ jobs: - name: Kick off a sample-app workload run: | # start the locust swarm - kubectl -n sample-application run strzal --image=djbingham/curl \ - --restart='OnFailure' -i --rm --command -- curl -X POST -F \ - 'locust_count=6' -F 'hatch_rate=2' http://locust-master:8089/swarm + kubectl --namespace sample-application run strzal --image=djbingham/curl \ + --restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \ + 'user_count=6' -F 'spawn_rate=2' http://locust-master:8089/swarm - name: Get short commit SHA, display tunnel URL and IP Address of the worker node id: get-subdomain diff --git a/.github/workflows/push.yaml b/.github/workflows/push.yaml index e4e5171e33..724b4ce1ca 100644 --- a/.github/workflows/push.yaml +++ b/.github/workflows/push.yaml @@ -20,13 +20,13 @@ jobs: with: go-version: "1.21" - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: version: latest - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} @@ -64,13 +64,13 @@ jobs: with: go-version: "1.21" - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: version: latest - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} @@ -115,11 +115,11 @@ jobs: run: npm run lint continue-on-error: true - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: version: latest - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} @@ -164,11 +164,11 @@ jobs: run: npm run lint continue-on-error: true - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: version: latest - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/staging-deployment.yaml b/.github/workflows/staging-deployment.yaml index ed13dc00e1..9b7a5121b2 100644 --- a/.github/workflows/staging-deployment.yaml +++ b/.github/workflows/staging-deployment.yaml @@ -11,7 +11,7 @@ jobs: environment: staging steps: - name: Executing remote ssh commands using ssh key - uses: appleboy/ssh-action@v0.1.8 + uses: appleboy/ssh-action@v1.0.3 env: GITHUB_BRANCH: develop GITHUB_SHA: ${{ github.sha }} diff --git a/.github/workflows/testing-deployment.yaml b/.github/workflows/testing-deployment.yaml index 799222ee3e..efb3d58a79 100644 --- a/.github/workflows/testing-deployment.yaml +++ b/.github/workflows/testing-deployment.yaml @@ -11,7 +11,7 @@ jobs: if: ${{ github.event.label.name == 'testing-deploy' }} steps: - name: Executing remote ssh commands using ssh key - uses: appleboy/ssh-action@v0.1.8 + uses: appleboy/ssh-action@v1.0.3 env: GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }} GITHUB_SHA: ${{ github.sha }} @@ -33,8 +33,11 @@ jobs: git add . git stash push -m "stashed on $(date --iso-8601=seconds)" git fetch origin - git checkout ${GITHUB_BRANCH} + git checkout develop git pull + # This is added to include the scenerio when new commit in PR is force-pushed + git branch -D ${GITHUB_BRANCH} + git checkout --track origin/${GITHUB_BRANCH} make build-ee-query-service-amd64 make build-frontend-amd64 make run-signoz \ No newline at end of file diff --git a/README.md b/README.md index ab7f9862aa..a17c67e498 100644 --- a/README.md +++ b/README.md @@ -108,7 +108,7 @@ We support [OpenTelemetry](https://opentelemetry.io) as the library which you ca - Java - Python -- NodeJS +- Node.js - Go - PHP - .NET diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 9ac386de77..edc5ead22c 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -1,7 +1,7 @@ version: "3.9" x-clickhouse-defaults: &clickhouse-defaults - image: clickhouse/clickhouse-server:23.11.1-alpine + image: clickhouse/clickhouse-server:24.1.2-alpine tty: true deploy: restart_policy: @@ -146,7 +146,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.38.2 + image: signoz/query-service:0.39.0 command: [ "-config=/root/config/prometheus.yml", @@ -186,7 +186,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:0.38.2 + image: signoz/frontend:0.39.0 deploy: restart_policy: condition: on-failure @@ -199,7 +199,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.88.11 + image: signoz/signoz-otel-collector:0.88.12 command: [ "--config=/etc/otel-collector-config.yaml", @@ -237,7 +237,7 @@ services: - query-service otel-collector-migrator: - image: signoz/signoz-schema-migrator:0.88.11 + image: signoz/signoz-schema-migrator:0.88.12 deploy: restart_policy: condition: on-failure diff --git a/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml b/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml index 424d717b09..8f13b6506f 100644 --- a/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml +++ b/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml @@ -123,15 +123,7 @@ exporters: clickhouselogsexporter: dsn: tcp://clickhouse:9000/ docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER} - timeout: 5s - sending_queue: - queue_size: 100 - retry_on_failure: - enabled: true - initial_interval: 5s - max_interval: 30s - max_elapsed_time: 300s - + timeout: 10s extensions: health_check: endpoint: 0.0.0.0:13133 diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 333427103a..61e03804f4 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -19,7 +19,7 @@ services: - ZOO_AUTOPURGE_INTERVAL=1 clickhouse: - image: clickhouse/clickhouse-server:23.7.3-alpine + image: clickhouse/clickhouse-server:24.1.2-alpine container_name: signoz-clickhouse # ports: # - "9000:9000" @@ -66,7 +66,7 @@ services: - --storage.path=/data otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.11} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.12} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -81,7 +81,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: signoz-otel-collector - image: signoz/signoz-otel-collector:0.88.11 + image: signoz/signoz-otel-collector:0.88.12 command: [ "--config=/etc/otel-collector-config.yaml", diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 6fdaf92406..cb77c4c024 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -3,7 +3,7 @@ version: "2.4" x-clickhouse-defaults: &clickhouse-defaults restart: on-failure # addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab - image: clickhouse/clickhouse-server:23.11.1-alpine + image: clickhouse/clickhouse-server:24.1.2-alpine tty: true depends_on: - zookeeper-1 @@ -164,7 +164,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` query-service: - image: signoz/query-service:${DOCKER_TAG:-0.38.2} + image: signoz/query-service:${DOCKER_TAG:-0.39.0} container_name: signoz-query-service command: [ @@ -203,7 +203,7 @@ services: <<: *db-depend frontend: - image: signoz/frontend:${DOCKER_TAG:-0.38.2} + image: signoz/frontend:${DOCKER_TAG:-0.39.0} container_name: signoz-frontend restart: on-failure depends_on: @@ -215,7 +215,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.11} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.12} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -229,7 +229,7 @@ services: otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.11} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.12} container_name: signoz-otel-collector command: [ diff --git a/deploy/docker/clickhouse-setup/otel-collector-config.yaml b/deploy/docker/clickhouse-setup/otel-collector-config.yaml index f3d6900e6c..8211364efb 100644 --- a/deploy/docker/clickhouse-setup/otel-collector-config.yaml +++ b/deploy/docker/clickhouse-setup/otel-collector-config.yaml @@ -135,14 +135,7 @@ exporters: clickhouselogsexporter: dsn: tcp://clickhouse:9000/ docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER} - timeout: 5s - sending_queue: - queue_size: 100 - retry_on_failure: - enabled: true - initial_interval: 5s - max_interval: 30s - max_elapsed_time: 300s + timeout: 10s service: telemetry: diff --git a/ee/query-service/Dockerfile b/ee/query-service/Dockerfile index 769f18756f..55ed33aa60 100644 --- a/ee/query-service/Dockerfile +++ b/ee/query-service/Dockerfile @@ -1,5 +1,5 @@ # use a minimal alpine image -FROM alpine:3.18.5 +FROM alpine:3.18.6 # Add Maintainer Info LABEL maintainer="signoz" diff --git a/ee/query-service/usage/manager.go b/ee/query-service/usage/manager.go index 28d4e3020e..99158b4345 100644 --- a/ee/query-service/usage/manager.go +++ b/ee/query-service/usage/manager.go @@ -139,17 +139,13 @@ func (lm *Manager) UploadUsage() { zap.S().Info("uploading usage data") - // Try to get the org name orgName := "" - orgNames, err := lm.modelDao.GetOrgs(ctx) - if err != nil { - zap.S().Errorf("failed to get org data: %v", zap.Error(err)) - } else { - if len(orgNames) != 1 { - zap.S().Errorf("expected one org but got %d orgs", len(orgNames)) - } else { - orgName = orgNames[0].Name - } + orgNames, orgError := lm.modelDao.GetOrgs(ctx) + if orgError != nil { + zap.S().Errorf("failed to get org data: %v", zap.Error(orgError)) + } + if len(orgNames) == 1 { + orgName = orgNames[0].Name } usagesPayload := []model.Usage{} diff --git a/frontend/jest.config.ts b/frontend/jest.config.ts index c9a67b3d26..20bf44bf96 100644 --- a/frontend/jest.config.ts +++ b/frontend/jest.config.ts @@ -22,7 +22,7 @@ const config: Config.InitialOptions = { '^.+\\.(js|jsx)$': 'babel-jest', }, transformIgnorePatterns: [ - 'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios)/)', + 'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens)/)', ], setupFilesAfterEnv: ['jest.setup.ts'], testPathIgnorePatterns: ['/node_modules/', '/public/'], diff --git a/frontend/package.json b/frontend/package.json index c104f3715b..1a7acae5ad 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -36,7 +36,9 @@ "@mdx-js/loader": "2.3.0", "@mdx-js/react": "2.3.0", "@monaco-editor/react": "^4.3.1", - "@signozhq/design-tokens": "0.0.6", + "@radix-ui/react-tabs": "1.0.4", + "@radix-ui/react-tooltip": "1.0.7", + "@signozhq/design-tokens": "0.0.8", "@uiw/react-md-editor": "3.23.5", "@xstate/react": "^3.0.0", "ansi-to-html": "0.7.2", diff --git a/frontend/public/Icons/awwSnap.svg b/frontend/public/Icons/awwSnap.svg new file mode 100644 index 0000000000..19088a1898 --- /dev/null +++ b/frontend/public/Icons/awwSnap.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Icons/emptyState.svg b/frontend/public/Icons/emptyState.svg new file mode 100644 index 0000000000..b00fbb67ab --- /dev/null +++ b/frontend/public/Icons/emptyState.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Icons/loading-plane.gif b/frontend/public/Icons/loading-plane.gif new file mode 100644 index 0000000000..9d5817746b Binary files /dev/null and b/frontend/public/Icons/loading-plane.gif differ diff --git a/frontend/public/Icons/promQL.svg b/frontend/public/Icons/promQL.svg new file mode 100644 index 0000000000..d02bcc2a4d --- /dev/null +++ b/frontend/public/Icons/promQL.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Icons/tetra-pack.svg b/frontend/public/Icons/tetra-pack.svg new file mode 100644 index 0000000000..13fd4f25bf --- /dev/null +++ b/frontend/public/Icons/tetra-pack.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Images/eyesEmoji.svg b/frontend/public/Images/eyesEmoji.svg new file mode 100644 index 0000000000..5d7e56aa2c --- /dev/null +++ b/frontend/public/Images/eyesEmoji.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/frontend/public/Logos/rust.png b/frontend/public/Logos/rust.png index 2637dc9314..08acc5d0b4 100644 Binary files a/frontend/public/Logos/rust.png and b/frontend/public/Logos/rust.png differ diff --git a/frontend/public/Logos/swift.png b/frontend/public/Logos/swift.png new file mode 100644 index 0000000000..79429f1c95 Binary files /dev/null and b/frontend/public/Logos/swift.png differ diff --git a/frontend/public/locales/en/explorer.json b/frontend/public/locales/en/explorer.json index b4ffa6148c..65e4fc2053 100644 --- a/frontend/public/locales/en/explorer.json +++ b/frontend/public/locales/en/explorer.json @@ -1,3 +1,4 @@ { - "name_of_the_view": "Name of the view" + "name_of_the_view": "Name of the view", + "delete_confirm_message": "Are you sure you want to delete {{viewName}} view? Deleting a view is irreversible and cannot be undone." } \ No newline at end of file diff --git a/frontend/public/locales/en/titles.json b/frontend/public/locales/en/titles.json index 71ec805100..82fad7f472 100644 --- a/frontend/public/locales/en/titles.json +++ b/frontend/public/locales/en/titles.json @@ -39,5 +39,8 @@ "LIST_LICENSES": "SigNoz | List of Licenses", "WORKSPACE_LOCKED": "SigNoz | Workspace Locked", "SUPPORT": "SigNoz | Support", - "DEFAULT": "Open source Observability Platform | SigNoz" + "LOGS_SAVE_VIEWS": "SigNoz | Logs Save Views", + "TRACES_SAVE_VIEWS": "SigNoz | Traces Save Views", + "DEFAULT": "Open source Observability Platform | SigNoz", + "SHORTCUTS": "SigNoz | Shortcuts" } diff --git a/frontend/scripts/typecheck-staged.sh b/frontend/scripts/typecheck-staged.sh index ea4fdaad86..e67cf54a72 100644 --- a/frontend/scripts/typecheck-staged.sh +++ b/frontend/scripts/typecheck-staged.sh @@ -9,7 +9,7 @@ done # create temporary tsconfig which includes only passed files str="{ \"extends\": \"./tsconfig.json\", - \"include\": [\"src/types/global.d.ts\",\"src/typings/window.ts\", $files] + \"include\": [\"src/types/global.d.ts\",\"src/typings/window.ts\", \"src/typings/chartjs-adapter-date-fns.d.ts\", \"src/typings/environment.ts\" ,$files] }" echo $str > tsconfig.tmp @@ -22,4 +22,4 @@ code=$? # delete temp config rm ./tsconfig.tmp -exit $code \ No newline at end of file +exit $code diff --git a/frontend/src/AppRoutes/Private.tsx b/frontend/src/AppRoutes/Private.tsx index f559dc633f..aafaa932af 100644 --- a/frontend/src/AppRoutes/Private.tsx +++ b/frontend/src/AppRoutes/Private.tsx @@ -160,7 +160,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element { if (currentRoute) { const { isPrivate, key } = currentRoute; - if (isPrivate && key !== ROUTES.WORKSPACE_LOCKED) { + if (isPrivate && key !== String(ROUTES.WORKSPACE_LOCKED)) { handlePrivateRoutes(key); } else { // no need to fetch the user and make user fetching false diff --git a/frontend/src/AppRoutes/index.tsx b/frontend/src/AppRoutes/index.tsx index 4b7d30c5ff..f1927c9a4c 100644 --- a/frontend/src/AppRoutes/index.tsx +++ b/frontend/src/AppRoutes/index.tsx @@ -8,6 +8,7 @@ import { LOCALSTORAGE } from 'constants/localStorage'; import ROUTES from 'constants/routes'; import AppLayout from 'container/AppLayout'; import useAnalytics from 'hooks/analytics/useAnalytics'; +import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys'; import { useThemeConfig } from 'hooks/useDarkMode'; import useGetFeatureFlag from 'hooks/useGetFeatureFlag'; import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense'; @@ -177,22 +178,24 @@ function App(): JSX.Element { - - }> - - {routes.map(({ path, component, exact }) => ( - - ))} + + + }> + + {routes.map(({ path, component, exact }) => ( + + ))} - - - - + + + + + diff --git a/frontend/src/AppRoutes/pageComponents.ts b/frontend/src/AppRoutes/pageComponents.ts index aca7de9730..0a2e0e59f2 100644 --- a/frontend/src/AppRoutes/pageComponents.ts +++ b/frontend/src/AppRoutes/pageComponents.ts @@ -15,9 +15,20 @@ export const ServiceMapPage = Loadable( () => import(/* webpackChunkName: "ServiceMapPage" */ 'modules/Servicemap'), ); +export const LogsSaveViews = Loadable( + () => import(/* webpackChunkName: "LogsSaveViews" */ 'pages/LogsModulePage'), // TODO: Add a wrapper so that the same component can be used in traces +); + export const TracesExplorer = Loadable( () => - import(/* webpackChunkName: "Traces Explorer Page" */ 'pages/TracesExplorer'), + import( + /* webpackChunkName: "Traces Explorer Page" */ 'pages/TracesModulePage' + ), +); + +export const TracesSaveViews = Loadable( + () => + import(/* webpackChunkName: "Traces Save Views" */ 'pages/TracesModulePage'), ); export const TraceFilter = Loadable( @@ -171,3 +182,7 @@ export const WorkspaceBlocked = Loadable( () => import(/* webpackChunkName: "WorkspaceLocked" */ 'pages/WorkspaceLocked'), ); + +export const ShortcutsPage = Loadable( + () => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Shortcuts'), +); diff --git a/frontend/src/AppRoutes/routes.ts b/frontend/src/AppRoutes/routes.ts index f6923c809a..e9d202c420 100644 --- a/frontend/src/AppRoutes/routes.ts +++ b/frontend/src/AppRoutes/routes.ts @@ -1,4 +1,5 @@ import ROUTES from 'constants/routes'; +import Shortcuts from 'pages/Shortcuts/Shortcuts'; import WorkspaceBlocked from 'pages/WorkspaceLocked'; import { RouteProps } from 'react-router-dom'; @@ -21,6 +22,7 @@ import { Logs, LogsExplorer, LogsIndexToFields, + LogsSaveViews, MySettings, NewDashboardPage, OldLogsExplorer, @@ -39,6 +41,7 @@ import { TraceDetail, TraceFilter, TracesExplorer, + TracesSaveViews, UnAuthorized, UsageExplorerPage, } from './pageComponents'; @@ -86,6 +89,13 @@ const routes: AppRoutes[] = [ exact: true, key: 'SERVICE_MAP', }, + { + path: ROUTES.LOGS_SAVE_VIEWS, + component: LogsSaveViews, + isPrivate: true, + exact: true, + key: 'LOGS_SAVE_VIEWS', + }, { path: ROUTES.TRACE_DETAIL, exact: true, @@ -163,6 +173,13 @@ const routes: AppRoutes[] = [ isPrivate: true, key: 'TRACES_EXPLORER', }, + { + path: ROUTES.TRACES_SAVE_VIEWS, + exact: true, + component: TracesSaveViews, + isPrivate: true, + key: 'TRACES_SAVE_VIEWS', + }, { path: ROUTES.CHANNELS_NEW, exact: true, @@ -303,6 +320,13 @@ const routes: AppRoutes[] = [ isPrivate: true, key: 'WORKSPACE_LOCKED', }, + { + path: ROUTES.SHORTCUTS, + exact: true, + component: Shortcuts, + isPrivate: true, + key: 'SHORTCUTS', + }, ]; export const SUPPORT_ROUTE: AppRoutes = { diff --git a/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss b/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss index 6e38e84300..14f80a9b93 100644 --- a/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss +++ b/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss @@ -33,6 +33,7 @@ .timeSelection-input { display: flex; gap: 8px; + height: 33px; align-items: center; padding: 4px 8px; padding-left: 0px !important; @@ -59,6 +60,26 @@ font-weight: 400 !important; } +.info-text { + display: flex; + align-items: center; + justify-content: center; + padding: 4px; + cursor: default; + color: var(--bg-vanilla-400, #c0c1c3) !important; + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: normal; + letter-spacing: 0.14px; +} + +.info-text:hover { + &.ant-btn-text { + background-color: unset !important; + } +} + .lightMode { .time-options-container { .time-options-item { @@ -93,4 +114,8 @@ color: rgba($color: #000000, $alpha: 0.4); } } + + .info-text { + color: var(--bg-slate-400) !important; + } } diff --git a/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx b/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx index 4ded57d250..8be31b78e9 100644 --- a/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx +++ b/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx @@ -4,22 +4,44 @@ import './CustomTimePicker.styles.scss'; import { Input, Popover, Tooltip, Typography } from 'antd'; import cx from 'classnames'; +import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal'; import { Options } from 'container/TopNav/DateTimeSelection/config'; +import { + FixedDurationSuggestionOptions, + RelativeDurationSuggestionOptions, +} from 'container/TopNav/DateTimeSelectionV2/config'; import dayjs from 'dayjs'; +import { defaultTo, noop } from 'lodash-es'; import debounce from 'lodash-es/debounce'; import { CheckCircle, ChevronDown, Clock } from 'lucide-react'; -import { ChangeEvent, useEffect, useState } from 'react'; +import { + ChangeEvent, + Dispatch, + SetStateAction, + useEffect, + useState, +} from 'react'; +import { useLocation } from 'react-router-dom'; import { popupContainer } from 'utils/selectPopupContainer'; +import CustomTimePickerPopoverContent from './CustomTimePickerPopoverContent'; + const maxAllowedMinTimeInMonths = 6; interface CustomTimePickerProps { onSelect: (value: string) => void; onError: (value: boolean) => void; - items: any[]; selectedValue: string; selectedTime: string; onValidCustomDateChange: ([t1, t2]: any[]) => void; + open: boolean; + setOpen: Dispatch>; + items: any[]; + newPopover?: boolean; + customDateTimeVisible?: boolean; + setCustomDTPickerVisible?: Dispatch>; + onCustomDateHandler?: (dateTimeRange: DateTimeRangeType) => void; + handleGoLive?: () => void; } function CustomTimePicker({ @@ -28,9 +50,15 @@ function CustomTimePicker({ items, selectedValue, selectedTime, + open, + setOpen, onValidCustomDateChange, + newPopover, + customDateTimeVisible, + setCustomDTPickerVisible, + onCustomDateHandler, + handleGoLive, }: CustomTimePickerProps): JSX.Element { - const [open, setOpen] = useState(false); const [ selectedTimePlaceholderValue, setSelectedTimePlaceholderValue, @@ -41,6 +69,7 @@ function CustomTimePicker({ const [inputErrorMessage, setInputErrorMessage] = useState( null, ); + const location = useLocation(); const [isInputFocused, setIsInputFocused] = useState(false); const getSelectedTimeRangeLabel = ( @@ -56,6 +85,20 @@ function CustomTimePicker({ return Options[index].label; } } + for ( + let index = 0; + index < RelativeDurationSuggestionOptions.length; + index++ + ) { + if (RelativeDurationSuggestionOptions[index].value === selectedTime) { + return RelativeDurationSuggestionOptions[index].label; + } + } + for (let index = 0; index < FixedDurationSuggestionOptions.length; index++) { + if (FixedDurationSuggestionOptions[index].value === selectedTime) { + return FixedDurationSuggestionOptions[index].label; + } + } return ''; }; @@ -111,7 +154,7 @@ function CustomTimePicker({ break; } - if (minTime && minTime < maxAllowedMinTime) { + if (minTime && (!minTime.isValid() || minTime < maxAllowedMinTime)) { setInputStatus('error'); onError(true); setInputErrorMessage('Please enter time less than 6 months'); @@ -140,19 +183,25 @@ function CustomTimePicker({ debouncedHandleInputChange(inputValue); }; + const handleSelect = (label: string, value: string): void => { + onSelect(value); + setSelectedTimePlaceholderValue(label); + setInputStatus(''); + onError(false); + setInputErrorMessage(null); + setInputValue(''); + if (value !== 'custom') { + hide(); + } + }; + const content = (
- {items.map(({ value, label }) => ( + {items?.map(({ value, label }) => (
{ - onSelect(value); - setSelectedTimePlaceholderValue(label); - setInputStatus(''); - onError(false); - setInputErrorMessage(null); - setInputValue(''); - hide(); + handleSelect(label, value); }} key={value} className={cx( @@ -175,6 +224,15 @@ function CustomTimePicker({ setIsInputFocused(false); }; + // this is required as TopNav component wraps the components and we need to clear the state on path change + useEffect(() => { + setInputStatus(''); + onError(false); + setInputErrorMessage(null); + setInputValue(''); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [location.pathname]); + return (
+ ) : ( + content + ) + } arrow={false} + trigger="hover" open={open} onOpenChange={handleOpenChange} - trigger={['click']} style={{ padding: 0, }} @@ -236,3 +310,11 @@ function CustomTimePicker({ } export default CustomTimePicker; + +CustomTimePicker.defaultProps = { + newPopover: false, + customDateTimeVisible: false, + setCustomDTPickerVisible: noop, + onCustomDateHandler: noop, + handleGoLive: noop, +}; diff --git a/frontend/src/components/CustomTimePicker/CustomTimePickerPopoverContent.tsx b/frontend/src/components/CustomTimePicker/CustomTimePickerPopoverContent.tsx new file mode 100644 index 0000000000..e7bd571f35 --- /dev/null +++ b/frontend/src/components/CustomTimePicker/CustomTimePickerPopoverContent.tsx @@ -0,0 +1,133 @@ +import './CustomTimePicker.styles.scss'; + +import { Button, DatePicker } from 'antd'; +import cx from 'classnames'; +import ROUTES from 'constants/routes'; +import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal'; +import { + Option, + RelativeDurationSuggestionOptions, +} from 'container/TopNav/DateTimeSelectionV2/config'; +import dayjs, { Dayjs } from 'dayjs'; +import { Dispatch, SetStateAction, useMemo } from 'react'; +import { useSelector } from 'react-redux'; +import { useLocation } from 'react-router-dom'; +import { AppState } from 'store/reducers'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +interface CustomTimePickerPopoverContentProps { + options: any[]; + setIsOpen: Dispatch>; + customDateTimeVisible: boolean; + setCustomDTPickerVisible: Dispatch>; + onCustomDateHandler: (dateTimeRange: DateTimeRangeType) => void; + onSelectHandler: (label: string, value: string) => void; + handleGoLive: () => void; + selectedTime: string; +} + +function CustomTimePickerPopoverContent({ + options, + setIsOpen, + customDateTimeVisible, + setCustomDTPickerVisible, + onCustomDateHandler, + onSelectHandler, + handleGoLive, + selectedTime, +}: CustomTimePickerPopoverContentProps): JSX.Element { + const { RangePicker } = DatePicker; + const { pathname } = useLocation(); + + const { maxTime, minTime } = useSelector( + (state) => state.globalTime, + ); + + const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [ + pathname, + ]); + + const disabledDate = (current: Dayjs): boolean => { + const currentDay = dayjs(current); + return currentDay.isAfter(dayjs()); + }; + + const onPopoverClose = (visible: boolean): void => { + if (!visible) { + setCustomDTPickerVisible(false); + } + setIsOpen(visible); + }; + + const onModalOkHandler = (date_time: any): void => { + if (date_time?.[1]) { + onPopoverClose(false); + } + onCustomDateHandler(date_time); + }; + function getTimeChips(options: Option[]): JSX.Element { + return ( +
+ {options.map((option) => ( + + ))} +
+ ); + } + + return ( +
+
+ {isLogsExplorerPage && ( + + )} + {options.map((option) => ( + + ))} +
+
+ {selectedTime === 'custom' || customDateTimeVisible ? ( + + ) : ( +
+
RELATIVE TIMES
+
{getTimeChips(RelativeDurationSuggestionOptions)}
+
+ )} +
+
+ ); +} + +export default CustomTimePickerPopoverContent; diff --git a/frontend/src/components/ExplorerCard/ExplorerCard.tsx b/frontend/src/components/ExplorerCard/ExplorerCard.tsx index b55f019bc1..73d7a24c74 100644 --- a/frontend/src/components/ExplorerCard/ExplorerCard.tsx +++ b/frontend/src/components/ExplorerCard/ExplorerCard.tsx @@ -6,7 +6,6 @@ import { } from '@ant-design/icons'; import { Button, - Card, Col, Dropdown, MenuProps, @@ -152,95 +151,100 @@ function ExplorerCard({ const saveButtonType = isQueryUpdated ? 'default' : 'primary'; const saveButtonIcon = isQueryUpdated ? null : ; + const showSaveView = false; + return ( <> - - - - - Query Builder - - - - - - {viewsData?.data.data && viewsData?.data.data.length && ( - - + {viewsData?.data.data.map((view) => ( + + + + ))} + + + )} + {isQueryUpdated && ( + + )} + + } + showArrow={false} + open={isOpen} + onOpenChange={handleOpenChange} > - Save changes - - )} - - } - showArrow={false} - open={isOpen} - onOpenChange={handleOpenChange} - > - - - - {viewKey && ( - - - - )} - - - - - {children} + +
+ + {viewKey && ( + + + + )} + + + + + )} + +
{children}
); } diff --git a/frontend/src/components/ExplorerCard/styles.ts b/frontend/src/components/ExplorerCard/styles.ts index 63ed068c53..56c3d38c75 100644 --- a/frontend/src/components/ExplorerCard/styles.ts +++ b/frontend/src/components/ExplorerCard/styles.ts @@ -3,6 +3,7 @@ import styled, { CSSProperties } from 'styled-components'; export const ExplorerCardHeadContainer = styled(Card)` margin: 1rem 0; + padding: 0; `; export const OffSetCol = styled(Col)` diff --git a/frontend/src/components/ExplorerCard/test/ExplorerCard.test.tsx b/frontend/src/components/ExplorerCard/test/ExplorerCard.test.tsx index 7efc8a65cd..1fdb29d281 100644 --- a/frontend/src/components/ExplorerCard/test/ExplorerCard.test.tsx +++ b/frontend/src/components/ExplorerCard/test/ExplorerCard.test.tsx @@ -1,4 +1,4 @@ -import { fireEvent, render, screen } from '@testing-library/react'; +import { render, screen } from '@testing-library/react'; import ROUTES from 'constants/routes'; import MockQueryClientProvider from 'providers/test/MockQueryClientProvider'; import { DataSource } from 'types/common/queryBuilder'; @@ -46,7 +46,7 @@ describe('ExplorerCard', () => { child , ); - expect(screen.getByText('Query Builder')).toBeInTheDocument(); + expect(screen.queryByText('Query Builder')).not.toBeInTheDocument(); }); it('renders a save view button', () => { @@ -55,19 +55,6 @@ describe('ExplorerCard', () => { child , ); - expect(screen.getByText('Save view')).toBeInTheDocument(); - }); - - it('should see all the view listed in dropdown', async () => { - const screen = render( - Mock Children, - ); - const selectPlaceholder = screen.getByText('Select a view'); - - fireEvent.mouseDown(selectPlaceholder); - const viewNameText = await screen.getAllByText('View 1'); - viewNameText.forEach((element) => { - expect(element).toBeInTheDocument(); - }); + expect(screen.queryByText('Save view')).not.toBeInTheDocument(); }); }); diff --git a/frontend/src/components/ExplorerCard/utils.ts b/frontend/src/components/ExplorerCard/utils.ts index 3a2eeac95f..48af06bb3d 100644 --- a/frontend/src/components/ExplorerCard/utils.ts +++ b/frontend/src/components/ExplorerCard/utils.ts @@ -14,7 +14,7 @@ import { SaveViewHandlerProps, } from './types'; -const showErrorNotification = ( +export const showErrorNotification = ( notifications: NotificationInstance, err: Error, ): void => { @@ -90,6 +90,14 @@ export const isQueryUpdatedInView = ({ // Omitting id from aggregateAttribute and groupBy const updatedCurrentQuery = omitIdFromQuery(stagedQuery); + if ( + updatedCurrentQuery?.builder === undefined || + updatedCurrentQuery.clickhouse_sql === undefined || + updatedCurrentQuery.promql === undefined + ) { + return false; + } + return ( panelType !== currentPanelType || !isEqual(query.builder, updatedCurrentQuery?.builder) || diff --git a/frontend/src/components/LogDetail/LogDetail.interfaces.ts b/frontend/src/components/LogDetail/LogDetail.interfaces.ts index a67dfc10c8..991fb4488e 100644 --- a/frontend/src/components/LogDetail/LogDetail.interfaces.ts +++ b/frontend/src/components/LogDetail/LogDetail.interfaces.ts @@ -3,8 +3,11 @@ import { AddToQueryHOCProps } from 'components/Logs/AddToQueryHOC'; import { ActionItemProps } from 'container/LogDetailedView/ActionItem'; import { ILog } from 'types/api/logs/log'; +import { VIEWS } from './constants'; + export type LogDetailProps = { log: ILog | null; + selectedTab: VIEWS; } & Pick & - Pick & + Partial> & Pick; diff --git a/frontend/src/components/LogDetail/LogDetails.styles.scss b/frontend/src/components/LogDetail/LogDetails.styles.scss new file mode 100644 index 0000000000..0dcdc1e5c1 --- /dev/null +++ b/frontend/src/components/LogDetail/LogDetails.styles.scss @@ -0,0 +1,230 @@ +.log-detail-drawer { + border-left: 1px solid var(--bg-slate-500); + background: var(--bg-ink-400); + box-shadow: -4px 10px 16px 2px rgba(0, 0, 0, 0.2); + + .ant-drawer-header { + padding: 8px 16px; + border-bottom: none; + + align-items: stretch; + + border-bottom: 1px solid var(--bg-slate-500); + background: var(--bg-ink-400); + } + + .ant-drawer-close { + margin-inline-end: 0px; + } + + .ant-drawer-body { + padding: 16px; + } + + .title { + color: var(--text-vanilla-400); + font-family: Inter; + font-size: var(--font-size-sm); + font-style: normal; + font-weight: var(--font-weight-normal); + line-height: 20px; /* 142.857% */ + letter-spacing: -0.07px; + } + + .radio-button { + display: flex; + align-items: center; + justify-content: center; + padding-top: var(--padding-1); + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-300); + box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1); + } + + .log-detail-drawer__log { + width: 100%; + display: flex; + align-items: center; + gap: 4px; + position: relative; + + .log-body { + font-family: 'SF Mono'; + font-family: 'Space Mono', monospace; + + font-size: var(--font-size-sm); + font-weight: var(--font-weight-normal); + line-height: 18px; + letter-spacing: -0.07px; + white-space: nowrap; + text-overflow: ellipsis; + overflow: hidden; + color: var(--text-vanilla-400); + opacity: 0.6; + } + + .log-type-indicator { + height: 24px; + border: 2px solid var(--bg-slate-400); + border-radius: 5px; + margin-left: 0; + + &.INFO { + border-color: #1d212d; + } + + &.WARNING { + border-color: #ffcd56; + } + + &.ERROR { + border-color: #e5484d; + } + } + + .log-overflow-shadow { + background: linear-gradient(270deg, #121317 10.4%, rgba(18, 19, 23, 0) 100%); + + width: 196px; + position: absolute; + right: 0; + } + } + + .tabs-and-search { + display: flex; + justify-content: space-between; + align-items: center; + margin: 16px 0; + + .action-btn { + border-radius: 2px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-300); + box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1); + display: flex; + align-items: center; + justify-content: center; + } + + .json-action-btn { + display: flex; + gap: 8px; + } + } + + .views-tabs { + color: var(--text-vanilla-400); + + .view-title { + display: flex; + gap: var(--margin-2); + align-items: center; + justify-content: center; + font-size: var(--font-size-xs); + font-style: normal; + font-weight: var(--font-weight-normal); + } + + .tab { + border: 1px solid var(--bg-slate-400); + width: 114px; + } + + .tab::before { + background: var(--bg-slate-400); + } + + .selected_view { + background: var(--bg-slate-300); + color: var(--text-vanilla-100); + border: 1px solid var(--bg-slate-400); + } + + .selected_view::before { + background: var(--bg-slate-400); + } + } + + .search-input { + margin-top: var(--margin-2); + border: 1px solid var(--bg-slate-400); + height: 46px; + padding: var(--padding-1) var(--padding-2); + box-shadow: none; + border-radius: 0; + } + + .ant-drawer-close { + padding: 0px; + } +} + +.lightMode { + .ant-drawer-header { + border-bottom: 1px solid var(--bg-vanilla-400); + background: var(--bg-vanilla-100); + } + + .log-detail-drawer { + .title { + color: var(--text-ink-300); + } + + .log-detail-drawer__log { + .log-overflow-shadow { + background: linear-gradient( + 270deg, + var(--bg-vanilla-100) 10.4%, + rgba(255, 255, 255, 0) 100% + ); + } + + .log-type-indicator { + border: 2px solid var(--bg-vanilla-400); + } + + .ant-typography { + color: var(--text-ink-300); + background: transparent; + } + } + + .radio-button { + border: 1px solid var(--bg-vanilla-400); + background: var(--bg-vanilla-100); + color: var(--text-ink-300); + } + + .views-tabs { + .tab { + background: var(--bg-vanilla-100); + } + + .selected_view { + background: var(--bg-vanilla-300); + border: 1px solid var(--bg-slate-300); + color: var(--text-ink-400); + } + + .selected_view::before { + background: var(--bg-vanilla-300); + border-left: 1px solid var(--bg-slate-300); + } + } + + .tabs-and-search { + .action-btn { + border: 1px solid var(--bg-vanilla-400); + background: var(--bg-vanilla-100); + color: var(--text-ink-300); + } + } + + .search-input { + border: 1px solid var(--bg-vanilla-200); + background: var(--bg-vanilla-100); + color: var(--text-ink-300); + } + } +} diff --git a/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.styles.scss b/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.styles.scss new file mode 100644 index 0000000000..e3da355621 --- /dev/null +++ b/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.styles.scss @@ -0,0 +1,10 @@ +.query-builder-search-wrapper { + margin-top: 10px; + height: 46px; + border: 1px solid var(--bg-slate-400); + border-bottom: none; + + .ant-select-selector { + border: none !important; + } +} \ No newline at end of file diff --git a/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.tsx b/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.tsx new file mode 100644 index 0000000000..72d18b7e12 --- /dev/null +++ b/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.tsx @@ -0,0 +1,77 @@ +import './QueryBuilderSearchWrapper.styles.scss'; + +import useInitialQuery from 'container/LogsExplorerContext/useInitialQuery'; +import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch'; +import { Dispatch, SetStateAction, useEffect } from 'react'; +import { ILog } from 'types/api/logs/log'; +import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData'; + +function QueryBuilderSearchWrapper({ + log, + filters, + contextQuery, + isEdit, + suffixIcon, + setFilters, + setContextQuery, +}: QueryBuilderSearchWraperProps): JSX.Element { + const initialContextQuery = useInitialQuery(log); + + useEffect(() => { + setContextQuery(initialContextQuery); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + const handleSearch = (tagFilters: TagFilter): void => { + const tagFiltersLength = tagFilters.items.length; + + if ( + (!tagFiltersLength && (!filters || !filters.items.length)) || + tagFiltersLength === filters?.items.length || + !contextQuery + ) + return; + + const nextQuery: Query = { + ...contextQuery, + builder: { + ...contextQuery.builder, + queryData: contextQuery.builder.queryData.map((item) => ({ + ...item, + filters: tagFilters, + })), + }, + }; + + setFilters({ ...tagFilters }); + setContextQuery({ ...nextQuery }); + }; + + // eslint-disable-next-line react/jsx-no-useless-fragment + if (!contextQuery || !isEdit) return <>; + + return ( + + ); +} + +interface QueryBuilderSearchWraperProps { + log: ILog; + isEdit: boolean; + contextQuery: Query | undefined; + setContextQuery: Dispatch>; + filters: TagFilter | null; + setFilters: Dispatch>; + suffixIcon?: React.ReactNode; +} + +QueryBuilderSearchWrapper.defaultProps = { + suffixIcon: undefined, +}; + +export default QueryBuilderSearchWrapper; diff --git a/frontend/src/components/LogDetail/constants.ts b/frontend/src/components/LogDetail/constants.ts new file mode 100644 index 0000000000..92199d4441 --- /dev/null +++ b/frontend/src/components/LogDetail/constants.ts @@ -0,0 +1,7 @@ +export const VIEW_TYPES = { + OVERVIEW: 'OVERVIEW', + JSON: 'JSON', + CONTEXT: 'CONTEXT', +} as const; + +export type VIEWS = typeof VIEW_TYPES[keyof typeof VIEW_TYPES]; diff --git a/frontend/src/components/LogDetail/index.tsx b/frontend/src/components/LogDetail/index.tsx index b787322ca7..ffa1c07e9c 100644 --- a/frontend/src/components/LogDetail/index.tsx +++ b/frontend/src/components/LogDetail/index.tsx @@ -1,50 +1,207 @@ -import { Drawer, Tabs } from 'antd'; -import JSONView from 'container/LogDetailedView/JsonView'; -import TableView from 'container/LogDetailedView/TableView'; -import { useMemo } from 'react'; +/* eslint-disable sonarjs/cognitive-complexity */ +import './LogDetails.styles.scss'; +import { Color, Spacing } from '@signozhq/design-tokens'; +import { Button, Divider, Drawer, Radio, Tooltip, Typography } from 'antd'; +import { RadioChangeEvent } from 'antd/lib'; +import cx from 'classnames'; +import { LogType } from 'components/Logs/LogStateIndicator/LogStateIndicator'; +import ContextView from 'container/LogDetailedView/ContextView/ContextView'; +import JSONView from 'container/LogDetailedView/JsonView'; +import Overview from 'container/LogDetailedView/Overview'; +import { aggregateAttributesResourcesToString } from 'container/LogDetailedView/utils'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { useNotifications } from 'hooks/useNotifications'; +import { + Braces, + Copy, + Filter, + HardHat, + Table, + TextSelect, + X, +} from 'lucide-react'; +import { useState } from 'react'; +import { useCopyToClipboard } from 'react-use'; +import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData'; + +import { VIEW_TYPES, VIEWS } from './constants'; import { LogDetailProps } from './LogDetail.interfaces'; +import QueryBuilderSearchWrapper from './QueryBuilderSearchWrapper'; function LogDetail({ log, onClose, onAddToQuery, onClickActionItem, + selectedTab, }: LogDetailProps): JSX.Element { - const items = useMemo( - () => [ - { - label: 'Table', - key: '1', - children: log && ( - - ), - }, - { - label: 'JSON', - key: '2', - children: log && , - }, - ], - [log, onAddToQuery, onClickActionItem], - ); + const [, copyToClipboard] = useCopyToClipboard(); + const [selectedView, setSelectedView] = useState(selectedTab); + + const [isFilterVisibile, setIsFilterVisible] = useState(false); + + const [contextQuery, setContextQuery] = useState(); + const [filters, setFilters] = useState(null); + const [isEdit, setIsEdit] = useState(false); + + const isDarkMode = useIsDarkMode(); + + const { notifications } = useNotifications(); + + const LogJsonData = log ? aggregateAttributesResourcesToString(log) : ''; + + const handleModeChange = (e: RadioChangeEvent): void => { + setSelectedView(e.target.value); + setIsEdit(false); + setIsFilterVisible(false); + }; + + const handleFilterVisible = (): void => { + setIsFilterVisible(!isFilterVisibile); + setIsEdit(!isEdit); + }; + + const drawerCloseHandler = ( + e: React.MouseEvent | React.KeyboardEvent, + ): void => { + if (onClose) { + onClose(e); + } + }; + + const handleJSONCopy = (): void => { + copyToClipboard(LogJsonData); + notifications.success({ + message: 'Copied to clipboard', + }); + }; + + if (!log) { + // eslint-disable-next-line react/jsx-no-useless-fragment + return <>; + } + + const logType = log?.attributes_string?.log_level || LogType.INFO; return ( + + Log details + + } placement="right" - closable - onClose={onClose} + // closable + onClose={drawerCloseHandler} open={log !== null} - style={{ overscrollBehavior: 'contain' }} + style={{ + overscrollBehavior: 'contain', + background: isDarkMode ? Color.BG_INK_400 : Color.BG_VANILLA_100, + }} + className="log-detail-drawer" destroyOnClose + closeIcon={} > - +
+ + + {log?.body} + + +
 
+
+ +
+ + +
+ + Overview + + + +
+ + JSON +
+
+ +
+ + Context +
+
+ + + {selectedView === VIEW_TYPES.JSON && ( +
+
+ )} + + {selectedView === VIEW_TYPES.CONTEXT && ( + - - {isLogsExplorerPage && ( - <> - - - - )} - - {activeContextLog && ( - )} - - + + {activeContextLog && ( + + )} + ); } +ListLogView.defaultProps = { + activeLog: null, +}; + export default ListLogView; diff --git a/frontend/src/components/Logs/ListLogView/styles.ts b/frontend/src/components/Logs/ListLogView/styles.ts index 0db1baafe5..79812c4400 100644 --- a/frontend/src/components/Logs/ListLogView/styles.ts +++ b/frontend/src/components/Logs/ListLogView/styles.ts @@ -7,6 +7,7 @@ export const Container = styled(Card)<{ }>` width: 100% !important; margin-bottom: 0.3rem; + cursor: pointer; .ant-card-body { padding: 0.3rem 0.6rem; } @@ -29,11 +30,13 @@ export const TextContainer = styled.div` export const LogContainer = styled.div` margin-left: 0.5rem; + display: flex; + flex-direction: column; + gap: 6px; `; export const LogText = styled.div` display: inline-block; - text-overflow: ellipsis; overflow: hidden; white-space: nowrap; diff --git a/frontend/src/components/Logs/LogLinesActionButtons/LogLinesActionButtons.styles.scss b/frontend/src/components/Logs/LogLinesActionButtons/LogLinesActionButtons.styles.scss new file mode 100644 index 0000000000..6044671ea1 --- /dev/null +++ b/frontend/src/components/Logs/LogLinesActionButtons/LogLinesActionButtons.styles.scss @@ -0,0 +1,44 @@ +.log-line-action-buttons { + display: flex; + position: absolute; + transform: translate(-50%, -50%); + top: 50%; + right: 0; + cursor: pointer; + border-radius: 2px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-400); + + .ant-btn-default { + border: none; + box-shadow: none; + padding: 9px; + justify-content: center; + align-items: center; + display: flex; + + &.active-tab { + background-color: var(--bg-slate-400); + } + } + + .copy-log-btn { + border-left: 1px solid var(--bg-slate-400); + border-color: var(--bg-slate-400) !important; + } +} + +.lightMode { + .log-line-action-buttons { + border: 1px solid var(--bg-vanilla-400); + background: var(--bg-vanilla-400); + + .ant-btn-default { + } + + .copy-log-btn { + border-left: 1px solid var(--bg-vanilla-400); + border-color: var(--bg-vanilla-400) !important; + } + } +} diff --git a/frontend/src/components/Logs/LogLinesActionButtons/LogLinesActionButtons.tsx b/frontend/src/components/Logs/LogLinesActionButtons/LogLinesActionButtons.tsx new file mode 100644 index 0000000000..af6da290fb --- /dev/null +++ b/frontend/src/components/Logs/LogLinesActionButtons/LogLinesActionButtons.tsx @@ -0,0 +1,42 @@ +import './LogLinesActionButtons.styles.scss'; + +import { LinkOutlined } from '@ant-design/icons'; +import { Button, Tooltip } from 'antd'; +import { TextSelect } from 'lucide-react'; +import { MouseEventHandler } from 'react'; + +export interface LogLinesActionButtonsProps { + handleShowContext: MouseEventHandler; + onLogCopy: MouseEventHandler; + customClassName?: string; +} +export default function LogLinesActionButtons({ + handleShowContext, + onLogCopy, + customClassName = '', +}: LogLinesActionButtonsProps): JSX.Element { + return ( +
+ +
+ ); +} + +LogLinesActionButtons.defaultProps = { + customClassName: '', +}; diff --git a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss new file mode 100644 index 0000000000..6d2429b592 --- /dev/null +++ b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss @@ -0,0 +1,30 @@ +.log-state-indicator { + padding-left: 8px; + + .line { + margin: 0 8px; + min-height: 24px; + height: 100%; + width: 3px; + border-radius: 50px; + background-color: transparent; + + &.INFO { + background-color: #1d212d; + } + + &.WARNING { + background-color: #ffcd56; + } + + &.ERROR { + background-color: #e5484d; + } + } + + &.isActive { + .line { + background-color: var(--bg-robin-400, #7190f9); + } + } +} diff --git a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx new file mode 100644 index 0000000000..4c9b7de903 --- /dev/null +++ b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx @@ -0,0 +1,28 @@ +import './LogStateIndicator.styles.scss'; + +import cx from 'classnames'; + +export const LogType = { + INFO: 'INFO', + WARNING: 'WARNING', + ERROR: 'ERROR', +}; +function LogStateIndicator({ + type, + isActive, +}: { + type: string; + isActive?: boolean; +}): JSX.Element { + return ( +
+
+
+ ); +} + +LogStateIndicator.defaultProps = { + isActive: false, +}; + +export default LogStateIndicator; diff --git a/frontend/src/components/Logs/RawLogView/RawLogView.styles.scss b/frontend/src/components/Logs/RawLogView/RawLogView.styles.scss new file mode 100644 index 0000000000..e69de29bb2 diff --git a/frontend/src/components/Logs/RawLogView/index.tsx b/frontend/src/components/Logs/RawLogView/index.tsx index bdf623daf4..94c9dbe1bb 100644 --- a/frontend/src/components/Logs/RawLogView/index.tsx +++ b/frontend/src/components/Logs/RawLogView/index.tsx @@ -1,11 +1,9 @@ -import { - ExpandAltOutlined, - LinkOutlined, - MonitorOutlined, -} from '@ant-design/icons'; +import './RawLogView.styles.scss'; + import Convert from 'ansi-to-html'; -import { Button, DrawerProps, Tooltip } from 'antd'; +import { DrawerProps } from 'antd'; import LogDetail from 'components/LogDetail'; +import { VIEW_TYPES, VIEWS } from 'components/LogDetail/constants'; import LogsExplorerContext from 'container/LogsExplorerContext'; import dayjs from 'dayjs'; import dompurify from 'dompurify'; @@ -14,7 +12,7 @@ import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; // hooks import { useIsDarkMode } from 'hooks/useDarkMode'; import { FlatLogData } from 'lib/logs/flatLogData'; -import { isEmpty, isUndefined } from 'lodash-es'; +import { isEmpty, isNumber, isUndefined } from 'lodash-es'; import { KeyboardEvent, MouseEvent, @@ -24,13 +22,12 @@ import { useState, } from 'react'; +import LogLinesActionButtons from '../LogLinesActionButtons/LogLinesActionButtons'; +import LogStateIndicator, { + LogType, +} from '../LogStateIndicator/LogStateIndicator'; // styles -import { - ActionButtonsWrapper, - ExpandIconWrapper, - RawLogContent, - RawLogViewContainer, -} from './styles'; +import { RawLogContent, RawLogViewContainer } from './styles'; import { RawLogViewProps } from './types'; const convert = new Convert(); @@ -50,7 +47,6 @@ function RawLogView({ const { activeLog: activeContextLog, - onSetActiveLog: handleSetActiveContextLog, onClearActiveLog: handleClearActiveContextLog, } = useActiveLog(); const { @@ -61,12 +57,15 @@ function RawLogView({ } = useActiveLog(); const [hasActionButtons, setHasActionButtons] = useState(false); + const [selectedTab, setSelectedTab] = useState(); const isDarkMode = useIsDarkMode(); const isReadOnlyLog = !isLogsExplorerPage || isReadOnly; const severityText = data.severity_text ? `${data.severity_text} |` : ''; + const logType = data?.attributes_string?.log_level || LogType.INFO; + const updatedSelecedFields = useMemo( () => selectedFields.filter((e) => e.name !== 'id'), [selectedFields], @@ -74,7 +73,14 @@ function RawLogView({ const attributesValues = updatedSelecedFields .map((field) => flattenLogData[field.name]) - .filter((attribute) => !isUndefined(attribute) && !isEmpty(attribute)); + .filter((attribute) => { + // loadash isEmpty doesnot work with numbers + if (isNumber(attribute)) { + return true; + } + + return !isUndefined(attribute) && !isEmpty(attribute); + }); let attributesText = attributesValues.join(' | '); @@ -98,6 +104,7 @@ function RawLogView({ if (activeContextLog || isReadOnly) return; onSetActiveLog(data); + setSelectedTab(VIEW_TYPES.OVERVIEW); }, [activeContextLog, isReadOnly, data, onSetActiveLog]); const handleCloseLogDetail: DrawerProps['onClose'] = useCallback( @@ -108,6 +115,7 @@ function RawLogView({ event.stopPropagation(); onClearActiveLog(); + setSelectedTab(undefined); }, [onClearActiveLog], ); @@ -128,9 +136,11 @@ function RawLogView({ (event) => { event.preventDefault(); event.stopPropagation(); - handleSetActiveContextLog(data); + // handleSetActiveContextLog(data); + setSelectedTab(VIEW_TYPES.CONTEXT); + onSetActiveLog(data); }, - [data, handleSetActiveContextLog], + [data, onSetActiveLog], ); const html = useMemo( @@ -147,37 +157,30 @@ function RawLogView({ align="middle" $isDarkMode={isDarkMode} $isReadOnly={isReadOnly} - $isActiveLog={isHighlighted} + $isHightlightedLog={isHighlighted} + $isActiveLog={isActiveLog} onMouseEnter={handleMouseEnter} onMouseLeave={handleMouseLeave} > - {!isReadOnly && ( - - - - )} + {hasActionButtons && ( - - - + + + + + + )} + +
+ {!addNewColumn &&
} + + {addNewColumn && ( +
+
+ {' '} + columns + {' '} +
+ + +
+ )} + +
+ {!addNewColumn && ( +
+ columns + {' '} +
+ )} + +
+ {addColumn?.value?.map(({ key, id }) => ( +
+
+ + {key} + +
+ addColumn.onRemove(id as string)} + /> +
+ ))} +
+ + {addColumn?.isFetching && ( +
Loading ...
+ )} + + {addNewColumn && + addColumn && + addColumn.value.length > 0 && + addColumn.options && + addColumn?.options?.length > 0 && ( + + )} + + {addNewColumn && ( +
+ {addColumn?.options?.map(({ label, value }) => ( +
{ + console.log('coluimn name', label, value); + + eve.stopPropagation(); + + if (addColumn && addColumn?.onSelect) { + addColumn?.onSelect(value, { label, disabled: false }); + } + }} + > +
+ + {label} + +
+
+ ))} +
+ )} +
+
+ + )} +
+ ); +} diff --git a/frontend/src/components/ReleaseNote/index.tsx b/frontend/src/components/ReleaseNote/index.tsx index bfabbd2637..bf788e6a77 100644 --- a/frontend/src/components/ReleaseNote/index.tsx +++ b/frontend/src/components/ReleaseNote/index.tsx @@ -25,11 +25,12 @@ const allComponentMap: ComponentMapType[] = [ if (!path) { return false; } - const allowedPaths = [ + const allowedPaths: string[] = [ ROUTES.LIST_ALL_ALERT, ROUTES.APPLICATION, ROUTES.ALL_DASHBOARD, ]; + return ( userFlags?.ReleaseNote0120Hide !== 'Y' && allowedPaths.includes(path) && diff --git a/frontend/src/components/ResizeTable/DynamicColumnTable.syles.scss b/frontend/src/components/ResizeTable/DynamicColumnTable.syles.scss index 2bd0606abd..31026f4f12 100644 --- a/frontend/src/components/ResizeTable/DynamicColumnTable.syles.scss +++ b/frontend/src/components/ResizeTable/DynamicColumnTable.syles.scss @@ -1,25 +1,31 @@ .DynamicColumnTable { - display: flex; - flex-direction: column; - width: 100%; + display: flex; + flex-direction: column; + width: 100%; - .dynamicColumnTable-button { - align-self: flex-end; - margin: 10px 0; - } + .dynamicColumnTable-button { + align-self: flex-end; + margin: 10px 0; + + &.filter-btn { + display: flex; + align-items: center; + justify-content: center; + } + } } .dynamicColumnsTable-items { - display: flex; - width: 10.625rem; - justify-content: space-between; - align-items: center; + display: flex; + width: 10.625rem; + justify-content: space-between; + align-items: center; } @media (max-width: 768px) { - .dynamicColumnsTable-items { - flex-direction: column; - width: auto; - text-align: center; - } -} \ No newline at end of file + .dynamicColumnsTable-items { + flex-direction: column; + width: auto; + text-align: center; + } +} diff --git a/frontend/src/components/ResizeTable/DynamicColumnTable.tsx b/frontend/src/components/ResizeTable/DynamicColumnTable.tsx index c0d77c967b..401517206f 100644 --- a/frontend/src/components/ResizeTable/DynamicColumnTable.tsx +++ b/frontend/src/components/ResizeTable/DynamicColumnTable.tsx @@ -1,9 +1,9 @@ /* eslint-disable react/jsx-props-no-spreading */ import './DynamicColumnTable.syles.scss'; -import { SettingOutlined } from '@ant-design/icons'; import { Button, Dropdown, MenuProps, Switch } from 'antd'; import { ColumnsType } from 'antd/lib/table'; +import { SlidersHorizontal } from 'lucide-react'; import { memo, useEffect, useState } from 'react'; import { popupContainer } from 'utils/selectPopupContainer'; @@ -90,9 +90,9 @@ function DynamicColumnTable({ trigger={['click']} > + + +
+ +
+ + + + + + + +
+ + + Save this view} + open={isSaveModalOpen} + closable + onCancel={hideSaveViewModal} + footer={[ + , + ]} + > + Label +
+ setColor(hex)} + /> + setNewViewName(e.target.value)} + /> +
+
+ + + + + + ); +} + +export interface ExplorerOptionsProps { + isLoading?: boolean; + onExport: (dashboard: Dashboard | null) => void; + query: Query | null; + disabled: boolean; + sourcepage: DataSource; +} + +ExplorerOptions.defaultProps = { isLoading: false }; + +export default ExplorerOptions; diff --git a/frontend/src/container/ExplorerOptions/types.ts b/frontend/src/container/ExplorerOptions/types.ts new file mode 100644 index 0000000000..398fe0d8a0 --- /dev/null +++ b/frontend/src/container/ExplorerOptions/types.ts @@ -0,0 +1,28 @@ +import { NotificationInstance } from 'antd/es/notification/interface'; +import { AxiosResponse } from 'axios'; +import { SaveViewWithNameProps } from 'components/ExplorerCard/types'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { Dispatch, SetStateAction } from 'react'; +import { UseMutateAsyncFunction } from 'react-query'; +import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery'; +import { SaveViewPayloadProps, SaveViewProps } from 'types/api/saveViews/types'; +import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder'; + +export interface SaveNewViewHandlerProps { + viewName: string; + compositeQuery: ICompositeMetricQuery; + sourcePage: DataSource; + extraData: SaveViewProps['extraData']; + panelType: PANEL_TYPES | null; + notifications: NotificationInstance; + refetchAllView: SaveViewWithNameProps['refetchAllView']; + saveViewAsync: UseMutateAsyncFunction< + AxiosResponse, + Error, + SaveViewProps, + SaveViewPayloadProps + >; + handlePopOverClose: SaveViewWithNameProps['handlePopOverClose']; + redirectWithQueryBuilderData: QueryBuilderContextType['redirectWithQueryBuilderData']; + setNewViewName: Dispatch>; +} diff --git a/frontend/src/container/ExplorerOptions/utils.ts b/frontend/src/container/ExplorerOptions/utils.ts new file mode 100644 index 0000000000..e3ac710609 --- /dev/null +++ b/frontend/src/container/ExplorerOptions/utils.ts @@ -0,0 +1,69 @@ +import { Color } from '@signozhq/design-tokens'; +import { showErrorNotification } from 'components/ExplorerCard/utils'; +import { QueryParams } from 'constants/query'; +import ROUTES from 'constants/routes'; +import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi'; +import { DataSource } from 'types/common/queryBuilder'; + +import { SaveNewViewHandlerProps } from './types'; + +export const getRandomColor = (): Color => { + const colorKeys = Object.keys(Color) as (keyof typeof Color)[]; + const randomKey = colorKeys[Math.floor(Math.random() * colorKeys.length)]; + return Color[randomKey]; +}; + +export const DATASOURCE_VS_ROUTES: Record = { + [DataSource.METRICS]: '', + [DataSource.TRACES]: ROUTES.TRACES_EXPLORER, + [DataSource.LOGS]: ROUTES.LOGS_EXPLORER, +}; + +export const saveNewViewHandler = ({ + saveViewAsync, + refetchAllView, + notifications, + handlePopOverClose, + viewName, + compositeQuery, + sourcePage, + extraData, + redirectWithQueryBuilderData, + panelType, + setNewViewName, +}: SaveNewViewHandlerProps): void => { + saveViewAsync( + { + viewName, + compositeQuery, + sourcePage, + extraData, + }, + { + onSuccess: (data) => { + refetchAllView(); + redirectWithQueryBuilderData(mapQueryDataFromApi(compositeQuery), { + [QueryParams.panelTypes]: panelType, + [QueryParams.viewName]: viewName, + [QueryParams.viewKey]: data.data.data, + }); + notifications.success({ + message: 'View Saved Successfully', + }); + }, + onError: (err) => { + showErrorNotification(notifications, err); + }, + onSettled: () => { + handlePopOverClose(); + setNewViewName(''); + }, + }, + ); +}; + +export const generateRGBAFromHex = (hex: string, opacity: number): string => + `rgba(${parseInt(hex.slice(1, 3), 16)}, ${parseInt( + hex.slice(3, 5), + 16, + )}, ${parseInt(hex.slice(5, 7), 16)}, ${opacity})`; diff --git a/frontend/src/container/ExportPanel/ExportPanel.tsx b/frontend/src/container/ExportPanel/ExportPanelContainer.tsx similarity index 95% rename from frontend/src/container/ExportPanel/ExportPanel.tsx rename to frontend/src/container/ExportPanel/ExportPanelContainer.tsx index eda643dde9..df2d4f8720 100644 --- a/frontend/src/container/ExportPanel/ExportPanel.tsx +++ b/frontend/src/container/ExportPanel/ExportPanelContainer.tsx @@ -16,7 +16,10 @@ import { } from './styles'; import { filterOptions, getSelectOptions } from './utils'; -function ExportPanel({ isLoading, onExport }: ExportPanelProps): JSX.Element { +function ExportPanelContainer({ + isLoading, + onExport, +}: ExportPanelProps): JSX.Element { const { t } = useTranslation(['dashboard']); const [selectedDashboardId, setSelectedDashboardId] = useState( @@ -118,4 +121,4 @@ function ExportPanel({ isLoading, onExport }: ExportPanelProps): JSX.Element { ); } -export default ExportPanel; +export default ExportPanelContainer; diff --git a/frontend/src/container/ExportPanel/index.tsx b/frontend/src/container/ExportPanel/index.tsx index c3751c7e58..f302d83212 100644 --- a/frontend/src/container/ExportPanel/index.tsx +++ b/frontend/src/container/ExportPanel/index.tsx @@ -1,13 +1,9 @@ -import { AlertOutlined, AreaChartOutlined } from '@ant-design/icons'; -import { Button, Modal, Space } from 'antd'; -import { QueryParams } from 'constants/query'; -import ROUTES from 'constants/routes'; -import history from 'lib/history'; +import { Modal } from 'antd'; import { useCallback, useState } from 'react'; import { Dashboard } from 'types/api/dashboard/getAll'; import { Query } from 'types/api/queryBuilder/queryBuilderData'; -import ExportPanelContainer from './ExportPanel'; +import ExportPanelContainer from './ExportPanelContainer'; function ExportPanel({ isLoading, @@ -20,53 +16,25 @@ function ExportPanel({ setIsExport(value); }, []); - const onCreateAlertsHandler = useCallback(() => { - history.push( - `${ROUTES.ALERTS_NEW}?${QueryParams.compositeQuery}=${encodeURIComponent( - JSON.stringify(query), - )}`, - ); - }, [query]); - const onCancel = (value: boolean) => (): void => { onModalToggle(value); }; - const onAddToDashboard = (): void => { - setIsExport(true); - }; - return ( - <> - - - - - - - - - - + + + ); } diff --git a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx index 73b1f4715e..97b540df35 100644 --- a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx +++ b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx @@ -5,6 +5,7 @@ import GridPanelSwitch from 'container/GridPanelSwitch'; import { getFormatNameByOptionId } from 'container/NewWidget/RightContainer/alertFomatCategories'; import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems'; import { Time } from 'container/TopNav/DateTimeSelection/config'; +import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { useResizeObserver } from 'hooks/useDimensions'; @@ -28,7 +29,7 @@ export interface ChartPreviewProps { query: Query | null; graphType?: PANEL_TYPES; selectedTime?: timePreferenceType; - selectedInterval?: Time; + selectedInterval?: Time | TimeV2; headline?: JSX.Element; alertDef?: AlertDef; userQueryKey?: string; diff --git a/frontend/src/container/FormAlertRules/QuerySection.styles.scss b/frontend/src/container/FormAlertRules/QuerySection.styles.scss new file mode 100644 index 0000000000..ee3f4892af --- /dev/null +++ b/frontend/src/container/FormAlertRules/QuerySection.styles.scss @@ -0,0 +1,61 @@ +.alert-tabs { + .ant-tabs-tab { + border: none !important; + margin-left: 0px !important; + padding: 0px !important; + + .nav-btns { + display: flex; + align-items: center; + justify-content: center; + + .prom-ql-icon { + height: 14px; + width: 14px; + } + } + .ant-btn-default { + border-color: transparent; + } + } + .ant-tabs-tab-active { + .nav-btns { + background: var(--bg-slate-400) !important; + } + } + + .ant-tabs-nav { + margin: 0px; + margin-bottom: 0.5rem; + } + .ant-tabs-nav::before { + border-bottom: none !important; + } + .ant-tabs-nav-list { + border: 1px solid var(--bg-slate-200); + } + .ant-tabs-tab + .ant-tabs-tab { + border-left: 1px solid var(--bg-slate-200) !important; + } + + .stage-run-query { + display: flex; + align-items: center; + } +} + +.lightMode { + .alert-tabs { + .ant-tabs-nav-list { + border: 1px solid var(--bg-vanilla-300); + } + .ant-tabs-tab + .ant-tabs-tab { + border-left: 1px solid var(--bg-vanilla-200) !important; + } + .ant-tabs-tab-active { + .nav-btns { + background: var(--bg-vanilla-300) !important; + } + } + } +} diff --git a/frontend/src/container/FormAlertRules/QuerySection.tsx b/frontend/src/container/FormAlertRules/QuerySection.tsx index 4ccac39fc4..619e61af2f 100644 --- a/frontend/src/container/FormAlertRules/QuerySection.tsx +++ b/frontend/src/container/FormAlertRules/QuerySection.tsx @@ -1,8 +1,11 @@ -import { Button, Tabs } from 'antd'; +import './QuerySection.styles.scss'; + +import { Button, Tabs, Tooltip } from 'antd'; import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts'; import { PANEL_TYPES } from 'constants/queryBuilder'; import { QueryBuilder } from 'container/QueryBuilder'; -import { useMemo } from 'react'; +import { Atom, Play, Terminal } from 'lucide-react'; +import { useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; @@ -22,6 +25,7 @@ function QuerySection({ }: QuerySectionProps): JSX.Element { // init namespace for translations const { t } = useTranslation('alerts'); + const [currentTab, setCurrentTab] = useState(queryCategory); const { featureResponse } = useSelector( (state) => state.app, @@ -31,6 +35,7 @@ function QuerySection({ featureResponse.refetch().then(() => { setQueryCategory(queryType as EQueryType); }); + setCurrentTab(queryType as EQueryType); }; const renderPromqlUI = (): JSX.Element => ; @@ -49,22 +54,61 @@ function QuerySection({ const tabs = [ { - label: t('tab_qb'), + label: ( + + + + ), key: EQueryType.QUERY_BUILDER, }, { - label: t('tab_chquery'), + label: ( + + + + ), key: EQueryType.CLICKHOUSE, }, ]; const items = useMemo( () => [ - { label: t('tab_qb'), key: EQueryType.QUERY_BUILDER }, - { label: t('tab_chquery'), key: EQueryType.CLICKHOUSE }, - { label: t('tab_promql'), key: EQueryType.PROM }, + { + label: ( + + + + ), + key: EQueryType.QUERY_BUILDER, + }, + { + label: ( + + + + ), + key: EQueryType.CLICKHOUSE, + }, + { + label: ( + + + + ), + key: EQueryType.PROM, + }, ], - [t], + [], ); const renderTabs = (typ: AlertTypes): JSX.Element | null => { @@ -73,40 +117,54 @@ function QuerySection({ case AlertTypes.LOGS_BASED_ALERT: case AlertTypes.EXCEPTIONS_BASED_ALERT: return ( - - - - } - items={tabs} - /> +
+ + + + } + items={tabs} + /> +
); case AlertTypes.METRICS_BASED_ALERT: default: return ( - - - - } - items={items} - /> +
+ + + + } + items={items} + /> +
); } }; @@ -126,8 +184,8 @@ function QuerySection({ <> {t('alert_form_step1')} -
{renderTabs(alertType)}
- {renderQuerySection(queryCategory)} +
{renderTabs(alertType)}
+ {renderQuerySection(currentTab)}
); diff --git a/frontend/src/container/FormAlertRules/RuleOptions.tsx b/frontend/src/container/FormAlertRules/RuleOptions.tsx index 0fa5e404e7..e4ae376c73 100644 --- a/frontend/src/container/FormAlertRules/RuleOptions.tsx +++ b/frontend/src/container/FormAlertRules/RuleOptions.tsx @@ -83,7 +83,7 @@ function RuleOptions({ ); - const renderThresholdMatchOpts = (): JSX.Element => ( + const renderMatchOpts = (): JSX.Element => ( ); - const renderPromMatchOpts = (): JSX.Element => ( - handleMatchOptChange(value)} - > - {t('option_atleastonce')} - - ); + const onChangeEvalWindow = (value: string | unknown): void => { + const ew = (value as string) || alertDef.evalWindow; + setAlertDef({ + ...alertDef, + evalWindow: ew, + }); + }; const renderEvalWindows = (): JSX.Element => ( { - const ew = (value as string) || alertDef.evalWindow; - setAlertDef({ - ...alertDef, - evalWindow: ew, - }); - }} + onChange={onChangeEvalWindow} > {t('option_5min')} {t('option_10min')} @@ -133,6 +123,20 @@ function RuleOptions({ ); + const renderPromEvalWindows = (): JSX.Element => ( + + {t('option_5min')} + {t('option_10min')} + {t('option_15min')} + + ); + const renderThresholdRuleOpts = (): JSX.Element => ( @@ -147,7 +151,7 @@ function RuleOptions({ onChange={onChangeSelectedQueryName} /> is - {renderCompareOps()} {t('text_condition2')} {renderThresholdMatchOpts()}{' '} + {renderCompareOps()} {t('text_condition2')} {renderMatchOpts()}{' '} {t('text_condition3')} {renderEvalWindows()} @@ -167,7 +171,8 @@ function RuleOptions({ onChange={onChangeSelectedQueryName} /> is - {renderCompareOps()} {t('text_condition2')} {renderPromMatchOpts()} + {renderCompareOps()} {t('text_condition2')} {renderMatchOpts()} + {t('text_condition3')} {renderPromEvalWindows()} ); diff --git a/frontend/src/container/FormAlertRules/index.tsx b/frontend/src/container/FormAlertRules/index.tsx index f8c9d36a91..b87f025f9d 100644 --- a/frontend/src/container/FormAlertRules/index.tsx +++ b/frontend/src/container/FormAlertRules/index.tsx @@ -142,6 +142,10 @@ function FormAlertRules({ // onQueryCategoryChange handles changes to query category // in state as well as sets additional defaults const onQueryCategoryChange = (val: EQueryType): void => { + const element = document.getElementById('top'); + if (element) { + element.scrollIntoView({ behavior: 'smooth' }); + } if (val === EQueryType.PROM) { setAlertDef({ ...alertDef, @@ -465,7 +469,7 @@ function FormAlertRules({ <> {Element} - +
- + {category.name} @@ -542,38 +546,43 @@ function GeneralSettings({ hide={!!retentionField.hide} /> ))} - - - {category.statusComponent} - - - onModalToggleHandler(category.name.toLowerCase() as TTTLType) - } - onOk={(): Promise => - onOkHandler(category.name.toLowerCase() as TTTLType) - } - centered - open={category.save.modal} - confirmLoading={category.save.apiLoading} - > - - {t('retention_confirmation_description', { - name: category.name.toLowerCase(), - })} - - + + {!isCloudUserVal && ( + <> + + + {category.statusComponent} + + + onModalToggleHandler(category.name.toLowerCase() as TTTLType) + } + onOk={(): Promise => + onOkHandler(category.name.toLowerCase() as TTTLType) + } + centered + open={category.save.modal} + confirmLoading={category.save.apiLoading} + > + + {t('retention_confirmation_description', { + name: category.name.toLowerCase(), + })} + + + + )} @@ -587,16 +596,20 @@ function GeneralSettings({ {Element} - + {!isCloudUserVal && ( + + )} {errorText && {errorText}} {renderConfig} + + {isCloudUserVal && } ); diff --git a/frontend/src/container/GeneralSettings/Retention.tsx b/frontend/src/container/GeneralSettings/Retention.tsx index 6228391503..172d1ee0c6 100644 --- a/frontend/src/container/GeneralSettings/Retention.tsx +++ b/frontend/src/container/GeneralSettings/Retention.tsx @@ -8,6 +8,7 @@ import { useRef, useState, } from 'react'; +import { isCloudUser } from 'utils/app'; import { Input, @@ -85,9 +86,13 @@ function Retention({ func(null); } }; + if (hide) { return null; } + + const isCloudUserVal = isCloudUser(); + return ( @@ -98,12 +103,14 @@ function Retention({ = 0 ? selectedValue : ''} + disabled={isCloudUserVal} onChange={(e): void => onChangeHandler(e, setSelectedValue)} style={{ width: 75 }} /> setFieldSearchInput(e.target.value)} + /> + )} + + + + ), + className: 'collapse-content attribute-collapse', + }, + ]} + /> + + ); +} + +export default Overview; diff --git a/frontend/src/container/LogDetailedView/TableView.styles.scss b/frontend/src/container/LogDetailedView/TableView.styles.scss new file mode 100644 index 0000000000..89101bf7b2 --- /dev/null +++ b/frontend/src/container/LogDetailedView/TableView.styles.scss @@ -0,0 +1,80 @@ +.attribute-table-container { + .ant-table { + background: var(--bg-ink-400); + + .ant-table-row:hover { + .ant-table-cell { + .value-field { + display: flex; + justify-content: space-between; + align-items: center; + .action-btn { + display: flex; + gap: 4px; + } + } + } + } + + .ant-table-cell { + border: 1px solid var(--bg-slate-500); + } + + .attribute-name { + .ant-btn { + &:hover { + background-color: none !important; + } + } + } + + .value-field-container { + background: rgba(22, 25, 34, 0.4); + + .value-field { + position: relative; + } + + .action-btn { + display: none; + width: max-content; + position: absolute; + padding: 0 16px; + right: 0; + + .filter-btn { + display: flex; + align-items: center; + border: none; + box-shadow: none; + border-radius: 2px; + background: var(--bg-slate-400); + + height: 24px; + } + } + } + } +} + +.lightMode { + .attribute-table-container { + .ant-table { + background: var(--bg-vanilla-100); + } + + .ant-table-cell { + border: 1px solid var(--bg-vanilla-200); + } + + .value-field-container { + background: var(--bg-vanilla-300); + + .action-btn { + .filter-btn { + background: var(--bg-vanilla-300); + } + } + } + } +} diff --git a/frontend/src/container/LogDetailedView/TableView.tsx b/frontend/src/container/LogDetailedView/TableView.tsx index 929e827dc3..29224e65ae 100644 --- a/frontend/src/container/LogDetailedView/TableView.tsx +++ b/frontend/src/container/LogDetailedView/TableView.tsx @@ -1,16 +1,21 @@ -import { orange } from '@ant-design/colors'; +import './TableView.styles.scss'; + import { LinkOutlined } from '@ant-design/icons'; -import { Input, Space, Tooltip, Tree } from 'antd'; +import { Color } from '@signozhq/design-tokens'; +import { Button, Space, Spin, Tooltip, Tree, Typography } from 'antd'; import { ColumnsType } from 'antd/es/table'; import AddToQueryHOC, { AddToQueryHOCProps, } from 'components/Logs/AddToQueryHOC'; import CopyClipboardHOC from 'components/Logs/CopyClipboardHOC'; import { ResizeTable } from 'components/ResizeTable'; +import { OPERATORS } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; import history from 'lib/history'; import { fieldSearchFilter } from 'lib/logs/fieldSearch'; +import { removeJSONStringifyQuotes } from 'lib/removeJSONStringifyQuotes'; import { isEmpty } from 'lodash-es'; +import { ArrowDownToDot, ArrowUpFromDot } from 'lucide-react'; import { useMemo, useState } from 'react'; import { useDispatch } from 'react-redux'; import { generatePath } from 'react-router-dom'; @@ -19,7 +24,7 @@ import AppActions from 'types/actions'; import { SET_DETAILED_LOG_DATA } from 'types/actions/logs'; import { ILog } from 'types/api/logs/log'; -import ActionItem, { ActionItemProps } from './ActionItem'; +import { ActionItemProps } from './ActionItem'; import FieldRenderer from './FieldRenderer'; import { filterKeyForField, @@ -34,25 +39,53 @@ const RESTRICTED_FIELDS = ['timestamp']; interface TableViewProps { logData: ILog; + fieldSearchInput: string; } type Props = TableViewProps & - Pick & - Pick; + Partial> & + Pick; function TableView({ logData, + fieldSearchInput, onAddToQuery, onClickActionItem, }: Props): JSX.Element | null { - const [fieldSearchInput, setFieldSearchInput] = useState(''); - const dispatch = useDispatch>(); + const [isfilterInLoading, setIsFilterInLoading] = useState(false); + const [isfilterOutLoading, setIsFilterOutLoading] = useState(false); const flattenLogData: Record | null = useMemo( () => (logData ? flattenObject(logData) : null), [logData], ); + + const handleClick = ( + operator: string, + fieldKey: string, + fieldValue: string, + ): void => { + const validatedFieldValue = removeJSONStringifyQuotes(fieldValue); + if (onClickActionItem) { + onClickActionItem(fieldKey, validatedFieldValue, operator); + } + }; + + const onClickHandler = ( + operator: string, + fieldKey: string, + fieldValue: string, + ) => (): void => { + handleClick(operator, fieldKey, fieldValue); + if (operator === OPERATORS.IN) { + setIsFilterInLoading(true); + } + if (operator === OPERATORS.NIN) { + setIsFilterOutLoading(true); + } + }; + if (logData === null) { return null; } @@ -95,24 +128,6 @@ function TableView({ } const columns: ColumnsType = [ - { - title: 'Action', - width: 11, - render: (fieldData: Record): JSX.Element | null => { - const fieldFilterKey = filterKeyForField(fieldData.field); - - if (!RESTRICTED_FIELDS.includes(fieldFilterKey)) { - return ( - - ); - } - return null; - }, - }, { title: 'Field', dataIndex: 'field', @@ -120,6 +135,7 @@ function TableView({ width: 50, align: 'left', ellipsis: true, + className: 'attribute-name', render: (field: string, record): JSX.Element => { const renderedField = ; @@ -127,8 +143,8 @@ function TableView({ const traceId = flattenLogData[record.field]; return ( - - {renderedField} + + {renderedField} {traceId && ( @@ -166,15 +182,15 @@ function TableView({ }, { title: 'Value', - dataIndex: 'value', key: 'value', width: 70, ellipsis: false, - render: (field, record): JSX.Element => { - const textToCopy = field.slice(1, -1); + className: 'value-field-container attribute-value', + render: (fieldData: Record, record): JSX.Element => { + const textToCopy = fieldData.value.slice(1, -1); if (record.field === 'body') { - const parsedBody = recursiveParseJSON(field); + const parsedBody = recursiveParseJSON(fieldData.value); if (!isEmpty(parsedBody)) { return ( @@ -182,30 +198,62 @@ function TableView({ } } + const fieldFilterKey = filterKeyForField(fieldData.field); + return ( - - {removeEscapeCharacters(field)} - +
+ + + {removeEscapeCharacters(fieldData.value)} + + + + +
); }, }, ]; return ( - <> - setFieldSearchInput(e.target.value)} - /> - - + ); } diff --git a/frontend/src/container/LogDetailedView/index.tsx b/frontend/src/container/LogDetailedView/index.tsx index 588cc7e240..4ff7ab6a99 100644 --- a/frontend/src/container/LogDetailedView/index.tsx +++ b/frontend/src/container/LogDetailedView/index.tsx @@ -1,4 +1,5 @@ import LogDetail from 'components/LogDetail'; +import { VIEW_TYPES } from 'components/LogDetail/constants'; import ROUTES from 'constants/routes'; import { getGeneratedFilterQueryString } from 'lib/getGeneratedFilterQueryString'; import getStep from 'lib/getStep'; @@ -136,6 +137,7 @@ function LogDetailedView({ return ( }; return escapeMap[char as keyof typeof escapeMap]; }); + +export function removeExtraSpaces(input: string): string { + return input.replace(/\s+/g, ' ').trim(); +} diff --git a/frontend/src/container/LogExplorerQuerySection/LogsExplorerQuerySection.styles.scss b/frontend/src/container/LogExplorerQuerySection/LogsExplorerQuerySection.styles.scss new file mode 100644 index 0000000000..db79557a71 --- /dev/null +++ b/frontend/src/container/LogExplorerQuerySection/LogsExplorerQuerySection.styles.scss @@ -0,0 +1,32 @@ +.qb-search-view-container { + padding: 8px 16px; + border-top: 1px solid var(--bg-slate-400, #1d212d); + border-bottom: 1px solid var(--bg-slate-400, #1d212d); + + .ant-select-selector { + border-radius: 2px; + border: 1px solid var(--bg-slate-400) !important; + background-color: var(--bg-ink-300) !important; + + input { + font-size: 12px; + } + + .ant-tag .ant-typography { + font-size: 12px; + } + } +} + +.lightMode { + .qb-search-view-container { + border-top: 1px solid var(--bg-vanilla-300); + border-bottom: 1px solid var(--bg-vanilla-300); + + .ant-select-selector { + border-color: var(--bg-vanilla-300) !important; + background-color: var(--bg-vanilla-100) !important; + color: var(--bg-ink-200); + } + } +} diff --git a/frontend/src/container/LogExplorerQuerySection/index.tsx b/frontend/src/container/LogExplorerQuerySection/index.tsx index 165bc8c20f..8653a75caf 100644 --- a/frontend/src/container/LogExplorerQuerySection/index.tsx +++ b/frontend/src/container/LogExplorerQuerySection/index.tsx @@ -1,4 +1,5 @@ -import { Button } from 'antd'; +import './LogsExplorerQuerySection.styles.scss'; + import { initialQueriesMap, OPERATORS, @@ -7,17 +8,29 @@ import { import ExplorerOrderBy from 'container/ExplorerOrderBy'; import { QueryBuilder } from 'container/QueryBuilder'; import { OrderByFilterProps } from 'container/QueryBuilder/filters/OrderByFilter/OrderByFilter.interfaces'; +import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch'; import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces'; import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations'; import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl'; -import { ButtonWrapperStyled } from 'pages/LogsExplorer/styles'; -import { prepareQueryWithDefaultTimestamp } from 'pages/LogsExplorer/utils'; +import { + prepareQueryWithDefaultTimestamp, + SELECTED_VIEWS, +} from 'pages/LogsExplorer/utils'; import { memo, useCallback, useMemo } from 'react'; +import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { DataSource } from 'types/common/queryBuilder'; -function LogExplorerQuerySection(): JSX.Element { - const { handleRunQuery, updateAllQueriesOperators } = useQueryBuilder(); +function LogExplorerQuerySection({ + selectedView, +}: { + selectedView: SELECTED_VIEWS; +}): JSX.Element { + const { currentQuery, updateAllQueriesOperators } = useQueryBuilder(); + + const query = currentQuery?.builder?.queryData[0] || null; + const panelTypes = useGetPanelTypesQueryParam(PANEL_TYPES.LIST); const defaultValue = useMemo(() => { const updatedQuery = updateAllQueriesOperators( @@ -45,6 +58,12 @@ function LogExplorerQuerySection(): JSX.Element { return config; }, [panelTypes]); + const { handleChangeQueryData } = useQueryOperations({ + index: 0, + query, + filterConfigs, + }); + const renderOrderBy = useCallback( ({ query, onChange }: OrderByFilterProps): JSX.Element => ( @@ -59,20 +78,34 @@ function LogExplorerQuerySection(): JSX.Element { [panelTypes, renderOrderBy], ); + const handleChangeTagFilters = useCallback( + (value: IBuilderQuery['filters']) => { + handleChangeQueryData('filters', value); + }, + [handleChangeQueryData], + ); + return ( - - - - } - /> + <> + {selectedView === SELECTED_VIEWS.SEARCH && ( +
+ +
+ )} + + {selectedView === SELECTED_VIEWS.QUERY_BUILDER && ( + + )} + ); } diff --git a/frontend/src/container/LogsContextList/LogsContextList.styles.scss b/frontend/src/container/LogsContextList/LogsContextList.styles.scss new file mode 100644 index 0000000000..18bb285140 --- /dev/null +++ b/frontend/src/container/LogsContextList/LogsContextList.styles.scss @@ -0,0 +1,36 @@ +.context-logs-list { + position: relative; + + .show-more-button { + position: absolute; + z-index: 1; + opacity: 1; + + &.up { + top: 0; + } + + &.down { + bottom: 0; + } + } + + .virtuoso-list { + &::-webkit-scrollbar { + width: 0.1rem; + height: 0.1rem; + } + } + + &.logs-context-list-asc { + .virtuoso-list { + padding-top: 16px; + } + } + + &.logs-context-list-desc { + .virtuoso-list { + padding-bottom: 16px; + } + } +} diff --git a/frontend/src/container/LogsContextList/ShowButton.styles.scss b/frontend/src/container/LogsContextList/ShowButton.styles.scss new file mode 100644 index 0000000000..ee765f5c08 --- /dev/null +++ b/frontend/src/container/LogsContextList/ShowButton.styles.scss @@ -0,0 +1,31 @@ +.show-more-button { + background-color: var(--bg-slate-400); + color: var(--bg-vanilla-100); + display: flex; + padding: 4px 8px; + align-items: center; + gap: 3px; + border: none; + margin: 0; +} + +.show-more-button { + &.disabled { + background-color: var(--bg-slate-200); + color: var(--bg-vanilla-400); + } +} + +.lightMode { + .show-more-button { + background-color: var(--bg-vanilla-300); + color: var(--bg-slate-400); + } + + .show-more-button { + &.disabled { + background-color: var(--bg-vanilla-300); + color: var(--bg-vanilla-400); + } + } +} \ No newline at end of file diff --git a/frontend/src/container/LogsContextList/ShowButton.tsx b/frontend/src/container/LogsContextList/ShowButton.tsx index 7240800af7..e687a5610d 100644 --- a/frontend/src/container/LogsContextList/ShowButton.tsx +++ b/frontend/src/container/LogsContextList/ShowButton.tsx @@ -1,7 +1,10 @@ -import { Button, Typography } from 'antd'; -import { ORDERBY_FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config'; +import './ShowButton.styles.scss'; -import { ShowButtonWrapper } from './styles'; +import { Color } from '@signozhq/design-tokens'; +import { Button } from 'antd'; +import cx from 'classnames'; +import { ORDERBY_FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config'; +import { ArrowDown, ArrowUp, Ban } from 'lucide-react'; interface ShowButtonProps { isLoading: boolean; @@ -16,20 +19,35 @@ function ShowButton({ order, onClick, }: ShowButtonProps): JSX.Element { + const getIcons = (): JSX.Element => { + if (order === ORDERBY_FILTERS.ASC) { + return isDisabled ? ( + + ) : ( + + ); + } + return isDisabled ? ( + + ) : ( + + ); + }; + return ( - - - Showing 10 lines {order === ORDERBY_FILTERS.ASC ? 'after' : 'before'} match - - - + ); } diff --git a/frontend/src/container/LogsContextList/configs.ts b/frontend/src/container/LogsContextList/configs.ts index 2fbb159b9d..baa3b39420 100644 --- a/frontend/src/container/LogsContextList/configs.ts +++ b/frontend/src/container/LogsContextList/configs.ts @@ -1,6 +1,6 @@ import { OrderByPayload } from 'types/api/queryBuilder/queryBuilderData'; -export const INITIAL_PAGE_SIZE = 5; +export const INITIAL_PAGE_SIZE = 10; export const LOGS_MORE_PAGE_SIZE = 10; export const getOrderByTimestamp = (order: string): OrderByPayload => ({ diff --git a/frontend/src/container/LogsContextList/index.tsx b/frontend/src/container/LogsContextList/index.tsx index d473ddb20a..5bb47d6ce5 100644 --- a/frontend/src/container/LogsContextList/index.tsx +++ b/frontend/src/container/LogsContextList/index.tsx @@ -1,3 +1,5 @@ +import './LogsContextList.styles.scss'; + import RawLogView from 'components/Logs/RawLogView'; import Spinner from 'components/Spinner'; import { PANEL_TYPES } from 'constants/queryBuilder'; @@ -21,6 +23,7 @@ import { EmptyText, ListContainer } from './styles'; import { getRequestData } from './utils'; interface LogsContextListProps { + className?: string; isEdit: boolean; query: Query; log: ILog; @@ -29,6 +32,7 @@ interface LogsContextListProps { } function LogsContextList({ + className, isEdit, query, log, @@ -166,7 +170,7 @@ function LogsContextList({ ); return ( - <> +
{order === ORDERBY_FILTERS.ASC && ( } )} - +
); } +LogsContextList.defaultProps = { + className: '', +}; + export default memo(LogsContextList); diff --git a/frontend/src/container/LogsContextList/styles.ts b/frontend/src/container/LogsContextList/styles.ts index 85cf3128f1..9d9d1379b6 100644 --- a/frontend/src/container/LogsContextList/styles.ts +++ b/frontend/src/container/LogsContextList/styles.ts @@ -1,19 +1,14 @@ -import { Space, Typography } from 'antd'; -import { themeColors } from 'constants/theme'; +import { Color } from '@signozhq/design-tokens'; +import { Typography } from 'antd'; import styled from 'styled-components'; export const ListContainer = styled.div<{ $isDarkMode: boolean }>` position: relative; - margin: 0 -1.5rem; - height: 10rem; - overflow-y: scroll; + height: 21rem; + overflow: hidden; background-color: ${({ $isDarkMode }): string => - $isDarkMode ? themeColors.darkGrey : themeColors.lightgrey}; -`; - -export const ShowButtonWrapper = styled(Space)` - margin: 0.625rem 0; + $isDarkMode ? Color.BG_INK_400 : Color.BG_VANILLA_100}; `; export const EmptyText = styled(Typography)` diff --git a/frontend/src/container/LogsError/LogsError.styles.scss b/frontend/src/container/LogsError/LogsError.styles.scss new file mode 100644 index 0000000000..789357122f --- /dev/null +++ b/frontend/src/container/LogsError/LogsError.styles.scss @@ -0,0 +1,42 @@ +.logs-error-container { + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + height: 240px; + + .logs-error-content { + display: flex; + flex-direction: column; + gap: 4px; + + color: var(--text-vanilla-400); + font-family: Inter; + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: 18px; /* 128.571% */ + letter-spacing: -0.07px; + + .aww-snap { + font-weight: 600; + } + + .error-state-svg { + height: 50px; + width: 50px; + } + + .contact-support { + display: flex; + align-items: center; + margin-top: 8px; + gap: 4px; + + .text { + color: var(--text-robin-400); + font-weight: 500; + } + } + } +} diff --git a/frontend/src/container/LogsError/LogsError.tsx b/frontend/src/container/LogsError/LogsError.tsx new file mode 100644 index 0000000000..0b3c9f501d --- /dev/null +++ b/frontend/src/container/LogsError/LogsError.tsx @@ -0,0 +1,26 @@ +import './LogsError.styles.scss'; + +import { Typography } from 'antd'; +import { ArrowRight } from 'lucide-react'; + +export default function LogsError(): JSX.Element { + return ( +
+
+ error-emoji + + Aw snap :/ Something went wrong. Please + try again or contact support. + +
+ Contact Support + +
+
+
+ ); +} diff --git a/frontend/src/container/LogsExplorerChart/LogsExplorerChart.styled.ts b/frontend/src/container/LogsExplorerChart/LogsExplorerChart.styled.ts index 6fbe2d2e23..fec5dc1f0c 100644 --- a/frontend/src/container/LogsExplorerChart/LogsExplorerChart.styled.ts +++ b/frontend/src/container/LogsExplorerChart/LogsExplorerChart.styled.ts @@ -2,10 +2,13 @@ import { Card } from 'antd'; import styled from 'styled-components'; export const CardStyled = styled(Card)` + border: none !important; position: relative; - margin: 0.5rem 0 3.1rem 0; + margin-bottom: 16px; .ant-card-body { - height: 20vh; + height: 200px; min-height: 200px; + padding: 0 16px 16px 16px; + font-family: 'Space Mono', monospace; } `; diff --git a/frontend/src/container/LogsExplorerChart/index.tsx b/frontend/src/container/LogsExplorerChart/index.tsx index ec329907f3..2f909bea25 100644 --- a/frontend/src/container/LogsExplorerChart/index.tsx +++ b/frontend/src/container/LogsExplorerChart/index.tsx @@ -1,9 +1,17 @@ import Graph from 'components/Graph'; import Spinner from 'components/Spinner'; +import { QueryParams } from 'constants/query'; import { themeColors } from 'constants/theme'; +import useUrlQuery from 'hooks/useUrlQuery'; import getChartData, { GetChartDataProps } from 'lib/getChartData'; +import GetMinMax from 'lib/getMinMax'; import { colors } from 'lib/getRandomColor'; -import { memo, useCallback, useMemo } from 'react'; +import getTimeString from 'lib/getTimeString'; +import history from 'lib/history'; +import { memo, useCallback, useEffect, useMemo } from 'react'; +import { useDispatch } from 'react-redux'; +import { useLocation } from 'react-router-dom'; +import { UpdateTimeInterval } from 'store/actions'; import { LogsExplorerChartProps } from './LogsExplorerChart.interfaces'; import { CardStyled } from './LogsExplorerChart.styled'; @@ -14,6 +22,9 @@ function LogsExplorerChart({ isLabelEnabled = true, className, }: LogsExplorerChartProps): JSX.Element { + const dispatch = useDispatch(); + const urlQuery = useUrlQuery(); + const location = useLocation(); const handleCreateDatasets: Required['createDataset'] = useCallback( (element, index, allLabels) => ({ data: element, @@ -28,6 +39,52 @@ function LogsExplorerChart({ [isLabelEnabled], ); + const onDragSelect = useCallback( + (start: number, end: number): void => { + const startTimestamp = Math.trunc(start); + const endTimestamp = Math.trunc(end); + + if (startTimestamp !== endTimestamp) { + dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp])); + } + + const { maxTime, minTime } = GetMinMax('custom', [ + startTimestamp, + endTimestamp, + ]); + + urlQuery.set(QueryParams.startTime, minTime.toString()); + urlQuery.set(QueryParams.endTime, maxTime.toString()); + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; + history.push(generatedUrl); + }, + [dispatch, location.pathname, urlQuery], + ); + + const handleBackNavigation = (): void => { + const searchParams = new URLSearchParams(window.location.search); + const startTime = searchParams.get(QueryParams.startTime); + const endTime = searchParams.get(QueryParams.endTime); + + if (startTime && endTime && startTime !== endTime) { + dispatch( + UpdateTimeInterval('custom', [ + parseInt(getTimeString(startTime), 10), + parseInt(getTimeString(endTime), 10), + ]), + ); + } + }; + + useEffect(() => { + window.addEventListener('popstate', handleBackNavigation); + + return (): void => { + window.removeEventListener('popstate', handleBackNavigation); + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + const graphData = useMemo( () => getChartData({ @@ -50,8 +107,8 @@ function LogsExplorerChart({ name="logsExplorerChart" data={graphData.data} type="bar" - containerHeight="100%" animate + onDragSelect={onDragSelect} /> )} diff --git a/frontend/src/container/LogsExplorerContext/index.tsx b/frontend/src/container/LogsExplorerContext/index.tsx index 31c7202a8d..d62cdb274b 100644 --- a/frontend/src/container/LogsExplorerContext/index.tsx +++ b/frontend/src/container/LogsExplorerContext/index.tsx @@ -1,6 +1,5 @@ import { EditFilled } from '@ant-design/icons'; -import { Typography } from 'antd'; -import Modal from 'antd/es/modal/Modal'; +import { Modal, Typography } from 'antd'; import RawLogView from 'components/Logs/RawLogView'; import LogsContextList from 'container/LogsContextList'; import { ORDERBY_FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config'; diff --git a/frontend/src/container/LogsExplorerContext/types.ts b/frontend/src/container/LogsExplorerContext/types.ts index 343171a740..bb5c26769f 100644 --- a/frontend/src/container/LogsExplorerContext/types.ts +++ b/frontend/src/container/LogsExplorerContext/types.ts @@ -1,6 +1,7 @@ +import { MouseEventHandler } from 'react'; import { ILog } from 'types/api/logs/log'; export interface LogsExplorerContextProps { log: ILog; - onClose: VoidFunction; + onClose: MouseEventHandler; } diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/TableRow.styles.scss b/frontend/src/container/LogsExplorerList/InfinityTableView/TableRow.styles.scss new file mode 100644 index 0000000000..6b0593def3 --- /dev/null +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/TableRow.styles.scss @@ -0,0 +1,4 @@ +.logs-table-row { + cursor: pointer; + position: relative; +} diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/TableRow.tsx b/frontend/src/container/LogsExplorerList/InfinityTableView/TableRow.tsx new file mode 100644 index 0000000000..de28412078 --- /dev/null +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/TableRow.tsx @@ -0,0 +1,95 @@ +import './TableRow.styles.scss'; + +import { ColumnsType } from 'antd/es/table'; +import LogLinesActionButtons from 'components/Logs/LogLinesActionButtons/LogLinesActionButtons'; +import { ColumnTypeRender } from 'components/Logs/TableView/types'; +import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { + cloneElement, + MouseEventHandler, + ReactElement, + ReactNode, + useCallback, + useMemo, +} from 'react'; +import { ILog } from 'types/api/logs/log'; + +import { TableCellStyled } from './styles'; + +interface TableRowProps { + tableColumns: ColumnsType>; + index: number; + log: Record; + handleSetActiveContextLog: (log: ILog) => void; + logs: ILog[]; + hasActions: boolean; +} + +export default function TableRow({ + tableColumns, + index, + log, + handleSetActiveContextLog, + logs, + hasActions, +}: TableRowProps): JSX.Element { + const isDarkMode = useIsDarkMode(); + + const currentLog = useMemo(() => logs.find(({ id }) => id === log.id), [ + logs, + log.id, + ]); + + const { onLogCopy, isLogsExplorerPage } = useCopyLogLink(currentLog?.id); + + const handleShowContext: MouseEventHandler = useCallback( + (event) => { + event.preventDefault(); + event.stopPropagation(); + if (!handleSetActiveContextLog || !currentLog) return; + + handleSetActiveContextLog(currentLog); + }, + [currentLog, handleSetActiveContextLog], + ); + + return ( + <> + {tableColumns.map((column) => { + if (!column.render) return
; + + const element: ColumnTypeRender> = column.render( + log[column.key as keyof Record], + log, + index, + ); + + const elementWithChildren = element as Exclude< + ColumnTypeRender>, + ReactNode + >; + + const children = elementWithChildren.children as ReactElement; + const props = elementWithChildren.props as Record; + + return ( + + {cloneElement(children, props)} + + ); + })} + {hasActions && isLogsExplorerPage && ( + + )} + + ); +} diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/config.ts b/frontend/src/container/LogsExplorerList/InfinityTableView/config.ts index 3478cd149e..ec16ba1024 100644 --- a/frontend/src/container/LogsExplorerList/InfinityTableView/config.ts +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/config.ts @@ -3,4 +3,5 @@ import { CSSProperties } from 'react'; export const infinityDefaultStyles: CSSProperties = { width: '100%', overflowX: 'scroll', + marginTop: '15px', }; diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx b/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx index 3db6fdb408..e1e7ca514d 100644 --- a/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx @@ -1,22 +1,13 @@ import LogDetail from 'components/LogDetail'; -import { ColumnTypeRender } from 'components/Logs/TableView/types'; +import { VIEW_TYPES } from 'components/LogDetail/constants'; import { useTableView } from 'components/Logs/TableView/useTableView'; import { LOCALSTORAGE } from 'constants/localStorage'; -import LogsExplorerContext from 'container/LogsExplorerContext'; import { useActiveLog } from 'hooks/logs/useActiveLog'; import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; import { useIsDarkMode } from 'hooks/useDarkMode'; import useDragColumns from 'hooks/useDragColumns'; import { getDraggedColumns } from 'hooks/useDragColumns/utils'; -import { - cloneElement, - forwardRef, - memo, - ReactElement, - ReactNode, - useCallback, - useMemo, -} from 'react'; +import { forwardRef, memo, useCallback, useMemo } from 'react'; import { TableComponents, TableVirtuoso, @@ -26,11 +17,8 @@ import { ILog } from 'types/api/logs/log'; import { infinityDefaultStyles } from './config'; import { LogsCustomTable } from './LogsCustomTable'; -import { - TableCellStyled, - TableHeaderCellStyled, - TableRowStyled, -} from './styles'; +import { TableHeaderCellStyled, TableRowStyled } from './styles'; +import TableRow from './TableRow'; import { InfinityTableProps } from './types'; // eslint-disable-next-line react/function-component-definition @@ -64,6 +52,7 @@ const InfinityTable = forwardRef( activeLog: activeContextLog, onSetActiveLog: handleSetActiveContextLog, onClearActiveLog: handleClearActiveContextLog, + onAddToQuery: handleAddToQuery, } = useActiveLog(); const { activeLog, @@ -76,6 +65,8 @@ const InfinityTable = forwardRef( ...tableViewProps, onClickExpand: onSetActiveLog, onOpenLogsContext: handleSetActiveContextLog, + activeLog, + activeContextLog, }); const { draggedColumns, onDragColumns } = useDragColumns< Record @@ -96,37 +87,16 @@ const InfinityTable = forwardRef( const itemContent = useCallback( (index: number, log: Record): JSX.Element => ( - <> - {tableColumns.map((column) => { - if (!column.render) return ; - - const element: ColumnTypeRender> = column.render( - log[column.key as keyof Record], - log, - index, - ); - - const elementWithChildren = element as Exclude< - ColumnTypeRender>, - ReactNode - >; - - const children = elementWithChildren.children as ReactElement; - const props = elementWithChildren.props as Record; - - return ( - - {cloneElement(children, props)} - - ); - })} - + ), - [tableColumns, isDarkMode], + [handleSetActiveContextLog, tableColumns, tableViewProps.logs], ); const tableHeader = useCallback( @@ -137,13 +107,14 @@ const InfinityTable = forwardRef( return ( - {column.title as string} + {(column.title as string).replace(/^\w/, (c) => c.toUpperCase())} ); })} @@ -152,6 +123,12 @@ const InfinityTable = forwardRef( [tableColumns, isDarkMode], ); + const handleClickExpand = (index: number): void => { + if (!onSetActiveLog) return; + + onSetActiveLog(tableViewProps.logs[index]); + }; + return ( <> ( {...(infitiyTableProps?.onEndReached ? { endReached: infitiyTableProps.onEndReached } : {})} + onClick={(event: any): void => { + handleClickExpand(event.target.parentElement.parentElement.dataset.index); + }} /> {activeContextLog && ( - )} ` padding: 0.5rem; - border-inline-end: 1px solid rgba(253, 253, 253, 0.12); - border-top: 1px solid rgba(253, 253, 253, 0.12); background-color: ${(props): string => - props.$isDarkMode ? themeColors.black : themeColors.whiteCream}; + props.$isDarkMode ? 'inherit' : themeColors.whiteCream}; color: ${(props): string => props.$isDarkMode ? themeColors.white : themeColors.bckgGrey}; `; +// handle the light theme here export const TableRowStyled = styled.tr<{ $isActiveLog: boolean; $isDarkMode: boolean; @@ -36,34 +30,39 @@ export const TableRowStyled = styled.tr<{ ${({ $isActiveLog }): string => getActiveLogBackground($isActiveLog)} } + cursor: pointer; + position: relative; + + .log-line-action-buttons { + display: none; + } + &:hover { ${TableCellStyled} { ${({ $isActiveLog, $isDarkMode }): string => $isActiveLog ? getActiveLogBackground() : `background-color: ${ - !$isDarkMode ? themeColors.lightgrey : themeColors.bckgGrey - };`} + !$isDarkMode ? 'var(--bg-vanilla-200)' : 'rgba(171, 189, 255, 0.04)' + }`} + } + .log-line-action-buttons { + display: flex; } } `; export const TableHeaderCellStyled = styled.th` padding: 0.5rem; - border-inline-end: 1px solid rgba(253, 253, 253, 0.12); - background-color: ${(props): string => - !props.$isDarkMode ? themeColors.whiteCream : themeColors.bckgGrey}; - + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: 18px; + letter-spacing: -0.07px; + background: ${(props): string => (props.$isDarkMode ? '#0b0c0d' : '#fdfdfd')}; + ${({ $isTimestamp }): string => ($isTimestamp ? 'padding-left: 24px;' : '')} ${({ $isDragColumn }): string => ($isDragColumn ? 'cursor: col-resize;' : '')} color: ${(props): string => - props.$isDarkMode ? themeColors.white : themeColors.bckgGrey}; - - &:first-child { - border-start-start-radius: 2px; - } - &:last-child { - border-start-end-radius: 2px; - border-inline-end: none; - } + props.$isDarkMode ? 'var(--bg-vanilla-100, #fff)' : themeColors.bckgGrey}; `; diff --git a/frontend/src/container/LogsExplorerList/LogsExplorerList.interfaces.ts b/frontend/src/container/LogsExplorerList/LogsExplorerList.interfaces.ts index ba68c67eb8..e0e9c57353 100644 --- a/frontend/src/container/LogsExplorerList/LogsExplorerList.interfaces.ts +++ b/frontend/src/container/LogsExplorerList/LogsExplorerList.interfaces.ts @@ -3,7 +3,10 @@ import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; export type LogsExplorerListProps = { isLoading: boolean; + isFetching: boolean; currentStagedQueryData: IBuilderQuery | null; logs: ILog[]; onEndReached: (index: number) => void; + isError: boolean; + isFilterApplied: boolean; }; diff --git a/frontend/src/container/LogsExplorerList/LogsExplorerList.style.scss b/frontend/src/container/LogsExplorerList/LogsExplorerList.style.scss new file mode 100644 index 0000000000..a6033da1ff --- /dev/null +++ b/frontend/src/container/LogsExplorerList/LogsExplorerList.style.scss @@ -0,0 +1,8 @@ +.logs-list-view-container { + font-family: 'Space Mono', monospace; + font-size: 14px; + font-weight: 400; + line-height: 18px; + letter-spacing: -0.005em; + text-align: left; +} diff --git a/frontend/src/container/LogsExplorerList/index.tsx b/frontend/src/container/LogsExplorerList/index.tsx index 361f3a49b6..c864aa355a 100644 --- a/frontend/src/container/LogsExplorerList/index.tsx +++ b/frontend/src/container/LogsExplorerList/index.tsx @@ -1,24 +1,28 @@ -import { Card, Typography } from 'antd'; +import './LogsExplorerList.style.scss'; + +import { Card } from 'antd'; import LogDetail from 'components/LogDetail'; +import { VIEW_TYPES } from 'components/LogDetail/constants'; // components import ListLogView from 'components/Logs/ListLogView'; import RawLogView from 'components/Logs/RawLogView'; import Spinner from 'components/Spinner'; import { CARD_BODY_STYLE } from 'constants/card'; import { LOCALSTORAGE } from 'constants/localStorage'; -import ExplorerControlPanel from 'container/ExplorerControlPanel'; -import { Heading } from 'container/LogsTable/styles'; +import EmptyLogsSearch from 'container/EmptyLogsSearch/EmptyLogsSearch'; +import LogsError from 'container/LogsError/LogsError'; +import { LogsLoading } from 'container/LogsLoading/LogsLoading'; import { useOptionsMenu } from 'container/OptionsMenu'; import { useActiveLog } from 'hooks/logs/useActiveLog'; import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; -import useFontFaceObserver from 'hooks/useFontObserver'; import { memo, useCallback, useEffect, useMemo, useRef } from 'react'; import { Virtuoso, VirtuosoHandle } from 'react-virtuoso'; // interfaces import { ILog } from 'types/api/logs/log'; import { DataSource, StringOperators } from 'types/common/queryBuilder'; +import NoLogs from '../NoLogs/NoLogs'; import InfinityTableView from './InfinityTableView'; import { LogsExplorerListProps } from './LogsExplorerList.interfaces'; import { InfinityWrapperStyled } from './styles'; @@ -27,12 +31,14 @@ import { convertKeysToColumnFields } from './utils'; function Footer(): JSX.Element { return ; } - function LogsExplorerList({ isLoading, + isFetching, currentStagedQueryData, logs, onEndReached, + isError, + isFilterApplied, }: LogsExplorerListProps): JSX.Element { const ref = useRef(null); const { initialDataSource } = useQueryBuilder(); @@ -46,7 +52,7 @@ function LogsExplorerList({ onSetActiveLog, } = useActiveLog(); - const { options, config } = useOptionsMenu({ + const { options } = useOptionsMenu({ storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS, dataSource: initialDataSource || DataSource.METRICS, aggregateOperator: @@ -58,19 +64,6 @@ function LogsExplorerList({ [logs, activeLogId], ); - useFontFaceObserver( - [ - { - family: 'Fira Code', - weight: '300', - }, - ], - options.format === 'raw', - { - timeout: 5000, - }, - ); - const selectedFields = useMemo( () => convertKeysToColumnFields(options.selectColumns), [options], @@ -96,10 +89,12 @@ function LogsExplorerList({ selectedFields={selectedFields} onAddToQuery={onAddToQuery} onSetActiveLog={onSetActiveLog} + activeLog={activeLog} /> ); }, [ + activeLog, onAddToQuery, onSetActiveLog, options.format, @@ -142,7 +137,10 @@ function LogsExplorerList({ } return ( - + - +
+ {(isLoading || (isFetching && logs.length === 0)) && } - {options.format !== 'table' && ( - - Event - + {!isLoading && + !isFetching && + !isError && + !isFilterApplied && + logs.length === 0 && } + + {!isLoading && + !isFetching && + logs.length === 0 && + !isError && + isFilterApplied && } + + {isError && !isLoading && !isFetching && } + + {!isLoading && !isError && logs.length > 0 && ( + <> + {renderContent} + + + )} - - {!isLoading && logs.length === 0 && ( - No logs lines found - )} - - {renderContent} - - - +
); } diff --git a/frontend/src/container/LogsExplorerTable/LogsExplorerTable.interfaces.ts b/frontend/src/container/LogsExplorerTable/LogsExplorerTable.interfaces.ts index f2e4c3e7d6..22c1dfc00d 100644 --- a/frontend/src/container/LogsExplorerTable/LogsExplorerTable.interfaces.ts +++ b/frontend/src/container/LogsExplorerTable/LogsExplorerTable.interfaces.ts @@ -3,4 +3,5 @@ import { QueryDataV3 } from 'types/api/widgets/getQuery'; export type LogsExplorerTableProps = { data: QueryDataV3[]; isLoading: boolean; + isError: boolean; }; diff --git a/frontend/src/container/LogsExplorerTable/LogsExplorerTable.styles.scss b/frontend/src/container/LogsExplorerTable/LogsExplorerTable.styles.scss new file mode 100644 index 0000000000..3f1781502c --- /dev/null +++ b/frontend/src/container/LogsExplorerTable/LogsExplorerTable.styles.scss @@ -0,0 +1,58 @@ +.logs-table { + .ant-table { + background: unset; + border: none; + } + + .ant-table-thead { + .ant-table-cell { + background: unset !important; + border-bottom: unset !important; + color: var(--bg-vanilla-400) !important; + font-family: Inter; + font-size: 14px; + font-style: normal; + font-weight: 500; + line-height: 20px; + letter-spacing: -0.07px; + } + + .ant-table-cell::before { + background-color: unset !important; + } + } + + .ant-table-row { + color: var(--bg-vanilla-400) !important; + font-family: Inter; + font-size: 14px; + font-style: normal; + font-weight: 400 !important; + line-height: 20px; + letter-spacing: -0.07px; + + .ant-table-cell-row-hover { + background: rgba(171, 189, 255, 0.04) !important; + } + } +} + +.lightMode { + .ant-table { + color: var(--bg-slate-400) !important; + } + + .ant-table-thead { + .ant-table-cell { + color: var(--bg-slate-400) !important; + } + } + + .ant-table-row { + color: var(--bg-slate-400) !important; + + .ant-table-cell-row-hover { + background: var(--bg-vanilla-300) !important; + } + } +} diff --git a/frontend/src/container/LogsExplorerTable/index.tsx b/frontend/src/container/LogsExplorerTable/index.tsx index bbcb2aa99b..13883d3a62 100644 --- a/frontend/src/container/LogsExplorerTable/index.tsx +++ b/frontend/src/container/LogsExplorerTable/index.tsx @@ -1,4 +1,8 @@ +import './LogsExplorerTable.styles.scss'; + import { initialQueriesMap } from 'constants/queryBuilder'; +import LogsError from 'container/LogsError/LogsError'; +import { LogsLoading } from 'container/LogsLoading/LogsLoading'; import { QueryTable } from 'container/QueryTable'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { memo } from 'react'; @@ -8,14 +12,24 @@ import { LogsExplorerTableProps } from './LogsExplorerTable.interfaces'; function LogsExplorerTable({ data, isLoading, + isError, }: LogsExplorerTableProps): JSX.Element { const { stagedQuery } = useQueryBuilder(); + if (isLoading) { + return ; + } + + if (isError) { + return ; + } + return ( ); } diff --git a/frontend/src/container/LogsExplorerViews/LogsExplorerViews.styles.scss b/frontend/src/container/LogsExplorerViews/LogsExplorerViews.styles.scss new file mode 100644 index 0000000000..501ae4d0d3 --- /dev/null +++ b/frontend/src/container/LogsExplorerViews/LogsExplorerViews.styles.scss @@ -0,0 +1,141 @@ +.logs-explorer-views-container { + margin-bottom: 24px; + + .logs-explorer-views-types { + .views-tabs-container { + padding: 8px 16px; + border: 1px solid var(--text-slate-400); + border-left: none; + border-right: none; + + display: flex; + align-items: center; + justify-content: space-between; + + .views-tabs { + color: var(--text-vanilla-400); + + .view-title { + display: flex; + gap: var(--margin-2); + align-items: center; + justify-content: center; + font-size: var(--font-size-xs); + font-style: normal; + font-weight: var(--font-weight-normal); + } + + .ant-btn { + box-shadow: none; + } + + .tab { + border: 1px solid var(--bg-slate-400); + width: 114px; + } + + .tab::before { + background: var(--bg-slate-400); + } + + .selected_view { + background: var(--bg-slate-300); + color: var(--text-vanilla-100); + border: 1px solid var(--bg-slate-400); + } + + .selected_view::before { + background: var(--bg-slate-400); + } + + .ant-radio-button-wrapper { + min-width: 120px; + text-align: center; + + font-style: normal; + font-weight: 400; + font-size: 12px; + } + } + + .tab-options { + display: flex; + gap: 8px; + align-items: center; + + .ant-btn { + border: 1px solid var(--bg-slate-400); + } + + .format-options-container { + position: relative; + } + } + } + + .logs-actions-container { + display: flex; + justify-content: center; + align-items: center; + gap: 8px; + + .ant-btn { + border: none; + } + } + + .logs-explorer-views-type-content { + .ant-card { + border: none !important; + } + + .query-table { + .ant-table { + table { + min-width: 99% !important; + } + } + } + } + } + + .ant-card-body { + background-color: var(--bg-ink-500); + } + + .logs-histogram { + margin-bottom: 0px; + } +} + +.lightMode { + .logs-explorer-views-container { + .ant-card-body { + background-color: var(--bg-vanilla-100); + } + + .views-tabs-container { + border: 1px solid var(--text-vanilla-300); + + .views-tabs { + .tab { + border: 1px solid var(--bg-vanilla-300); + } + + .tab::before { + background: var(--bg-vanilla-300); + } + + .selected_view { + background: white; + color: var(--text-robin-400); + border: 1px solid var(--bg-robin-400); + } + + .selected_view::before { + background: var(--bg-robin-400); + } + } + } + } +} diff --git a/frontend/src/container/LogsExplorerViews/index.tsx b/frontend/src/container/LogsExplorerViews/index.tsx index bcc67ddd6a..c37e621108 100644 --- a/frontend/src/container/LogsExplorerViews/index.tsx +++ b/frontend/src/container/LogsExplorerViews/index.tsx @@ -1,5 +1,9 @@ -import { Tabs, TabsProps } from 'antd'; -import TabLabel from 'components/TabLabel'; +/* eslint-disable sonarjs/cognitive-complexity */ +import './LogsExplorerViews.styles.scss'; + +import { Button } from 'antd'; +import LogsFormatOptionsMenu from 'components/LogsFormatOptionsMenu/LogsFormatOptionsMenu'; +import { LOCALSTORAGE } from 'constants/localStorage'; import { AVAILABLE_EXPORT_PANEL_TYPES } from 'constants/panelTypes'; import { QueryParams } from 'constants/query'; import { @@ -9,23 +13,29 @@ import { PANEL_TYPES, } from 'constants/queryBuilder'; import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config'; -import ExportPanel from 'container/ExportPanel'; +import ExplorerOptions from 'container/ExplorerOptions/ExplorerOptions'; import GoToTop from 'container/GoToTop'; import LogsExplorerChart from 'container/LogsExplorerChart'; import LogsExplorerList from 'container/LogsExplorerList'; import LogsExplorerTable from 'container/LogsExplorerTable'; +import { useOptionsMenu } from 'container/OptionsMenu'; import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView'; import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard'; import { addEmptyWidgetInDashboardJSONWithQuery } from 'hooks/dashboard/utils'; import { LogTimeRange } from 'hooks/logs/types'; import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; import { useGetExplorerQueryRange } from 'hooks/queryBuilder/useGetExplorerQueryRange'; +import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import useAxiosError from 'hooks/useAxiosError'; +import useClickOutside from 'hooks/useClickOutside'; import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange'; import { useNotifications } from 'hooks/useNotifications'; import useUrlQueryData from 'hooks/useUrlQueryData'; import { getPaginationQueryData } from 'lib/newQueryBuilder/getPaginationQueryData'; +import { defaultTo, isEmpty } from 'lodash-es'; +import { Sliders } from 'lucide-react'; +import { SELECTED_VIEWS } from 'pages/LogsExplorer/utils'; import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'; import { useSelector } from 'react-redux'; import { useHistory } from 'react-router-dom'; @@ -38,18 +48,30 @@ import { Query, TagFilter, } from 'types/api/queryBuilder/queryBuilderData'; -import { DataSource, LogsAggregatorOperator } from 'types/common/queryBuilder'; +import { + DataSource, + LogsAggregatorOperator, + StringOperators, +} from 'types/common/queryBuilder'; import { GlobalReducer } from 'types/reducer/globalTime'; import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToDashboardLink'; import { v4 } from 'uuid'; -import { ActionsWrapper } from './LogsExplorerViews.styled'; - -function LogsExplorerViews(): JSX.Element { +function LogsExplorerViews({ + selectedView, + showHistogram, +}: { + selectedView: SELECTED_VIEWS; + showHistogram: boolean; +}): JSX.Element { const { notifications } = useNotifications(); const history = useHistory(); + // this is to respect the panel type present in the URL rather than defaulting it to list always. + const panelTypes = useGetPanelTypesQueryParam(PANEL_TYPES.LIST); + const { activeLogId, timeRange, onTimeRangeChange } = useCopyLogLink(); + const { queryData: pageSize } = useUrlQueryData( QueryParams.pageSize, DEFAULT_PER_PAGE_VALUE, @@ -63,18 +85,25 @@ function LogsExplorerViews(): JSX.Element { // Context const { + initialDataSource, currentQuery, stagedQuery, panelType, updateAllQueriesOperators, + handleSetConfig, } = useQueryBuilder(); + const [selectedPanelType, setSelectedPanelType] = useState( + panelType || PANEL_TYPES.LIST, + ); + const { handleExplorerTabChange } = useHandleExplorerTabChange(); // State const [page, setPage] = useState(1); const [logs, setLogs] = useState([]); const [requestData, setRequestData] = useState(null); + const [showFormatMenuItems, setShowFormatMenuItems] = useState(false); const handleAxisError = useAxiosError(); @@ -147,6 +176,15 @@ function LogsExplorerViews(): JSX.Element { [currentQuery, updateAllQueriesOperators], ); + const handleModeChange = (panelType: PANEL_TYPES): void => { + if (selectedView === SELECTED_VIEWS.SEARCH) { + handleSetConfig(panelType, DataSource.LOGS); + } + + setShowFormatMenuItems(false); + handleExplorerTabChange(panelType); + }; + const { data: listChartData, isFetching: isFetchingListChartData, @@ -155,7 +193,7 @@ function LogsExplorerViews(): JSX.Element { enabled: !!listChartQuery && panelType === PANEL_TYPES.LIST, }); - const { data, isFetching, isError } = useGetExplorerQueryRange( + const { data, isLoading, isFetching, isError } = useGetExplorerQueryRange( requestData, panelType, { @@ -327,12 +365,37 @@ function LogsExplorerViews(): JSX.Element { ); useEffect(() => { - const shouldChangeView = isMultipleQueries || isGroupByExist; + const shouldChangeView = + (isMultipleQueries || isGroupByExist) && + selectedView !== SELECTED_VIEWS.SEARCH; - if (panelType === PANEL_TYPES.LIST && shouldChangeView) { + if (selectedPanelType === PANEL_TYPES.LIST && shouldChangeView) { handleExplorerTabChange(PANEL_TYPES.TIME_SERIES); + setSelectedPanelType(PANEL_TYPES.TIME_SERIES); } - }, [panelType, isMultipleQueries, isGroupByExist, handleExplorerTabChange]); + + if (panelType) { + setSelectedPanelType(panelType); + } + }, [ + isMultipleQueries, + isGroupByExist, + selectedPanelType, + selectedView, + handleExplorerTabChange, + panelType, + ]); + + useEffect(() => { + if ( + selectedView && + selectedView === SELECTED_VIEWS.SEARCH && + handleSetConfig + ) { + handleSetConfig(defaultTo(panelTypes, PANEL_TYPES.LIST), DataSource.LOGS); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); useEffect(() => { const currentParams = data?.params as Omit; @@ -372,10 +435,6 @@ function LogsExplorerViews(): JSX.Element { setPage(1); setRequestData(newRequestData); currentMinTimeRef.current = minTime; - - if (!activeLogId) { - onTimeRangeChange(null); - } } }, [ stagedQuery, @@ -388,58 +447,14 @@ function LogsExplorerViews(): JSX.Element { activeLogId, onTimeRangeChange, panelType, + selectedView, ]); - const tabsItems: TabsProps['items'] = useMemo( - () => [ - { - label: ( - - ), - key: PANEL_TYPES.LIST, - disabled: isMultipleQueries || isGroupByExist, - children: ( - - ), - }, - { - label: , - key: PANEL_TYPES.TIME_SERIES, - children: ( - - ), - }, - { - label: 'Table', - key: PANEL_TYPES.TABLE, - children: ( - - ), - }, - ], - [ - isMultipleQueries, - isGroupByExist, - isFetching, - listQuery, - logs, - handleEndReached, - data, - isError, - ], - ); + const { options, config } = useOptionsMenu({ + storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS, + dataSource: initialDataSource || DataSource.METRICS, + aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP, + }); const chartData = useMemo(() => { if (!stagedQuery) return []; @@ -466,31 +481,156 @@ function LogsExplorerViews(): JSX.Element { return isGroupByExist ? data.payload.data.result : firstPayloadQueryArray; }, [stagedQuery, panelType, data, listChartData, listQuery]); + const formatItems = [ + { + key: 'raw', + label: 'Raw', + data: { + title: 'max lines per row', + }, + }, + { + key: 'list', + label: 'Default', + }, + { + key: 'table', + label: 'Column', + data: { + title: 'columns', + }, + }, + ]; + + const handleToggleShowFormatOptions = (): void => + setShowFormatMenuItems(!showFormatMenuItems); + + const menuRef = useRef(null); + + useClickOutside({ + ref: menuRef, + onClickOutside: () => { + if (showFormatMenuItems) { + setShowFormatMenuItems(false); + } + }, + }); + return ( - <> - - {stagedQuery && ( - - - +
+ {showHistogram && ( + )} - + +
+
+ + + + + +
+ {selectedPanelType === PANEL_TYPES.LIST && ( +
+
+
+
+ )} +
+
+ +
+ {selectedPanelType === PANEL_TYPES.LIST && ( + + )} + + {selectedPanelType === PANEL_TYPES.TIME_SERIES && ( + + )} + + {selectedPanelType === PANEL_TYPES.TABLE && ( + + )} +
+
- + + +
); } diff --git a/frontend/src/container/LogsLoading/LogsLoading.styles.scss b/frontend/src/container/LogsLoading/LogsLoading.styles.scss new file mode 100644 index 0000000000..43ac407006 --- /dev/null +++ b/frontend/src/container/LogsLoading/LogsLoading.styles.scss @@ -0,0 +1,19 @@ +.loading-logs { + padding: 24px 0; + height: 240px; + + display: flex; + justify-content: center; + align-items: flex-start; + + .loading-logs-content { + display: flex; + align-items: flex-start; + flex-direction: column; + + .loading-gif { + height: 72px; + margin-left: -24px; + } + } +} diff --git a/frontend/src/container/LogsLoading/LogsLoading.tsx b/frontend/src/container/LogsLoading/LogsLoading.tsx new file mode 100644 index 0000000000..1710cd9f57 --- /dev/null +++ b/frontend/src/container/LogsLoading/LogsLoading.tsx @@ -0,0 +1,22 @@ +import './LogsLoading.styles.scss'; + +import { Typography } from 'antd'; + +export function LogsLoading(): JSX.Element { + return ( +
+
+ wait-icon + + + Just a bit of patience, just a little bit’s enough ⎯ we’re getting your + logs! + +
+
+ ); +} diff --git a/frontend/src/container/LogsSearchFilter/utils.ts b/frontend/src/container/LogsSearchFilter/utils.ts index 71c0011289..390a3c14b0 100644 --- a/frontend/src/container/LogsSearchFilter/utils.ts +++ b/frontend/src/container/LogsSearchFilter/utils.ts @@ -1,8 +1,9 @@ import { Time } from 'container/TopNav/DateTimeSelection/config'; +import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import { GetMinMaxPayload } from 'lib/getMinMax'; export const getGlobalTime = ( - selectedTime: Time, + selectedTime: Time | TimeV2, globalTime: GetMinMaxPayload, ): GetMinMaxPayload | undefined => { if (selectedTime === 'custom') { diff --git a/frontend/src/container/LogsTable/index.tsx b/frontend/src/container/LogsTable/index.tsx index 73ad2c7eb9..c87d4232f6 100644 --- a/frontend/src/container/LogsTable/index.tsx +++ b/frontend/src/container/LogsTable/index.tsx @@ -2,6 +2,7 @@ import './logsTable.styles.scss'; import { Card, Typography } from 'antd'; import LogDetail from 'components/LogDetail'; +import { VIEW_TYPES } from 'components/LogDetail/constants'; // components import ListLogView from 'components/Logs/ListLogView'; import RawLogView from 'components/Logs/RawLogView'; @@ -9,7 +10,6 @@ import LogsTableView from 'components/Logs/TableView'; import Spinner from 'components/Spinner'; import { CARD_BODY_STYLE } from 'constants/card'; import { useActiveLog } from 'hooks/logs/useActiveLog'; -import useFontFaceObserver from 'hooks/useFontObserver'; import { memo, useCallback, useMemo } from 'react'; import { useSelector } from 'react-redux'; import { Virtuoso } from 'react-virtuoso'; @@ -37,19 +37,6 @@ function LogsTable(props: LogsTableProps): JSX.Element { onSetActiveLog, } = useActiveLog(); - useFontFaceObserver( - [ - { - family: 'Fira Code', - weight: '300', - }, - ], - viewMode === 'raw', - { - timeout: 5000, - }, - ); - const { logs, fields: { selected }, @@ -132,6 +119,7 @@ function LogsTable(props: LogsTableProps): JSX.Element { {renderContent} - {' '} User Details diff --git a/frontend/src/container/MySettings/index.tsx b/frontend/src/container/MySettings/index.tsx index e3945c4d12..4bc3ffdb3a 100644 --- a/frontend/src/container/MySettings/index.tsx +++ b/frontend/src/container/MySettings/index.tsx @@ -1,13 +1,44 @@ import './MySettings.styles.scss'; -import { Button, Space } from 'antd'; +import { Button, Radio, RadioChangeEvent, Space, Typography } from 'antd'; import { Logout } from 'api/utils'; -import { LogOut } from 'lucide-react'; +import useThemeMode, { useIsDarkMode } from 'hooks/useDarkMode'; +import { LogOut, Moon, Sun } from 'lucide-react'; +import { useState } from 'react'; import Password from './Password'; import UserInfo from './UserInfo'; function MySettings(): JSX.Element { + const isDarkMode = useIsDarkMode(); + const { toggleTheme } = useThemeMode(); + + const themeOptions = [ + { + label: ( +
+ Dark{' '} +
+ ), + value: 'dark', + }, + { + label: ( +
+ Light{' '} +
+ ), + value: 'light', + }, + ]; + + const [theme, setTheme] = useState(isDarkMode ? 'dark' : 'light'); + + const handleThemeChange = ({ target: { value } }: RadioChangeEvent): void => { + setTheme(value); + toggleTheme(); + }; + return ( - +
+ + {' '} + Theme{' '} + + +
- +
+ +
+ +
+ +
([]); - const { role } = useSelector((state) => state.app); - useEffect(() => { if (variables) { const tableRowData = []; @@ -52,40 +54,6 @@ function DashboardVariableSelection(): JSX.Element | null { setUpdate(!update); }; - const updateMutation = useUpdateDashboard(); - const { notifications } = useNotifications(); - - const updateVariables = ( - name: string, - updatedVariablesData: Dashboard['data']['variables'], - ): void => { - if (!selectedDashboard) { - return; - } - - updateMutation.mutateAsync( - { - ...selectedDashboard, - data: { - ...selectedDashboard.data, - variables: updatedVariablesData, - }, - }, - { - onSuccess: (updatedDashboard) => { - if (updatedDashboard.payload) { - setSelectedDashboard(updatedDashboard.payload); - } - }, - onError: () => { - notifications.error({ - message: `Error updating ${name} variable`, - }); - }, - }, - ); - }; - const onValueUpdate = ( name: string, id: string, @@ -105,12 +73,22 @@ function DashboardVariableSelection(): JSX.Element | null { return variableCopy; }, ); + updateLocalStorageDashboardVariables(name, value, allSelected); const variables = convertVariablesToDbFormat(newVariablesArr); - if (role !== 'VIEWER' && selectedDashboard) { - updateVariables(name, variables); + if (selectedDashboard) { + setSelectedDashboard({ + ...selectedDashboard, + data: { + ...selectedDashboard?.data, + variables: { + ...variables, + }, + }, + }); } + onVarChanged(name); setUpdate(!update); diff --git a/frontend/src/container/NewExplorerCTA/config.ts b/frontend/src/container/NewExplorerCTA/config.ts index 886f044e57..e5ccc110d3 100644 --- a/frontend/src/container/NewExplorerCTA/config.ts +++ b/frontend/src/container/NewExplorerCTA/config.ts @@ -4,7 +4,7 @@ export const RIBBON_STYLES = { top: '-0.75rem', }; -export const buttonText = { +export const buttonText: Record = { [ROUTES.LOGS_EXPLORER]: 'Switch to Old Logs Explorer', [ROUTES.TRACE]: 'Try new Traces Explorer', [ROUTES.OLD_LOGS_EXPLORER]: 'Switch to New Logs Explorer', diff --git a/frontend/src/container/NewExplorerCTA/index.tsx b/frontend/src/container/NewExplorerCTA/index.tsx index 5b6d4532e2..5b6e485193 100644 --- a/frontend/src/container/NewExplorerCTA/index.tsx +++ b/frontend/src/container/NewExplorerCTA/index.tsx @@ -36,7 +36,6 @@ function NewExplorerCTA(): JSX.Element | null { danger data-testid="newExplorerCTA" type="primary" - size="small" > {buttonText[location.pathname]} diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.styles.scss b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.styles.scss new file mode 100644 index 0000000000..019344bbe0 --- /dev/null +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.styles.scss @@ -0,0 +1,7 @@ +.query-header-container { + .action-btn { + display: flex; + align-items: center; + justify-content: center; + } +} diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx index b56b53694a..9725446c7f 100644 --- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/QueryHeader.tsx @@ -1,3 +1,5 @@ +import './QueryHeader.styles.scss'; + import { DeleteOutlined, DownOutlined, @@ -29,13 +31,14 @@ function QueryHeader({ }: IQueryHeaderProps): JSX.Element { const [collapse, setCollapse] = useState(false); return ( - - + + @@ -43,6 +46,7 @@ function QueryHeader({ type="default" icon={collapse ? : } onClick={(): void => setCollapse(!collapse)} + className="action-btn" /> @@ -52,6 +56,7 @@ function QueryHeader({ danger icon={} onClick={onDelete} + className="action-btn" /> )} diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx index 522b21c803..f12b150bd3 100644 --- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/QueryBuilder/clickHouse/query.tsx @@ -1,7 +1,9 @@ +import MEditor, { Monaco } from '@monaco-editor/react'; +import { Color } from '@signozhq/design-tokens'; import { Input } from 'antd'; -import MonacoEditor from 'components/Editor'; import { LEGEND } from 'constants/global'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { useIsDarkMode } from 'hooks/useDarkMode'; import { ChangeEvent, useCallback } from 'react'; import { IClickHouseQuery } from 'types/api/queryBuilder/queryBuilderData'; import { EQueryType } from 'types/common/dashboard'; @@ -51,8 +53,10 @@ function ClickHouseQueryBuilder({ }, [handleSetQueryItemData, queryData, queryIndex]); const handleUpdateEditor = useCallback( - (value: string) => { - handleUpdateQuery('query', value); + (value: string | undefined) => { + if (value !== undefined) { + handleUpdateQuery('query', value); + } }, [handleUpdateQuery], ); @@ -69,6 +73,28 @@ function ClickHouseQueryBuilder({ [handleUpdateQuery], ); + const isDarkMode = useIsDarkMode(); + + function setEditorTheme(monaco: Monaco): void { + monaco.editor.defineTheme('my-theme', { + base: 'vs-dark', + inherit: true, + rules: [ + { token: 'string.key.json', foreground: Color.BG_VANILLA_400 }, + { token: 'string.value.json', foreground: Color.BG_ROBIN_400 }, + ], + colors: { + 'editor.background': Color.BG_INK_300, + }, + // fontFamily: 'SF Mono', + fontFamily: 'Space Mono', + fontSize: 20, + fontWeight: 'normal', + lineHeight: 18, + letterSpacing: -0.06, + }); + } + return ( - ( @@ -111,6 +115,7 @@ function QuerySection({ const handleQueryCategoryChange = (qCategory: string): void => { const currentQueryType = qCategory as EQueryType; + setCurrentTab(qCategory as EQueryType); featureResponse.refetch().then(() => { handleStageQuery({ ...currentQuery, queryType: currentQueryType }); @@ -132,7 +137,13 @@ function QuerySection({ const items = [ { key: EQueryType.QUERY_BUILDER, - label: 'Query Builder', + label: ( + + + + ), tab: Query Builder, children: ( @@ -140,39 +151,55 @@ function QuerySection({ }, { key: EQueryType.CLICKHOUSE, - label: 'ClickHouse Query', + label: ( + + + + ), tab: ClickHouse Query, children: , }, { key: EQueryType.PROM, - label: 'PromQL', + label: ( + + + + ), tab: PromQL, children: , }, ]; return ( - - - - - } - items={items} - /> +
+ + + + + } + items={items} + /> +
); } diff --git a/frontend/src/container/NewWidget/LeftContainer/QuerySection/styles.ts b/frontend/src/container/NewWidget/LeftContainer/QuerySection/styles.ts index 3a0d73e1ab..3d97d169d8 100644 --- a/frontend/src/container/NewWidget/LeftContainer/QuerySection/styles.ts +++ b/frontend/src/container/NewWidget/LeftContainer/QuerySection/styles.ts @@ -20,8 +20,8 @@ export const QueryButton = styled(Button)` export const QueryWrapper = styled.div` width: 100%; - margin: 1rem 0; - padding: 1rem 0.5rem; + margin: 0; + padding: 0.5rem 0; display: flex; flex-direction: column; `; diff --git a/frontend/src/container/NewWidget/LeftContainer/styles.ts b/frontend/src/container/NewWidget/LeftContainer/styles.ts index 4d1988a4e1..c6dc1ba756 100644 --- a/frontend/src/container/NewWidget/LeftContainer/styles.ts +++ b/frontend/src/container/NewWidget/LeftContainer/styles.ts @@ -6,4 +6,8 @@ export const QueryContainer = styled(Card)` margin-top: 1rem; min-height: 23.5%; } + + .ant-card-body { + padding: 12px; + } `; diff --git a/frontend/src/container/NoLogs/NoLogs.styles.scss b/frontend/src/container/NoLogs/NoLogs.styles.scss new file mode 100644 index 0000000000..32d7309b28 --- /dev/null +++ b/frontend/src/container/NoLogs/NoLogs.styles.scss @@ -0,0 +1,44 @@ +.no-logs-container { + height: 240px; + display: flex; + justify-content: center; + align-items: center; + flex-direction: column; + + // border: 1px solid #1d212d; + border-radius: 3px; + + .no-logs-container-content { + display: flex; + flex-direction: column; + gap: 8px; + + .eyes-emoji { + height: 32px; + width: 32px; + } + + .no-logs-text { + font-size: 14px; + font-weight: 500; + + .sub-text { + font-weight: 400; + color: #c0c1c3; + } + } + + .send-logs-link { + display: flex; + align-items: center; + gap: 8px; + + color: #7190f9; + font-size: 14px; + font-style: normal; + font-weight: 500; + line-height: 18px; /* 128.571% */ + letter-spacing: -0.07px; + } + } +} diff --git a/frontend/src/container/NoLogs/NoLogs.tsx b/frontend/src/container/NoLogs/NoLogs.tsx new file mode 100644 index 0000000000..df934b7bcc --- /dev/null +++ b/frontend/src/container/NoLogs/NoLogs.tsx @@ -0,0 +1,24 @@ +import './NoLogs.styles.scss'; + +import { Typography } from 'antd'; +import { ArrowUpRight } from 'lucide-react'; + +export default function NoLogs(): JSX.Element { + return ( +
+
+ eyes emoji + + No logs yet.{' '} + + When we receive logs, they would show up here + + + + + Sending Logs to SigNoz + +
+
+ ); +} diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-installOtelCollector.md new file mode 100644 index 0000000000..946b7fbdbf --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-installOtelCollector.md @@ -0,0 +1,24 @@ +## Install otel-collector in your Kubernetes infra +  + +Add the SigNoz Helm Chart repository +```bash +helm repo add signoz https://charts.signoz.io +``` +  + +If the chart is already present, update the chart to the latest using: +```bash +helm repo update +``` +  + +Install the Kubernetes Infrastructure chart provided by SigNoz +```bash +helm install my-release signoz/k8s-infra \ +--set otelCollectorEndpoint=ingest.{{REGION}}.signoz.cloud:443 \ +--set otelInsecure=false \ +--set signozApiKey={{SIGNOZ_INGESTION_KEY}} \ +--set global.clusterName= +``` +- Replace `` with the name of the Kubernetes cluster or a unique identifier of the cluster. diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-instrumentApplication.md new file mode 100644 index 0000000000..a4e0f2c5a6 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-instrumentApplication.md @@ -0,0 +1,57 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Elixir (Phoenix + Ecto) Application + +### Step 1: Add dependencies +Install dependencies related to OpenTelemetry by adding them to `mix.exs` file + +```bash + {:opentelemetry_exporter, "~> 1.6"}, + {:opentelemetry_api, "~> 1.2"}, + {:opentelemetry, "~> 1.3"}, + {:opentelemetry_semantic_conventions, "~> 0.2"}, + {:opentelemetry_cowboy, "~> 0.2.1"}, + {:opentelemetry_phoenix, "~> 1.1"}, + {:opentelemetry_ecto, "~> 1.1"} +``` +  + +In your application start, usually the `application.ex` file, setup the telemetry handlers + +```bash + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:{{MYAPP}}, :repo]) +``` +  + +As an example, this is how you can setup the handlers in your application.ex file for an application called demo : + +```bash +# application.ex +@impl true +def start(_type, _args) do + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:demo, :repo]) + +end +``` + +  + +### Step 2: Configure Application +You need to configure your application to send telemetry data by adding the following config to your `runtime.exs` file: + +```bash +config :opentelemetry, :resource, service: %{name: "{{MYAPP}}"} + +config :opentelemetry, :processors, + otel_batch_processor: %{ + exporter: + {:opentelemetry_exporter, + %{endpoints: ["http://localhost:4318"]} + } + } +``` + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-runApplication.md new file mode 100644 index 0000000000..0dabd95519 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-runApplication.md @@ -0,0 +1,6 @@ +### Running your Elixir application +Once you are done instrumenting your Elixir (Phoenix + Ecto) application with OpenTelemetry, you should install the dependencies needed to run your application and run it as you normally would. + +  + +To see some examples for instrumented applications, you can checkout [this link](https://signoz.io/docs/instrumentation/elixir/#sample-examples) \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/QuickStart/elixir-linuxamd64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/QuickStart/elixir-linuxamd64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..d8e7f3e14e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/QuickStart/elixir-linuxamd64-quickStart-instrumentApplication.md @@ -0,0 +1,62 @@ +  + +Follow the steps below to instrument your Elixir (Phoenix + Ecto) Application + +### Step 1: Add dependencies +Install dependencies related to OpenTelemetry by adding them to `mix.exs` file + +```bash + {:opentelemetry_exporter, "~> 1.6"}, + {:opentelemetry_api, "~> 1.2"}, + {:opentelemetry, "~> 1.3"}, + {:opentelemetry_semantic_conventions, "~> 0.2"}, + {:opentelemetry_cowboy, "~> 0.2.1"}, + {:opentelemetry_phoenix, "~> 1.1"}, + {:opentelemetry_ecto, "~> 1.1"} +``` +  + +In your application start, usually the `application.ex` file, setup the telemetry handlers + +```bash + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:{{MYAPP}}, :repo]) +``` +  + +As an example, this is how you can setup the handlers in your application.ex file for an application called demo : + +```bash +# application.ex +@impl true +def start(_type, _args) do + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:demo, :repo]) + +end +``` + +  + +### Step 2: Configure Application +You need to configure your application to send telemetry data by adding the following config to your `runtime.exs` file: + +```bash +config :opentelemetry, :resource, service: %{name: "{{MYAPP}}"} + +config :opentelemetry, :processors, + otel_batch_processor: %{ + exporter: { + :opentelemetry_exporter, + %{ + endpoints: ["https://ingest.{{REGION}}.signoz.cloud:443"], + headers: [ + {"signoz-access-token", {{SIGNOZ_ACCESS_TOKEN}} } + ] + } + } + } +``` + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/QuickStart/elixir-linuxamd64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/QuickStart/elixir-linuxamd64-quickStart-runApplication.md new file mode 100644 index 0000000000..0dabd95519 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/QuickStart/elixir-linuxamd64-quickStart-runApplication.md @@ -0,0 +1,6 @@ +### Running your Elixir application +Once you are done instrumenting your Elixir (Phoenix + Ecto) application with OpenTelemetry, you should install the dependencies needed to run your application and run it as you normally would. + +  + +To see some examples for instrumented applications, you can checkout [this link](https://signoz.io/docs/instrumentation/elixir/#sample-examples) \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..a659f36474 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_amd64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_amd64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..a4e0f2c5a6 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-instrumentApplication.md @@ -0,0 +1,57 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Elixir (Phoenix + Ecto) Application + +### Step 1: Add dependencies +Install dependencies related to OpenTelemetry by adding them to `mix.exs` file + +```bash + {:opentelemetry_exporter, "~> 1.6"}, + {:opentelemetry_api, "~> 1.2"}, + {:opentelemetry, "~> 1.3"}, + {:opentelemetry_semantic_conventions, "~> 0.2"}, + {:opentelemetry_cowboy, "~> 0.2.1"}, + {:opentelemetry_phoenix, "~> 1.1"}, + {:opentelemetry_ecto, "~> 1.1"} +``` +  + +In your application start, usually the `application.ex` file, setup the telemetry handlers + +```bash + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:{{MYAPP}}, :repo]) +``` +  + +As an example, this is how you can setup the handlers in your application.ex file for an application called demo : + +```bash +# application.ex +@impl true +def start(_type, _args) do + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:demo, :repo]) + +end +``` + +  + +### Step 2: Configure Application +You need to configure your application to send telemetry data by adding the following config to your `runtime.exs` file: + +```bash +config :opentelemetry, :resource, service: %{name: "{{MYAPP}}"} + +config :opentelemetry, :processors, + otel_batch_processor: %{ + exporter: + {:opentelemetry_exporter, + %{endpoints: ["http://localhost:4318"]} + } + } +``` + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-runApplication.md new file mode 100644 index 0000000000..c8584acac5 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-runApplication.md @@ -0,0 +1,29 @@ +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Elixir application + +Once you are done instrumenting your Elixir (Phoenix + Ecto) application with OpenTelemetry, you should install the dependencies needed to run your application and run it as you normally would. + +  + +To see some examples for instrumented applications, you can checkout [this link](https://signoz.io/docs/instrumentation/elixir/#sample-examples) +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/QuickStart/elixir-linuxarm64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/QuickStart/elixir-linuxarm64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..d8e7f3e14e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/QuickStart/elixir-linuxarm64-quickStart-instrumentApplication.md @@ -0,0 +1,62 @@ +  + +Follow the steps below to instrument your Elixir (Phoenix + Ecto) Application + +### Step 1: Add dependencies +Install dependencies related to OpenTelemetry by adding them to `mix.exs` file + +```bash + {:opentelemetry_exporter, "~> 1.6"}, + {:opentelemetry_api, "~> 1.2"}, + {:opentelemetry, "~> 1.3"}, + {:opentelemetry_semantic_conventions, "~> 0.2"}, + {:opentelemetry_cowboy, "~> 0.2.1"}, + {:opentelemetry_phoenix, "~> 1.1"}, + {:opentelemetry_ecto, "~> 1.1"} +``` +  + +In your application start, usually the `application.ex` file, setup the telemetry handlers + +```bash + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:{{MYAPP}}, :repo]) +``` +  + +As an example, this is how you can setup the handlers in your application.ex file for an application called demo : + +```bash +# application.ex +@impl true +def start(_type, _args) do + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:demo, :repo]) + +end +``` + +  + +### Step 2: Configure Application +You need to configure your application to send telemetry data by adding the following config to your `runtime.exs` file: + +```bash +config :opentelemetry, :resource, service: %{name: "{{MYAPP}}"} + +config :opentelemetry, :processors, + otel_batch_processor: %{ + exporter: { + :opentelemetry_exporter, + %{ + endpoints: ["https://ingest.{{REGION}}.signoz.cloud:443"], + headers: [ + {"signoz-access-token", {{SIGNOZ_ACCESS_TOKEN}} } + ] + } + } + } +``` + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/QuickStart/elixir-linuxarm64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/QuickStart/elixir-linuxarm64-quickStart-runApplication.md new file mode 100644 index 0000000000..0dabd95519 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/QuickStart/elixir-linuxarm64-quickStart-runApplication.md @@ -0,0 +1,6 @@ +### Running your Elixir application +Once you are done instrumenting your Elixir (Phoenix + Ecto) application with OpenTelemetry, you should install the dependencies needed to run your application and run it as you normally would. + +  + +To see some examples for instrumented applications, you can checkout [this link](https://signoz.io/docs/instrumentation/elixir/#sample-examples) \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..cbabb8077b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_arm64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_arm64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..a4e0f2c5a6 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-instrumentApplication.md @@ -0,0 +1,57 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Elixir (Phoenix + Ecto) Application + +### Step 1: Add dependencies +Install dependencies related to OpenTelemetry by adding them to `mix.exs` file + +```bash + {:opentelemetry_exporter, "~> 1.6"}, + {:opentelemetry_api, "~> 1.2"}, + {:opentelemetry, "~> 1.3"}, + {:opentelemetry_semantic_conventions, "~> 0.2"}, + {:opentelemetry_cowboy, "~> 0.2.1"}, + {:opentelemetry_phoenix, "~> 1.1"}, + {:opentelemetry_ecto, "~> 1.1"} +``` +  + +In your application start, usually the `application.ex` file, setup the telemetry handlers + +```bash + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:{{MYAPP}}, :repo]) +``` +  + +As an example, this is how you can setup the handlers in your application.ex file for an application called demo : + +```bash +# application.ex +@impl true +def start(_type, _args) do + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:demo, :repo]) + +end +``` + +  + +### Step 2: Configure Application +You need to configure your application to send telemetry data by adding the following config to your `runtime.exs` file: + +```bash +config :opentelemetry, :resource, service: %{name: "{{MYAPP}}"} + +config :opentelemetry, :processors, + otel_batch_processor: %{ + exporter: + {:opentelemetry_exporter, + %{endpoints: ["http://localhost:4318"]} + } + } +``` + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-runApplication.md new file mode 100644 index 0000000000..5202f436f5 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-runApplication.md @@ -0,0 +1,28 @@ +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Elixir application +Once you are done instrumenting your Elixir (Phoenix + Ecto) application with OpenTelemetry, you should install the dependencies needed to run your application and run it as you normally would. + +  + +To see some examples for instrumented applications, you can checkout [this link](https://signoz.io/docs/instrumentation/elixir/#sample-examples) +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/QuickStart/elixir-macosamd64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/QuickStart/elixir-macosamd64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..d8e7f3e14e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/QuickStart/elixir-macosamd64-quickStart-instrumentApplication.md @@ -0,0 +1,62 @@ +  + +Follow the steps below to instrument your Elixir (Phoenix + Ecto) Application + +### Step 1: Add dependencies +Install dependencies related to OpenTelemetry by adding them to `mix.exs` file + +```bash + {:opentelemetry_exporter, "~> 1.6"}, + {:opentelemetry_api, "~> 1.2"}, + {:opentelemetry, "~> 1.3"}, + {:opentelemetry_semantic_conventions, "~> 0.2"}, + {:opentelemetry_cowboy, "~> 0.2.1"}, + {:opentelemetry_phoenix, "~> 1.1"}, + {:opentelemetry_ecto, "~> 1.1"} +``` +  + +In your application start, usually the `application.ex` file, setup the telemetry handlers + +```bash + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:{{MYAPP}}, :repo]) +``` +  + +As an example, this is how you can setup the handlers in your application.ex file for an application called demo : + +```bash +# application.ex +@impl true +def start(_type, _args) do + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:demo, :repo]) + +end +``` + +  + +### Step 2: Configure Application +You need to configure your application to send telemetry data by adding the following config to your `runtime.exs` file: + +```bash +config :opentelemetry, :resource, service: %{name: "{{MYAPP}}"} + +config :opentelemetry, :processors, + otel_batch_processor: %{ + exporter: { + :opentelemetry_exporter, + %{ + endpoints: ["https://ingest.{{REGION}}.signoz.cloud:443"], + headers: [ + {"signoz-access-token", {{SIGNOZ_ACCESS_TOKEN}} } + ] + } + } + } +``` + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/QuickStart/elixir-macosamd64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/QuickStart/elixir-macosamd64-quickStart-runApplication.md new file mode 100644 index 0000000000..0dabd95519 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/QuickStart/elixir-macosamd64-quickStart-runApplication.md @@ -0,0 +1,6 @@ +### Running your Elixir application +Once you are done instrumenting your Elixir (Phoenix + Ecto) application with OpenTelemetry, you should install the dependencies needed to run your application and run it as you normally would. + +  + +To see some examples for instrumented applications, you can checkout [this link](https://signoz.io/docs/instrumentation/elixir/#sample-examples) \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..843e86a411 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +### Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_amd64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_amd64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..a4e0f2c5a6 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-instrumentApplication.md @@ -0,0 +1,57 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Elixir (Phoenix + Ecto) Application + +### Step 1: Add dependencies +Install dependencies related to OpenTelemetry by adding them to `mix.exs` file + +```bash + {:opentelemetry_exporter, "~> 1.6"}, + {:opentelemetry_api, "~> 1.2"}, + {:opentelemetry, "~> 1.3"}, + {:opentelemetry_semantic_conventions, "~> 0.2"}, + {:opentelemetry_cowboy, "~> 0.2.1"}, + {:opentelemetry_phoenix, "~> 1.1"}, + {:opentelemetry_ecto, "~> 1.1"} +``` +  + +In your application start, usually the `application.ex` file, setup the telemetry handlers + +```bash + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:{{MYAPP}}, :repo]) +``` +  + +As an example, this is how you can setup the handlers in your application.ex file for an application called demo : + +```bash +# application.ex +@impl true +def start(_type, _args) do + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:demo, :repo]) + +end +``` + +  + +### Step 2: Configure Application +You need to configure your application to send telemetry data by adding the following config to your `runtime.exs` file: + +```bash +config :opentelemetry, :resource, service: %{name: "{{MYAPP}}"} + +config :opentelemetry, :processors, + otel_batch_processor: %{ + exporter: + {:opentelemetry_exporter, + %{endpoints: ["http://localhost:4318"]} + } + } +``` + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-runApplication.md new file mode 100644 index 0000000000..5202f436f5 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-runApplication.md @@ -0,0 +1,28 @@ +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Elixir application +Once you are done instrumenting your Elixir (Phoenix + Ecto) application with OpenTelemetry, you should install the dependencies needed to run your application and run it as you normally would. + +  + +To see some examples for instrumented applications, you can checkout [this link](https://signoz.io/docs/instrumentation/elixir/#sample-examples) +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/QuickStart/elixir-macosarm64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/QuickStart/elixir-macosarm64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..d8e7f3e14e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/QuickStart/elixir-macosarm64-quickStart-instrumentApplication.md @@ -0,0 +1,62 @@ +  + +Follow the steps below to instrument your Elixir (Phoenix + Ecto) Application + +### Step 1: Add dependencies +Install dependencies related to OpenTelemetry by adding them to `mix.exs` file + +```bash + {:opentelemetry_exporter, "~> 1.6"}, + {:opentelemetry_api, "~> 1.2"}, + {:opentelemetry, "~> 1.3"}, + {:opentelemetry_semantic_conventions, "~> 0.2"}, + {:opentelemetry_cowboy, "~> 0.2.1"}, + {:opentelemetry_phoenix, "~> 1.1"}, + {:opentelemetry_ecto, "~> 1.1"} +``` +  + +In your application start, usually the `application.ex` file, setup the telemetry handlers + +```bash + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:{{MYAPP}}, :repo]) +``` +  + +As an example, this is how you can setup the handlers in your application.ex file for an application called demo : + +```bash +# application.ex +@impl true +def start(_type, _args) do + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:demo, :repo]) + +end +``` + +  + +### Step 2: Configure Application +You need to configure your application to send telemetry data by adding the following config to your `runtime.exs` file: + +```bash +config :opentelemetry, :resource, service: %{name: "{{MYAPP}}"} + +config :opentelemetry, :processors, + otel_batch_processor: %{ + exporter: { + :opentelemetry_exporter, + %{ + endpoints: ["https://ingest.{{REGION}}.signoz.cloud:443"], + headers: [ + {"signoz-access-token", {{SIGNOZ_ACCESS_TOKEN}} } + ] + } + } + } +``` + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/QuickStart/elixir-macosarm64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/QuickStart/elixir-macosarm64-quickStart-runApplication.md new file mode 100644 index 0000000000..0dabd95519 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/QuickStart/elixir-macosarm64-quickStart-runApplication.md @@ -0,0 +1,6 @@ +### Running your Elixir application +Once you are done instrumenting your Elixir (Phoenix + Ecto) application with OpenTelemetry, you should install the dependencies needed to run your application and run it as you normally would. + +  + +To see some examples for instrumented applications, you can checkout [this link](https://signoz.io/docs/instrumentation/elixir/#sample-examples) \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..3a780bb8de --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_arm64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_arm64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..a4e0f2c5a6 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-instrumentApplication.md @@ -0,0 +1,57 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Elixir (Phoenix + Ecto) Application + +### Step 1: Add dependencies +Install dependencies related to OpenTelemetry by adding them to `mix.exs` file + +```bash + {:opentelemetry_exporter, "~> 1.6"}, + {:opentelemetry_api, "~> 1.2"}, + {:opentelemetry, "~> 1.3"}, + {:opentelemetry_semantic_conventions, "~> 0.2"}, + {:opentelemetry_cowboy, "~> 0.2.1"}, + {:opentelemetry_phoenix, "~> 1.1"}, + {:opentelemetry_ecto, "~> 1.1"} +``` +  + +In your application start, usually the `application.ex` file, setup the telemetry handlers + +```bash + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:{{MYAPP}}, :repo]) +``` +  + +As an example, this is how you can setup the handlers in your application.ex file for an application called demo : + +```bash +# application.ex +@impl true +def start(_type, _args) do + :opentelemetry_cowboy.setup() + OpentelemetryPhoenix.setup(adapter: :cowboy2) + OpentelemetryEcto.setup([:demo, :repo]) + +end +``` + +  + +### Step 2: Configure Application +You need to configure your application to send telemetry data by adding the following config to your `runtime.exs` file: + +```bash +config :opentelemetry, :resource, service: %{name: "{{MYAPP}}"} + +config :opentelemetry, :processors, + otel_batch_processor: %{ + exporter: + {:opentelemetry_exporter, + %{endpoints: ["http://localhost:4318"]} + } + } +``` + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-runApplication.md new file mode 100644 index 0000000000..5202f436f5 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-runApplication.md @@ -0,0 +1,28 @@ +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Elixir application +Once you are done instrumenting your Elixir (Phoenix + Ecto) application with OpenTelemetry, you should install the dependencies needed to run your application and run it as you normally would. + +  + +To see some examples for instrumented applications, you can checkout [this link](https://signoz.io/docs/instrumentation/elixir/#sample-examples) +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/Kubernetes/fastapi-kubernetes-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/Kubernetes/fastapi-kubernetes-instrumentApplication.md index f0420aa305..cf7b13bf14 100644 --- a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/Kubernetes/fastapi-kubernetes-instrumentApplication.md +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/Kubernetes/fastapi-kubernetes-instrumentApplication.md @@ -20,8 +20,8 @@ This will create and activate a virtual environment named `.venv` ### Step 2 : Install the OpenTelemetry dependencies ```bash -pip install opentelemetry-distro==0.38b0 -pip install opentelemetry-exporter-otlp==1.17.0 +pip install opentelemetry-distro==0.43b0 +pip install opentelemetry-exporter-otlp==1.22.0 ```   diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxAMD64/QuickStart/fastapi-linuxamd64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxAMD64/QuickStart/fastapi-linuxamd64-quickStart-instrumentApplication.md index 1e067b3e7c..6c501e75f7 100644 --- a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxAMD64/QuickStart/fastapi-linuxamd64-quickStart-instrumentApplication.md +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxAMD64/QuickStart/fastapi-linuxamd64-quickStart-instrumentApplication.md @@ -16,8 +16,8 @@ This will create and activate a virtual environment named `.venv` ### Step 2 : Install the OpenTelemetry dependencies ```bash -pip install opentelemetry-distro==0.38b0 -pip install opentelemetry-exporter-otlp==1.17.0 +pip install opentelemetry-distro==0.43b0 +pip install opentelemetry-exporter-otlp==1.22.0 ```   diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxAMD64/Recommended/fastapi-linuxamd64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxAMD64/Recommended/fastapi-linuxamd64-recommended-instrumentApplication.md index 1e067b3e7c..6c501e75f7 100644 --- a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxAMD64/Recommended/fastapi-linuxamd64-recommended-instrumentApplication.md +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxAMD64/Recommended/fastapi-linuxamd64-recommended-instrumentApplication.md @@ -16,8 +16,8 @@ This will create and activate a virtual environment named `.venv` ### Step 2 : Install the OpenTelemetry dependencies ```bash -pip install opentelemetry-distro==0.38b0 -pip install opentelemetry-exporter-otlp==1.17.0 +pip install opentelemetry-distro==0.43b0 +pip install opentelemetry-exporter-otlp==1.22.0 ```   diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxARM64/QuickStart/fastapi-linuxarm64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxARM64/QuickStart/fastapi-linuxarm64-quickStart-instrumentApplication.md index 1e067b3e7c..6c501e75f7 100644 --- a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxARM64/QuickStart/fastapi-linuxarm64-quickStart-instrumentApplication.md +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxARM64/QuickStart/fastapi-linuxarm64-quickStart-instrumentApplication.md @@ -16,8 +16,8 @@ This will create and activate a virtual environment named `.venv` ### Step 2 : Install the OpenTelemetry dependencies ```bash -pip install opentelemetry-distro==0.38b0 -pip install opentelemetry-exporter-otlp==1.17.0 +pip install opentelemetry-distro==0.43b0 +pip install opentelemetry-exporter-otlp==1.22.0 ```   diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxARM64/Recommended/fastapi-linuxarm64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxARM64/Recommended/fastapi-linuxarm64-recommended-instrumentApplication.md index 1e067b3e7c..6c501e75f7 100644 --- a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxARM64/Recommended/fastapi-linuxarm64-recommended-instrumentApplication.md +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/LinuxARM64/Recommended/fastapi-linuxarm64-recommended-instrumentApplication.md @@ -16,8 +16,8 @@ This will create and activate a virtual environment named `.venv` ### Step 2 : Install the OpenTelemetry dependencies ```bash -pip install opentelemetry-distro==0.38b0 -pip install opentelemetry-exporter-otlp==1.17.0 +pip install opentelemetry-distro==0.43b0 +pip install opentelemetry-exporter-otlp==1.22.0 ```   diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsAMD64/QuickStart/fastapi-macosamd64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsAMD64/QuickStart/fastapi-macosamd64-quickStart-instrumentApplication.md index 1e067b3e7c..6c501e75f7 100644 --- a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsAMD64/QuickStart/fastapi-macosamd64-quickStart-instrumentApplication.md +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsAMD64/QuickStart/fastapi-macosamd64-quickStart-instrumentApplication.md @@ -16,8 +16,8 @@ This will create and activate a virtual environment named `.venv` ### Step 2 : Install the OpenTelemetry dependencies ```bash -pip install opentelemetry-distro==0.38b0 -pip install opentelemetry-exporter-otlp==1.17.0 +pip install opentelemetry-distro==0.43b0 +pip install opentelemetry-exporter-otlp==1.22.0 ```   diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsAMD64/Recommended/fastapi-macosamd64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsAMD64/Recommended/fastapi-macosamd64-recommended-instrumentApplication.md index 1e067b3e7c..6c501e75f7 100644 --- a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsAMD64/Recommended/fastapi-macosamd64-recommended-instrumentApplication.md +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsAMD64/Recommended/fastapi-macosamd64-recommended-instrumentApplication.md @@ -16,8 +16,8 @@ This will create and activate a virtual environment named `.venv` ### Step 2 : Install the OpenTelemetry dependencies ```bash -pip install opentelemetry-distro==0.38b0 -pip install opentelemetry-exporter-otlp==1.17.0 +pip install opentelemetry-distro==0.43b0 +pip install opentelemetry-exporter-otlp==1.22.0 ```   diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsARM64/QuickStart/fastapi-macosarm64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsARM64/QuickStart/fastapi-macosarm64-quickStart-instrumentApplication.md index 1e067b3e7c..6c501e75f7 100644 --- a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsARM64/QuickStart/fastapi-macosarm64-quickStart-instrumentApplication.md +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsARM64/QuickStart/fastapi-macosarm64-quickStart-instrumentApplication.md @@ -16,8 +16,8 @@ This will create and activate a virtual environment named `.venv` ### Step 2 : Install the OpenTelemetry dependencies ```bash -pip install opentelemetry-distro==0.38b0 -pip install opentelemetry-exporter-otlp==1.17.0 +pip install opentelemetry-distro==0.43b0 +pip install opentelemetry-exporter-otlp==1.22.0 ```   diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsARM64/Recommended/fastapi-macosarm64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsARM64/Recommended/fastapi-macosarm64-recommended-instrumentApplication.md index 1e067b3e7c..6c501e75f7 100644 --- a/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsARM64/Recommended/fastapi-macosarm64-recommended-instrumentApplication.md +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Python/md-docs/FastAPI/MacOsARM64/Recommended/fastapi-macosarm64-recommended-instrumentApplication.md @@ -16,8 +16,8 @@ This will create and activate a virtual environment named `.venv` ### Step 2 : Install the OpenTelemetry dependencies ```bash -pip install opentelemetry-distro==0.38b0 -pip install opentelemetry-exporter-otlp==1.17.0 +pip install opentelemetry-distro==0.43b0 +pip install opentelemetry-exporter-otlp==1.22.0 ```   diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-installOtelCollector.md new file mode 100644 index 0000000000..946b7fbdbf --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-installOtelCollector.md @@ -0,0 +1,24 @@ +## Install otel-collector in your Kubernetes infra +  + +Add the SigNoz Helm Chart repository +```bash +helm repo add signoz https://charts.signoz.io +``` +  + +If the chart is already present, update the chart to the latest using: +```bash +helm repo update +``` +  + +Install the Kubernetes Infrastructure chart provided by SigNoz +```bash +helm install my-release signoz/k8s-infra \ +--set otelCollectorEndpoint=ingest.{{REGION}}.signoz.cloud:443 \ +--set otelInsecure=false \ +--set signozApiKey={{SIGNOZ_INGESTION_KEY}} \ +--set global.clusterName= +``` +- Replace `` with the name of the Kubernetes cluster or a unique identifier of the cluster. diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-instrumentApplication.md new file mode 100644 index 0000000000..067e09195f --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-instrumentApplication.md @@ -0,0 +1,95 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Rust Application + +### Step 1: Add dependencies +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +```rust +opentelemetry = { version = "0.18.0", features = ["rt-tokio", "metrics", "trace"] } +opentelemetry-otlp = { version = "0.11.0", features = ["trace", "metrics"] } +opentelemetry-semantic-conventions = { version = "0.10.0" } +opentelemetry-proto = { version = "0.1.0"} +tokio = { version = "1", features = ["full"] } +tonic = { version = "0.8.2", features = ["tls-roots"] } +``` +  + +Use the above crates in entry point of your Rust application, which is generally your `main.rs` file + +```rust +use opentelemetry::global::shutdown_tracer_provider; +use opentelemetry::sdk::Resource; +use opentelemetry::trace::TraceError; +use opentelemetry::{ + global, sdk::trace as sdktrace, + trace::{TraceContextExt, Tracer}, + Context, Key, KeyValue, +}; +use opentelemetry_otlp::WithExportConfig; +use tonic::metadata::{MetadataMap, MetadataValue}; +``` +  + +### Step 2: Initialize tracer +Add `init_tracer` function to your `main.rs` file. It initializes an OpenTelemetry tracer with the OpenTelemetry OTLP exporter which is sending data to SigNoz Cloud. + +```rust +fn init_tracer() -> Result { + opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter(opentelemetry_otlp::new_exporter().tonic().with_env()) + .with_trace_config( + sdktrace::config().with_resource(Resource::default()), + ) + .install_batch(opentelemetry::runtime::Tokio) +} +``` +### Step 3: Add OpenTelemetry instrumentation + +Call the `init_tracer` function inside `main()` at starting so that as soon as your rust application starts, tracer will be available globally. + +```rust + let _ = init_tracer(); +``` + +Modify your `main()` function from + +```rust +fn main(){ + //rest of the code +} +``` + +  + +to + +```rust +#[tokio::main] +async fn main() { + //rest of the code +} +``` + +Add the below code block within a function or a section of your code where you're setting up and using the tracer for distributed tracing. After adding the below code block you can send traces to SigNoz Cloud + +```rust + let tracer = global::tracer("global_tracer"); + let _cx = Context::new(); + + tracer.in_span("operation", |cx| { + let span = cx.span(); + span.set_attribute(Key::new("KEY").string("value")); + + span.add_event( + format!("Operations"), + vec![ + Key::new("SigNoz is").string("working!"), + ], + ); + }); + shutdown_tracer_provider() +``` + +The above code block will create a span named operation which sets an attribute and an event to it saying "SigNoz is working!". diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-runApplication.md new file mode 100644 index 0000000000..56b63a48ef --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-runApplication.md @@ -0,0 +1,7 @@ +### Running your Rust application + +Run the application using the below command: + +```bash +OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 OTEL_RESOURCE_ATTRIBUTES=service.name={{MYAPP}} cargo run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/QuickStart/rust-linuxamd64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/QuickStart/rust-linuxamd64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..efedfba93e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/QuickStart/rust-linuxamd64-quickStart-instrumentApplication.md @@ -0,0 +1,133 @@ +  + +### Step 1: Add dependencies +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +```rust +opentelemetry = { version = "0.18.0", features = ["rt-tokio", "metrics", "trace"] } +opentelemetry-otlp = { version = "0.11.0", features = ["trace", "metrics"] } +opentelemetry-semantic-conventions = { version = "0.10.0" } +opentelemetry-proto = { version = "0.1.0"} +tokio = { version = "1", features = ["full"] } +tonic = { version = "0.8.2", features = ["tls-roots"] } +dotenv = "0.15.0" +``` +  + +Use the above crates in entry point of your Rust application, which is generally your `main.rs` file + +```rust +use dotenv::dotenv; +use opentelemetry::global::shutdown_tracer_provider; +use opentelemetry::sdk::Resource; +use opentelemetry::trace::TraceError; +use opentelemetry::{ + global, sdk::trace as sdktrace, + trace::{TraceContextExt, Tracer}, + Context, Key, KeyValue, +}; +use opentelemetry_otlp::WithExportConfig; +use tonic::metadata::{MetadataMap, MetadataValue}; +``` +  + +### Step 2: Initialize tracer and create env file +Add `init_tracer` function to your `main.rs` file. It initializes an OpenTelemetry tracer with the OpenTelemetry OTLP exporter which is sending data to SigNoz Cloud. + +```rust +fn init_tracer() -> Result { + let signoz_access_token = std::env::var("SIGNOZ_ACCESS_TOKEN").expect("SIGNOZ_ACCESS_TOKEN not set"); + let mut metadata = MetadataMap::new(); + metadata.insert( + "signoz-access-token", + MetadataValue::from_str(&signoz_access_token).unwrap(), + ); + opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter( + opentelemetry_otlp::new_exporter() + .tonic() + .with_metadata(metadata) + .with_endpoint(std::env::var("SIGNOZ_ENDPOINT").expect("SIGNOZ_ENDPOINT not set")), + ) + .with_trace_config( + sdktrace::config().with_resource(Resource::new(vec![ + KeyValue::new( + opentelemetry_semantic_conventions::resource::SERVICE_NAME, + std::env::var("APP_NAME").expect("APP_NAME not set"), + ), + ])), + ) + .install_batch(opentelemetry::runtime::Tokio) +} +``` + +After adding the above function in your `main.rs` file, create an `.env` file in root of your app. The structure could look like this : + +```bash +project_root/ +|-- Cargo.toml +|-- src/ +| |-- main.rs +|-- .env +``` + +In your environment file, paste the below variables which will be used in the next steps. + +```rust +PORT=3000 // If it is a web app pass port or else you can ignore this variable +APP_NAME={{MYAPP}} +SIGNOZ_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443/v1/traces +SIGNOZ_ACCESS_TOKEN={{SIGNOZ_INGESTION_KEY}} +``` + +### Step 3: Add OpenTelemetry instrumentation + + +Call the `init_tracer` function inside `main()` at starting so that as soon as your rust application starts, tracer will be available globally. + +```rust +dotenv().ok(); +let _ = init_tracer(); +``` + +Modify your `main()` function from + +```rust +fn main(){ + //rest of the code +} +``` + +  + +to + +```rust +#[tokio::main] +async fn main() { + //rest of the code +} +``` + +Add the below code block within a function or a section of your code where you're setting up and using the tracer for distributed tracing. After adding the below code block you can send traces to SigNoz Cloud + +```rust + let tracer = global::tracer("global_tracer"); + let _cx = Context::new(); + + tracer.in_span("operation", |cx| { + let span = cx.span(); + span.set_attribute(Key::new("KEY").string("value")); + + span.add_event( + format!("Operations"), + vec![ + Key::new("SigNoz is").string("working!"), + ], + ); + }); + shutdown_tracer_provider() +``` + +The above code block will create a span named operation which sets an attribute and an event to it saying "SigNoz is working!". diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/QuickStart/rust-linuxamd64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/QuickStart/rust-linuxamd64-quickStart-runApplication.md new file mode 100644 index 0000000000..e296dc0bd7 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/QuickStart/rust-linuxamd64-quickStart-runApplication.md @@ -0,0 +1,7 @@ +### Running your Rust application + +Since your variables are set in the `.env` file, you can run your Rust application using the below command: + +```bash +cargo run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..a659f36474 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_amd64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_amd64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..45243b63fd --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-instrumentApplication.md @@ -0,0 +1,95 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Rust Application + +### Step 1: Add dependencies +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +```rust +opentelemetry = { version = "0.18.0", features = ["rt-tokio", "metrics", "trace"] } +opentelemetry-otlp = { version = "0.11.0", features = ["trace", "metrics"] } +opentelemetry-semantic-conventions = { version = "0.10.0" } +opentelemetry-proto = { version = "0.1.0"} +tokio = { version = "1", features = ["full"] } +tonic = { version = "0.8.2", features = ["tls-roots"] } +``` +  + +Use the above crates in entry point of your Rust application, which is generally your `main.rs` file + +```rust +use opentelemetry::global::shutdown_tracer_provider; +use opentelemetry::sdk::Resource; +use opentelemetry::trace::TraceError; +use opentelemetry::{ + global, sdk::trace as sdktrace, + trace::{TraceContextExt, Tracer}, + Context, Key, KeyValue, +}; +use opentelemetry_otlp::WithExportConfig; +use tonic::metadata::{MetadataMap, MetadataValue}; +``` +  + +### Step 2: Initialize tracer +Add `init_tracer` function to your `main.rs` file. It initializes an OpenTelemetry tracer with the OpenTelemetry OTLP exporter which is sending data to SigNoz Cloud. + +```rust +fn init_tracer() -> Result { + opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter(opentelemetry_otlp::new_exporter().tonic().with_env()) + .with_trace_config( + sdktrace::config().with_resource(Resource::default()), + ) + .install_batch(opentelemetry::runtime::Tokio) +} +``` +### Step 3: Add OpenTelemetry instrumentation + +Call the `init_tracer` function inside `main()` at starting so that as soon as your rust application starts, tracer will be available globally. + +```rust + let _ = init_tracer(); +``` + +Modify your `main()` function from + +```rust +fn main(){ + //rest of the code +} +``` + +  + +to + +```rust +#[tokio::main] +async fn main() { + //rest of the code +} +``` + +Add the below code block within a function or a section of your code where you're setting up and using the tracer for distributed tracing. After adding the below code block you can send traces to SigNoz Cloud + +```rust + let tracer = global::tracer("global_tracer"); + let _cx = Context::new(); + + tracer.in_span("operation", |cx| { + let span = cx.span(); + span.set_attribute(Key::new("KEY").string("value")); + + span.add_event( + format!("Operations"), + vec![ + Key::new("SigNoz is").string("working!"), + ], + ); + }); + shutdown_tracer_provider() +``` + +The above code block will create a span named operation which sets an attribute and an event to it saying "SigNoz is working!". diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-runApplication.md new file mode 100644 index 0000000000..3b5f23e602 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-runApplication.md @@ -0,0 +1,32 @@ +  + +Once you are done instrumenting your Rust application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Rust application + +Run the application using the below command: + +```bash +OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 OTEL_RESOURCE_ATTRIBUTES=service.name=s{{MYAPP}} cargo run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/QuickStart/rust-linuxarm64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/QuickStart/rust-linuxarm64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..fabf62ed5b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/QuickStart/rust-linuxarm64-quickStart-instrumentApplication.md @@ -0,0 +1,133 @@ +  + +### Step 1: Add dependencies +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +```rust +opentelemetry = { version = "0.18.0", features = ["rt-tokio", "metrics", "trace"] } +opentelemetry-otlp = { version = "0.11.0", features = ["trace", "metrics"] } +opentelemetry-semantic-conventions = { version = "0.10.0" } +opentelemetry-proto = { version = "0.1.0"} +tokio = { version = "1", features = ["full"] } +tonic = { version = "0.8.2", features = ["tls-roots"] } +dotenv = "0.15.0" +``` +  + +Use the above crates in entry point of your Rust application, which is generally your `main.rs` file + +```rust +use dotenv::dotenv; +use opentelemetry::global::shutdown_tracer_provider; +use opentelemetry::sdk::Resource; +use opentelemetry::trace::TraceError; +use opentelemetry::{ + global, sdk::trace as sdktrace, + trace::{TraceContextExt, Tracer}, + Context, Key, KeyValue, +}; +use opentelemetry_otlp::WithExportConfig; +use tonic::metadata::{MetadataMap, MetadataValue}; +``` +  + +### Step 2: Initialize tracer and create env file +Add `init_tracer` function to your `main.rs` file. It initializes an OpenTelemetry tracer with the OpenTelemetry OTLP exporter which is sending data to SigNoz Cloud. + +```rust +fn init_tracer() -> Result { + let signoz_access_token = std::env::var("SIGNOZ_ACCESS_TOKEN").expect("SIGNOZ_ACCESS_TOKEN not set"); + let mut metadata = MetadataMap::new(); + metadata.insert( + "signoz-access-token", + MetadataValue::from_str(&signoz_access_token).unwrap(), + ); + opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter( + opentelemetry_otlp::new_exporter() + .tonic() + .with_metadata(metadata) + .with_endpoint(std::env::var("SIGNOZ_ENDPOINT").expect("SIGNOZ_ENDPOINT not set")), + ) + .with_trace_config( + sdktrace::config().with_resource(Resource::new(vec![ + KeyValue::new( + opentelemetry_semantic_conventions::resource::SERVICE_NAME, + std::env::var("APP_NAME").expect("APP_NAME not set"), + ), + ])), + ) + .install_batch(opentelemetry::runtime::Tokio) +} +``` + +After adding the above function in your `main.rs` file, create an `.env` file in root of your app. The structure could look like this : + +```bash +project_root/ +|-- Cargo.toml +|-- src/ +| |-- main.rs +|-- .env +``` + +In your environnement file, paste the below variables which will be used in the next steps. + +```rust +PORT=3000 // If it is a web app pass port or else you can ignore this variable +APP_NAME={{MYAPP}} +SIGNOZ_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443/v1/traces +SIGNOZ_ACCESS_TOKEN={{SIGNOZ_INGESTION_KEY}} +``` + +### Step 3: Add OpenTelemetry instrumentation + + +Call the `init_tracer` function inside `main()` at starting so that as soon as your rust application starts, tracer will be available globally. + +```rust +dotenv().ok(); +let _ = init_tracer(); +``` + +Modify your `main()` function from + +```rust +fn main(){ + //rest of the code +} +``` + +  + +to + +```rust +#[tokio::main] +async fn main() { + //rest of the code +} +``` + +Add the below code block within a function or a section of your code where you're setting up and using the tracer for distributed tracing. After adding the below code block you can send traces to SigNoz Cloud + +```rust + let tracer = global::tracer("global_tracer"); + let _cx = Context::new(); + + tracer.in_span("operation", |cx| { + let span = cx.span(); + span.set_attribute(Key::new("KEY").string("value")); + + span.add_event( + format!("Operations"), + vec![ + Key::new("SigNoz is").string("working!"), + ], + ); + }); + shutdown_tracer_provider() +``` + +The above code block will create a span named operation which sets an attribute and an event to it saying "SigNoz is working!". diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/QuickStart/rust-linuxarm64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/QuickStart/rust-linuxarm64-quickStart-runApplication.md new file mode 100644 index 0000000000..e296dc0bd7 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/QuickStart/rust-linuxarm64-quickStart-runApplication.md @@ -0,0 +1,7 @@ +### Running your Rust application + +Since your variables are set in the `.env` file, you can run your Rust application using the below command: + +```bash +cargo run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..cbabb8077b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_arm64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_arm64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..45243b63fd --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-instrumentApplication.md @@ -0,0 +1,95 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Rust Application + +### Step 1: Add dependencies +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +```rust +opentelemetry = { version = "0.18.0", features = ["rt-tokio", "metrics", "trace"] } +opentelemetry-otlp = { version = "0.11.0", features = ["trace", "metrics"] } +opentelemetry-semantic-conventions = { version = "0.10.0" } +opentelemetry-proto = { version = "0.1.0"} +tokio = { version = "1", features = ["full"] } +tonic = { version = "0.8.2", features = ["tls-roots"] } +``` +  + +Use the above crates in entry point of your Rust application, which is generally your `main.rs` file + +```rust +use opentelemetry::global::shutdown_tracer_provider; +use opentelemetry::sdk::Resource; +use opentelemetry::trace::TraceError; +use opentelemetry::{ + global, sdk::trace as sdktrace, + trace::{TraceContextExt, Tracer}, + Context, Key, KeyValue, +}; +use opentelemetry_otlp::WithExportConfig; +use tonic::metadata::{MetadataMap, MetadataValue}; +``` +  + +### Step 2: Initialize tracer +Add `init_tracer` function to your `main.rs` file. It initializes an OpenTelemetry tracer with the OpenTelemetry OTLP exporter which is sending data to SigNoz Cloud. + +```rust +fn init_tracer() -> Result { + opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter(opentelemetry_otlp::new_exporter().tonic().with_env()) + .with_trace_config( + sdktrace::config().with_resource(Resource::default()), + ) + .install_batch(opentelemetry::runtime::Tokio) +} +``` +### Step 3: Add OpenTelemetry instrumentation + +Call the `init_tracer` function inside `main()` at starting so that as soon as your rust application starts, tracer will be available globally. + +```rust + let _ = init_tracer(); +``` + +Modify your `main()` function from + +```rust +fn main(){ + //rest of the code +} +``` + +  + +to + +```rust +#[tokio::main] +async fn main() { + //rest of the code +} +``` + +Add the below code block within a function or a section of your code where you're setting up and using the tracer for distributed tracing. After adding the below code block you can send traces to SigNoz Cloud + +```rust + let tracer = global::tracer("global_tracer"); + let _cx = Context::new(); + + tracer.in_span("operation", |cx| { + let span = cx.span(); + span.set_attribute(Key::new("KEY").string("value")); + + span.add_event( + format!("Operations"), + vec![ + Key::new("SigNoz is").string("working!"), + ], + ); + }); + shutdown_tracer_provider() +``` + +The above code block will create a span named operation which sets an attribute and an event to it saying "SigNoz is working!". diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-runApplication.md new file mode 100644 index 0000000000..3b5f23e602 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-runApplication.md @@ -0,0 +1,32 @@ +  + +Once you are done instrumenting your Rust application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Rust application + +Run the application using the below command: + +```bash +OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 OTEL_RESOURCE_ATTRIBUTES=service.name=s{{MYAPP}} cargo run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/QuickStart/rust-macosamd64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/QuickStart/rust-macosamd64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..efedfba93e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/QuickStart/rust-macosamd64-quickStart-instrumentApplication.md @@ -0,0 +1,133 @@ +  + +### Step 1: Add dependencies +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +```rust +opentelemetry = { version = "0.18.0", features = ["rt-tokio", "metrics", "trace"] } +opentelemetry-otlp = { version = "0.11.0", features = ["trace", "metrics"] } +opentelemetry-semantic-conventions = { version = "0.10.0" } +opentelemetry-proto = { version = "0.1.0"} +tokio = { version = "1", features = ["full"] } +tonic = { version = "0.8.2", features = ["tls-roots"] } +dotenv = "0.15.0" +``` +  + +Use the above crates in entry point of your Rust application, which is generally your `main.rs` file + +```rust +use dotenv::dotenv; +use opentelemetry::global::shutdown_tracer_provider; +use opentelemetry::sdk::Resource; +use opentelemetry::trace::TraceError; +use opentelemetry::{ + global, sdk::trace as sdktrace, + trace::{TraceContextExt, Tracer}, + Context, Key, KeyValue, +}; +use opentelemetry_otlp::WithExportConfig; +use tonic::metadata::{MetadataMap, MetadataValue}; +``` +  + +### Step 2: Initialize tracer and create env file +Add `init_tracer` function to your `main.rs` file. It initializes an OpenTelemetry tracer with the OpenTelemetry OTLP exporter which is sending data to SigNoz Cloud. + +```rust +fn init_tracer() -> Result { + let signoz_access_token = std::env::var("SIGNOZ_ACCESS_TOKEN").expect("SIGNOZ_ACCESS_TOKEN not set"); + let mut metadata = MetadataMap::new(); + metadata.insert( + "signoz-access-token", + MetadataValue::from_str(&signoz_access_token).unwrap(), + ); + opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter( + opentelemetry_otlp::new_exporter() + .tonic() + .with_metadata(metadata) + .with_endpoint(std::env::var("SIGNOZ_ENDPOINT").expect("SIGNOZ_ENDPOINT not set")), + ) + .with_trace_config( + sdktrace::config().with_resource(Resource::new(vec![ + KeyValue::new( + opentelemetry_semantic_conventions::resource::SERVICE_NAME, + std::env::var("APP_NAME").expect("APP_NAME not set"), + ), + ])), + ) + .install_batch(opentelemetry::runtime::Tokio) +} +``` + +After adding the above function in your `main.rs` file, create an `.env` file in root of your app. The structure could look like this : + +```bash +project_root/ +|-- Cargo.toml +|-- src/ +| |-- main.rs +|-- .env +``` + +In your environment file, paste the below variables which will be used in the next steps. + +```rust +PORT=3000 // If it is a web app pass port or else you can ignore this variable +APP_NAME={{MYAPP}} +SIGNOZ_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443/v1/traces +SIGNOZ_ACCESS_TOKEN={{SIGNOZ_INGESTION_KEY}} +``` + +### Step 3: Add OpenTelemetry instrumentation + + +Call the `init_tracer` function inside `main()` at starting so that as soon as your rust application starts, tracer will be available globally. + +```rust +dotenv().ok(); +let _ = init_tracer(); +``` + +Modify your `main()` function from + +```rust +fn main(){ + //rest of the code +} +``` + +  + +to + +```rust +#[tokio::main] +async fn main() { + //rest of the code +} +``` + +Add the below code block within a function or a section of your code where you're setting up and using the tracer for distributed tracing. After adding the below code block you can send traces to SigNoz Cloud + +```rust + let tracer = global::tracer("global_tracer"); + let _cx = Context::new(); + + tracer.in_span("operation", |cx| { + let span = cx.span(); + span.set_attribute(Key::new("KEY").string("value")); + + span.add_event( + format!("Operations"), + vec![ + Key::new("SigNoz is").string("working!"), + ], + ); + }); + shutdown_tracer_provider() +``` + +The above code block will create a span named operation which sets an attribute and an event to it saying "SigNoz is working!". diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/QuickStart/rust-macosamd64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/QuickStart/rust-macosamd64-quickStart-runApplication.md new file mode 100644 index 0000000000..e296dc0bd7 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/QuickStart/rust-macosamd64-quickStart-runApplication.md @@ -0,0 +1,7 @@ +### Running your Rust application + +Since your variables are set in the `.env` file, you can run your Rust application using the below command: + +```bash +cargo run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..843e86a411 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +### Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_amd64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_amd64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..067e09195f --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-instrumentApplication.md @@ -0,0 +1,95 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Rust Application + +### Step 1: Add dependencies +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +```rust +opentelemetry = { version = "0.18.0", features = ["rt-tokio", "metrics", "trace"] } +opentelemetry-otlp = { version = "0.11.0", features = ["trace", "metrics"] } +opentelemetry-semantic-conventions = { version = "0.10.0" } +opentelemetry-proto = { version = "0.1.0"} +tokio = { version = "1", features = ["full"] } +tonic = { version = "0.8.2", features = ["tls-roots"] } +``` +  + +Use the above crates in entry point of your Rust application, which is generally your `main.rs` file + +```rust +use opentelemetry::global::shutdown_tracer_provider; +use opentelemetry::sdk::Resource; +use opentelemetry::trace::TraceError; +use opentelemetry::{ + global, sdk::trace as sdktrace, + trace::{TraceContextExt, Tracer}, + Context, Key, KeyValue, +}; +use opentelemetry_otlp::WithExportConfig; +use tonic::metadata::{MetadataMap, MetadataValue}; +``` +  + +### Step 2: Initialize tracer +Add `init_tracer` function to your `main.rs` file. It initializes an OpenTelemetry tracer with the OpenTelemetry OTLP exporter which is sending data to SigNoz Cloud. + +```rust +fn init_tracer() -> Result { + opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter(opentelemetry_otlp::new_exporter().tonic().with_env()) + .with_trace_config( + sdktrace::config().with_resource(Resource::default()), + ) + .install_batch(opentelemetry::runtime::Tokio) +} +``` +### Step 3: Add OpenTelemetry instrumentation + +Call the `init_tracer` function inside `main()` at starting so that as soon as your rust application starts, tracer will be available globally. + +```rust + let _ = init_tracer(); +``` + +Modify your `main()` function from + +```rust +fn main(){ + //rest of the code +} +``` + +  + +to + +```rust +#[tokio::main] +async fn main() { + //rest of the code +} +``` + +Add the below code block within a function or a section of your code where you're setting up and using the tracer for distributed tracing. After adding the below code block you can send traces to SigNoz Cloud + +```rust + let tracer = global::tracer("global_tracer"); + let _cx = Context::new(); + + tracer.in_span("operation", |cx| { + let span = cx.span(); + span.set_attribute(Key::new("KEY").string("value")); + + span.add_event( + format!("Operations"), + vec![ + Key::new("SigNoz is").string("working!"), + ], + ); + }); + shutdown_tracer_provider() +``` + +The above code block will create a span named operation which sets an attribute and an event to it saying "SigNoz is working!". diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-runApplication.md new file mode 100644 index 0000000000..3b5f23e602 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-runApplication.md @@ -0,0 +1,32 @@ +  + +Once you are done instrumenting your Rust application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Rust application + +Run the application using the below command: + +```bash +OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 OTEL_RESOURCE_ATTRIBUTES=service.name=s{{MYAPP}} cargo run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/QuickStart/rust-macosarm64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/QuickStart/rust-macosarm64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..efedfba93e --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/QuickStart/rust-macosarm64-quickStart-instrumentApplication.md @@ -0,0 +1,133 @@ +  + +### Step 1: Add dependencies +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +```rust +opentelemetry = { version = "0.18.0", features = ["rt-tokio", "metrics", "trace"] } +opentelemetry-otlp = { version = "0.11.0", features = ["trace", "metrics"] } +opentelemetry-semantic-conventions = { version = "0.10.0" } +opentelemetry-proto = { version = "0.1.0"} +tokio = { version = "1", features = ["full"] } +tonic = { version = "0.8.2", features = ["tls-roots"] } +dotenv = "0.15.0" +``` +  + +Use the above crates in entry point of your Rust application, which is generally your `main.rs` file + +```rust +use dotenv::dotenv; +use opentelemetry::global::shutdown_tracer_provider; +use opentelemetry::sdk::Resource; +use opentelemetry::trace::TraceError; +use opentelemetry::{ + global, sdk::trace as sdktrace, + trace::{TraceContextExt, Tracer}, + Context, Key, KeyValue, +}; +use opentelemetry_otlp::WithExportConfig; +use tonic::metadata::{MetadataMap, MetadataValue}; +``` +  + +### Step 2: Initialize tracer and create env file +Add `init_tracer` function to your `main.rs` file. It initializes an OpenTelemetry tracer with the OpenTelemetry OTLP exporter which is sending data to SigNoz Cloud. + +```rust +fn init_tracer() -> Result { + let signoz_access_token = std::env::var("SIGNOZ_ACCESS_TOKEN").expect("SIGNOZ_ACCESS_TOKEN not set"); + let mut metadata = MetadataMap::new(); + metadata.insert( + "signoz-access-token", + MetadataValue::from_str(&signoz_access_token).unwrap(), + ); + opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter( + opentelemetry_otlp::new_exporter() + .tonic() + .with_metadata(metadata) + .with_endpoint(std::env::var("SIGNOZ_ENDPOINT").expect("SIGNOZ_ENDPOINT not set")), + ) + .with_trace_config( + sdktrace::config().with_resource(Resource::new(vec![ + KeyValue::new( + opentelemetry_semantic_conventions::resource::SERVICE_NAME, + std::env::var("APP_NAME").expect("APP_NAME not set"), + ), + ])), + ) + .install_batch(opentelemetry::runtime::Tokio) +} +``` + +After adding the above function in your `main.rs` file, create an `.env` file in root of your app. The structure could look like this : + +```bash +project_root/ +|-- Cargo.toml +|-- src/ +| |-- main.rs +|-- .env +``` + +In your environment file, paste the below variables which will be used in the next steps. + +```rust +PORT=3000 // If it is a web app pass port or else you can ignore this variable +APP_NAME={{MYAPP}} +SIGNOZ_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443/v1/traces +SIGNOZ_ACCESS_TOKEN={{SIGNOZ_INGESTION_KEY}} +``` + +### Step 3: Add OpenTelemetry instrumentation + + +Call the `init_tracer` function inside `main()` at starting so that as soon as your rust application starts, tracer will be available globally. + +```rust +dotenv().ok(); +let _ = init_tracer(); +``` + +Modify your `main()` function from + +```rust +fn main(){ + //rest of the code +} +``` + +  + +to + +```rust +#[tokio::main] +async fn main() { + //rest of the code +} +``` + +Add the below code block within a function or a section of your code where you're setting up and using the tracer for distributed tracing. After adding the below code block you can send traces to SigNoz Cloud + +```rust + let tracer = global::tracer("global_tracer"); + let _cx = Context::new(); + + tracer.in_span("operation", |cx| { + let span = cx.span(); + span.set_attribute(Key::new("KEY").string("value")); + + span.add_event( + format!("Operations"), + vec![ + Key::new("SigNoz is").string("working!"), + ], + ); + }); + shutdown_tracer_provider() +``` + +The above code block will create a span named operation which sets an attribute and an event to it saying "SigNoz is working!". diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/QuickStart/rust-macosarm64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/QuickStart/rust-macosarm64-quickStart-runApplication.md new file mode 100644 index 0000000000..e296dc0bd7 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/QuickStart/rust-macosarm64-quickStart-runApplication.md @@ -0,0 +1,7 @@ +### Running your Rust application + +Since your variables are set in the `.env` file, you can run your Rust application using the below command: + +```bash +cargo run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..3a780bb8de --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_arm64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_arm64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..067e09195f --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-instrumentApplication.md @@ -0,0 +1,95 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Rust Application + +### Step 1: Add dependencies +Add these crates just below the `[dependencies]` section of your `cargo.toml` file + +```rust +opentelemetry = { version = "0.18.0", features = ["rt-tokio", "metrics", "trace"] } +opentelemetry-otlp = { version = "0.11.0", features = ["trace", "metrics"] } +opentelemetry-semantic-conventions = { version = "0.10.0" } +opentelemetry-proto = { version = "0.1.0"} +tokio = { version = "1", features = ["full"] } +tonic = { version = "0.8.2", features = ["tls-roots"] } +``` +  + +Use the above crates in entry point of your Rust application, which is generally your `main.rs` file + +```rust +use opentelemetry::global::shutdown_tracer_provider; +use opentelemetry::sdk::Resource; +use opentelemetry::trace::TraceError; +use opentelemetry::{ + global, sdk::trace as sdktrace, + trace::{TraceContextExt, Tracer}, + Context, Key, KeyValue, +}; +use opentelemetry_otlp::WithExportConfig; +use tonic::metadata::{MetadataMap, MetadataValue}; +``` +  + +### Step 2: Initialize tracer +Add `init_tracer` function to your `main.rs` file. It initializes an OpenTelemetry tracer with the OpenTelemetry OTLP exporter which is sending data to SigNoz Cloud. + +```rust +fn init_tracer() -> Result { + opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter(opentelemetry_otlp::new_exporter().tonic().with_env()) + .with_trace_config( + sdktrace::config().with_resource(Resource::default()), + ) + .install_batch(opentelemetry::runtime::Tokio) +} +``` +### Step 3: Add OpenTelemetry instrumentation + +Call the `init_tracer` function inside `main()` at starting so that as soon as your rust application starts, tracer will be available globally. + +```rust + let _ = init_tracer(); +``` + +Modify your `main()` function from + +```rust +fn main(){ + //rest of the code +} +``` + +  + +to + +```rust +#[tokio::main] +async fn main() { + //rest of the code +} +``` + +Add the below code block within a function or a section of your code where you're setting up and using the tracer for distributed tracing. After adding the below code block you can send traces to SigNoz Cloud + +```rust + let tracer = global::tracer("global_tracer"); + let _cx = Context::new(); + + tracer.in_span("operation", |cx| { + let span = cx.span(); + span.set_attribute(Key::new("KEY").string("value")); + + span.add_event( + format!("Operations"), + vec![ + Key::new("SigNoz is").string("working!"), + ], + ); + }); + shutdown_tracer_provider() +``` + +The above code block will create a span named operation which sets an attribute and an event to it saying "SigNoz is working!". diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-runApplication.md new file mode 100644 index 0000000000..3b5f23e602 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-runApplication.md @@ -0,0 +1,32 @@ +  + +Once you are done instrumenting your Rust application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Rust application + +Run the application using the below command: + +```bash +OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 OTEL_RESOURCE_ATTRIBUTES=service.name=s{{MYAPP}} cargo run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-installOtelCollector.md new file mode 100644 index 0000000000..946b7fbdbf --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-installOtelCollector.md @@ -0,0 +1,24 @@ +## Install otel-collector in your Kubernetes infra +  + +Add the SigNoz Helm Chart repository +```bash +helm repo add signoz https://charts.signoz.io +``` +  + +If the chart is already present, update the chart to the latest using: +```bash +helm repo update +``` +  + +Install the Kubernetes Infrastructure chart provided by SigNoz +```bash +helm install my-release signoz/k8s-infra \ +--set otelCollectorEndpoint=ingest.{{REGION}}.signoz.cloud:443 \ +--set otelInsecure=false \ +--set signozApiKey={{SIGNOZ_INGESTION_KEY}} \ +--set global.clusterName= +``` +- Replace `` with the name of the Kubernetes cluster or a unique identifier of the cluster. diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-instrumentApplication.md new file mode 100644 index 0000000000..c723c96975 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-instrumentApplication.md @@ -0,0 +1,68 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Swift Application + +### Step 1: Add dependencies + +To configure your Swift application to send data you need to initialize OpenTelemetry. Add these dependency in `Package.swift` file of your project or if you are using XCode then you need to add this [dependency](https://github.com/open-telemetry/opentelemetry-swift) and then import these below dependencies in the main file. + +```swift +import Foundation +import GRPC +import NIO +import NIOSSL +import OpenTelemetryApi +import OpenTelemetryProtocolExporterCommon +import OpenTelemetryProtocolExporterGrpc +import OpenTelemetrySdk +import ResourceExtension +import SignPostIntegration +import StdoutExporter +import ZipkinExporter +``` + +  + +### Step 2: Initialize tracer +Initialize the tracer using the code block below in the `main.swift` file : + +```swift +var resources = DefaultResources().get() + +let instrumentationScopeName = "{{MYAPP}}" +let instrumentationScopeVersion = "semver:0.1.0" + +let otlpConfiguration: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10)) + +let grpcChannel = ClientConnection.usingPlatformAppropriateTLS(for: MultiThreadedEventLoopGroup(numberOfThreads:1)).connect(host: , port: 4317) + +let otlpTraceExporter = OtlpTraceExporter(channel: grpcChannel, + config: otlpConfiguration) +let stdoutExporter = StdoutExporter() + +let spanExporter = MultiSpanExporter(spanExporters: [otlpTraceExporter, stdoutExporter]) + +let spanProcessor = SimpleSpanProcessor(spanExporter: spanExporter) +OpenTelemetry.registerTracerProvider(tracerProvider: + TracerProviderBuilder() + .add(spanProcessor: spanProcessor) + .build() +) +``` +- - The default value for this is `http://localhost:4317` + + +### Step 3: Add OpenTelemetry instrumentation + +```swift +func doWork() { + let childSpan = tracer.spanBuilder(spanName: "doWork").setSpanKind(spanKind: .client).startSpan() + childSpan.setAttribute(key: sampleKey, value: sampleValue) + Thread.sleep(forTimeInterval: Double.random(in: 0 ..< 10) / 100) + childSpan.end() +} +``` + +  + +If you call this `doWork` function, it will add a trace with span name "doWork" and attributes with key-value pair. diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-runApplication.md new file mode 100644 index 0000000000..b33c3a72a8 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-runApplication.md @@ -0,0 +1,7 @@ +### Running your Swift application + +Run the application using the below command: + +```bash +swift run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/QuickStart/swift-linuxamd64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/QuickStart/swift-linuxamd64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..a75a7b00a7 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/QuickStart/swift-linuxamd64-quickStart-instrumentApplication.md @@ -0,0 +1,65 @@ +  + +### Step 1: Add dependencies + +To configure your Swift application to send data you need to initialize OpenTelemetry. Add these dependency in `Package.swift` file of your project or if you are using XCode then you need to add this [dependency](https://github.com/open-telemetry/opentelemetry-swift) and then import these below dependencies in the main file. + +```swift +import Foundation +import GRPC +import NIO +import NIOSSL +import OpenTelemetryApi +import OpenTelemetryProtocolExporterCommon +import OpenTelemetryProtocolExporterGrpc +import OpenTelemetrySdk +import ResourceExtension +import SignPostIntegration +import StdoutExporter +import ZipkinExporter +``` + +  + +### Step 2: Initialize tracer +Initialize the tracer using the code block below in the `main.swift` file inside the main function or you can create another function for initializing the tracer and call it in some other block of code. + +```swift +var resources = DefaultResources().get() + +let instrumentationScopeName = "{{MYAPP}}" +let instrumentationScopeVersion = "semver:0.1.0" + +let otlpConfiguration: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10), headers: [("signoz-access-token", {{SIGNOZ_INGESTION_KEY}})]) + +let grpcChannel = ClientConnection.usingPlatformAppropriateTLS(for: MultiThreadedEventLoopGroup(numberOfThreads:1)).connect(host: "https://ingest.{{REGION}}.signoz.cloud:443", port: 443) + +let otlpTraceExporter = OtlpTraceExporter(channel: grpcChannel, + config: otlpConfiguration) +let stdoutExporter = StdoutExporter() + +let spanExporter = MultiSpanExporter(spanExporters: [otlpTraceExporter, stdoutExporter]) + +let spanProcessor = SimpleSpanProcessor(spanExporter: spanExporter) +OpenTelemetry.registerTracerProvider(tracerProvider: + TracerProviderBuilder() + .add(spanProcessor: spanProcessor) + .build() +) +``` + + +### Step 3: Add OpenTelemetry instrumentation + +```swift +func doWork() { + let childSpan = tracer.spanBuilder(spanName: "doWork").setSpanKind(spanKind: .client).startSpan() + childSpan.setAttribute(key: sampleKey, value: sampleValue) + Thread.sleep(forTimeInterval: Double.random(in: 0 ..< 10) / 100) + childSpan.end() +} +``` + +  + +If you call this `doWork` function, it will add a trace with span name "doWork" and attributes with key-value pair. You can modify this function according to your needs. diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/QuickStart/swift-linuxamd64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/QuickStart/swift-linuxamd64-quickStart-runApplication.md new file mode 100644 index 0000000000..2c5e5d0b1b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/QuickStart/swift-linuxamd64-quickStart-runApplication.md @@ -0,0 +1,7 @@ +### Running your Swift application + +Run the application using the below command: + +```bash +swift run +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..a659f36474 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_amd64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_amd64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..a3fe109cc4 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-instrumentApplication.md @@ -0,0 +1,70 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Swift Application + +### Step 1: Add dependencies + +To configure your Swift application to send data you need to initialize OpenTelemetry. Add these dependency in `Package.swift` file of your project or if you are using XCode then you need to add this [dependency](https://github.com/open-telemetry/opentelemetry-swift) and then import these below dependencies in the main file. + +```swift +import Foundation +import GRPC +import NIO +import NIOSSL +import OpenTelemetryApi +import OpenTelemetryProtocolExporterCommon +import OpenTelemetryProtocolExporterGrpc +import OpenTelemetrySdk +import ResourceExtension +import SignPostIntegration +import StdoutExporter +import ZipkinExporter +``` + +  + +### Step 2: Initialize tracer +Initialize the tracer using the code block below in the `main.swift` file inside the main function or you can create another function for initializing the tracer and call it in some other block of code. + +```swift +var resources = DefaultResources().get() + +let instrumentationScopeName = "{{MYAPP}}" +let instrumentationScopeVersion = "semver:0.1.0" + +let otlpConfiguration: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10)) + +let grpcChannel = ClientConnection.usingPlatformAppropriateTLS(for: MultiThreadedEventLoopGroup(numberOfThreads:1)).connect(host: , port: 4317) + +let otlpTraceExporter = OtlpTraceExporter(channel: grpcChannel, + config: otlpConfiguration) +let stdoutExporter = StdoutExporter() + +let spanExporter = MultiSpanExporter(spanExporters: [otlpTraceExporter, stdoutExporter]) + +let spanProcessor = SimpleSpanProcessor(spanExporter: spanExporter) +OpenTelemetry.registerTracerProvider(tracerProvider: + TracerProviderBuilder() + .add(spanProcessor: spanProcessor) + .build() +) +``` +- - The endpoint where Otel Collector is running. For ex -> "localhost" + + + + +### Step 3: Add OpenTelemetry instrumentation + +```swift +func doWork() { + let childSpan = tracer.spanBuilder(spanName: "doWork").setSpanKind(spanKind: .client).startSpan() + childSpan.setAttribute(key: sampleKey, value: sampleValue) + Thread.sleep(forTimeInterval: Double.random(in: 0 ..< 10) / 100) + childSpan.end() +} +``` + +  + +If you call this `doWork` function, it will add a trace with span name "doWork" and attributes with key-value pair. You can modify this function according to your needs. diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-runApplication.md new file mode 100644 index 0000000000..5efc5859db --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-runApplication.md @@ -0,0 +1,32 @@ +  + +Once you are done instrumenting your Swift application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Swift application + +Run the application using the below command: + +```bash +swift run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/QuickStart/swift-linuxarm64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/QuickStart/swift-linuxarm64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..48ad191a6b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/QuickStart/swift-linuxarm64-quickStart-instrumentApplication.md @@ -0,0 +1,64 @@ +  + +### Step 1: Add dependencies + +To configure your Swift application to send data you need to initialize OpenTelemetry. Add these dependency in `Package.swift` file of your project or if you are using XCode then you need to add this [dependency](https://github.com/open-telemetry/opentelemetry-swift) and then import these below dependencies in the main file. + +```swift +import Foundation +import GRPC +import NIO +import NIOSSL +import OpenTelemetryApi +import OpenTelemetryProtocolExporterCommon +import OpenTelemetryProtocolExporterGrpc +import OpenTelemetrySdk +import ResourceExtension +import SignPostIntegration +import StdoutExporter +import ZipkinExporter +``` + +  + +### Step 2: Initialize tracer +Initialize the tracer using the code block below in the `main.swift` file inside the main function or you can create another function for initializing the tracer and call it in some other block of code. + +```swift +var resources = DefaultResources().get() + +let instrumentationScopeName = "{{MYAPP}}" +let instrumentationScopeVersion = "semver:0.1.0" + +let otlpConfiguration: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10), headers: [("signoz-access-token", {{SIGNOZ_INGESTION_KEY}})]) + +let grpcChannel = ClientConnection.usingPlatformAppropriateTLS(for: MultiThreadedEventLoopGroup(numberOfThreads:1)).connect(host: "https://ingest.{{REGION}}.signoz.cloud:443", port: 443) + +let otlpTraceExporter = OtlpTraceExporter(channel: grpcChannel, + config: otlpConfiguration) +let stdoutExporter = StdoutExporter() + +let spanExporter = MultiSpanExporter(spanExporters: [otlpTraceExporter, stdoutExporter]) + +let spanProcessor = SimpleSpanProcessor(spanExporter: spanExporter) +OpenTelemetry.registerTracerProvider(tracerProvider: + TracerProviderBuilder() + .add(spanProcessor: spanProcessor) + .build() +) +``` + +### Step 3: Add OpenTelemetry instrumentation + +```swift +func doWork() { + let childSpan = tracer.spanBuilder(spanName: "doWork").setSpanKind(spanKind: .client).startSpan() + childSpan.setAttribute(key: sampleKey, value: sampleValue) + Thread.sleep(forTimeInterval: Double.random(in: 0 ..< 10) / 100) + childSpan.end() +} +``` + +  + +If you call this `doWork` function, it will add a trace with span name "doWork" and attributes with key-value pair. You can modify this function according to your needs. To read more about spans, you can follow [this link](https://opentelemetry.io/docs/concepts/signals/traces/#spans) diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/QuickStart/swift-linuxarm64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/QuickStart/swift-linuxarm64-quickStart-runApplication.md new file mode 100644 index 0000000000..2c5e5d0b1b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/QuickStart/swift-linuxarm64-quickStart-runApplication.md @@ -0,0 +1,7 @@ +### Running your Swift application + +Run the application using the below command: + +```bash +swift run +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..cbabb8077b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_arm64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_arm64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..06361a2a29 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-instrumentApplication.md @@ -0,0 +1,68 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Swift Application + +### Step 1: Add dependencies + +To configure your Swift application to send data you need to initialize OpenTelemetry. Add these dependency in `Package.swift` file of your project or if you are using XCode then you need to add this [dependency](https://github.com/open-telemetry/opentelemetry-swift) and then import these below dependencies in the main file. + +```swift +import Foundation +import GRPC +import NIO +import NIOSSL +import OpenTelemetryApi +import OpenTelemetryProtocolExporterCommon +import OpenTelemetryProtocolExporterGrpc +import OpenTelemetrySdk +import ResourceExtension +import SignPostIntegration +import StdoutExporter +import ZipkinExporter +``` + +  + +### Step 2: Initialize tracer +Initialize the tracer using the code block below in the `main.swift` file inside the main function or you can create another function for initializing the tracer and call it in some other block of code. + +```swift +var resources = DefaultResources().get() + +let instrumentationScopeName = "{{MYAPP}}" +let instrumentationScopeVersion = "semver:0.1.0" + +let otlpConfiguration: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10)) + +let grpcChannel = ClientConnection.usingPlatformAppropriateTLS(for: MultiThreadedEventLoopGroup(numberOfThreads:1)).connect(host: , port: 4317) + +let otlpTraceExporter = OtlpTraceExporter(channel: grpcChannel, + config: otlpConfiguration) +let stdoutExporter = StdoutExporter() + +let spanExporter = MultiSpanExporter(spanExporters: [otlpTraceExporter, stdoutExporter]) + +let spanProcessor = SimpleSpanProcessor(spanExporter: spanExporter) +OpenTelemetry.registerTracerProvider(tracerProvider: + TracerProviderBuilder() + .add(spanProcessor: spanProcessor) + .build() +) +``` +- - The endpoint where Otel Collector is running. For ex -> "localhost" + + +### Step 3: Add OpenTelemetry instrumentation + +```swift +func doWork() { + let childSpan = tracer.spanBuilder(spanName: "doWork").setSpanKind(spanKind: .client).startSpan() + childSpan.setAttribute(key: sampleKey, value: sampleValue) + Thread.sleep(forTimeInterval: Double.random(in: 0 ..< 10) / 100) + childSpan.end() +} +``` + +  + +If you call this `doWork` function, it will add a trace with span name "doWork" and attributes with key-value pair. You can modify this function according to your needs. diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-runApplication.md new file mode 100644 index 0000000000..5efc5859db --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-runApplication.md @@ -0,0 +1,32 @@ +  + +Once you are done instrumenting your Swift application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Swift application + +Run the application using the below command: + +```bash +swift run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/QuickStart/swift-macosamd64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/QuickStart/swift-macosamd64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..8bbb5019b5 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/QuickStart/swift-macosamd64-quickStart-instrumentApplication.md @@ -0,0 +1,64 @@ +  + +### Step 1: Add dependencies + +To configure your Swift application to send data you need to initialize OpenTelemetry. Add these dependency in `Package.swift` file of your project or if you are using XCode then you need to add this [dependency](https://github.com/open-telemetry/opentelemetry-swift) and then import these below dependencies in the main file. + +```swift +import Foundation +import GRPC +import NIO +import NIOSSL +import OpenTelemetryApi +import OpenTelemetryProtocolExporterCommon +import OpenTelemetryProtocolExporterGrpc +import OpenTelemetrySdk +import ResourceExtension +import SignPostIntegration +import StdoutExporter +import ZipkinExporter +``` + +  + +### Step 2: Initialize tracer +Initialize the tracer using the code block below in the `main.swift` file inside the main function or you can create another function for initializing the tracer and call it in some other block of code. + +```swift +var resources = DefaultResources().get() + +let instrumentationScopeName = "{{MYAPP}}" +let instrumentationScopeVersion = "semver:0.1.0" + +let otlpConfiguration: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10), headers: [("signoz-access-token", {{SIGNOZ_INGESTION_KEY}})]) + +let grpcChannel = ClientConnection.usingPlatformAppropriateTLS(for: MultiThreadedEventLoopGroup(numberOfThreads:1)).connect(host: "https://ingest.{{REGION}}.signoz.cloud:443", port: 443) + +let otlpTraceExporter = OtlpTraceExporter(channel: grpcChannel, + config: otlpConfiguration) +let stdoutExporter = StdoutExporter() + +let spanExporter = MultiSpanExporter(spanExporters: [otlpTraceExporter, stdoutExporter]) + +let spanProcessor = SimpleSpanProcessor(spanExporter: spanExporter) +OpenTelemetry.registerTracerProvider(tracerProvider: + TracerProviderBuilder() + .add(spanProcessor: spanProcessor) + .build() +) +``` + +### Step 3: Add OpenTelemetry instrumentation + +```swift +func doWork() { + let childSpan = tracer.spanBuilder(spanName: "doWork").setSpanKind(spanKind: .client).startSpan() + childSpan.setAttribute(key: sampleKey, value: sampleValue) + Thread.sleep(forTimeInterval: Double.random(in: 0 ..< 10) / 100) + childSpan.end() +} +``` + +  + +If you call this `doWork` function, it will add a trace with span name "doWork" and attributes with key-value pair. You can modify this function according to your needs. \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/QuickStart/swift-macosamd64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/QuickStart/swift-macosamd64-quickStart-runApplication.md new file mode 100644 index 0000000000..2c5e5d0b1b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/QuickStart/swift-macosamd64-quickStart-runApplication.md @@ -0,0 +1,7 @@ +### Running your Swift application + +Run the application using the below command: + +```bash +swift run +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..843e86a411 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +### Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_amd64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_amd64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..5a3b8936ba --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-instrumentApplication.md @@ -0,0 +1,69 @@ +  + +After setting up the Otel collector agent, follow the steps below to instrument your Swift Application + +### Step 1: Add dependencies + +To configure your Swift application to send data you need to initialize OpenTelemetry. Add these dependency in `Package.swift` file of your project or if you are using XCode then you need to add this [dependency](https://github.com/open-telemetry/opentelemetry-swift) and then import these below dependencies in the main file. + +```swift +import Foundation +import GRPC +import NIO +import NIOSSL +import OpenTelemetryApi +import OpenTelemetryProtocolExporterCommon +import OpenTelemetryProtocolExporterGrpc +import OpenTelemetrySdk +import ResourceExtension +import SignPostIntegration +import StdoutExporter +import ZipkinExporter +``` + +  + +### Step 2: Initialize tracer +Initialize the tracer using the code block below in the `main.swift` file inside the main function or you can create another function for initializing the tracer and call it in some other block of code. + +```swift +var resources = DefaultResources().get() + +let instrumentationScopeName = "{{MYAPP}}" +let instrumentationScopeVersion = "semver:0.1.0" + +let otlpConfiguration: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10)) + +let grpcChannel = ClientConnection.usingPlatformAppropriateTLS(for: MultiThreadedEventLoopGroup(numberOfThreads:1)).connect(host: , port: 4317) + +let otlpTraceExporter = OtlpTraceExporter(channel: grpcChannel, + config: otlpConfiguration) +let stdoutExporter = StdoutExporter() + +let spanExporter = MultiSpanExporter(spanExporters: [otlpTraceExporter, stdoutExporter]) + +let spanProcessor = SimpleSpanProcessor(spanExporter: spanExporter) +OpenTelemetry.registerTracerProvider(tracerProvider: + TracerProviderBuilder() + .add(spanProcessor: spanProcessor) + .build() +) +``` +- - The endpoint where Otel Collector is running. For ex -> "localhost" + + + +### Step 3: Add OpenTelemetry instrumentation + +```swift +func doWork() { + let childSpan = tracer.spanBuilder(spanName: "doWork").setSpanKind(spanKind: .client).startSpan() + childSpan.setAttribute(key: sampleKey, value: sampleValue) + Thread.sleep(forTimeInterval: Double.random(in: 0 ..< 10) / 100) + childSpan.end() +} +``` + +  + +If you call this `doWork` function, it will add a trace with span name "doWork" and attributes with key-value pair. You can modify this function according to your needs. diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-runApplication.md new file mode 100644 index 0000000000..5efc5859db --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-runApplication.md @@ -0,0 +1,32 @@ +  + +Once you are done instrumenting your Swift application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Swift application + +Run the application using the below command: + +```bash +swift run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/QuickStart/swift-macosarm64-quickStart-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/QuickStart/swift-macosarm64-quickStart-instrumentApplication.md new file mode 100644 index 0000000000..8bbb5019b5 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/QuickStart/swift-macosarm64-quickStart-instrumentApplication.md @@ -0,0 +1,64 @@ +  + +### Step 1: Add dependencies + +To configure your Swift application to send data you need to initialize OpenTelemetry. Add these dependency in `Package.swift` file of your project or if you are using XCode then you need to add this [dependency](https://github.com/open-telemetry/opentelemetry-swift) and then import these below dependencies in the main file. + +```swift +import Foundation +import GRPC +import NIO +import NIOSSL +import OpenTelemetryApi +import OpenTelemetryProtocolExporterCommon +import OpenTelemetryProtocolExporterGrpc +import OpenTelemetrySdk +import ResourceExtension +import SignPostIntegration +import StdoutExporter +import ZipkinExporter +``` + +  + +### Step 2: Initialize tracer +Initialize the tracer using the code block below in the `main.swift` file inside the main function or you can create another function for initializing the tracer and call it in some other block of code. + +```swift +var resources = DefaultResources().get() + +let instrumentationScopeName = "{{MYAPP}}" +let instrumentationScopeVersion = "semver:0.1.0" + +let otlpConfiguration: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10), headers: [("signoz-access-token", {{SIGNOZ_INGESTION_KEY}})]) + +let grpcChannel = ClientConnection.usingPlatformAppropriateTLS(for: MultiThreadedEventLoopGroup(numberOfThreads:1)).connect(host: "https://ingest.{{REGION}}.signoz.cloud:443", port: 443) + +let otlpTraceExporter = OtlpTraceExporter(channel: grpcChannel, + config: otlpConfiguration) +let stdoutExporter = StdoutExporter() + +let spanExporter = MultiSpanExporter(spanExporters: [otlpTraceExporter, stdoutExporter]) + +let spanProcessor = SimpleSpanProcessor(spanExporter: spanExporter) +OpenTelemetry.registerTracerProvider(tracerProvider: + TracerProviderBuilder() + .add(spanProcessor: spanProcessor) + .build() +) +``` + +### Step 3: Add OpenTelemetry instrumentation + +```swift +func doWork() { + let childSpan = tracer.spanBuilder(spanName: "doWork").setSpanKind(spanKind: .client).startSpan() + childSpan.setAttribute(key: sampleKey, value: sampleValue) + Thread.sleep(forTimeInterval: Double.random(in: 0 ..< 10) / 100) + childSpan.end() +} +``` + +  + +If you call this `doWork` function, it will add a trace with span name "doWork" and attributes with key-value pair. You can modify this function according to your needs. \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/QuickStart/swift-macosarm64-quickStart-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/QuickStart/swift-macosarm64-quickStart-runApplication.md new file mode 100644 index 0000000000..2c5e5d0b1b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/QuickStart/swift-macosarm64-quickStart-runApplication.md @@ -0,0 +1,7 @@ +### Running your Swift application + +Run the application using the below command: + +```bash +swift run +``` diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-installOtelCollector.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-installOtelCollector.md new file mode 100644 index 0000000000..3a780bb8de --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-installOtelCollector.md @@ -0,0 +1,96 @@ +## Setup OpenTelemetry Binary as an agent +  + +### Step 1: Download otel-collector tar.gz +```bash +wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_arm64.tar.gz +``` +  + +### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder +```bash +mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_arm64.tar.gz -C otelcol-contrib +``` +  + +### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it +```bash +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + hostmetrics: + collection_interval: 60s + scrapers: + cpu: {} + disk: {} + load: {} + filesystem: {} + memory: {} + network: {} + paging: {} + process: + mute_process_name_error: true + mute_process_exe_error: true + mute_process_io_error: true + processes: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + - job_name: otel-collector-binary + static_configs: + - targets: + # - localhost:8888 +processors: + batch: + send_batch_size: 1000 + timeout: 10s + # Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md + resourcedetection: + detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure. + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + timeout: 2s + system: + hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback +extensions: + health_check: {} + zpages: {} +exporters: + otlp: + endpoint: "ingest.{{REGION}}.signoz.cloud:443" + tls: + insecure: false + headers: + "signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}" + logging: + verbosity: normal +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + metrics/internal: + receivers: [prometheus, hostmetrics] + processors: [resourcedetection, batch] + exporters: [otlp] + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] +``` + + diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-instrumentApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-instrumentApplication.md new file mode 100644 index 0000000000..623782393b --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-instrumentApplication.md @@ -0,0 +1,66 @@ +  + +### Step 1: Add dependencies + +To configure your Swift application to send data you need to initialize OpenTelemetry. Add these dependency in `Package.swift` file of your project or if you are using XCode then you need to add this [dependency](https://github.com/open-telemetry/opentelemetry-swift) and then import these below dependencies in the main file. + +```swift +import Foundation +import GRPC +import NIO +import NIOSSL +import OpenTelemetryApi +import OpenTelemetryProtocolExporterCommon +import OpenTelemetryProtocolExporterGrpc +import OpenTelemetrySdk +import ResourceExtension +import SignPostIntegration +import StdoutExporter +import ZipkinExporter +``` + +  + +### Step 2: Initialize tracer +Initialize the tracer using the code block below in the `main.swift` file inside the main function or you can create another function for initializing the tracer and call it in some other block of code. + +```swift +var resources = DefaultResources().get() + +let instrumentationScopeName = "{{MYAPP}}" +let instrumentationScopeVersion = "semver:0.1.0" + +let otlpConfiguration: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10)) + +let grpcChannel = ClientConnection.usingPlatformAppropriateTLS(for: MultiThreadedEventLoopGroup(numberOfThreads:1)).connect(host: , port: 4317) + +let otlpTraceExporter = OtlpTraceExporter(channel: grpcChannel, + config: otlpConfiguration) +let stdoutExporter = StdoutExporter() + +let spanExporter = MultiSpanExporter(spanExporters: [otlpTraceExporter, stdoutExporter]) + +let spanProcessor = SimpleSpanProcessor(spanExporter: spanExporter) +OpenTelemetry.registerTracerProvider(tracerProvider: + TracerProviderBuilder() + .add(spanProcessor: spanProcessor) + .build() +) +``` +- - The endpoint where Otel Collector is running. For ex -> "localhost" + + +### Step 3: Add OpenTelemetry instrumentation + +```swift +func doWork() { + let childSpan = tracer.spanBuilder(spanName: "doWork").setSpanKind(spanKind: .client).startSpan() + childSpan.setAttribute(key: sampleKey, value: sampleValue) + Thread.sleep(forTimeInterval: Double.random(in: 0 ..< 10) / 100) + childSpan.end() +} +``` + +  + +If you call this `doWork` function, it will add a trace with span name "doWork" and attributes with key-value pair. You can modify this function according to your needs. \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-runApplication.md b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-runApplication.md new file mode 100644 index 0000000000..5efc5859db --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-runApplication.md @@ -0,0 +1,32 @@ +  + +Once you are done instrumenting your Swift application, you can run it using the below commands + +  + +### Step 1: Run OTel Collector + Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step + +```bash +./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid +``` +  + +#### (Optional Step): View last 50 lines of `otelcol` logs +```bash +tail -f -n 50 otelcol-output.log +``` + +#### (Optional Step): Stop `otelcol` +```bash +kill "$(< otel-pid)" +``` +  + +### Step 2: Running your Swift application + +Run the application using the below command: + +```bash +swift run +``` \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx b/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx index ca168fbf88..785e73d610 100644 --- a/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx +++ b/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx @@ -115,6 +115,36 @@ export default function ConnectionStatus(): JSX.Element { imgClassName="supported-language-img" /> ); + case 'rust': + return ( +
+ ); + case 'elixir': + return ( +
+ ); + case 'swift': + return ( +
+ ); default: return <> ; diff --git a/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx b/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx index 994234eca8..b7ebbcfde8 100644 --- a/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx +++ b/frontend/src/container/OnboardingContainer/Steps/LogsConnectionStatus/LogsConnectionStatus.tsx @@ -89,6 +89,8 @@ export default function LogsConnectionStatus(): JSX.Element { refetchInterval: pollingInterval, enabled: true, }, + {}, + false, ); const verifyLogsData = ( diff --git a/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts b/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts index aebb865ceb..7bf505f30d 100644 --- a/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts +++ b/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts @@ -41,6 +41,38 @@ import APM_dotnet_macOsARM64_quickStart_runApplication from '../Modules/APM/Dotn import APM_dotnet_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Dotnet/md-docs/MacOsARM64/Recommended/dotnet-macosarm64-recommended-installOtelCollector.md'; import APM_dotnet_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Dotnet/md-docs/MacOsARM64/Recommended/dotnet-macosarm64-recommended-instrumentApplication.md'; import APM_dotnet_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Dotnet/md-docs/MacOsARM64/Recommended/dotnet-macosarm64-recommended-runApplication.md'; +// Elixir-Kubernetes +import APM_elixir_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-installOtelCollector.md'; +import APM_elixir_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-instrumentApplication.md'; +import APM_elixir_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Elixir/md-docs/Kubernetes/elixir-kubernetes-runApplication.md'; +// Elixir-LinuxAMD64-quickstart +import APM_elixir_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Elixir/md-docs/LinuxAMD64/QuickStart/elixir-linuxamd64-quickStart-instrumentApplication.md'; +import APM_elixir_linuxAMD64_quickStart_runApplication from '../Modules/APM/Elixir/md-docs/LinuxAMD64/QuickStart/elixir-linuxamd64-quickStart-runApplication.md'; +// Elixir-LinuxAMD64-recommended +import APM_elixir_linuxAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-installOtelCollector.md'; +import APM_elixir_linuxAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-instrumentApplication.md'; +import APM_elixir_linuxAMD64_recommendedSteps_runApplication from '../Modules/APM/Elixir/md-docs/LinuxAMD64/Recommended/elixir-linuxamd64-recommended-runApplication.md'; +// Elixir-LinuxARM64-quickstart +import APM_elixir_linuxARM64_quickStart_instrumentApplication from '../Modules/APM/Elixir/md-docs/LinuxARM64/QuickStart/elixir-linuxarm64-quickStart-instrumentApplication.md'; +import APM_elixir_linuxARM64_quickStart_runApplication from '../Modules/APM/Elixir/md-docs/LinuxARM64/QuickStart/elixir-linuxarm64-quickStart-runApplication.md'; +// Elixir-LinuxARM64-recommended +import APM_elixir_linuxARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-installOtelCollector.md'; +import APM_elixir_linuxARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-instrumentApplication.md'; +import APM_elixir_linuxARM64_recommendedSteps_runApplication from '../Modules/APM/Elixir/md-docs/LinuxARM64/Recommended/elixir-linuxarm64-recommended-runApplication.md'; +// Elixir-MacOsAMD64-quickstart +import APM_elixir_macOsAMD64_quickStart_instrumentApplication from '../Modules/APM/Elixir/md-docs/MacOsAMD64/QuickStart/elixir-macosamd64-quickStart-instrumentApplication.md'; +import APM_elixir_macOsAMD64_quickStart_runApplication from '../Modules/APM/Elixir/md-docs/MacOsAMD64/QuickStart/elixir-macosamd64-quickStart-runApplication.md'; +// Elixir-MacOsAMD64-recommended +import APM_elixir_macOsAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-installOtelCollector.md'; +import APM_elixir_macOsAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-instrumentApplication.md'; +import APM_elixir_macOsAMD64_recommendedSteps_runApplication from '../Modules/APM/Elixir/md-docs/MacOsAMD64/Recommended/elixir-macosamd64-recommended-runApplication.md'; +// Elixir-MacOsARM64-quickstart +import APM_elixir_macOsARM64_quickStart_instrumentApplication from '../Modules/APM/Elixir/md-docs/MacOsARM64/QuickStart/elixir-macosarm64-quickStart-instrumentApplication.md'; +import APM_elixir_macOsARM64_quickStart_runApplication from '../Modules/APM/Elixir/md-docs/MacOsARM64/QuickStart/elixir-macosarm64-quickStart-runApplication.md'; +// Elixir-MacOsARM64-recommended +import APM_elixir_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-installOtelCollector.md'; +import APM_elixir_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-instrumentApplication.md'; +import APM_elixir_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Elixir/md-docs/MacOsARM64/Recommended/elixir-macosarm64-recommended-runApplication.md'; import APM_go_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/GoLang/md-docs/Kubernetes/golang-kubernetes-installOtelCollector.md'; import APM_go_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/GoLang/md-docs/Kubernetes/golang-kubernetes-instrumentApplication.md'; import APM_go_kubernetes_recommendedSteps_runApplication from '../Modules/APM/GoLang/md-docs/Kubernetes/golang-kubernetes-runApplication.md'; @@ -577,9 +609,73 @@ import APM_rails_macOsARM64_quickStart_runApplication from '../Modules/APM/RubyO import APM_rails_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/RubyOnRails/md-docs/MacOsARM64/Recommended/ror-macosarm64-recommended-installOtelCollector.md'; import APM_rails_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/RubyOnRails/md-docs/MacOsARM64/Recommended/ror-macosarm64-recommended-instrumentApplication.md'; import APM_rails_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/RubyOnRails/md-docs/MacOsARM64/Recommended/ror-macosarm64-recommended-runApplication.md'; +// Rust-Kubernetes +import APM_rust_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-installOtelCollector.md'; +import APM_rust_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-instrumentApplication.md'; +import APM_rust_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Rust/md-docs/Kubernetes/rust-kubernetes-runApplication.md'; +// Rust-LinuxAMD64-quickstart +import APM_rust_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Rust/md-docs/LinuxAMD64/QuickStart/rust-linuxamd64-quickStart-instrumentApplication.md'; +import APM_rust_linuxAMD64_quickStart_runApplication from '../Modules/APM/Rust/md-docs/LinuxAMD64/QuickStart/rust-linuxamd64-quickStart-runApplication.md'; +// Rust-LinuxAMD64-recommended +import APM_rust_linuxAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-installOtelCollector.md'; +import APM_rust_linuxAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-instrumentApplication.md'; +import APM_rust_linuxAMD64_recommendedSteps_runApplication from '../Modules/APM/Rust/md-docs/LinuxAMD64/Recommended/rust-linuxamd64-recommended-runApplication.md'; +// Rust-LinuxARM64-quickstart +import APM_rust_linuxARM64_quickStart_instrumentApplication from '../Modules/APM/Rust/md-docs/LinuxARM64/QuickStart/rust-linuxarm64-quickStart-instrumentApplication.md'; +import APM_rust_linuxARM64_quickStart_runApplication from '../Modules/APM/Rust/md-docs/LinuxARM64/QuickStart/rust-linuxarm64-quickStart-runApplication.md'; +// Rust-LinuxARM64-recommended +import APM_rust_linuxARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-installOtelCollector.md'; +import APM_rust_linuxARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-instrumentApplication.md'; +import APM_rust_linuxARM64_recommendedSteps_runApplication from '../Modules/APM/Rust/md-docs/LinuxARM64/Recommended/rust-linuxarm64-recommended-runApplication.md'; +// Rust-MacOsAMD64-quickstart +import APM_rust_macOsAMD64_quickStart_instrumentApplication from '../Modules/APM/Rust/md-docs/MacOsAMD64/QuickStart/rust-macosamd64-quickStart-instrumentApplication.md'; +import APM_rust_macOsAMD64_quickStart_runApplication from '../Modules/APM/Rust/md-docs/MacOsAMD64/QuickStart/rust-macosamd64-quickStart-runApplication.md'; +// Rust-MacOsAMD64-recommended +import APM_rust_macOsAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-installOtelCollector.md'; +import APM_rust_macOsAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-instrumentApplication.md'; +import APM_rust_macOsAMD64_recommendedSteps_runApplication from '../Modules/APM/Rust/md-docs/MacOsAMD64/Recommended/rust-macosamd64-recommended-runApplication.md'; +// Rust-MacOsARM64-quickstart +import APM_rust_macOsARM64_quickStart_instrumentApplication from '../Modules/APM/Rust/md-docs/MacOsARM64/QuickStart/rust-macosarm64-quickStart-instrumentApplication.md'; +import APM_rust_macOsARM64_quickStart_runApplication from '../Modules/APM/Rust/md-docs/MacOsARM64/QuickStart/rust-macosarm64-quickStart-runApplication.md'; +// Rust-MacOsARM64-recommended +import APM_rust_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-installOtelCollector.md'; +import APM_rust_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-instrumentApplication.md'; +import APM_rust_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Rust/md-docs/MacOsARM64/Recommended/rust-macosarm64-recommended-runApplication.md'; +// Swift-Kubernetes +import APM_swift_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-installOtelCollector.md'; +import APM_swift_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-instrumentApplication.md'; +import APM_swift_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Swift/md-docs/Kubernetes/swift-kubernetes-runApplication.md'; +// Swift-LinuxAMD64-quickstart +import APM_swift_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Swift/md-docs/LinuxAMD64/QuickStart/swift-linuxamd64-quickStart-instrumentApplication.md'; +import APM_swift_linuxAMD64_quickStart_runApplication from '../Modules/APM/Swift/md-docs/LinuxAMD64/QuickStart/swift-linuxamd64-quickStart-runApplication.md'; +// Swift-LinuxAMD64-recommended +import APM_swift_linuxAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-installOtelCollector.md'; +import APM_swift_linuxAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-instrumentApplication.md'; +import APM_swift_linuxAMD64_recommendedSteps_runApplication from '../Modules/APM/Swift/md-docs/LinuxAMD64/Recommended/swift-linuxamd64-recommended-runApplication.md'; +// Swift-LinuxARM64-quickstart +import APM_swift_linuxARM64_quickStart_instrumentApplication from '../Modules/APM/Swift/md-docs/LinuxARM64/QuickStart/swift-linuxarm64-quickStart-instrumentApplication.md'; +import APM_swift_linuxARM64_quickStart_runApplication from '../Modules/APM/Swift/md-docs/LinuxARM64/QuickStart/swift-linuxarm64-quickStart-runApplication.md'; +// Swift-LinuxARM64-recommended +import APM_swift_linuxARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-installOtelCollector.md'; +import APM_swift_linuxARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-instrumentApplication.md'; +import APM_swift_linuxARM64_recommendedSteps_runApplication from '../Modules/APM/Swift/md-docs/LinuxARM64/Recommended/swift-linuxarm64-recommended-runApplication.md'; +// Swift-MacOsAMD64-quickstart +import APM_swift_macOsAMD64_quickStart_instrumentApplication from '../Modules/APM/Swift/md-docs/MacOsAMD64/QuickStart/swift-macosamd64-quickStart-instrumentApplication.md'; +import APM_swift_macOsAMD64_quickStart_runApplication from '../Modules/APM/Swift/md-docs/MacOsAMD64/QuickStart/swift-macosamd64-quickStart-runApplication.md'; +// Swift-MacOsAMD64-recommended +import APM_swift_macOsAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-installOtelCollector.md'; +import APM_swift_macOsAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-instrumentApplication.md'; +import APM_swift_macOsAMD64_recommendedSteps_runApplication from '../Modules/APM/Swift/md-docs/MacOsAMD64/Recommended/swift-macosamd64-recommended-runApplication.md'; +// Swift-MacOsARM64-quickstart +import APM_swift_macOsARM64_quickStart_instrumentApplication from '../Modules/APM/Swift/md-docs/MacOsARM64/QuickStart/swift-macosarm64-quickStart-instrumentApplication.md'; +import APM_swift_macOsARM64_quickStart_runApplication from '../Modules/APM/Swift/md-docs/MacOsARM64/QuickStart/swift-macosarm64-quickStart-runApplication.md'; +// Swift-MacOsARM64-recommended +import APM_swift_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-installOtelCollector.md'; +import APM_swift_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-instrumentApplication.md'; +import APM_swift_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Swift/md-docs/MacOsARM64/Recommended/swift-macosarm64-recommended-runApplication.md'; export const ApmDocFilePaths = { - // APM + // Aust /// //// Java Start @@ -1311,44 +1407,143 @@ export const ApmDocFilePaths = { /// //// .NET Start - // ROR-Kubernetes + // dotnet-Kubernetes APM_dotnet_kubernetes_recommendedSteps_setupOtelCollector, APM_dotnet_kubernetes_recommendedSteps_instrumentApplication, APM_dotnet_kubernetes_recommendedSteps_runApplication, - // ROR-LinuxAMD64-quickstart + // dotnet-LinuxAMD64-quickstart APM_dotnet_linuxAMD64_quickStart_instrumentApplication, APM_dotnet_linuxAMD64_quickStart_runApplication, - // ROR-LinuxAMD64-recommended + // dotnet-LinuxAMD64-recommended APM_dotnet_linuxAMD64_recommendedSteps_setupOtelCollector, APM_dotnet_linuxAMD64_recommendedSteps_instrumentApplication, APM_dotnet_linuxAMD64_recommendedSteps_runApplication, - // ROR-LinuxARM64-quickstart + // dotnet-LinuxARM64-quickstart APM_dotnet_linuxARM64_quickStart_instrumentApplication, APM_dotnet_linuxARM64_quickStart_runApplication, - // ROR-LinuxARM64-recommended + // dotnet-LinuxARM64-recommended APM_dotnet_linuxARM64_recommendedSteps_setupOtelCollector, APM_dotnet_linuxARM64_recommendedSteps_instrumentApplication, APM_dotnet_linuxARM64_recommendedSteps_runApplication, - // ROR-MacOsAMD64-quickstart + // dotnet-MacOsAMD64-quickstart APM_dotnet_macOsAMD64_quickStart_instrumentApplication, APM_dotnet_macOsAMD64_quickStart_runApplication, - // ROR-MacOsAMD64-recommended + // dotnet-MacOsAMD64-recommended APM_dotnet_macOsAMD64_recommendedSteps_setupOtelCollector, APM_dotnet_macOsAMD64_recommendedSteps_instrumentApplication, APM_dotnet_macOsAMD64_recommendedSteps_runApplication, - // ROR-MacOsARM64-quickstart + // dotnet-MacOsARM64-quickstart APM_dotnet_macOsARM64_quickStart_instrumentApplication, APM_dotnet_macOsARM64_quickStart_runApplication, - // ROR-MacOsARM64-recommended + // dotnet-MacOsARM64-recommended APM_dotnet_macOsARM64_recommendedSteps_setupOtelCollector, APM_dotnet_macOsARM64_recommendedSteps_instrumentApplication, APM_dotnet_macOsARM64_recommendedSteps_runApplication, + + // Rust + APM_rust_kubernetes_recommendedSteps_setupOtelCollector, + APM_rust_kubernetes_recommendedSteps_instrumentApplication, + APM_rust_kubernetes_recommendedSteps_runApplication, + + APM_rust_linuxAMD64_quickStart_instrumentApplication, + APM_rust_linuxAMD64_quickStart_runApplication, + + APM_rust_linuxAMD64_recommendedSteps_setupOtelCollector, + APM_rust_linuxAMD64_recommendedSteps_instrumentApplication, + APM_rust_linuxAMD64_recommendedSteps_runApplication, + + APM_rust_linuxARM64_quickStart_instrumentApplication, + APM_rust_linuxARM64_quickStart_runApplication, + + APM_rust_linuxARM64_recommendedSteps_setupOtelCollector, + APM_rust_linuxARM64_recommendedSteps_instrumentApplication, + APM_rust_linuxARM64_recommendedSteps_runApplication, + + APM_rust_macOsAMD64_quickStart_instrumentApplication, + APM_rust_macOsAMD64_quickStart_runApplication, + + APM_rust_macOsAMD64_recommendedSteps_setupOtelCollector, + APM_rust_macOsAMD64_recommendedSteps_instrumentApplication, + APM_rust_macOsAMD64_recommendedSteps_runApplication, + + APM_rust_macOsARM64_quickStart_instrumentApplication, + APM_rust_macOsARM64_quickStart_runApplication, + + APM_rust_macOsARM64_recommendedSteps_setupOtelCollector, + APM_rust_macOsARM64_recommendedSteps_instrumentApplication, + APM_rust_macOsARM64_recommendedSteps_runApplication, + + // Elixir + APM_elixir_kubernetes_recommendedSteps_setupOtelCollector, + APM_elixir_kubernetes_recommendedSteps_instrumentApplication, + APM_elixir_kubernetes_recommendedSteps_runApplication, + + APM_elixir_linuxAMD64_quickStart_instrumentApplication, + APM_elixir_linuxAMD64_quickStart_runApplication, + + APM_elixir_linuxAMD64_recommendedSteps_setupOtelCollector, + APM_elixir_linuxAMD64_recommendedSteps_instrumentApplication, + APM_elixir_linuxAMD64_recommendedSteps_runApplication, + + APM_elixir_linuxARM64_quickStart_instrumentApplication, + APM_elixir_linuxARM64_quickStart_runApplication, + + APM_elixir_linuxARM64_recommendedSteps_setupOtelCollector, + APM_elixir_linuxARM64_recommendedSteps_instrumentApplication, + APM_elixir_linuxARM64_recommendedSteps_runApplication, + + APM_elixir_macOsAMD64_quickStart_instrumentApplication, + APM_elixir_macOsAMD64_quickStart_runApplication, + + APM_elixir_macOsAMD64_recommendedSteps_setupOtelCollector, + APM_elixir_macOsAMD64_recommendedSteps_instrumentApplication, + APM_elixir_macOsAMD64_recommendedSteps_runApplication, + + APM_elixir_macOsARM64_quickStart_instrumentApplication, + APM_elixir_macOsARM64_quickStart_runApplication, + + APM_elixir_macOsARM64_recommendedSteps_setupOtelCollector, + APM_elixir_macOsARM64_recommendedSteps_instrumentApplication, + APM_elixir_macOsARM64_recommendedSteps_runApplication, + + // Swift + APM_swift_kubernetes_recommendedSteps_setupOtelCollector, + APM_swift_kubernetes_recommendedSteps_instrumentApplication, + APM_swift_kubernetes_recommendedSteps_runApplication, + + APM_swift_linuxAMD64_quickStart_instrumentApplication, + APM_swift_linuxAMD64_quickStart_runApplication, + + APM_swift_linuxAMD64_recommendedSteps_setupOtelCollector, + APM_swift_linuxAMD64_recommendedSteps_instrumentApplication, + APM_swift_linuxAMD64_recommendedSteps_runApplication, + + APM_swift_linuxARM64_quickStart_instrumentApplication, + APM_swift_linuxARM64_quickStart_runApplication, + + APM_swift_linuxARM64_recommendedSteps_setupOtelCollector, + APM_swift_linuxARM64_recommendedSteps_instrumentApplication, + APM_swift_linuxARM64_recommendedSteps_runApplication, + + APM_swift_macOsAMD64_quickStart_instrumentApplication, + APM_swift_macOsAMD64_quickStart_runApplication, + + APM_swift_macOsAMD64_recommendedSteps_setupOtelCollector, + APM_swift_macOsAMD64_recommendedSteps_instrumentApplication, + APM_swift_macOsAMD64_recommendedSteps_runApplication, + + APM_swift_macOsARM64_quickStart_instrumentApplication, + APM_swift_macOsARM64_quickStart_runApplication, + + APM_swift_macOsARM64_recommendedSteps_setupOtelCollector, + APM_swift_macOsARM64_recommendedSteps_instrumentApplication, + APM_swift_macOsARM64_recommendedSteps_runApplication, }; diff --git a/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts b/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts index 08a674d19a..5b46b02727 100644 --- a/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts +++ b/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts @@ -107,6 +107,21 @@ const supportedLanguages = [ id: 'dotnet', imgURL: `Logos/dotnet.png`, }, + { + name: 'rust', + id: 'rust', + imgURL: `Logos/rust.png`, + }, + { + name: 'elixir', + id: 'elixir', + imgURL: `Logos/elixir.png`, + }, + { + name: 'swift', + id: 'swift', + imgURL: `Logos/swift.png`, + }, ]; export const defaultLogsType = { @@ -222,7 +237,10 @@ export const getSupportedFrameworks = ({ if ( (moduleID === ModulesMap.APM && dataSourceName === 'go') || (moduleID === ModulesMap.APM && dataSourceName === 'rails') || - (moduleID === ModulesMap.APM && dataSourceName === '.NET') + (moduleID === ModulesMap.APM && dataSourceName === '.NET') || + (moduleID === ModulesMap.APM && dataSourceName === 'rust') || + (moduleID === ModulesMap.APM && dataSourceName === 'elixir') || + (moduleID === ModulesMap.APM && dataSourceName === 'swift') ) { return []; } @@ -248,7 +266,10 @@ export const hasFrameworks = ({ moduleID === ModulesMap.InfrastructureMonitoring || (moduleID === ModulesMap.APM && dataSourceName === 'go') || (moduleID === ModulesMap.APM && dataSourceName === 'rails') || - (moduleID === ModulesMap.APM && dataSourceName === '.NET') + (moduleID === ModulesMap.APM && dataSourceName === '.NET') || + (moduleID === ModulesMap.APM && dataSourceName === 'rust') || + (moduleID === ModulesMap.APM && dataSourceName === 'elixir') || + (moduleID === ModulesMap.APM && dataSourceName === 'swift') ) { return false; } diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx index 44a30ede05..39d4d221a8 100644 --- a/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx +++ b/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx @@ -277,6 +277,7 @@ function AuthDomains(): JSX.Element { rowKey={(record: AuthDomain): string => record.name + v4()} dataSource={!SSOFlag ? notEntripriseData : []} tableLayout="fixed" + bordered /> ); @@ -327,6 +328,7 @@ function AuthDomains(): JSX.Element { loading={isLoading} tableLayout="fixed" rowKey={(record: AuthDomain): string => record.name + v4()} + bordered /> diff --git a/frontend/src/container/OrganizationSettings/Members/index.tsx b/frontend/src/container/OrganizationSettings/Members/index.tsx index eccda855a0..85e30c978b 100644 --- a/frontend/src/container/OrganizationSettings/Members/index.tsx +++ b/frontend/src/container/OrganizationSettings/Members/index.tsx @@ -231,7 +231,7 @@ function UserFunction({ function Members(): JSX.Element { const { org } = useSelector((state) => state.app); - const { status, data } = useQuery({ + const { status, data, isLoading } = useQuery({ queryFn: () => getOrgUser({ orgId: (org || [])[0].id, @@ -308,13 +308,19 @@ function Members(): JSX.Element { return ( - Members + + Members{' '} + {!isLoading && dataSource && ( +
({dataSource.length})
+ )} +
); diff --git a/frontend/src/container/OrganizationSettings/PendingInvitesContainer/index.tsx b/frontend/src/container/OrganizationSettings/PendingInvitesContainer/index.tsx index 871ce95564..7395102d4c 100644 --- a/frontend/src/container/OrganizationSettings/PendingInvitesContainer/index.tsx +++ b/frontend/src/container/OrganizationSettings/PendingInvitesContainer/index.tsx @@ -271,7 +271,12 @@ function PendingInvitesContainer(): JSX.Element { - {t('pending_invites')} + + {t('pending_invites')} + {getPendingInvitesResponse.status !== 'loading' && dataSource && ( +
({dataSource.length})
+ )} +
@@ -294,6 +299,7 @@ function PendingInvitesContainer(): JSX.Element { dataSource={dataSource} pagination={false} loading={getPendingInvitesResponse.status === 'loading'} + bordered /> diff --git a/frontend/src/container/PipelinePage/PipelineListsView/Preview/components/LogsList/index.tsx b/frontend/src/container/PipelinePage/PipelineListsView/Preview/components/LogsList/index.tsx index 19a60cb91f..f7d3af3a88 100644 --- a/frontend/src/container/PipelinePage/PipelineListsView/Preview/components/LogsList/index.tsx +++ b/frontend/src/container/PipelinePage/PipelineListsView/Preview/components/LogsList/index.tsx @@ -2,6 +2,7 @@ import './styles.scss'; import { ExpandAltOutlined } from '@ant-design/icons'; import LogDetail from 'components/LogDetail'; +import { VIEW_TYPES } from 'components/LogDetail/constants'; import dayjs from 'dayjs'; import { useActiveLog } from 'hooks/logs/useActiveLog'; import { ILog } from 'types/api/logs/log'; @@ -36,6 +37,7 @@ function LogsList({ logs }: LogsListProps): JSX.Element { ))} (config && config.queryVariant === 'static' && config.initialDataSource) || @@ -64,70 +67,132 @@ export const QueryBuilder = memo(function QueryBuilder({ [currentQuery], ); + const handleScrollIntoView = ( + entityType: string, + entityName: string, + ): void => { + const selectedEntity = document.getElementById( + `qb-${entityType}-${entityName}`, + ); + + if (selectedEntity) { + selectedEntity.scrollIntoView({ + behavior: 'smooth', + block: 'start', + inline: 'nearest', + }); + } + }; + return ( - -
- - {currentQuery.builder.queryData.map((query, index) => ( - - + +
+ + + + + + + + + +
+ +
+ + + {' '} + + + + + {currentQuery.builder.queryData.map((query, index) => ( + + + + ))} + {currentQuery.builder.queryFormulas.map((formula, index) => { + const isAllMetricDataSource = currentQuery.builder.queryData.every( + (query) => query.dataSource === DataSource.METRICS, + ); + + const query = + currentQuery.builder.queryData[index] || + currentQuery.builder.queryData[0]; + + return ( + + + + ); + })} + + + + - ))} - {currentQuery.builder.queryFormulas.map((formula, index) => { - const isAllMetricDataSource = currentQuery.builder.queryData.every( - (query) => query.dataSource === DataSource.METRICS, - ); - - const query = - currentQuery.builder.queryData[index] || - currentQuery.builder.queryData[0]; - - return ( - - - - ); - })} + - - - - - - - - - {actions} - + + {currentQuery.builder.queryData.map((query) => ( + + ))} + + {currentQuery.builder.queryFormulas.map((formula) => ( + + ))} ); diff --git a/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.styled.ts b/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.styled.ts index 8f23dd0e3c..cbc5f16d56 100644 --- a/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.styled.ts +++ b/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.styled.ts @@ -1,4 +1,5 @@ import { MinusSquareOutlined, PlusSquareOutlined } from '@ant-design/icons'; +import { Color } from '@signozhq/design-tokens'; import { Col, Typography } from 'antd'; import styled, { css } from 'styled-components'; @@ -19,6 +20,7 @@ export const StyledInner = styled(Col)` width: fit-content; display: flex; align-items: center; + gap: 8px; margin-bottom: 0.875rem; min-height: 1.375rem; cursor: pointer; @@ -31,4 +33,5 @@ export const StyledInner = styled(Col)` export const StyledLink = styled(Typography.Link)` pointer-events: none; + color: ${Color.BG_ROBIN_400} !important; `; diff --git a/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.styles.scss b/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.styles.scss new file mode 100644 index 0000000000..e859e1900b --- /dev/null +++ b/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.styles.scss @@ -0,0 +1,15 @@ +.filter-toggler { + margin-right: 8px; +} + +.additinal-filters-container { + .action-btn { + background: #4E74F8; + width: 16px; + height: 16px; + border-radius: 3px; + display: flex; + justify-content: center; + align-items: center; + } +} \ No newline at end of file diff --git a/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.tsx b/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.tsx index 470a8b0726..086cabd94f 100644 --- a/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.tsx +++ b/frontend/src/container/QueryBuilder/components/AdditionalFiltersToggler/AdditionalFiltersToggler.tsx @@ -1,15 +1,14 @@ +import './AdditionalFiltersToggler.styles.scss'; + +import { Color } from '@signozhq/design-tokens'; import { Col, Row, Typography } from 'antd'; +import { Minus, Plus } from 'lucide-react'; import { Fragment, memo, ReactNode, useState } from 'react'; // ** Types import { AdditionalFiltersProps } from './AdditionalFiltersToggler.interfaces'; // ** Styles -import { - StyledIconClose, - StyledIconOpen, - StyledInner, - StyledLink, -} from './AdditionalFiltersToggler.styled'; +import { StyledInner, StyledLink } from './AdditionalFiltersToggler.styled'; export const AdditionalFiltersToggler = memo(function AdditionalFiltersToggler({ children, @@ -42,10 +41,19 @@ export const AdditionalFiltersToggler = memo(function AdditionalFiltersToggler({ }); return ( - + - - {isOpenedFilters ? : } + + {isOpenedFilters ? ( + + + + ) : ( + + + + )} + {!isOpenedFilters && ( Add conditions for {filtersTexts} )} diff --git a/frontend/src/container/QueryBuilder/components/FilterLabel/FilterLabel.styled.ts b/frontend/src/container/QueryBuilder/components/FilterLabel/FilterLabel.styled.ts index bcf0d2d04c..bc2975a5d3 100644 --- a/frontend/src/container/QueryBuilder/components/FilterLabel/FilterLabel.styled.ts +++ b/frontend/src/container/QueryBuilder/components/FilterLabel/FilterLabel.styled.ts @@ -1,4 +1,3 @@ -import { themeColors } from 'constants/theme'; import styled from 'styled-components'; interface Props { @@ -12,8 +11,4 @@ export const StyledLabel = styled.div` white-space: nowrap; align-items: center; border-radius: 0.125rem; - border: ${({ isDarkMode }): string => - `1px solid ${ - isDarkMode ? themeColors.borderDarkGrey : themeColors.borderLightGrey - }`}; `; diff --git a/frontend/src/container/QueryBuilder/components/FilterLabel/FilterLabel.tsx b/frontend/src/container/QueryBuilder/components/FilterLabel/FilterLabel.tsx index 9d1c17514d..57d09938d6 100644 --- a/frontend/src/container/QueryBuilder/components/FilterLabel/FilterLabel.tsx +++ b/frontend/src/container/QueryBuilder/components/FilterLabel/FilterLabel.tsx @@ -14,7 +14,13 @@ export const FilterLabel = memo(function FilterLabel({ return ( - {label} + + {label} + ); }); diff --git a/frontend/src/container/QueryBuilder/components/Formula/Formula.styles.scss b/frontend/src/container/QueryBuilder/components/Formula/Formula.styles.scss new file mode 100644 index 0000000000..f4f20148ec --- /dev/null +++ b/frontend/src/container/QueryBuilder/components/Formula/Formula.styles.scss @@ -0,0 +1,4 @@ +.formula-container { + padding: 0 24px; + width: 100%; +} diff --git a/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx b/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx index dd39a5b1af..5f25edb29b 100644 --- a/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx +++ b/frontend/src/container/QueryBuilder/components/Formula/Formula.tsx @@ -1,22 +1,21 @@ +import './Formula.styles.scss'; + import { Col, Input, Row } from 'antd'; import { LEGEND } from 'constants/global'; // ** Components -import { - FilterLabel, - ListItemWrapper, - ListMarker, -} from 'container/QueryBuilder/components'; +import { FilterLabel } from 'container/QueryBuilder/components'; import HavingFilter from 'container/QueryBuilder/filters/Formula/Having/HavingFilter'; import LimitFilter from 'container/QueryBuilder/filters/Formula/Limit/Limit'; import OrderByFilter from 'container/QueryBuilder/filters/Formula/OrderBy/OrderByFilter'; // ** Hooks import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations'; -import { ChangeEvent, useCallback, useMemo } from 'react'; +import { ChangeEvent, useCallback, useMemo, useState } from 'react'; import { IBuilderFormula } from 'types/api/queryBuilder/queryBuilderData'; import { getFormatedLegend } from 'utils/getFormatedLegend'; import { AdditionalFiltersToggler } from '../AdditionalFiltersToggler'; +import QBEntityOptions from '../QBEntityOptions/QBEntityOptions'; // ** Types import { FormulaProps } from './Formula.interfaces'; @@ -42,6 +41,8 @@ export function Formula({ formula, }); + const [isCollapse, setIsCollapsed] = useState(false); + const handleDelete = useCallback(() => { removeQueryBuilderEntityByIndex('queryFormulas', index); }, [index, removeQueryBuilderEntityByIndex]); @@ -55,6 +56,10 @@ export function Formula({ handleSetFormulaData(index, newFormula); }, [index, formula, handleSetFormulaData]); + const handleToggleCollapseFormula = (): void => { + setIsCollapsed(!isCollapse); + }; + const handleChange = useCallback( (e: ChangeEvent) => { const { name, value } = e.target; @@ -138,44 +143,50 @@ export function Formula({ ); return ( - - - - - - - - - - - {isAdditionalFilterEnable && ( - - - - {renderAdditionalFilters} - - - + + + + {!isCollapse && ( + + + + + + + + {isAdditionalFilterEnable && ( + + + + {renderAdditionalFilters} + + + + )} + )} - + ); } diff --git a/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.styles.scss b/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.styles.scss new file mode 100644 index 0000000000..06fb0ae2fb --- /dev/null +++ b/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.styles.scss @@ -0,0 +1,81 @@ +.qb-entity-options { + display: flex; + justify-content: space-between; + margin: 4px 0; + + .left-col-items { + display: flex; + align-items: center; + gap: 12px; + + .title { + font-weight: 500; + font-style: italic; + .entityType { + text-transform: capitalize; + } + } + } + + .options { + box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1); + border-radius: 2px; + margin-left: -51px; + + border-radius: 2px; + + .periscope-btn { + border: 1px solid var(--bg-slate-200); + background: var(--bg-ink-200); + min-width: 32px; + } + + .query-name { + font-size: 12px; + font-style: normal; + font-weight: 400; + // line-height: 18px; + + color: var(--bg-sakura-400) !important; + + border: 1px solid rgba(242, 71, 105, 0.2) !important; + background: rgba(242, 71, 105, 0.1) !important; + + &:hover { + border: 1px solid rgba(242, 71, 105, 0.4) !important; + color: var(--bg-sakura-400) !important; + } + } + + .formula-name { + font-size: 12px; + font-style: normal; + font-weight: 400; + // line-height: 18px; + + color: var(--bg-sienna-400) !important; + + border: 1px solid rgba(189, 153, 121, 0.2) !important; + background: rgba(189, 153, 121, 0.1); + + &:hover { + border: 1px solid rgba(189, 153, 121, 0.4) !important; + color: var(--bg-sienna-400) !important; + } + } + } +} + +.lightMode { + .qb-entity-options { + .options { + border-color: var(--bg-vanilla-300); + + .periscope-btn { + border-color: var(--bg-vanilla-300); + background: var(--bg-vanilla-100); + color: var(--bg-ink-200); + } + } + } +} diff --git a/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx b/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx new file mode 100644 index 0000000000..1319cf0a19 --- /dev/null +++ b/frontend/src/container/QueryBuilder/components/QBEntityOptions/QBEntityOptions.tsx @@ -0,0 +1,74 @@ +import './QBEntityOptions.styles.scss'; + +import { Button, Col } from 'antd'; +import cx from 'classnames'; +import { ChevronDown, ChevronRight, Eye, EyeOff, Trash2 } from 'lucide-react'; + +interface QBEntityOptionsProps { + isCollapsed: boolean; + entityType: string; + entityData: any; + onDelete: () => void; + onToggleVisibility: () => void; + onCollapseEntity: () => void; + showDeleteButton: boolean; +} + +export default function QBEntityOptions({ + isCollapsed, + entityType, + entityData, + onDelete, + onToggleVisibility, + onCollapseEntity, + showDeleteButton, +}: QBEntityOptionsProps): JSX.Element { + return ( + +
+
+
+ + + + + + +
+ + {isCollapsed && ( +
+ {entityType} -{' '} + {entityData.queryName} +
+ )} +
+ + {showDeleteButton && ( + + )} +
+ + ); +} diff --git a/frontend/src/container/QueryBuilder/components/Query/Query.styles.scss b/frontend/src/container/QueryBuilder/components/Query/Query.styles.scss new file mode 100644 index 0000000000..b7ddccbc52 --- /dev/null +++ b/frontend/src/container/QueryBuilder/components/Query/Query.styles.scss @@ -0,0 +1,8 @@ +.qb-search-container { + display: block; + position: relative; +} + +.qb-container { + padding: 0 24px; +} diff --git a/frontend/src/container/QueryBuilder/components/Query/Query.tsx b/frontend/src/container/QueryBuilder/components/Query/Query.tsx index 1c1196d85c..76f411ba43 100644 --- a/frontend/src/container/QueryBuilder/components/Query/Query.tsx +++ b/frontend/src/container/QueryBuilder/components/Query/Query.tsx @@ -1,13 +1,14 @@ +import './Query.styles.scss'; + import { Col, Input, Row } from 'antd'; // ** Constants import { PANEL_TYPES } from 'constants/queryBuilder'; +import ROUTES from 'constants/routes'; // ** Components import { AdditionalFiltersToggler, DataSourceDropdown, FilterLabel, - ListItemWrapper, - ListMarker, } from 'container/QueryBuilder/components'; import { AggregatorFilter, @@ -23,30 +24,42 @@ import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearc import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations'; // ** Hooks -import { ChangeEvent, memo, ReactNode, useCallback } from 'react'; +import { + ChangeEvent, + memo, + ReactNode, + useCallback, + useMemo, + useState, +} from 'react'; +import { useLocation } from 'react-use'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { transformToUpperCase } from 'utils/transformToUpperCase'; +import QBEntityOptions from '../QBEntityOptions/QBEntityOptions'; // ** Types import { QueryProps } from './Query.interfaces'; export const Query = memo(function Query({ index, - isAvailableToDisable, queryVariant, query, filterConfigs, queryComponents, }: QueryProps): JSX.Element { - const { panelType } = useQueryBuilder(); + const { panelType, currentQuery } = useQueryBuilder(); + const { pathname } = useLocation(); + + const [isCollapse, setIsCollapsed] = useState(false); + const { operators, isMetricsDataSource, isTracePanelType, listOfAdditionalFilters, handleChangeAggregatorAttribute, - handleChangeDataSource, handleChangeQueryData, + handleChangeDataSource, handleChangeOperator, handleDeleteQuery, } = useQueryOperations({ index, query, filterConfigs }); @@ -111,6 +124,10 @@ export const Query = memo(function Query({ [handleChangeQueryData], ); + const handleToggleCollapsQuery = (): void => { + setIsCollapsed(!isCollapse); + }; + const renderOrderByFilter = useCallback((): ReactNode => { if (queryComponents?.renderOrderBy) { return queryComponents.renderOrderBy({ @@ -146,6 +163,12 @@ export const Query = memo(function Query({ ], ); + const isExplorerPage = useMemo( + () => + pathname === ROUTES.LOGS_EXPLORER || pathname === ROUTES.TRACES_EXPLORER, + [pathname], + ); + const renderAdditionalFilters = useCallback((): ReactNode => { switch (panelType) { case PANEL_TYPES.TIME_SERIES: { @@ -257,31 +280,74 @@ export const Query = memo(function Query({ ]); return ( - - - - - + + 1} + /> + + {!isCollapse && ( + + + + {!isExplorerPage && ( + + {queryVariant === 'dropdown' ? ( + + ) : ( + + )} + + )} + + {isMetricsDataSource && ( + + + + + + + + + + + )} + + + {isMetricsDataSource && ( + + + + )} + + + + + + - - {queryVariant === 'dropdown' ? ( - - ) : ( - - )} - - {isMetricsDataSource && ( - + {!isMetricsDataSource && ( + )} - + - {isMetricsDataSource && ( - - - - )} - - + + + {panelType === PANEL_TYPES.VALUE ? ( + + ) : ( + + )} + - - - {!isMetricsDataSource && ( - - - - + {!isTracePanelType && ( + + + + {renderAdditionalFilters()} + + - - + - - - - )} - - - - - - - {panelType === PANEL_TYPES.VALUE ? ( - - ) : ( - - )} - - - - {!isTracePanelType && ( - - - - {renderAdditionalFilters()} - - - )} - {panelType !== PANEL_TYPES.LIST && panelType !== PANEL_TYPES.TRACE && ( - - + )} )} - + ); }); diff --git a/frontend/src/container/QueryBuilder/components/ToolbarActions/LeftToolbarActions.tsx b/frontend/src/container/QueryBuilder/components/ToolbarActions/LeftToolbarActions.tsx new file mode 100644 index 0000000000..7485c47563 --- /dev/null +++ b/frontend/src/container/QueryBuilder/components/ToolbarActions/LeftToolbarActions.tsx @@ -0,0 +1,85 @@ +import './ToolbarActions.styles.scss'; + +import { Button, Switch, Tooltip, Typography } from 'antd'; +import cx from 'classnames'; +import { Atom, MousePointerSquare, Terminal } from 'lucide-react'; +import { SELECTED_VIEWS } from 'pages/LogsExplorer/utils'; + +interface LeftToolbarActionsProps { + items: any; + selectedView: string; + onToggleHistrogramVisibility: () => void; + onChangeSelectedView: (view: SELECTED_VIEWS) => void; + showHistogram: boolean; +} + +const activeTab = 'active-tab'; +const actionBtn = 'action-btn'; +export const queryBuilder = 'query-builder'; + +export default function LeftToolbarActions({ + items, + selectedView, + onToggleHistrogramVisibility, + onChangeSelectedView, + showHistogram, +}: LeftToolbarActionsProps): JSX.Element { + const { clickhouse, search, queryBuilder: QB } = items; + + return ( +
+
+ + + + + + + + {clickhouse?.show && ( + + )} +
+ +
+ Histogram + +
+
+ ); +} diff --git a/frontend/src/container/QueryBuilder/components/ToolbarActions/RightToolbarActions.tsx b/frontend/src/container/QueryBuilder/components/ToolbarActions/RightToolbarActions.tsx new file mode 100644 index 0000000000..aea205d569 --- /dev/null +++ b/frontend/src/container/QueryBuilder/components/ToolbarActions/RightToolbarActions.tsx @@ -0,0 +1,38 @@ +import './ToolbarActions.styles.scss'; + +import { Button } from 'antd'; +import { LogsExplorerShortcuts } from 'constants/shortcuts/logsExplorerShortcuts'; +import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys'; +import { Play } from 'lucide-react'; +import { useEffect } from 'react'; + +interface RightToolbarActionsProps { + onStageRunQuery: () => void; +} + +export default function RightToolbarActions({ + onStageRunQuery, +}: RightToolbarActionsProps): JSX.Element { + const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys(); + + useEffect(() => { + registerShortcut(LogsExplorerShortcuts.StageAndRunQuery, onStageRunQuery); + + return (): void => { + deregisterShortcut(LogsExplorerShortcuts.StageAndRunQuery); + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [onStageRunQuery]); + return ( +
+ +
+ ); +} diff --git a/frontend/src/container/QueryBuilder/components/ToolbarActions/ToolbarActions.styles.scss b/frontend/src/container/QueryBuilder/components/ToolbarActions/ToolbarActions.styles.scss new file mode 100644 index 0000000000..a29f031e37 --- /dev/null +++ b/frontend/src/container/QueryBuilder/components/ToolbarActions/ToolbarActions.styles.scss @@ -0,0 +1,71 @@ +.left-toolbar { + display: flex; + align-items: center; + + .left-toolbar-query-actions { + display: flex; + border-radius: 2px; + border: 1px solid var(--bg-slate-400, #1d212d); + background: var(--bg-ink-300, #16181d); + flex-direction: row; + + .prom-ql-icon { + height: 14px; + width: 14px; + } + + .ant-btn { + display: flex; + align-items: center; + justify-content: center; + border: none; + padding: 9px; + box-shadow: none; + border-radius: 0; + + &.active-tab { + background-color: #1d212d; + } + + &:disabled { + background-color: #121317; + opacity: 0.6; + } + } + .action-btn + .action-btn { + border-left: 1px solid var(--bg-slate-400, #1d212d); + } + } + + .histogram-view-controller { + display: flex; + align-items: center; + padding-left: 8px; + gap: 8px; + } +} + +.right-toolbar { + display: flex; + align-items: center; + background-color: var(--bg-robin-600); +} + +.lightMode { + .left-toolbar { + .left-toolbar-query-actions { + border-color: var(--bg-vanilla-300); + background: var(--bg-vanilla-100); + + .ant-btn { + border-color: var(--bg-vanilla-300); + background: var(--bg-vanilla-100); + color: var(--bg-ink-200); + + &.active-tab { + background-color: var(--bg-robin-100); + } + } + } + } +} diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/QueryBuilderSearch.styles.scss b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/QueryBuilderSearch.styles.scss new file mode 100644 index 0000000000..8fd979fa8e --- /dev/null +++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/QueryBuilderSearch.styles.scss @@ -0,0 +1,11 @@ +.lightMode { + .query-builder-search { + .ant-select-dropdown { + box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1); + } + + .ant-select-item-option-active { + background-color: var(--bg-vanilla-200) !important; + } + } +} diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx index 975c79a4a8..46d535737a 100644 --- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx +++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx @@ -1,11 +1,18 @@ +import './QueryBuilderSearch.styles.scss'; + import { Select, Spin, Tag, Tooltip } from 'antd'; import { OPERATORS } from 'constants/queryBuilder'; +import { LogsExplorerShortcuts } from 'constants/shortcuts/logsExplorerShortcuts'; import { getDataTypes } from 'container/LogDetailedView/utils'; +import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys'; import { useAutoComplete, WhereClauseConfig, } from 'hooks/queryBuilder/useAutoComplete'; import { useFetchKeysAndValues } from 'hooks/queryBuilder/useFetchKeysAndValues'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { isEqual } from 'lodash-es'; +import type { BaseSelectRef } from 'rc-select'; import { KeyboardEvent, ReactElement, @@ -13,6 +20,8 @@ import { useCallback, useEffect, useMemo, + useRef, + useState, } from 'react'; import { BaseAutocompleteData, @@ -44,6 +53,7 @@ function QueryBuilderSearch({ whereClauseConfig, className, placeholder, + suffixIcon, }: QueryBuilderSearchProps): JSX.Element { const { updateTag, @@ -60,12 +70,18 @@ function QueryBuilderSearch({ searchKey, } = useAutoComplete(query, whereClauseConfig); + const [isOpen, setIsOpen] = useState(false); + const selectRef = useRef(null); const { sourceKeys, handleRemoveSourceKey } = useFetchKeysAndValues( searchValue, query, searchKey, ); + const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys(); + + const { handleRunQuery, currentQuery } = useQueryBuilder(); + const onTagRender = ({ value, closable, @@ -116,6 +132,13 @@ function QueryBuilderSearch({ const onInputKeyDownHandler = (event: KeyboardEvent): void => { if (isMulti || event.key === 'Backspace') handleKeyDown(event); if (isExistsNotExistsOperator(searchValue)) handleKeyDown(event); + + if ((event.ctrlKey || event.metaKey) && event.key === 'Enter') { + event.preventDefault(); + event.stopPropagation(); + handleRunQuery(); + setIsOpen(false); + } }; const handleDeselect = useCallback( @@ -179,42 +202,77 @@ function QueryBuilderSearch({ }); onChange(initialTagFilters); - /* eslint-disable react-hooks/exhaustive-deps */ + // eslint-disable-next-line react-hooks/exhaustive-deps }, [sourceKeys]); + const isLastQuery = useMemo( + () => + isEqual( + currentQuery.builder.queryData[currentQuery.builder.queryData.length - 1], + query, + ), + [currentQuery, query], + ); + + useEffect(() => { + if (isLastQuery) { + registerShortcut(LogsExplorerShortcuts.FocusTheSearchBar, () => { + // set timeout is needed here else the select treats the hotkey as input value + setTimeout(() => { + selectRef.current?.focus(); + }, 0); + }); + } + + return (): void => + deregisterShortcut(LogsExplorerShortcuts.FocusTheSearchBar); + }, [deregisterShortcut, isLastQuery, registerShortcut]); + return ( - + + ); } @@ -224,12 +282,14 @@ interface QueryBuilderSearchProps { whereClauseConfig?: WhereClauseConfig; className?: string; placeholder?: string; + suffixIcon?: React.ReactNode; } QueryBuilderSearch.defaultProps = { whereClauseConfig: undefined, className: '', placeholder: PLACEHOLDER, + suffixIcon: undefined, }; export interface CustomTagProps { diff --git a/frontend/src/container/ServiceApplication/types.ts b/frontend/src/container/ServiceApplication/types.ts index 6ff6b7c87a..0717538cb8 100644 --- a/frontend/src/container/ServiceApplication/types.ts +++ b/frontend/src/container/ServiceApplication/types.ts @@ -1,5 +1,6 @@ import { ServiceDataProps } from 'api/metrics/getTopLevelOperations'; import { Time } from 'container/TopNav/DateTimeSelection/config'; +import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults'; import { UseQueryResult } from 'react-query'; import { SuccessResponse } from 'types/api'; @@ -24,7 +25,7 @@ export interface GetQueryRangeRequestDataProps { topLevelOperations: [keyof ServiceDataProps, string[]][]; maxTime: number; minTime: number; - globalSelectedInterval: Time; + globalSelectedInterval: Time | TimeV2; } export interface GetServiceListFromQueryProps { diff --git a/frontend/src/container/SideNav/NavItem/NavItem.tsx b/frontend/src/container/SideNav/NavItem/NavItem.tsx index 301acc402e..0ec6127da1 100644 --- a/frontend/src/container/SideNav/NavItem/NavItem.tsx +++ b/frontend/src/container/SideNav/NavItem/NavItem.tsx @@ -1,5 +1,8 @@ +/* eslint-disable jsx-a11y/no-static-element-interactions */ +/* eslint-disable jsx-a11y/click-events-have-key-events */ import './NavItem.styles.scss'; +import { Tooltip } from 'antd'; import cx from 'classnames'; import { SidebarItem } from '../sideNav.types'; @@ -18,14 +21,15 @@ export default function NavItem({ const { label, icon } = item; return ( - // eslint-disable-next-line jsx-a11y/click-events-have-key-events, jsx-a11y/no-static-element-interactions -
-
-
-
{icon}
+ +
+
+
+
{icon}
- {!isCollapsed &&
{label}
} + {!isCollapsed &&
{label}
} +
-
+
); } diff --git a/frontend/src/container/SideNav/SideNav.styles.scss b/frontend/src/container/SideNav/SideNav.styles.scss index 379a7bba93..2cc32e12f3 100644 --- a/frontend/src/container/SideNav/SideNav.styles.scss +++ b/frontend/src/container/SideNav/SideNav.styles.scss @@ -1,20 +1,19 @@ -@import '@signozhq/design-tokens'; - .sideNav { flex: 0 0 240px; max-width: 240px; min-width: 240px; width: 240px; - border-right: 1px solid $bg-slate-400; + border-right: 1px solid var(--bg-slate-400); padding-bottom: 48px; transition: all 0.3s, background 0s, border 0s; position: relative; + z-index: 1; .brand { display: flex; align-items: center; gap: 12px; - padding: $padding-4; + padding: var(--padding-4); .brand-logo { display: flex; @@ -24,7 +23,7 @@ cursor: pointer; img { - height: $font-size-xl; + height: var(--font-size-xl); } .brand-logo-name { @@ -45,13 +44,13 @@ padding: 0; color: rgba(255, 255, 255, 0.85); font-size: 8px; - font-weight: $font-weight-medium; + font-weight: var(--font-weight-medium); letter-spacing: 0.6px; padding: 4px 8px; text-transform: uppercase; white-space: nowrap; - background: $bg-slate-400; - border: 1px solid $bg-slate-400; + background: var(--bg-slate-400); + border: 1px solid var(--bg-slate-400); border-radius: 20px; opacity: 1; transition: all 0.2s; @@ -73,7 +72,7 @@ width: 100%; height: 36px; - border: 1px solid $bg-slate-400; + border: 1px solid var(--bg-slate-400); border-radius: 2px; box-shadow: none !important; @@ -81,7 +80,7 @@ } .secondary-nav-items { - border-top: 1px solid $bg-slate-400; + border-top: 1px solid var(--bg-slate-400); padding: 8px 0; max-width: 100%; position: fixed; @@ -105,8 +104,8 @@ transition: display 0.3s; svg { - fill: $bg-vanilla-300; - color: $bg-slate-300; + fill: var(--bg-vanilla-400); + color: var(--bg-slate-300); } } } @@ -141,30 +140,30 @@ .lightMode { .sideNav { - background: $bg-vanilla-300; - border-right: 1px solid $bg-vanilla-400; + background: var(--bg-vanilla-300); + border-right: 1px solid var(--bg-vanilla-400); .get-started-nav-items { .get-started-btn { - border: 1px solid $bg-vanilla-400; + border: 1px solid var(--bg-vanilla-400); } } .brand { .brand-logo { .brand-logo-name { - color: $bg-slate-400; + color: var(--bg-slate-400); } } } .secondary-nav-items { - border-top: 1px solid $bg-vanilla-400; + border-top: 1px solid var(--bg-vanilla-400); .collapse-expand-handlers { svg { - color: $bg-slate-300; - fill: $bg-vanilla-300; + color: var(--bg-slate-300); + fill: var(--bg-vanilla-400); } } } diff --git a/frontend/src/container/SideNav/SideNav.tsx b/frontend/src/container/SideNav/SideNav.tsx index b7a06ff0e5..a98d9eacd9 100644 --- a/frontend/src/container/SideNav/SideNav.tsx +++ b/frontend/src/container/SideNav/SideNav.tsx @@ -3,14 +3,12 @@ import './SideNav.styles.scss'; import { Button } from 'antd'; -import getLocalStorageKey from 'api/browser/localstorage/get'; import cx from 'classnames'; -import { IS_SIDEBAR_COLLAPSED } from 'constants/app'; import { FeatureKeys } from 'constants/features'; import ROUTES from 'constants/routes'; -import { ToggleButton } from 'container/Header/styles'; +import { GlobalShortcuts } from 'constants/shortcuts/globalShortcuts'; +import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys'; import useComponentPermission from 'hooks/useComponentPermission'; -import useThemeMode, { useIsDarkMode } from 'hooks/useDarkMode'; import { LICENSE_PLAN_KEY, LICENSE_PLAN_STATUS } from 'hooks/useLicense'; import history from 'lib/history'; import { @@ -21,17 +19,10 @@ import { RocketIcon, UserCircle, } from 'lucide-react'; -import { - useCallback, - useEffect, - useLayoutEffect, - useMemo, - useState, -} from 'react'; +import { useCallback, useEffect, useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; -import { useDispatch, useSelector } from 'react-redux'; +import { useSelector } from 'react-redux'; import { useLocation } from 'react-router-dom'; -import { sideBarCollapse } from 'store/actions'; import { AppState } from 'store/reducers'; import { License } from 'types/api/licenses/def'; import AppReducer from 'types/reducer/app'; @@ -44,6 +35,7 @@ import defaultMenuItems, { helpSupportMenuItem, inviteMemberMenuItem, manageLicenseMenuItem, + shortcutMenuItem, slackSupportMenuItem, trySignozCloudMenuItem, } from './menuItems'; @@ -51,18 +43,24 @@ import NavItem from './NavItem/NavItem'; import { SecondaryMenuItemKey } from './sideNav.types'; import { getActiveMenuKeyFromPath } from './sideNav.utils'; +interface UserManagementMenuItems { + key: string; + label: string; + icon: JSX.Element; +} + function SideNav({ licenseData, isFetching, + onCollapse, + collapsed, }: { licenseData: any; isFetching: boolean; + onCollapse: () => void; + collapsed: boolean; }): JSX.Element { - const dispatch = useDispatch(); const [menuItems, setMenuItems] = useState(defaultMenuItems); - const [collapsed, setCollapsed] = useState( - getLocalStorageKey(IS_SIDEBAR_COLLAPSED) === 'true', - ); const { pathname, search } = useLocation(); const { @@ -82,9 +80,9 @@ function SideNav({ icon: , }; - const [userManagementMenuItems, setUserManagementMenuItems] = useState([ - manageLicenseMenuItem, - ]); + const [userManagementMenuItems, setUserManagementMenuItems] = useState< + UserManagementMenuItems[] + >([manageLicenseMenuItem]); const onClickSlackHandler = (): void => { window.open('https://signoz.io/slack', '_blank'); @@ -98,6 +96,8 @@ function SideNav({ const [inviteMembers] = useComponentPermission(['invite_members'], role); + const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys(); + useEffect(() => { if (inviteMembers) { const updatedUserManagementMenuItems = [ @@ -148,14 +148,6 @@ function SideNav({ const { t } = useTranslation(''); - const onCollapse = useCallback(() => { - setCollapsed((collapsed) => !collapsed); - }, []); - - useLayoutEffect(() => { - dispatch(sideBarCollapse(collapsed)); - }, [collapsed, dispatch]); - const isLicenseActive = licenseData?.payload?.licenses?.find((e: License) => e.isCurrent)?.status === LICENSE_PLAN_STATUS.VALID; @@ -172,6 +164,10 @@ function SideNav({ ); }; + const onClickShortcuts = (): void => { + history.push(`/shortcuts`); + }; + const onClickGetStarted = (): void => { history.push(`/get-started`); }; @@ -194,9 +190,6 @@ function SideNav({ pathname, ]); - const isDarkMode = useIsDarkMode(); - const { toggleTheme } = useThemeMode(); - const isCloudUserVal = isCloudUser(); useEffect(() => { @@ -262,6 +255,42 @@ function SideNav({ ? ROUTES.ORG_SETTINGS : ROUTES.SETTINGS; + useEffect(() => { + registerShortcut(GlobalShortcuts.SidebarCollapse, onCollapse); + + registerShortcut(GlobalShortcuts.NavigateToServices, () => + onClickHandler(ROUTES.APPLICATION), + ); + registerShortcut(GlobalShortcuts.NavigateToTraces, () => + onClickHandler(ROUTES.TRACE), + ); + + registerShortcut(GlobalShortcuts.NavigateToLogs, () => + onClickHandler(ROUTES.LOGS), + ); + + registerShortcut(GlobalShortcuts.NavigateToDashboards, () => + onClickHandler(ROUTES.ALL_DASHBOARD), + ); + + registerShortcut(GlobalShortcuts.NavigateToAlerts, () => + onClickHandler(ROUTES.LIST_ALL_ALERT), + ); + registerShortcut(GlobalShortcuts.NavigateToExceptions, () => + onClickHandler(ROUTES.ALL_ERROR), + ); + + return (): void => { + deregisterShortcut(GlobalShortcuts.SidebarCollapse); + deregisterShortcut(GlobalShortcuts.NavigateToServices); + deregisterShortcut(GlobalShortcuts.NavigateToTraces); + deregisterShortcut(GlobalShortcuts.NavigateToLogs); + deregisterShortcut(GlobalShortcuts.NavigateToDashboards); + deregisterShortcut(GlobalShortcuts.NavigateToAlerts); + deregisterShortcut(GlobalShortcuts.NavigateToExceptions); + }; + }, [deregisterShortcut, onClickHandler, onCollapse, registerShortcut]); + return (
@@ -278,18 +307,8 @@ function SideNav({ {!collapsed && SigNoz }
- {!collapsed && ( - <> - {!isFetching &&
{licenseTag}
} - - - + {!collapsed && licenseTag && ( +
{licenseTag}
)}
@@ -322,6 +341,14 @@ function SideNav({
+ + {licenseData && !isLicenseActive && ( , }; +export const shortcutMenuItem = { + key: ROUTES.SHORTCUTS, + label: 'Keyboard Shortcuts', + icon: , +}; + export const slackSupportMenuItem = { key: SecondaryMenuItemKey.Slack, label: 'Slack Support', @@ -110,7 +117,7 @@ const menuItems: SidebarItem[] = [ ]; /** Mapping of some newly added routes and their corresponding active sidebar menu key */ -export const NEW_ROUTES_MENU_ITEM_KEY_MAP = { +export const NEW_ROUTES_MENU_ITEM_KEY_MAP: Record = { [ROUTES.TRACES_EXPLORER]: ROUTES.TRACE, [ROUTES.TRACE_EXPLORER]: ROUTES.TRACE, [ROUTES.LOGS_BASE]: ROUTES.LOGS_EXPLORER, diff --git a/frontend/src/container/SideNav/sideNav.utils.ts b/frontend/src/container/SideNav/sideNav.utils.ts index 3c455ddab2..77aee24147 100644 --- a/frontend/src/container/SideNav/sideNav.utils.ts +++ b/frontend/src/container/SideNav/sideNav.utils.ts @@ -7,5 +7,5 @@ export const getActiveMenuKeyFromPath = (pathname: string): string => { const baseRoute = `/${basePath}`; - return NEW_ROUTES_MENU_ITEM_KEY_MAP[baseRoute] || baseRoute; + return (NEW_ROUTES_MENU_ITEM_KEY_MAP[baseRoute] as string) || baseRoute; }; diff --git a/frontend/src/container/TimeSeriesView/TimeSeriesView.styles.scss b/frontend/src/container/TimeSeriesView/TimeSeriesView.styles.scss new file mode 100644 index 0000000000..e69de29bb2 diff --git a/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx b/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx index 767bf16d34..8bac60b425 100644 --- a/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx +++ b/frontend/src/container/TimeSeriesView/TimeSeriesView.tsx @@ -1,8 +1,14 @@ -import Spinner from 'components/Spinner'; +import './TimeSeriesView.styles.scss'; + import Uplot from 'components/Uplot'; +import EmptyLogsSearch from 'container/EmptyLogsSearch/EmptyLogsSearch'; +import LogsError from 'container/LogsError/LogsError'; +import { LogsLoading } from 'container/LogsLoading/LogsLoading'; +import NoLogs from 'container/NoLogs/NoLogs'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions'; import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData'; +import { isEmpty } from 'lodash-es'; import { useEffect, useMemo, useRef, useState } from 'react'; import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; @@ -11,13 +17,14 @@ import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; import { GlobalReducer } from 'types/reducer/globalTime'; import { getTimeRange } from 'utils/getTimeRange'; -import { Container, ErrorText } from './styles'; +import { Container } from './styles'; function TimeSeriesView({ data, isLoading, isError, yAxisUnit, + isFilterApplied, }: TimeSeriesViewProps): JSX.Element { const graphRef = useRef(null); @@ -66,16 +73,33 @@ function TimeSeriesView({ return ( - {isLoading && } - {isError && {data?.error || 'Something went wrong'}} + {isError && }
- {!isLoading && !isError && chartData && chartOptions && ( - - )} + {isLoading && } + + {chartData && + chartData[0] && + chartData[0]?.length === 0 && + !isLoading && + !isError && + isFilterApplied && } + + {chartData && + chartData[0] && + chartData[0]?.length === 0 && + !isLoading && + !isError && + !isFilterApplied && } + + {!isLoading && + !isError && + chartData && + !isEmpty(chartData?.[0]) && + chartOptions && }
); @@ -86,6 +110,7 @@ interface TimeSeriesViewProps { yAxisUnit?: string; isLoading: boolean; isError: boolean; + isFilterApplied: boolean; } TimeSeriesView.defaultProps = { diff --git a/frontend/src/container/TimeSeriesView/index.tsx b/frontend/src/container/TimeSeriesView/index.tsx index a0618b9960..b2c6b3f01f 100644 --- a/frontend/src/container/TimeSeriesView/index.tsx +++ b/frontend/src/container/TimeSeriesView/index.tsx @@ -68,6 +68,8 @@ function TimeSeriesViewContainer({ return ( +
{leftActions}
+
+ {showOldCTA && } + +
+
{rightActions}
+
+ ); +} + +Toolbar.defaultProps = { + leftActions:
, + rightActions:
, + showOldCTA: false, +}; diff --git a/frontend/src/container/TopNav/AutoRefresh/config.ts b/frontend/src/container/TopNav/AutoRefresh/config.ts index dfd3134e0e..64aaca6c64 100644 --- a/frontend/src/container/TopNav/AutoRefresh/config.ts +++ b/frontend/src/container/TopNav/AutoRefresh/config.ts @@ -1,6 +1,7 @@ import GetMinMax, { GetMinMaxPayload } from 'lib/getMinMax'; import { Time } from '../DateTimeSelection/config'; +import { Time as TimeV2 } from '../DateTimeSelectionV2/config'; export const options: IOptions[] = [ { @@ -67,7 +68,7 @@ export interface IOptions { } export const getMinMax = ( - selectedTime: Time, + selectedTime: Time | TimeV2, minTime: number, maxTime: number, ): GetMinMaxPayload => diff --git a/frontend/src/container/TopNav/AutoRefresh/index.tsx b/frontend/src/container/TopNav/AutoRefresh/index.tsx index 11a22e700f..d11b2c248b 100644 --- a/frontend/src/container/TopNav/AutoRefresh/index.tsx +++ b/frontend/src/container/TopNav/AutoRefresh/index.tsx @@ -31,7 +31,10 @@ import { popupContainer } from 'utils/selectPopupContainer'; import { getMinMax, options } from './config'; import { ButtonContainer, Container } from './styles'; -function AutoRefresh({ disabled = false }: AutoRefreshProps): JSX.Element { +function AutoRefresh({ + disabled = false, + showAutoRefreshBtnPrimary = true, +}: AutoRefreshProps): JSX.Element { const globalTime = useSelector( (state) => state.globalTime, ); @@ -176,7 +179,10 @@ function AutoRefresh({ disabled = false }: AutoRefreshProps): JSX.Element { } > - + @@ -185,10 +191,12 @@ function AutoRefresh({ disabled = false }: AutoRefreshProps): JSX.Element { interface AutoRefreshProps { disabled?: boolean; + showAutoRefreshBtnPrimary?: boolean; } AutoRefresh.defaultProps = { disabled: false, + showAutoRefreshBtnPrimary: true, }; export default AutoRefresh; diff --git a/frontend/src/container/TopNav/AutoRefreshV2/AutoRefreshV2.styles.scss b/frontend/src/container/TopNav/AutoRefreshV2/AutoRefreshV2.styles.scss new file mode 100644 index 0000000000..4b0953a767 --- /dev/null +++ b/frontend/src/container/TopNav/AutoRefreshV2/AutoRefreshV2.styles.scss @@ -0,0 +1,79 @@ +.auto-refresh-root { + .ant-popover-inner { + min-width: 187px; + min-height: 386px; + border-radius: 4px; + border: 1px solid var(--bg-slate-400); + background: linear-gradient( + 139deg, + rgba(18, 19, 23, 0.8) 0%, + rgba(18, 19, 23, 0.9) 98.68% + ); + box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2); + backdrop-filter: blur(20px); + padding: 0; + } + .ant-popover-arrow { + display: none; + } +} +.auto-refresh-menu { + display: flex; + flex-direction: column; +} + +.auto-refresh-checkbox { + padding: 12px 14px; + border-bottom: 1px solid var(--bg-slate-400); + color: #f9f9fa; + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: 20px; /* 142.857% */ + letter-spacing: -0.07px; +} + +.refresh-interval-text { + padding: 12px 14px 8px 14px; + color: #52575c; + font-size: 11px; + font-style: normal; + font-weight: 500; + line-height: 18px; /* 163.636% */ + letter-spacing: 0.88px; + text-transform: uppercase; + margin-bottom: 0px !important; +} + +.refresh-interval-btns { + padding-left: 14px !important; + color: var(--text-vanilla-400); + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: 20px; /* 142.857% */ + letter-spacing: -0.07px; + justify-content: space-between; +} + +.lightMode { + .auto-refresh-root { + .ant-popover-inner { + border: 1px solid var(--bg-vanilla-400); + background: var(--bg-vanilla-100); + } + } + + .auto-refresh-checkbox { + border-bottom: 1px solid var(--bg-vanilla-400); + color: var(--bg-slate-400); + } + + .refresh-interval-text { + color: var(--bg-vanilla-400); + } + + .refresh-interval-btns { + color: var(--text-slate-400); + } +} diff --git a/frontend/src/container/TopNav/AutoRefreshV2/config.ts b/frontend/src/container/TopNav/AutoRefreshV2/config.ts new file mode 100644 index 0000000000..a84f932fbc --- /dev/null +++ b/frontend/src/container/TopNav/AutoRefreshV2/config.ts @@ -0,0 +1,77 @@ +import GetMinMax, { GetMinMaxPayload } from 'lib/getMinMax'; + +import { Time } from '../DateTimeSelection/config'; +import { Time as TimeV2 } from '../DateTimeSelectionV2/config'; + +export const options: IOptions[] = [ + { + label: 'off', + key: 'off', + value: 0, + }, + { + label: '5 seconds', + key: '5s', + value: 5000, + }, + { + label: '10 seconds', + key: '10s', + value: 10000, + }, + { + label: '30 seconds', + key: '30s', + value: 30000, + }, + { + label: '1 minute', + key: '1m', + value: 60000, + }, + { + label: '5 minutes', + key: '5m', + value: 300000, + }, + { + label: '10 minutes', + key: '10m', + value: 600000, + }, + { + label: '30 minutes', + key: '30m', + value: 1800000, + }, + { + label: '1 hour', + key: '1h', + value: 3600000, + }, + { + label: '2 hours', + key: '2h', + value: 7200000, + }, + { + label: '1 day', + key: '1d', + value: 86400000, + }, +]; + +export interface IOptions { + label: string; + key: string; + value: number; +} + +export const getMinMax = ( + selectedTime: Time | TimeV2, + minTime: number, + maxTime: number, +): GetMinMaxPayload => + selectedTime !== 'custom' + ? GetMinMax(selectedTime) + : GetMinMax(selectedTime, [minTime, maxTime]); diff --git a/frontend/src/container/TopNav/AutoRefreshV2/index.tsx b/frontend/src/container/TopNav/AutoRefreshV2/index.tsx new file mode 100644 index 0000000000..63652bff7a --- /dev/null +++ b/frontend/src/container/TopNav/AutoRefreshV2/index.tsx @@ -0,0 +1,201 @@ +import './AutoRefreshV2.styles.scss'; + +import { CaretDownFilled } from '@ant-design/icons'; +import { Button, Checkbox, Popover, Typography } from 'antd'; +import { CheckboxChangeEvent } from 'antd/lib/checkbox'; +import get from 'api/browser/localstorage/get'; +import set from 'api/browser/localstorage/set'; +import { DASHBOARD_TIME_IN_DURATION } from 'constants/app'; +import useUrlQuery from 'hooks/useUrlQuery'; +import _omit from 'lodash-es/omit'; +import { Check } from 'lucide-react'; +import { useCallback, useEffect, useMemo, useState } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { useLocation } from 'react-router-dom'; +import { useInterval } from 'react-use'; +import { Dispatch } from 'redux'; +import { AppState } from 'store/reducers'; +import AppActions from 'types/actions'; +import { + UPDATE_AUTO_REFRESH_INTERVAL, + UPDATE_TIME_INTERVAL, +} from 'types/actions/globalTime'; +import { GlobalReducer } from 'types/reducer/globalTime'; +import { popupContainer } from 'utils/selectPopupContainer'; + +import { getMinMax, options } from './config'; +import { ButtonContainer } from './styles'; + +function AutoRefresh({ + disabled = false, + showAutoRefreshBtnPrimary = true, +}: AutoRefreshProps): JSX.Element { + const globalTime = useSelector( + (state) => state.globalTime, + ); + const { pathname } = useLocation(); + + const isDisabled = useMemo( + () => + disabled || + globalTime.isAutoRefreshDisabled || + globalTime.selectedTime === 'custom', + [globalTime.isAutoRefreshDisabled, disabled, globalTime.selectedTime], + ); + + const localStorageData = JSON.parse(get(DASHBOARD_TIME_IN_DURATION) || '{}'); + + const localStorageValue = useMemo(() => localStorageData[pathname], [ + pathname, + localStorageData, + ]); + + const [isAutoRefreshEnabled, setIsAutoRefreshfreshEnabled] = useState( + Boolean(localStorageValue), + ); + + const dispatch = useDispatch>(); + + useEffect(() => { + const isAutoRefreshEnabled = Boolean(localStorageValue); + dispatch({ + type: UPDATE_AUTO_REFRESH_INTERVAL, + payload: localStorageValue, + }); + setIsAutoRefreshfreshEnabled(isAutoRefreshEnabled); + }, [localStorageValue, dispatch]); + + const params = useUrlQuery(); + + const [selectedOption, setSelectedOption] = useState( + localStorageValue || options[0].key, + ); + + useEffect(() => { + setSelectedOption(localStorageValue || options[0].key); + }, [localStorageValue, params]); + + const getOption = useMemo( + () => options.find((option) => option.key === selectedOption), + [selectedOption], + ); + + useInterval(() => { + const selectedValue = getOption?.value; + + if (isDisabled || !isAutoRefreshEnabled) { + return; + } + + if (selectedOption !== 'off' && selectedValue) { + const { maxTime, minTime } = getMinMax( + globalTime.selectedTime, + globalTime.minTime, + globalTime.maxTime, + ); + + dispatch({ + type: UPDATE_TIME_INTERVAL, + payload: { + maxTime, + minTime, + selectedTime: globalTime.selectedTime, + }, + }); + } + }, getOption?.value || 0); + + const onChangeHandler = useCallback( + (selectedValue: string) => { + setSelectedOption(selectedValue); + params.set(DASHBOARD_TIME_IN_DURATION, selectedValue); + set( + DASHBOARD_TIME_IN_DURATION, + JSON.stringify({ ...localStorageData, [pathname]: selectedValue }), + ); + setIsAutoRefreshfreshEnabled(true); + }, + [params, pathname, localStorageData], + ); + + const onChangeAutoRefreshHandler = useCallback( + (event: CheckboxChangeEvent) => { + const { checked } = event.target; + if (!checked) { + // remove the path from localstorage + set( + DASHBOARD_TIME_IN_DURATION, + JSON.stringify(_omit(localStorageData, pathname)), + ); + } + setIsAutoRefreshfreshEnabled(checked); + }, + [localStorageData, pathname], + ); + + if (globalTime.selectedTime === 'custom') { + // eslint-disable-next-line react/jsx-no-useless-fragment + return <>; + } + + return ( + + + Auto Refresh + + + Refresh Interval + + {options + .filter((e) => e.label !== 'off') + .map((option) => ( + + ))} +
+ } + > + + + + + ); +} + +interface AutoRefreshProps { + disabled?: boolean; + showAutoRefreshBtnPrimary?: boolean; +} + +AutoRefresh.defaultProps = { + disabled: false, + showAutoRefreshBtnPrimary: true, +}; + +export default AutoRefresh; diff --git a/frontend/src/container/TopNav/AutoRefreshV2/styles.ts b/frontend/src/container/TopNav/AutoRefreshV2/styles.ts new file mode 100644 index 0000000000..9672a346e7 --- /dev/null +++ b/frontend/src/container/TopNav/AutoRefreshV2/styles.ts @@ -0,0 +1,13 @@ +import { Button } from 'antd'; +import styled from 'styled-components'; + +export const Container = styled.div` + min-width: 8rem; +`; + +export const ButtonContainer = styled(Button)` + &&& { + padding-left: 0.5rem; + padding-right: 0.5rem; + } +`; diff --git a/frontend/src/container/TopNav/Breadcrumbs/index.tsx b/frontend/src/container/TopNav/Breadcrumbs/index.tsx index d5e4941142..4ab1e945d4 100644 --- a/frontend/src/container/TopNav/Breadcrumbs/index.tsx +++ b/frontend/src/container/TopNav/Breadcrumbs/index.tsx @@ -2,7 +2,7 @@ import { Breadcrumb } from 'antd'; import ROUTES from 'constants/routes'; import { Link, RouteComponentProps, withRouter } from 'react-router-dom'; -const breadcrumbNameMap = { +const breadcrumbNameMap: Record = { [ROUTES.APPLICATION]: 'Services', [ROUTES.TRACE]: 'Traces', [ROUTES.TRACES_EXPLORER]: 'Traces Explorer', @@ -20,7 +20,6 @@ const breadcrumbNameMap = { [ROUTES.ERROR_DETAIL]: 'Exceptions', [ROUTES.LIST_ALL_ALERT]: 'Alerts', [ROUTES.ALL_DASHBOARD]: 'Dashboard', - [ROUTES.LOGS]: 'Logs', [ROUTES.LOGS_EXPLORER]: 'Logs Explorer', [ROUTES.OLD_LOGS_EXPLORER]: 'Old Logs Explorer', [ROUTES.LIVE_LOGS]: 'Live View', diff --git a/frontend/src/container/TopNav/CustomDateTimeModal/index.tsx b/frontend/src/container/TopNav/CustomDateTimeModal/index.tsx index 904bcd5fd0..5c5292d3ac 100644 --- a/frontend/src/container/TopNav/CustomDateTimeModal/index.tsx +++ b/frontend/src/container/TopNav/CustomDateTimeModal/index.tsx @@ -44,7 +44,7 @@ function CustomDateTimeModal({ disabledDate={disabledDate} allowClear onOk={onModalOkHandler} - showTime + onCalendarChange={onModalOkHandler} /> ); diff --git a/frontend/src/container/TopNav/DateTimeSelection/index.tsx b/frontend/src/container/TopNav/DateTimeSelection/index.tsx index 12e47e9f73..614e977a12 100644 --- a/frontend/src/container/TopNav/DateTimeSelection/index.tsx +++ b/frontend/src/container/TopNav/DateTimeSelection/index.tsx @@ -28,6 +28,7 @@ import { GlobalReducer } from 'types/reducer/globalTime'; import AutoRefresh from '../AutoRefresh'; import CustomDateTimeModal, { DateTimeRangeType } from '../CustomDateTimeModal'; +import { Time as TimeV2 } from '../DateTimeSelectionV2/config'; import { getDefaultOption, getOptions, @@ -46,6 +47,7 @@ function DateTimeSelection({ const [formSelector] = Form.useForm(); const [hasSelectedTimeError, setHasSelectedTimeError] = useState(false); + const [isOpen, setIsOpen] = useState(false); const urlQuery = useUrlQuery(); const searchStartTime = urlQuery.get('startTime'); @@ -120,7 +122,7 @@ function DateTimeSelection({ const getInputLabel = ( startTime?: Dayjs, endTime?: Dayjs, - timeInterval: Time = '15min', + timeInterval: Time | TimeV2 = '15min', ): string | Time => { if (startTime && endTime && timeInterval === 'custom') { const format = 'YYYY/MM/DD HH:mm'; @@ -168,7 +170,7 @@ function DateTimeSelection({ return defaultSelectedOption; }; - const updateLocalStorageForRoutes = (value: Time | string): void => { + const updateLocalStorageForRoutes = (value: Time | TimeV2 | string): void => { const preRoutes = getLocalStorageKey(LOCALSTORAGE.METRICS_TIME_IN_DURATION); if (preRoutes !== null) { const preRoutesObject = JSON.parse(preRoutes); @@ -223,7 +225,7 @@ function DateTimeSelection({ [location.pathname], ); - const onSelectHandler = (value: Time): void => { + const onSelectHandler = (value: Time | TimeV2): void => { if (value !== 'custom') { updateTimeInterval(value); updateLocalStorageForRoutes(value); @@ -346,6 +348,8 @@ function DateTimeSelection({ > { onSelectHandler(value as Time); }} @@ -402,7 +406,7 @@ function DateTimeSelection({ interface DispatchProps { updateTimeInterval: ( - interval: Time, + interval: Time | TimeV2, dateTimeRange?: [number, number], ) => (dispatch: Dispatch) => void; globalTimeLoading: () => void; diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss b/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss new file mode 100644 index 0000000000..73688ad109 --- /dev/null +++ b/frontend/src/container/TopNav/DateTimeSelectionV2/DateTimeSelectionV2.styles.scss @@ -0,0 +1,231 @@ +.date-time-selector { + display: flex; + align-items: center; + + .date-time-input-element { + display: flex; + align-items: center; + border-radius: 2px; + border: 1px solid var(--bg-slate-400); + box-shadow: none !important; + background: var(--bg-ink-300); + display: flex; + min-width: 192px; + height: 34px; + padding: 6px 6px 6px 8px; + flex-shrink: 0; + text-align: start; + margin-right: 8px; + justify-content: space-between; + align-items: center; + + .date-time-input-content { + display: flex; + align-items: center; + } + .time-btn { + margin-right: 8px; + } + .down-arrow { + margin-left: 6px; + } + } + + .refresh-actions { + display: flex; + flex-direction: row; + border-radius: 2px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-300); + margin-left: 8px; + + .refresh-btn { + border-right: 1px solid var(--bg-slate-400); + } + .ant-btn { + display: flex; + padding: 4px 8px; + align-items: center; + box-shadow: none; + border: none; + + &.active-tab { + background-color: #1d212d; + } + } + } +} + +.date-time-root { + .ant-popover-inner { + width: 532px; + min-height: 334px; + border-radius: 4px !important; + border: 1px solid var(--bg-slate-400); + box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2) !important; + padding: 0px !important; + border-radius: 4px; + background: linear-gradient( + 139deg, + rgba(18, 19, 23, 0.8) 0%, + rgba(18, 19, 23, 0.9) 98.68% + ) !important; + backdrop-filter: blur(20px); + } + .ant-popover-arrow { + display: none; + } +} + +.date-time-popover { + display: flex; + + .date-time-options { + display: flex; + width: 224px; + flex-direction: column; + border-right: 1px solid #1d212d; + + .data-time-live { + border-bottom: 1px solid #1d212d; + text-align: start; + padding: 13.5px 14px; + height: 44px; + color: var(--bg-vanilla-400, #c0c1c3); + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: normal; + letter-spacing: 0.14px; + border-bottom: 1px solid var(--bg-slate-400, #1d212d); + } + + .active { + background-color: rgba(171, 189, 255, 0.04) !important; + } + + .data-time-live:hover { + &.ant-btn-text { + background-color: rgba(171, 189, 255, 0.04) !important; + } + } + + .date-time-options-btn { + text-align: start; + padding: 8px 13px; + height: 37px; + color: var(--bg-vanilla-400, #c0c1c3); + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: normal; + letter-spacing: 0.14px; + } + .date-time-options-btn:hover { + &.ant-btn-text { + background-color: rgba(171, 189, 255, 0.04) !important; + } + } + } + + .relative-date-time { + width: 307px; + display: flex; + flex-direction: column; + gap: 35px; + padding: 13px 14px; + + .relative-date-time-section { + display: flex; + gap: 6px; + flex-flow: wrap; + } + .time-heading { + text-align: left; + color: var(--bg-slate-200); + font-size: 11px; + font-style: normal; + font-weight: 500; + line-height: 18px; + letter-spacing: 0.88px; + padding-bottom: 8px; + } + + .time-btns { + color: var(--bg-vanilla-400); + background-color: #23262e; + font-size: 14px; + line-height: 17px; + letter-spacing: 0.04em; + text-align: left; + border-radius: 2px; + padding: 4px 8px; + } + } +} + +.lightMode { + .date-time-input-element { + border: 1px solid var(--bg-vanilla-300); + background: var(--bg-vanilla-100); + } + + .refresh-actions { + border: 1px solid var(--bg-vanilla-300); + background: var(--bg-vanilla-100); + + .refresh-btn { + border-right: 1px solid var(--bg-vanilla-300); + } + } + + .date-time-root { + .ant-popover-inner { + border: 1px solid var(--bg-vanilla-400); + background: var(--bg-vanilla-100) !important; + } + } + + .date-time-popover { + .date-time-options { + border-right: 1px solid var(--bg-vanilla-400); + + .data-time-live { + border-bottom: 1px solid var(--bg-vanilla-400); + color: var(--bg-slate-400); + border-bottom: 1px solid var(--bg-vanilla-400); + } + + .date-time-options-btn { + color: var(--bg-slate-400); + } + + .active { + background-color: var(--bg-vanilla-300) !important; + } + + .data-time-live:hover { + &.ant-btn-text { + background-color: var(--bg-vanilla-300) !important; + } + } + + .date-time-options-btn:hover { + &.ant-btn-text { + background-color: var(--bg-vanilla-300) !important; + } + } + } + + .relative-date-time { + .time-heading { + color: var(--bg-vanilla-400); + } + + .time-btns { + background-color: var(--bg-vanilla-300); + color: var(--bg-slate-400); + } + } + } +} diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/Refresh.tsx b/frontend/src/container/TopNav/DateTimeSelectionV2/Refresh.tsx new file mode 100644 index 0000000000..6e608b1189 --- /dev/null +++ b/frontend/src/container/TopNav/DateTimeSelectionV2/Refresh.tsx @@ -0,0 +1,36 @@ +import { useEffect, useState } from 'react'; + +import { RefreshTextContainer, Typography } from './styles'; + +function RefreshText({ + onLastRefreshHandler, + refreshButtonHidden, +}: RefreshTextProps): JSX.Element { + const [refreshText, setRefreshText] = useState(''); + + // this is to update the refresh text + useEffect(() => { + const interval = setInterval(() => { + const text = onLastRefreshHandler(); + if (refreshText !== text) { + setRefreshText(text); + } + }, 2000); + return (): void => { + clearInterval(interval); + }; + }, [onLastRefreshHandler, refreshText]); + + return ( + + {refreshText} + + ); +} + +interface RefreshTextProps { + onLastRefreshHandler: () => string; + refreshButtonHidden: boolean; +} + +export default RefreshText; diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts b/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts new file mode 100644 index 0000000000..053b8d3a1f --- /dev/null +++ b/frontend/src/container/TopNav/DateTimeSelectionV2/config.ts @@ -0,0 +1,149 @@ +/* eslint-disable sonarjs/no-duplicate-string */ +import ROUTES from 'constants/routes'; + +type FiveMin = '5min'; +type TenMin = '10min'; +type FifteenMin = '15min'; +type ThirtyMin = '30min'; +type FortyFiveMin = '45min'; +type OneMin = '1min'; +type ThreeHour = '3hr'; +type SixHour = '6hr'; +type OneHour = '1hr'; +type FourHour = '4hr'; +type TwelveHour = '12hr'; +type OneDay = '1day'; +type ThreeDay = '3days'; +type FourDay = '4days'; +type TenDay = '10days'; +type OneWeek = '1week'; +type TwoWeek = '2weeks'; +type SixWeek = '6weeks'; +type TwoMonths = '2months'; +type Custom = 'custom'; + +export type Time = + | FiveMin + | TenMin + | FifteenMin + | ThirtyMin + | OneMin + | ThreeHour + | FourHour + | SixHour + | OneHour + | Custom + | OneWeek + | SixWeek + | OneDay + | FourDay + | ThreeDay + | FortyFiveMin + | TwelveHour + | TenDay + | TwoWeek + | TwoMonths; + +export const Options: Option[] = [ + { value: '5min', label: 'Last 5 minutes' }, + { value: '15min', label: 'Last 15 minutes' }, + { value: '30min', label: 'Last 30 minutes' }, + { value: '1hr', label: 'Last 1 hour' }, + { value: '6hr', label: 'Last 6 hours' }, + { value: '1day', label: 'Last 1 day' }, + { value: '3days', label: 'Last 3 days' }, + { value: '1week', label: 'Last 1 week' }, + { value: 'custom', label: 'Custom...' }, +]; + +export interface Option { + value: Time; + label: string; +} + +export const RelativeDurationOptions: Option[] = [ + { value: '5min', label: 'Last 5 minutes' }, + { value: '15min', label: 'Last 15 minutes' }, + { value: '30min', label: 'Last 30 minutes' }, + { value: '1hr', label: 'Last 1 hour' }, + { value: '6hr', label: 'Last 6 hour' }, + { value: '1day', label: 'Last 1 day' }, + { value: '3days', label: 'Last 3 days' }, + { value: '1week', label: 'Last 1 week' }, +]; + +export const RelativeDurationSuggestionOptions: Option[] = [ + { value: '3hr', label: '3h' }, + { value: '4days', label: '4d' }, + { value: '6weeks', label: '6w' }, + { value: '12hr', label: '12 hours' }, + { value: '10days', label: '10d' }, + { value: '2weeks', label: '2 weeks' }, + { value: '2months', label: 'Last 2 months' }, + { value: '1day', label: 'today' }, +]; +export const FixedDurationSuggestionOptions: Option[] = [ + { value: '45min', label: '45m' }, + { value: '12hr', label: '12 hours' }, + { value: '10days', label: '10d' }, + { value: '2weeks', label: '2 weeks' }, + { value: '2months', label: 'Last 2 months' }, + { value: '1day', label: 'today' }, +]; + +export const getDefaultOption = (route: string): Time => { + if (route === ROUTES.SERVICE_MAP) { + return RelativeDurationOptions[2].value; + } + if (route === ROUTES.APPLICATION) { + return Options[2].value; + } + return Options[2].value; +}; + +export const getOptions = (routes: string): Option[] => { + if (routes === ROUTES.SERVICE_MAP) { + return RelativeDurationOptions; + } + return Options; +}; + +export const routesToHideBreadCrumbs = [ROUTES.SUPPORT, ROUTES.ALL_DASHBOARD]; + +export const routesToSkip = [ + ROUTES.SETTINGS, + ROUTES.LIST_ALL_ALERT, + ROUTES.TRACE_DETAIL, + ROUTES.ALL_CHANNELS, + ROUTES.USAGE_EXPLORER, + ROUTES.GET_STARTED, + ROUTES.VERSION, + ROUTES.ALL_DASHBOARD, + ROUTES.ORG_SETTINGS, + ROUTES.INGESTION_SETTINGS, + ROUTES.ERROR_DETAIL, + ROUTES.LOGS_PIPELINES, + ROUTES.BILLING, + ROUTES.SUPPORT, + ROUTES.WORKSPACE_LOCKED, + ROUTES.LOGS, + ROUTES.MY_SETTINGS, + ROUTES.LIST_LICENSES, + ROUTES.LOGS_SAVE_VIEWS, + ROUTES.LOGS_PIPELINES, + ROUTES.TRACES_EXPLORER, + ROUTES.TRACES_SAVE_VIEWS, + ROUTES.SHORTCUTS, +]; + +export const routesToDisable = [ROUTES.LOGS_EXPLORER, ROUTES.LIVE_LOGS]; + +export interface LocalStorageTimeRange { + localstorageStartTime: string | null; + localstorageEndTime: string | null; +} + +export interface TimeRange { + startTime: string; + endTime: string; +} diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx b/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx new file mode 100644 index 0000000000..157397f026 --- /dev/null +++ b/frontend/src/container/TopNav/DateTimeSelectionV2/index.tsx @@ -0,0 +1,490 @@ +import './DateTimeSelectionV2.styles.scss'; + +import { SyncOutlined } from '@ant-design/icons'; +import { Button } from 'antd'; +import getLocalStorageKey from 'api/browser/localstorage/get'; +import setLocalStorageKey from 'api/browser/localstorage/set'; +import CustomTimePicker from 'components/CustomTimePicker/CustomTimePicker'; +import { LOCALSTORAGE } from 'constants/localStorage'; +import { QueryParams } from 'constants/query'; +import { + initialQueryBuilderFormValuesMap, + PANEL_TYPES, +} from 'constants/queryBuilder'; +import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; +import ROUTES from 'constants/routes'; +import { + constructCompositeQuery, + defaultLiveQueryDataConfig, +} from 'container/LiveLogs/constants'; +import { QueryHistoryState } from 'container/LiveLogs/types'; +import dayjs, { Dayjs } from 'dayjs'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval'; +import useUrlQuery from 'hooks/useUrlQuery'; +import GetMinMax from 'lib/getMinMax'; +import getTimeString from 'lib/getTimeString'; +import history from 'lib/history'; +import { isObject } from 'lodash-es'; +import { useCallback, useEffect, useMemo, useState } from 'react'; +import { useQueryClient } from 'react-query'; +import { connect, useSelector } from 'react-redux'; +import { RouteComponentProps, withRouter } from 'react-router-dom'; +import { bindActionCreators, Dispatch } from 'redux'; +import { ThunkDispatch } from 'redux-thunk'; +import { GlobalTimeLoading, UpdateTimeInterval } from 'store/actions'; +import { AppState } from 'store/reducers'; +import AppActions from 'types/actions'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +import AutoRefresh from '../AutoRefreshV2'; +import { DateTimeRangeType } from '../CustomDateTimeModal'; +import { + getDefaultOption, + getOptions, + LocalStorageTimeRange, + Time, + TimeRange, +} from './config'; +import RefreshText from './Refresh'; +import { Form, FormContainer, FormItem } from './styles'; + +function DateTimeSelection({ + showAutoRefresh, + location, + updateTimeInterval, + globalTimeLoading, +}: Props): JSX.Element { + const [formSelector] = Form.useForm(); + + const [hasSelectedTimeError, setHasSelectedTimeError] = useState(false); + const [isOpen, setIsOpen] = useState(false); + + const urlQuery = useUrlQuery(); + const searchStartTime = urlQuery.get('startTime'); + const searchEndTime = urlQuery.get('endTime'); + const queryClient = useQueryClient(); + + const { + localstorageStartTime, + localstorageEndTime, + } = ((): LocalStorageTimeRange => { + const routes = getLocalStorageKey(LOCALSTORAGE.METRICS_TIME_IN_DURATION); + + if (routes !== null) { + const routesObject = JSON.parse(routes || '{}'); + const selectedTime = routesObject[location.pathname]; + + if (selectedTime) { + let parsedSelectedTime: TimeRange; + try { + parsedSelectedTime = JSON.parse(selectedTime); + } catch { + parsedSelectedTime = selectedTime; + } + + if (isObject(parsedSelectedTime)) { + return { + localstorageStartTime: parsedSelectedTime.startTime, + localstorageEndTime: parsedSelectedTime.endTime, + }; + } + return { localstorageStartTime: null, localstorageEndTime: null }; + } + } + return { localstorageStartTime: null, localstorageEndTime: null }; + })(); + + const getTime = useCallback((): [number, number] | undefined => { + if (searchEndTime && searchStartTime) { + const startDate = dayjs( + new Date(parseInt(getTimeString(searchStartTime), 10)), + ); + const endDate = dayjs(new Date(parseInt(getTimeString(searchEndTime), 10))); + + return [startDate.toDate().getTime() || 0, endDate.toDate().getTime() || 0]; + } + if (localstorageStartTime && localstorageEndTime) { + const startDate = dayjs(localstorageStartTime); + const endDate = dayjs(localstorageEndTime); + + return [startDate.toDate().getTime() || 0, endDate.toDate().getTime() || 0]; + } + return undefined; + }, [ + localstorageEndTime, + localstorageStartTime, + searchEndTime, + searchStartTime, + ]); + + const [options, setOptions] = useState(getOptions(location.pathname)); + const [refreshButtonHidden, setRefreshButtonHidden] = useState(false); + const [customDateTimeVisible, setCustomDTPickerVisible] = useState( + false, + ); + + const { stagedQuery, initQueryBuilderData, panelType } = useQueryBuilder(); + + const handleGoLive = useCallback(() => { + if (!stagedQuery) return; + + setIsOpen(false); + let queryHistoryState: QueryHistoryState | null = null; + + const compositeQuery = constructCompositeQuery({ + query: stagedQuery, + initialQueryData: initialQueryBuilderFormValuesMap.logs, + customQueryData: defaultLiveQueryDataConfig, + }); + + const isListView = + panelType === PANEL_TYPES.LIST && stagedQuery.builder.queryData[0]; + + if (isListView) { + const [graphQuery, listQuery] = queryClient.getQueriesData< + SuccessResponse | ErrorResponse + >({ + queryKey: REACT_QUERY_KEY.GET_QUERY_RANGE, + active: true, + }); + + queryHistoryState = { + graphQueryPayload: + graphQuery && graphQuery[1] + ? graphQuery[1].payload?.data.result || [] + : [], + listQueryPayload: + listQuery && listQuery[1] + ? listQuery[1].payload?.data.newResult.data.result || [] + : [], + }; + } + + const JSONCompositeQuery = encodeURIComponent(JSON.stringify(compositeQuery)); + + const path = `${ROUTES.LIVE_LOGS}?${QueryParams.compositeQuery}=${JSONCompositeQuery}`; + + history.push(path, queryHistoryState); + }, [panelType, queryClient, stagedQuery]); + + const { maxTime, minTime, selectedTime } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); + + const getInputLabel = ( + startTime?: Dayjs, + endTime?: Dayjs, + timeInterval: Time = '15min', + ): string | Time => { + if (startTime && endTime && timeInterval === 'custom') { + const format = 'DD/MM/YYYY HH:mm'; + + const startString = startTime.format(format); + const endString = endTime.format(format); + + return `${startString} - ${endString}`; + } + + return timeInterval; + }; + + useEffect(() => { + if (selectedTime === 'custom') { + setRefreshButtonHidden(true); + setCustomDTPickerVisible(true); + } else { + setRefreshButtonHidden(false); + setCustomDTPickerVisible(false); + } + }, [selectedTime]); + + const getDefaultTime = (pathName: string): Time => { + const defaultSelectedOption = getDefaultOption(pathName); + + const routes = getLocalStorageKey(LOCALSTORAGE.METRICS_TIME_IN_DURATION); + + if (routes !== null) { + const routesObject = JSON.parse(routes || '{}'); + const selectedTime = routesObject[pathName]; + + if (selectedTime) { + let parsedSelectedTime: TimeRange; + try { + parsedSelectedTime = JSON.parse(selectedTime); + } catch { + parsedSelectedTime = selectedTime; + } + if (isObject(parsedSelectedTime)) { + return 'custom'; + } + + return selectedTime; + } + } + + return defaultSelectedOption; + }; + + const updateLocalStorageForRoutes = (value: Time | string): void => { + const preRoutes = getLocalStorageKey(LOCALSTORAGE.METRICS_TIME_IN_DURATION); + if (preRoutes !== null) { + const preRoutesObject = JSON.parse(preRoutes); + + const preRoute = { + ...preRoutesObject, + }; + preRoute[location.pathname] = value; + + setLocalStorageKey( + LOCALSTORAGE.METRICS_TIME_IN_DURATION, + JSON.stringify(preRoute), + ); + } + }; + + const onLastRefreshHandler = useCallback(() => { + const currentTime = dayjs(); + + const lastRefresh = dayjs( + selectedTime === 'custom' ? minTime / 1000000 : maxTime / 1000000, + ); + + const secondsDiff = currentTime.diff(lastRefresh, 'seconds'); + + const minutedDiff = currentTime.diff(lastRefresh, 'minutes'); + const hoursDiff = currentTime.diff(lastRefresh, 'hours'); + const daysDiff = currentTime.diff(lastRefresh, 'days'); + const monthsDiff = currentTime.diff(lastRefresh, 'months'); + + if (monthsDiff > 0) { + return `Refreshed ${monthsDiff} months ago`; + } + + if (daysDiff > 0) { + return `Refreshed ${daysDiff} days ago`; + } + + if (hoursDiff > 0) { + return `Refreshed ${hoursDiff} hrs ago`; + } + + if (minutedDiff > 0) { + return `Refreshed ${minutedDiff} mins ago`; + } + + return `Refreshed ${secondsDiff} sec ago`; + }, [maxTime, minTime, selectedTime]); + + const isLogsExplorerPage = useMemo( + () => location.pathname === ROUTES.LOGS_EXPLORER, + [location.pathname], + ); + + const onSelectHandler = (value: Time): void => { + if (value !== 'custom') { + setIsOpen(false); + updateTimeInterval(value); + updateLocalStorageForRoutes(value); + if (refreshButtonHidden) { + setRefreshButtonHidden(false); + } + } else { + setRefreshButtonHidden(true); + setCustomDTPickerVisible(true); + } + + const { maxTime, minTime } = GetMinMax(value, getTime()); + + if (!isLogsExplorerPage) { + urlQuery.set(QueryParams.startTime, minTime.toString()); + urlQuery.set(QueryParams.endTime, maxTime.toString()); + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; + history.replace(generatedUrl); + } + + if (!stagedQuery) { + return; + } + // the second boolean param directs the qb about the time change so to merge the query and retain the current state + initQueryBuilderData(updateStepInterval(stagedQuery, maxTime, minTime), true); + }; + + const onRefreshHandler = (): void => { + onSelectHandler(selectedTime); + onLastRefreshHandler(); + }; + + const onCustomDateHandler = (dateTimeRange: DateTimeRangeType): void => { + if (dateTimeRange !== null) { + const [startTimeMoment, endTimeMoment] = dateTimeRange; + if (startTimeMoment && endTimeMoment) { + setCustomDTPickerVisible(false); + startTimeMoment.startOf('day').toString(); + updateTimeInterval('custom', [ + startTimeMoment.startOf('day').toDate().getTime(), + endTimeMoment.endOf('day').toDate().getTime(), + ]); + setLocalStorageKey('startTime', startTimeMoment.toString()); + setLocalStorageKey('endTime', endTimeMoment.toString()); + updateLocalStorageForRoutes( + JSON.stringify({ startTime: startTimeMoment, endTime: endTimeMoment }), + ); + + if (!isLogsExplorerPage) { + urlQuery.set( + QueryParams.startTime, + startTimeMoment?.toDate().getTime().toString(), + ); + urlQuery.set( + QueryParams.endTime, + endTimeMoment?.toDate().getTime().toString(), + ); + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; + history.replace(generatedUrl); + } + } + } + }; + + // this is triggred when we change the routes and based on that we are changing the default options + useEffect(() => { + const metricsTimeDuration = getLocalStorageKey( + LOCALSTORAGE.METRICS_TIME_IN_DURATION, + ); + + if (metricsTimeDuration === null) { + setLocalStorageKey( + LOCALSTORAGE.METRICS_TIME_IN_DURATION, + JSON.stringify({}), + ); + } + + const currentRoute = location.pathname; + const time = getDefaultTime(currentRoute); + + const currentOptions = getOptions(currentRoute); + setOptions(currentOptions); + + const getCustomOrIntervalTime = (time: Time): Time => { + if (searchEndTime !== null && searchStartTime !== null) { + return 'custom'; + } + if ( + (localstorageEndTime === null || localstorageStartTime === null) && + time === 'custom' + ) { + return getDefaultOption(currentRoute); + } + + return time; + }; + + const updatedTime = getCustomOrIntervalTime(time); + + const [preStartTime = 0, preEndTime = 0] = getTime() || []; + + setRefreshButtonHidden(updatedTime === 'custom'); + + updateTimeInterval(updatedTime, [preStartTime, preEndTime]); + + if (updatedTime !== 'custom') { + const { minTime, maxTime } = GetMinMax(updatedTime); + urlQuery.set(QueryParams.startTime, minTime.toString()); + urlQuery.set(QueryParams.endTime, maxTime.toString()); + } else { + urlQuery.set(QueryParams.startTime, preStartTime.toString()); + urlQuery.set(QueryParams.endTime, preEndTime.toString()); + } + const generatedUrl = `${location.pathname}?${urlQuery.toString()}`; + history.replace(generatedUrl); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [location.pathname, updateTimeInterval, globalTimeLoading]); + + return ( +
+ {!hasSelectedTimeError && !refreshButtonHidden && ( + + )} +
+ + { + onSelectHandler(value as Time); + }} + onError={(hasError: boolean): void => { + setHasSelectedTimeError(hasError); + }} + selectedTime={selectedTime} + onValidCustomDateChange={(dateTime): void => + onCustomDateHandler(dateTime as DateTimeRangeType) + } + selectedValue={getInputLabel( + dayjs(minTime / 1000000), + dayjs(maxTime / 1000000), + selectedTime, + )} + data-testid="dropDown" + items={options} + newPopover + handleGoLive={handleGoLive} + onCustomDateHandler={onCustomDateHandler} + customDateTimeVisible={customDateTimeVisible} + setCustomDTPickerVisible={setCustomDTPickerVisible} + /> + + {showAutoRefresh && selectedTime !== 'custom' && ( +
+
+ )} +
+ +
+ ); +} + +interface DateTimeSelectionV2Props { + showAutoRefresh: boolean; +} +interface DispatchProps { + updateTimeInterval: ( + interval: Time, + dateTimeRange?: [number, number], + ) => (dispatch: Dispatch) => void; + globalTimeLoading: () => void; +} + +const mapDispatchToProps = ( + dispatch: ThunkDispatch, +): DispatchProps => ({ + updateTimeInterval: bindActionCreators(UpdateTimeInterval, dispatch), + globalTimeLoading: bindActionCreators(GlobalTimeLoading, dispatch), +}); + +type Props = DateTimeSelectionV2Props & DispatchProps & RouteComponentProps; + +export default connect(null, mapDispatchToProps)(withRouter(DateTimeSelection)); diff --git a/frontend/src/container/TopNav/DateTimeSelectionV2/styles.ts b/frontend/src/container/TopNav/DateTimeSelectionV2/styles.ts new file mode 100644 index 0000000000..d35d5e764d --- /dev/null +++ b/frontend/src/container/TopNav/DateTimeSelectionV2/styles.ts @@ -0,0 +1,34 @@ +import { Form as FormComponent, Typography as TypographyComponent } from 'antd'; +import styled from 'styled-components'; + +export const Form = styled(FormComponent)` + &&& { + justify-content: flex-end; + } +`; + +export const Typography = styled(TypographyComponent)` + &&& { + text-align: right; + } +`; + +export const FormItem = styled(Form.Item)` + &&& { + margin: 0; + } +`; + +interface Props { + refreshButtonHidden: boolean; +} + +export const RefreshTextContainer = styled.div` + padding-right: 8px; + visibility: ${({ refreshButtonHidden }): string => + refreshButtonHidden ? 'hidden' : 'visible'}; +`; + +export const FormContainer = styled.div` + display: flex; +`; diff --git a/frontend/src/container/TopNav/index.tsx b/frontend/src/container/TopNav/index.tsx index 3b2667eaf6..5277908240 100644 --- a/frontend/src/container/TopNav/index.tsx +++ b/frontend/src/container/TopNav/index.tsx @@ -4,8 +4,8 @@ import { useMemo } from 'react'; import { matchPath, useHistory } from 'react-router-dom'; import NewExplorerCTA from '../NewExplorerCTA'; -import DateTimeSelector from './DateTimeSelection'; -import { routesToDisable, routesToSkip } from './DateTimeSelection/config'; +import DateTimeSelector from './DateTimeSelectionV2'; +import { routesToDisable, routesToSkip } from './DateTimeSelectionV2/config'; function TopNav(): JSX.Element | null { const { location } = useHistory(); @@ -31,26 +31,24 @@ function TopNav(): JSX.Element | null { [location.pathname], ); - if (isSignUpPage || isDisabled) { + if (isSignUpPage || isDisabled || isRouteToSkip) { return null; } - return ( - - {!isRouteToSkip && ( -
- - - -
- -
-
-
- - )} + return !isRouteToSkip ? ( + + + + + +
+ +
+
+
+ - ); + ) : null; } export default TopNav; diff --git a/frontend/src/container/Trace/Search/styles.ts b/frontend/src/container/Trace/Search/styles.ts index 5ade20596d..cfae21ceb2 100644 --- a/frontend/src/container/Trace/Search/styles.ts +++ b/frontend/src/container/Trace/Search/styles.ts @@ -7,6 +7,7 @@ export const Container = styled.div` display: flex; position: relative; width: 100%; + margin-top: 1rem; `; export const SearchComponent = styled(Search)` diff --git a/frontend/src/hooks/dashboard/useDashboardFromLocalStorage.tsx b/frontend/src/hooks/dashboard/useDashboardFromLocalStorage.tsx new file mode 100644 index 0000000000..965f1d70c5 --- /dev/null +++ b/frontend/src/hooks/dashboard/useDashboardFromLocalStorage.tsx @@ -0,0 +1,100 @@ +import getLocalStorageKey from 'api/browser/localstorage/get'; +import setLocalStorageKey from 'api/browser/localstorage/set'; +import { LOCALSTORAGE } from 'constants/localStorage'; +import { defaultTo } from 'lodash-es'; +import { useEffect, useState } from 'react'; +import { IDashboardVariable } from 'types/api/dashboard/getAll'; + +interface LocalStoreDashboardVariables { + [id: string]: { + selectedValue: IDashboardVariable['selectedValue']; + allSelected: boolean; + }; +} +interface DashboardLocalStorageVariables { + [id: string]: LocalStoreDashboardVariables; +} + +interface UseDashboardVariablesFromLocalStorageReturn { + currentDashboard: LocalStoreDashboardVariables; + updateLocalStorageDashboardVariables: ( + id: string, + selectedValue: IDashboardVariable['selectedValue'], + allSelected: boolean, + ) => void; +} + +export const useDashboardVariablesFromLocalStorage = ( + dashboardId: string, +): UseDashboardVariablesFromLocalStorageReturn => { + const [ + allDashboards, + setAllDashboards, + ] = useState({}); + + const [ + currentDashboard, + setCurrentDashboard, + ] = useState({}); + + useEffect(() => { + const localStoreDashboardVariablesString = getLocalStorageKey( + LOCALSTORAGE.DASHBOARD_VARIABLES, + ); + let localStoreDashboardVariables: DashboardLocalStorageVariables = {}; + if (localStoreDashboardVariablesString === null) { + try { + const serialzedData = JSON.stringify({ + [dashboardId]: {}, + }); + + setLocalStorageKey(LOCALSTORAGE.DASHBOARD_VARIABLES, serialzedData); + } catch { + console.error('Failed to seralise the data'); + } + } else { + try { + localStoreDashboardVariables = JSON.parse( + localStoreDashboardVariablesString, + ); + } catch { + console.error('Failed to parse dashboards from local storage'); + localStoreDashboardVariables = {}; + } finally { + setAllDashboards(localStoreDashboardVariables); + } + } + setCurrentDashboard(defaultTo(localStoreDashboardVariables[dashboardId], {})); + }, [dashboardId]); + + const updateLocalStorageDashboardVariables = ( + id: string, + selectedValue: IDashboardVariable['selectedValue'], + allSelected: boolean, + ): void => { + const newCurrentDashboard = { + ...currentDashboard, + [id]: { selectedValue, allSelected }, + }; + + const newAllDashboards = { + ...allDashboards, + [dashboardId]: newCurrentDashboard, + }; + + try { + const serializedData = JSON.stringify(newAllDashboards); + setLocalStorageKey(LOCALSTORAGE.DASHBOARD_VARIABLES, serializedData); + } catch { + console.error('Failed to set dashboards in local storage'); + } + + setAllDashboards(newAllDashboards); + setCurrentDashboard(newCurrentDashboard); + }; + + return { + currentDashboard, + updateLocalStorageDashboardVariables, + }; +}; diff --git a/frontend/src/hooks/hotkeys/__tests__/useKeyboardHotkeys.test.tsx b/frontend/src/hooks/hotkeys/__tests__/useKeyboardHotkeys.test.tsx new file mode 100644 index 0000000000..11e26b06f1 --- /dev/null +++ b/frontend/src/hooks/hotkeys/__tests__/useKeyboardHotkeys.test.tsx @@ -0,0 +1,75 @@ +import { render } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; + +import { + KeyboardHotkeysProvider, + useKeyboardHotkeys, +} from '../useKeyboardHotkeys'; + +function TestComponentWithRegister({ + handleShortcut, +}: { + handleShortcut: () => void; +}): JSX.Element { + const { registerShortcut } = useKeyboardHotkeys(); + + registerShortcut('a', handleShortcut); + + return ( +
+ Test Component +
+ ); +} +function TestComponentWithDeRegister({ + handleShortcut, +}: { + handleShortcut: () => void; +}): JSX.Element { + const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys(); + + registerShortcut('b', handleShortcut); + + // Deregister the shortcut before triggering it + deregisterShortcut('b'); + + return ( +
+ Test Component +
+ ); +} + +describe('KeyboardHotkeysProvider', () => { + it('registers and triggers shortcuts correctly', async () => { + const handleShortcut = jest.fn(); + + render( + + + , + ); + + // Trigger the registered shortcut + await userEvent.keyboard('a'); + + // Assert that the handleShortcut function has been called + expect(handleShortcut).toHaveBeenCalled(); + }); + + it('deregisters shortcuts correctly', () => { + const handleShortcut = jest.fn(); + + render( + + + , + ); + + // Try to trigger the deregistered shortcut + userEvent.keyboard('b'); + + // Assert that the handleShortcut function has NOT been called + expect(handleShortcut).not.toHaveBeenCalled(); + }); +}); diff --git a/frontend/src/hooks/hotkeys/useKeyboardHotkeys.tsx b/frontend/src/hooks/hotkeys/useKeyboardHotkeys.tsx new file mode 100644 index 0000000000..ec1b861664 --- /dev/null +++ b/frontend/src/hooks/hotkeys/useKeyboardHotkeys.tsx @@ -0,0 +1,121 @@ +import { noop, unset } from 'lodash-es'; +import { + createContext, + useCallback, + useContext, + useEffect, + useMemo, + useRef, +} from 'react'; + +interface KeyboardHotkeysContextReturnValue { + /** + * @param keyCombination provide the string for which the subsequent callback should be triggered. Example 'ctrl+a' + * @param callback the callback that should be triggered when the above key combination is being pressed + * @returns void + */ + registerShortcut: (keyCombination: string, callback: () => void) => void; + + /** + * + * @param keyCombination provide the string for which we want to deregister the callback + * @returns void + */ + deregisterShortcut: (keyCombination: string) => void; +} + +const KeyboardHotkeysContext = createContext( + { + registerShortcut: noop, + deregisterShortcut: noop, + }, +); + +const IGNORE_INPUTS = ['input', 'textarea']; // Inputs in which hotkey events will be ignored + +const useKeyboardHotkeys = (): KeyboardHotkeysContextReturnValue => { + const context = useContext(KeyboardHotkeysContext); + if (!context) { + throw new Error( + 'useKeyboardHotkeys must be used within a KeyboardHotkeysProvider', + ); + } + + return context; +}; + +function KeyboardHotkeysProvider({ + children, +}: { + children: JSX.Element; +}): JSX.Element { + const shortcuts = useRef void>>({}); + + const handleKeyPress = (event: KeyboardEvent): void => { + const { key, ctrlKey, altKey, shiftKey, metaKey, target } = event; + + if (IGNORE_INPUTS.includes((target as HTMLElement).tagName.toLowerCase())) { + return; + } + + // https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/metaKey + const modifiers = { ctrlKey, altKey, shiftKey, metaKey }; + + let shortcutKey = `${key.toLowerCase()}`; + + const isAltKey = `${modifiers.altKey ? '+alt' : ''}`; + const isShiftKey = `${modifiers.shiftKey ? '+shift' : ''}`; + + // ctrl and cmd have the same functionality for mac and windows parity + const isMetaKey = `${modifiers.metaKey || modifiers.ctrlKey ? '+meta' : ''}`; + + shortcutKey = shortcutKey + isAltKey + isShiftKey + isMetaKey; + + if (shortcuts.current[shortcutKey]) { + shortcuts.current[shortcutKey](); + } + }; + + useEffect(() => { + document.addEventListener('keydown', handleKeyPress); + return (): void => { + document.removeEventListener('keydown', handleKeyPress); + }; + }, []); + + const registerShortcut = useCallback( + (keyCombination: string, callback: () => void): void => { + if (!shortcuts.current[keyCombination]) { + shortcuts.current[keyCombination] = callback; + } else { + throw new Error('This shortcut is already present in current scope'); + } + }, + [shortcuts], + ); + + const deregisterShortcut = useCallback( + (keyCombination: string): void => { + if (shortcuts.current[keyCombination]) { + unset(shortcuts.current, keyCombination); + } + }, + [shortcuts], + ); + + const contextValue = useMemo( + () => ({ + registerShortcut, + deregisterShortcut, + }), + [registerShortcut, deregisterShortcut], + ); + + return ( + + {children} + + ); +} + +export { KeyboardHotkeysProvider, useKeyboardHotkeys }; diff --git a/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts b/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts index c874f5e6db..bd85d8f799 100644 --- a/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts +++ b/frontend/src/hooks/queryBuilder/useGetExplorerQueryRange.ts @@ -17,6 +17,7 @@ export const useGetExplorerQueryRange = ( panelType: PANEL_TYPES | null, options?: UseQueryOptions, Error>, params?: Record, + isDependentOnQB = true, ): UseQueryResult, Error> => { const { isEnabledQuery } = useQueryBuilder(); const { selectedTime: globalSelectedInterval, minTime, maxTime } = useSelector< @@ -32,11 +33,11 @@ export const useGetExplorerQueryRange = ( const isEnabled = useMemo(() => { if (!options) return isEnabledQuery; if (typeof options.enabled === 'boolean') { - return isEnabledQuery && options.enabled; + return options.enabled && (!isDependentOnQB || isEnabledQuery); } return isEnabledQuery; - }, [options, isEnabledQuery]); + }, [options, isEnabledQuery, isDependentOnQB]); return useGetQueryRange( { diff --git a/frontend/src/hooks/useClickOutside.tsx b/frontend/src/hooks/useClickOutside.tsx new file mode 100644 index 0000000000..d53415dbb6 --- /dev/null +++ b/frontend/src/hooks/useClickOutside.tsx @@ -0,0 +1,28 @@ +import { useEffect } from 'react'; + +type UseClickOutsideProps = { + ref: React.RefObject; + onClickOutside: () => void; +}; + +const useClickOutside = ({ + ref, + onClickOutside, +}: UseClickOutsideProps): void => { + const handleClickOutside = (event: MouseEvent): void => { + if (ref.current && !ref.current.contains(event.target as Node)) { + onClickOutside(); + } + }; + + useEffect(() => { + document.addEventListener('click', handleClickOutside); + + return (): void => { + document.removeEventListener('click', handleClickOutside); + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [ref, onClickOutside]); +}; + +export default useClickOutside; diff --git a/frontend/src/hooks/useDarkMode/index.tsx b/frontend/src/hooks/useDarkMode/index.tsx index baf0c21511..00df811704 100644 --- a/frontend/src/hooks/useDarkMode/index.tsx +++ b/frontend/src/hooks/useDarkMode/index.tsx @@ -82,6 +82,30 @@ export const useThemeConfig = (): ThemeConfig => { colorLink: '#4E74F8', colorPrimaryText: '#3F5ECC', }, + components: { + Dropdown: { + colorBgElevated: isDarkMode ? '#121317' : '#fff', + controlItemBgHover: isDarkMode ? '#1D212D' : '#fff', + colorText: isDarkMode ? '#C0C1C3' : '#121317', + fontSize: 12, + }, + Select: { + colorBgElevated: isDarkMode ? '#121317' : '#fff', + controlItemBgHover: isDarkMode ? '#1D212D' : '#fff', + boxShadowSecondary: isDarkMode + ? '4px 10px 16px 2px rgba(0, 0, 0, 0.30)' + : '#fff', + colorText: isDarkMode ? '#C0C1C3' : '#121317', + fontSize: 12, + }, + Button: { + paddingInline: 12, + fontSize: 12, + }, + Input: { + colorBorder: isDarkMode ? '#1D212D' : '#E9E9E9', + }, + }, }; }; diff --git a/frontend/src/hooks/useHandleExplorerTabChange.ts b/frontend/src/hooks/useHandleExplorerTabChange.ts index 415c4807c8..f3b3000cfc 100644 --- a/frontend/src/hooks/useHandleExplorerTabChange.ts +++ b/frontend/src/hooks/useHandleExplorerTabChange.ts @@ -1,5 +1,6 @@ import { QueryParams } from 'constants/query'; import { initialAutocompleteData, PANEL_TYPES } from 'constants/queryBuilder'; +import ROUTES from 'constants/routes'; import { SIGNOZ_VALUE } from 'container/QueryBuilder/filters/OrderByFilter/constants'; import { useCallback } from 'react'; import { Query } from 'types/api/queryBuilder/queryBuilderData'; @@ -12,6 +13,7 @@ export const useHandleExplorerTabChange = (): { handleExplorerTabChange: ( type: string, querySearchParameters?: ICurrentQueryData, + redirectToUrl?: typeof ROUTES[keyof typeof ROUTES], ) => void; } => { const { @@ -51,18 +53,34 @@ export const useHandleExplorerTabChange = (): { ); const handleExplorerTabChange = useCallback( - (type: string, currentQueryData?: ICurrentQueryData) => { + ( + type: string, + currentQueryData?: ICurrentQueryData, + redirectToUrl?: typeof ROUTES[keyof typeof ROUTES], + ) => { const newPanelType = type as PANEL_TYPES; if (newPanelType === panelType && !currentQueryData) return; const query = currentQueryData?.query || getUpdateQuery(newPanelType); - redirectWithQueryBuilderData(query, { - [QueryParams.panelTypes]: newPanelType, - [QueryParams.viewName]: currentQueryData?.name || viewName, - [QueryParams.viewKey]: currentQueryData?.uuid || viewKey, - }); + if (redirectToUrl) { + redirectWithQueryBuilderData( + query, + { + [QueryParams.panelTypes]: newPanelType, + [QueryParams.viewName]: currentQueryData?.name || viewName, + [QueryParams.viewKey]: currentQueryData?.uuid || viewKey, + }, + redirectToUrl, + ); + } else { + redirectWithQueryBuilderData(query, { + [QueryParams.panelTypes]: newPanelType, + [QueryParams.viewName]: currentQueryData?.name || viewName, + [QueryParams.viewKey]: currentQueryData?.uuid || viewKey, + }); + } }, [panelType, getUpdateQuery, redirectWithQueryBuilderData, viewName, viewKey], ); diff --git a/frontend/src/hooks/useQueryService.ts b/frontend/src/hooks/useQueryService.ts index 66460857a7..c13654c56b 100644 --- a/frontend/src/hooks/useQueryService.ts +++ b/frontend/src/hooks/useQueryService.ts @@ -1,6 +1,7 @@ import getService from 'api/metrics/getService'; import { AxiosError } from 'axios'; import { Time } from 'container/TopNav/DateTimeSelection/config'; +import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import { QueryKey, useQuery, @@ -26,7 +27,7 @@ export const useQueryService = ({ interface UseQueryServiceProps { minTime: number; maxTime: number; - selectedTime: Time; + selectedTime: Time | TimeV2; selectedTags: Tags[]; options?: UseQueryOptions; } diff --git a/frontend/src/index.html.ejs b/frontend/src/index.html.ejs index 8d756463cd..8a4e407ec5 100644 --- a/frontend/src/index.html.ejs +++ b/frontend/src/index.html.ejs @@ -67,7 +67,7 @@ diff --git a/frontend/src/lib/dashboard/getQueryResults.ts b/frontend/src/lib/dashboard/getQueryResults.ts index 89ba08f891..ac012ce3a7 100644 --- a/frontend/src/lib/dashboard/getQueryResults.ts +++ b/frontend/src/lib/dashboard/getQueryResults.ts @@ -6,6 +6,7 @@ import { getMetricsQueryRange } from 'api/metrics/getQueryRange'; import { PANEL_TYPES } from 'constants/queryBuilder'; import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems'; import { Time } from 'container/TopNav/DateTimeSelection/config'; +import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import { Pagination } from 'hooks/queryPagination'; import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld'; import { isEmpty } from 'lodash-es'; @@ -61,7 +62,7 @@ export interface GetQueryResultsProps { query: Query; graphType: PANEL_TYPES; selectedTime: timePreferenceType; - globalSelectedInterval: Time; + globalSelectedInterval: Time | TimeV2; variables?: Record; params?: Record; tableParams?: { diff --git a/frontend/src/lib/getMinMax.ts b/frontend/src/lib/getMinMax.ts index b10ac7a982..c52436063d 100644 --- a/frontend/src/lib/getMinMax.ts +++ b/frontend/src/lib/getMinMax.ts @@ -1,10 +1,11 @@ import { Time } from 'container/TopNav/DateTimeSelection/config'; +import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import { GlobalReducer } from 'types/reducer/globalTime'; import getMinAgo from './getStartAndEndTime/getMinAgo'; const GetMinMax = ( - interval: Time, + interval: Time | TimeV2, dateTimeRange?: [number, number], // eslint-disable-next-line sonarjs/cognitive-complexity ): GetMinMaxPayload => { @@ -26,6 +27,9 @@ const GetMinMax = ( } else if (interval === '30min') { const minTimeAgo = getMinAgo({ minutes: 30 }).getTime(); minTime = minTimeAgo; + } else if (interval === '45min') { + const minTimeAgo = getMinAgo({ minutes: 45 }).getTime(); + minTime = minTimeAgo; } else if (interval === '5min') { const minTimeAgo = getMinAgo({ minutes: 5 }).getTime(); minTime = minTimeAgo; @@ -37,11 +41,31 @@ const GetMinMax = ( // three day = one day * 3 const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 3 }).getTime(); minTime = minTimeAgo; + } else if (interval === '4days') { + // four day = one day * 4 + const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 4 }).getTime(); + minTime = minTimeAgo; + } else if (interval === '10days') { + // ten day = one day * 10 + const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 10 }).getTime(); + minTime = minTimeAgo; } else if (interval === '1week') { // one week = one day * 7 const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 7 }).getTime(); minTime = minTimeAgo; - } else if (['4hr', '6hr'].includes(interval)) { + } else if (interval === '2weeks') { + // two week = one day * 14 + const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 14 }).getTime(); + minTime = minTimeAgo; + } else if (interval === '6weeks') { + // six week = one day * 42 + const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 42 }).getTime(); + minTime = minTimeAgo; + } else if (interval === '2months') { + // two months = one day * 60 + const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 60 }).getTime(); + minTime = minTimeAgo; + } else if (['3hr', '4hr', '6hr', '12hr'].includes(interval)) { const h = parseInt(interval.replace('hr', ''), 10); const minTimeAgo = getMinAgo({ minutes: h * 60 }).getTime(); minTime = minTimeAgo; diff --git a/frontend/src/lib/getStartEndRangeTime.ts b/frontend/src/lib/getStartEndRangeTime.ts index 13a2d40031..7fd087fd54 100644 --- a/frontend/src/lib/getStartEndRangeTime.ts +++ b/frontend/src/lib/getStartEndRangeTime.ts @@ -1,6 +1,7 @@ import { PANEL_TYPES } from 'constants/queryBuilder'; import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems'; import { Time } from 'container/TopNav/DateTimeSelection/config'; +import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import store from 'store'; import getMaxMinTime from './getMaxMinTime'; @@ -37,7 +38,7 @@ const getStartEndRangeTime = ({ interface GetStartEndRangeTimesProps { type?: timePreferenceType; graphType?: PANEL_TYPES | null; - interval?: Time; + interval?: Time | TimeV2; } interface GetStartEndRangeTimesPayload { diff --git a/frontend/src/lib/query/createTableColumnsFromQuery.ts b/frontend/src/lib/query/createTableColumnsFromQuery.ts index 3174c35125..d62e0a763f 100644 --- a/frontend/src/lib/query/createTableColumnsFromQuery.ts +++ b/frontend/src/lib/query/createTableColumnsFromQuery.ts @@ -7,6 +7,7 @@ import { import { FORMULA_REGEXP } from 'constants/regExp'; import { QUERY_TABLE_CONFIG } from 'container/QueryTable/config'; import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces'; +import { isObject } from 'lodash-es'; import { ReactNode } from 'react'; import { IBuilderFormula, @@ -385,7 +386,11 @@ const fillDataFromList = ( Object.keys(listItem.data).forEach((label) => { if (column.dataIndex === label) { if (listItem.data[label as ListItemKey] !== '') { - column.data.push(listItem.data[label as ListItemKey].toString()); + if (isObject(listItem.data[label as ListItemKey])) { + column.data.push(JSON.stringify(listItem.data[label as ListItemKey])); + } else { + column.data.push(listItem.data[label as ListItemKey].toString()); + } } else { column.data.push('N/A'); } diff --git a/frontend/src/pages/LogsExplorer/index.tsx b/frontend/src/pages/LogsExplorer/index.tsx index d18eee199b..8b91b955ea 100644 --- a/frontend/src/pages/LogsExplorer/index.tsx +++ b/frontend/src/pages/LogsExplorer/index.tsx @@ -2,26 +2,102 @@ import { Col, Row } from 'antd'; import ExplorerCard from 'components/ExplorerCard/ExplorerCard'; import LogExplorerQuerySection from 'container/LogExplorerQuerySection'; import LogsExplorerViews from 'container/LogsExplorerViews'; -import LogsTopNav from 'container/LogsTopNav'; +// import LogsTopNav from 'container/LogsTopNav'; +import LeftToolbarActions from 'container/QueryBuilder/components/ToolbarActions/LeftToolbarActions'; +import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions'; +import Toolbar from 'container/Toolbar/Toolbar'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'; +import { useEffect, useMemo, useState } from 'react'; import { ErrorBoundary } from 'react-error-boundary'; import { DataSource } from 'types/common/queryBuilder'; import { WrapperStyled } from './styles'; +import { SELECTED_VIEWS } from './utils'; function LogsExplorer(): JSX.Element { + const [showHistogram, setShowHistogram] = useState(true); + const [selectedView, setSelectedView] = useState( + SELECTED_VIEWS.SEARCH, + ); + + const { handleRunQuery, currentQuery } = useQueryBuilder(); + + const handleToggleShowHistogram = (): void => { + setShowHistogram(!showHistogram); + }; + + const handleChangeSelectedView = (view: SELECTED_VIEWS): void => { + setSelectedView(view); + }; + + // Switch to query builder view if there are more than 1 queries + useEffect(() => { + if (currentQuery.builder.queryData.length > 1) { + handleChangeSelectedView(SELECTED_VIEWS.QUERY_BUILDER); + } + }, [currentQuery.builder.queryData.length]); + + const isMultipleQueries = useMemo( + () => + currentQuery.builder.queryData.length > 1 || + currentQuery.builder.queryFormulas.length > 0, + [currentQuery], + ); + + const toolbarViews = useMemo( + () => ({ + search: { + name: 'search', + label: 'Search', + disabled: isMultipleQueries, + show: true, + }, + queryBuilder: { + name: 'query-builder', + label: 'Query Builder', + disabled: false, + show: true, + }, + clickhouse: { + name: 'clickhouse', + label: 'Clickhouse', + disabled: false, + show: false, + }, + }), + [isMultipleQueries], + ); + return ( - + + } + rightActions={} + showOldCTA + /> + - +
- + - + diff --git a/frontend/src/pages/LogsExplorer/utils.ts b/frontend/src/pages/LogsExplorer/utils.ts index 13e8a29a4f..485aa22eef 100644 --- a/frontend/src/pages/LogsExplorer/utils.ts +++ b/frontend/src/pages/LogsExplorer/utils.ts @@ -10,3 +10,10 @@ export const prepareQueryWithDefaultTimestamp = (query: Query): Query => ({ })), }, }); + +// eslint-disable-next-line @typescript-eslint/naming-convention +export enum SELECTED_VIEWS { + SEARCH = 'search', + QUERY_BUILDER = 'query-builder', + CLICKHOUSE = 'clickhouse', +} diff --git a/frontend/src/pages/LogsModulePage/LogsModulePage.styles.scss b/frontend/src/pages/LogsModulePage/LogsModulePage.styles.scss new file mode 100644 index 0000000000..144264b532 --- /dev/null +++ b/frontend/src/pages/LogsModulePage/LogsModulePage.styles.scss @@ -0,0 +1,33 @@ +.logs-module-container { + // margin: 0 -1rem; // as we have added a margin of 0 1rem components container, have to adjust the margin with negative to style the logs explorer as we want + + // .ant-tabs-content-holder { + // margin: 0 -1rem; + // } + + .ant-tabs-nav { + padding: 0 16px; + margin-bottom: 0px; + + &::before { + border-bottom: 1px solid var(--bg-slate-400) !important; + } + } + + .tab-item { + display: flex; + justify-content: center; + align-items: center; + gap: 8px; + } +} + +.lightMode { + .logs-module-container { + .ant-tabs-nav { + &::before { + border-bottom: 1px solid var(--bg-vanilla-300) !important; + } + } + } +} \ No newline at end of file diff --git a/frontend/src/pages/LogsModulePage/LogsModulePage.tsx b/frontend/src/pages/LogsModulePage/LogsModulePage.tsx index ecd8d2dcfc..6802d436cf 100644 --- a/frontend/src/pages/LogsModulePage/LogsModulePage.tsx +++ b/frontend/src/pages/LogsModulePage/LogsModulePage.tsx @@ -1,28 +1,20 @@ +import './LogsModulePage.styles.scss'; + import RouteTab from 'components/RouteTab'; -import ROUTES from 'constants/routes'; +import { TabRoutes } from 'components/RouteTab/types'; import history from 'lib/history'; -import LogsExplorer from 'pages/LogsExplorer'; -import Pipelines from 'pages/Pipelines'; import { useLocation } from 'react-use'; -export const logsExplorer = { - Component: LogsExplorer, - name: 'Explorer', - route: ROUTES.LOGS, - key: ROUTES.LOGS, -}; - -export const logsPipelines = { - Component: Pipelines, - name: 'Pipelines', - route: ROUTES.LOGS_PIPELINES, - key: ROUTES.LOGS_PIPELINES, -}; +import { logSaveView, logsExplorer, logsPipelines } from './constants'; export default function LogsModulePage(): JSX.Element { const { pathname } = useLocation(); - const routes = [logsExplorer, logsPipelines]; + const routes: TabRoutes[] = [logsExplorer, logsPipelines, logSaveView]; - return ; + return ( +
+ +
+ ); } diff --git a/frontend/src/pages/LogsModulePage/constants.tsx b/frontend/src/pages/LogsModulePage/constants.tsx new file mode 100644 index 0000000000..1dbf667fd4 --- /dev/null +++ b/frontend/src/pages/LogsModulePage/constants.tsx @@ -0,0 +1,39 @@ +import { TabRoutes } from 'components/RouteTab/types'; +import ROUTES from 'constants/routes'; +import { Compass, TowerControl, Workflow } from 'lucide-react'; +import LogsExplorer from 'pages/LogsExplorer'; +import Pipelines from 'pages/Pipelines'; +import SaveView from 'pages/SaveView'; + +export const logsExplorer: TabRoutes = { + Component: LogsExplorer, + name: ( +
+ Explorer +
+ ), + route: ROUTES.LOGS, + key: ROUTES.LOGS, +}; + +export const logsPipelines: TabRoutes = { + Component: Pipelines, + name: ( +
+ Pipelines +
+ ), + route: ROUTES.LOGS_PIPELINES, + key: ROUTES.LOGS_PIPELINES, +}; + +export const logSaveView: TabRoutes = { + Component: SaveView, + name: ( +
+ Views +
+ ), + route: ROUTES.LOGS_SAVE_VIEWS, + key: ROUTES.LOGS_SAVE_VIEWS, +}; diff --git a/frontend/src/pages/LogsModulePage/utils.ts b/frontend/src/pages/LogsModulePage/utils.ts new file mode 100644 index 0000000000..222c1c9ef6 --- /dev/null +++ b/frontend/src/pages/LogsModulePage/utils.ts @@ -0,0 +1,7 @@ +export const removeSourcePageFromPath = (path: string): string => { + const lastSlashIndex = path.lastIndexOf('/'); + if (lastSlashIndex !== -1) { + return path.substring(0, lastSlashIndex); + } + return path; +}; diff --git a/frontend/src/pages/Pipelines/Pipelines.styles.scss b/frontend/src/pages/Pipelines/Pipelines.styles.scss new file mode 100644 index 0000000000..8521aab75e --- /dev/null +++ b/frontend/src/pages/Pipelines/Pipelines.styles.scss @@ -0,0 +1,5 @@ +.pipeline-tabs { + .ant-tabs-content { + padding: 0 16px; + } +} diff --git a/frontend/src/pages/Pipelines/index.tsx b/frontend/src/pages/Pipelines/index.tsx index d646390dda..81b3b503f9 100644 --- a/frontend/src/pages/Pipelines/index.tsx +++ b/frontend/src/pages/Pipelines/index.tsx @@ -1,3 +1,5 @@ +import './Pipelines.styles.scss'; + import type { TabsProps } from 'antd'; import { Tabs } from 'antd'; import getPipeline from 'api/pipeline/get'; @@ -81,7 +83,11 @@ function Pipelines(): JSX.Element { return ( - + ); } diff --git a/frontend/src/pages/SaveView/SaveView.styles.scss b/frontend/src/pages/SaveView/SaveView.styles.scss new file mode 100644 index 0000000000..461914c1d0 --- /dev/null +++ b/frontend/src/pages/SaveView/SaveView.styles.scss @@ -0,0 +1,349 @@ +.save-view-container { + margin-top: 70px; + display: flex; + justify-content: center; + width: 100%; + + .save-view-content { + width: calc(100% - 30px); + max-width: 736px; + + + .title { + color: var(--bg-vanilla-100); + font-size: var(--font-size-lg); + font-style: normal; + font-weight: var(--font-weight-normal); + line-height: 28px; /* 155.556% */ + letter-spacing: -0.09px; + } + + .subtitle { + color: var(---bg-vanilla-400); + font-size: var(--font-size-sm); + font-style: normal; + font-weight: var(--font-weight-normal); + line-height: 20px; /* 142.857% */ + letter-spacing: -0.07px; + } + + .ant-input-affix-wrapper { + margin-top: 16px; + margin-bottom: 8px; + } + + .ant-table-row { + .ant-table-cell { + padding: 0; + border: none; + background: var(--bg-ink-500); + + } + .column-render { + margin: 8px 0 !important; + padding: 16px; + border-radius: 6px; + border: 1px solid var(--bg-slate-500); + background: var(--bg-ink-400); + + .title-with-action { + display: flex; + justify-content: space-between; + align-items: center; + + .save-view-title { + display: flex; + align-items: center; + gap: 6px; + .dot { + min-height: 6px; + min-width: 6px; + border-radius: 50%; + } + .ant-typography { + color: var(--bg-vanilla-400); + font-size: var(--font-size-sm); + font-style: normal; + font-weight: var(--font-weight-medium); + line-height: 20px; + letter-spacing: -0.07px; + } + } + + .action-btn { + display: flex; + align-items: center; + gap: 20px; + cursor: pointer; + } + + } + .view-details { + margin-top: 8px; + display: flex; + align-items: center; + + .view-tag { + width: 14px; + height: 14px; + border-radius: 50px; + background: var(--bg-slate-300); + display: flex; + justify-content: center; + align-items: center; + + .tag-text { + color: var(--bg-vanilla-400); + leading-trim: both; + text-edge: cap; + font-size: 10px; + font-style: normal; + font-weight: var(--font-weight-normal); + line-height: normal; + letter-spacing: -0.05px; + } + } + + .view-created-by { + margin-left: 8px; + } + + .view-created-at { + margin-left: 24px; + display: flex; + align-items: center; + .ant-typography { + margin-left: 6px; + color: var(--bg-vanilla-400); + font-size: var(--font-size-sm); + font-style: normal; + font-weight: var(--font-weight-normal); + line-height: 18px; /* 128.571% */ + letter-spacing: -0.07px; + } + } + } + } + } + + .ant-pagination-item { + + display: flex; + justify-content: center; + align-items: center; + + > a { + color: var(--bg-vanilla-400); + font-variant-numeric: lining-nums tabular-nums slashed-zero; + font-feature-settings: 'dlig' on, 'salt' on, 'case' on, 'cpsp' on; + font-size: var(--font-size-sm); + font-style: normal; + font-weight: var(--font-weight-normal); + line-height: 20px; /* 142.857% */ + } + + } + + .ant-pagination-item-active { + background-color: var(--bg-robin-500); + > a { + color: var(--bg-ink-500) !important; + font-size: var(--font-size-sm); + font-style: normal; + font-weight: var(--font-weight-medium); + line-height: 20px; + } + } + } +} + +.delete-view-modal { + width: calc(100% - 30px) !important; /* Adjust the 20px as needed */ + max-width: 384px; + .ant-modal-content { + padding: 0; + border-radius: 4px; + border: 1px solid var(--bg-slate-500); + background: var(--bg-ink-400); + box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.20); + + .ant-modal-header { + padding: 16px; + background: var(--bg-ink-400); + } + + .ant-modal-body { + padding: 0px 16px 28px 16px; + + .ant-typography { + color: var(--bg-vanilla-400); + font-size: var(--font-size-sm); + font-style: normal; + font-weight: var(--font-weight-normal); + line-height: 20px; + letter-spacing: -0.07px; + } + + .save-view-input { + margin-top: 8px; + display: flex; + gap: 8px; + } + + .ant-color-picker-trigger { + padding: 6px; + border-radius: 2px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-300); + width: 32px; + height: 32px; + + .ant-color-picker-color-block { + border-radius: 50px; + width: 16px; + height: 16px; + flex-shrink: 0; + + .ant-color-picker-color-block-inner { + display: flex; + justify-content: center; + align-items: center; + } + } + } + + } + + .ant-modal-footer { + display: flex; + justify-content: flex-end; + padding: 16px 16px; + margin: 0; + + .cancel-btn { + display: flex; + align-items: center; + border: none; + border-radius: 2px; + background: var(--bg-slate-500); + } + + .delete-btn { + display: flex; + align-items: center; + border: none; + border-radius: 2px; + background: var(--bg-cherry-500); + margin-left: 12px; + } + + .delete-btn:hover { + color: var(--bg-vanilla-100); + background: var(--bg-cherry-600); + } + } + } + .title { + color: var(--bg-vanilla-100); + font-size: var(--font-size-sm); + font-style: normal; + font-weight: var(--font-weight-medium); + line-height: 20px; /* 142.857% */ + } +} + +.lightMode { + .save-view-container { + .save-view-content { + + .title { + color: var(--bg-ink-500); + } + + .ant-table-row { + .ant-table-cell { + background: var(--bg-vanilla-200); + } + + &:hover { + .ant-table-cell { + background: var(--bg-vanilla-200) !important; + } + } + + .column-render { + border: 1px solid var(--bg-vanilla-200); + background: var(--bg-vanilla-100); + + .title-with-action { + .save-view-title { + .ant-typography { + color: var(--bg-ink-500); + } + } + + .action-btn { + .ant-typography { + color: var(--bg-ink-500); + } + } + } + + .view-details { + .view-tag { + background: var(--bg-vanilla-200); + .tag-text { + color: var(--bg-ink-500); + } + } + + .view-created-by { + color: var(--bg-ink-500); + } + + .view-created-at { + .ant-typography { + color: var(--bg-ink-500); + } + } + } + } + } + } + } + + .delete-view-modal { + .ant-modal-content { + border: 1px solid var(--bg-vanilla-200); + background: var(--bg-vanilla-100); + + .ant-modal-header { + background: var(--bg-vanilla-100); + + .title { + color: var(--bg-ink-500); + } + } + + .ant-modal-body { + .ant-typography { + color: var(--bg-ink-500); + } + + .save-view-input { + .ant-input { + background: var(--bg-vanilla-200); + color: var(--bg-ink-500); + } + } + } + + .ant-modal-footer { + .cancel-btn { + background: var(--bg-vanilla-300); + color: var(--bg-ink-400); + } + } + } + } +} \ No newline at end of file diff --git a/frontend/src/pages/SaveView/constants.ts b/frontend/src/pages/SaveView/constants.ts new file mode 100644 index 0000000000..a6d55b1cb9 --- /dev/null +++ b/frontend/src/pages/SaveView/constants.ts @@ -0,0 +1,15 @@ +import ROUTES from 'constants/routes'; + +export const SOURCEPAGE_VS_ROUTES: { + [key: string]: Partial; +} = { + logs: ROUTES.LOGS_EXPLORER, + traces: ROUTES.TRACES_EXPLORER, +} as const; + +export const ROUTES_VS_SOURCEPAGE: { + [key: string]: string; +} = { + [ROUTES.LOGS_SAVE_VIEWS]: 'logs', + [ROUTES.TRACES_SAVE_VIEWS]: 'traces', +} as const; diff --git a/frontend/src/pages/SaveView/index.tsx b/frontend/src/pages/SaveView/index.tsx new file mode 100644 index 0000000000..afdf05686b --- /dev/null +++ b/frontend/src/pages/SaveView/index.tsx @@ -0,0 +1,357 @@ +import './SaveView.styles.scss'; + +import { Color } from '@signozhq/design-tokens'; +import { + Button, + ColorPicker, + Input, + Modal, + Table, + TableProps, + Typography, +} from 'antd'; +import { + getViewDetailsUsingViewKey, + showErrorNotification, +} from 'components/ExplorerCard/utils'; +import { getRandomColor } from 'container/ExplorerOptions/utils'; +import { useDeleteView } from 'hooks/saveViews/useDeleteView'; +import { useGetAllViews } from 'hooks/saveViews/useGetAllViews'; +import { useUpdateView } from 'hooks/saveViews/useUpdateView'; +import useErrorNotification from 'hooks/useErrorNotification'; +import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange'; +import { useNotifications } from 'hooks/useNotifications'; +import { + CalendarClock, + Check, + Compass, + PenLine, + Search, + Trash2, + X, +} from 'lucide-react'; +import { ChangeEvent, useEffect, useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { useLocation } from 'react-router-dom'; +import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery'; +import { ViewProps } from 'types/api/saveViews/types'; +import { DataSource } from 'types/common/queryBuilder'; + +import { ROUTES_VS_SOURCEPAGE, SOURCEPAGE_VS_ROUTES } from './constants'; +import { deleteViewHandler } from './utils'; + +function SaveView(): JSX.Element { + const { pathname } = useLocation(); + const sourcepage = ROUTES_VS_SOURCEPAGE[pathname]; + const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false); + const [activeViewKey, setActiveViewKey] = useState(''); + const [newViewName, setNewViewName] = useState(''); + const [color, setColor] = useState(Color.BG_SIENNA_500); + const [isEditModalOpen, setIsEditModalOpen] = useState(false); + const [activeViewName, setActiveViewName] = useState(''); + const [ + activeCompositeQuery, + setActiveCompositeQuery, + ] = useState(null); + const [searchValue, setSearchValue] = useState(''); + const [dataSource, setDataSource] = useState([]); + const { t } = useTranslation(['explorer']); + + const hideDeleteViewModal = (): void => { + setIsDeleteModalOpen(false); + }; + + const handleDeleteModelOpen = (uuid: string, name: string): void => { + setActiveViewKey(uuid); + setActiveViewName(name); + setIsDeleteModalOpen(true); + }; + + const hideEditViewModal = (): void => { + setIsEditModalOpen(false); + }; + + const handleEditModelOpen = (view: ViewProps, color: string): void => { + setActiveViewKey(view.uuid); + setColor(color); + setActiveViewName(view.name); + setNewViewName(view.name); + setActiveCompositeQuery(view.compositeQuery); + setIsEditModalOpen(true); + }; + + const { notifications } = useNotifications(); + + const { + data: viewsData, + isLoading, + error, + isRefetching, + refetch: refetchAllView, + } = useGetAllViews(sourcepage as DataSource); + + useEffect(() => { + setDataSource(viewsData?.data.data || []); + }, [viewsData?.data.data]); + + useErrorNotification(error); + + const handleSearch = (e: ChangeEvent): void => { + setSearchValue(e.target.value); + const filteredData = viewsData?.data.data.filter((view) => + view.name.toLowerCase().includes(e.target.value.toLowerCase()), + ); + setDataSource(filteredData || []); + }; + + const clearSearch = (): void => { + setSearchValue(''); + }; + + const { + mutateAsync: deleteViewAsync, + isLoading: isDeleteLoading, + } = useDeleteView(activeViewKey); + + const onDeleteHandler = (): void => { + deleteViewHandler({ + deleteViewAsync, + notifications, + refetchAllView, + viewId: activeViewKey, + hideDeleteViewModal, + clearSearch, + }); + }; + + const { + mutateAsync: updateViewAsync, + isLoading: isViewUpdating, + } = useUpdateView({ + compositeQuery: activeCompositeQuery || ({} as ICompositeMetricQuery), + viewKey: activeViewKey, + extraData: JSON.stringify({ color }), + sourcePage: sourcepage || DataSource.LOGS, + viewName: newViewName, + }); + + const onUpdateQueryHandler = (): void => { + updateViewAsync( + { + compositeQuery: activeCompositeQuery || ({} as ICompositeMetricQuery), + viewKey: activeViewKey, + extraData: JSON.stringify({ color }), + sourcePage: sourcepage, + viewName: activeViewName, + }, + { + onSuccess: () => { + notifications.success({ + message: 'View Updated Successfully', + }); + hideEditViewModal(); + refetchAllView(); + }, + onError: (err) => { + showErrorNotification(notifications, err); + }, + }, + ); + }; + + const { handleExplorerTabChange } = useHandleExplorerTabChange(); + + const handleRedirectQuery = (view: ViewProps): void => { + const currentViewDetails = getViewDetailsUsingViewKey( + view.uuid, + viewsData?.data.data, + ); + if (!currentViewDetails) return; + const { query, name, uuid, panelType: currentPanelType } = currentViewDetails; + + if (sourcepage) { + handleExplorerTabChange( + currentPanelType, + { + query, + name, + uuid, + }, + SOURCEPAGE_VS_ROUTES[sourcepage], + ); + } + }; + + const columns: TableProps['columns'] = [ + { + title: 'Save View', + key: 'view', + render: (view: ViewProps): JSX.Element => { + const extraData = view.extraData !== '' ? JSON.parse(view.extraData) : ''; + let bgColor = getRandomColor(); + if (extraData !== '') { + bgColor = extraData.color; + } + + const timeOptions: Intl.DateTimeFormatOptions = { + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + hour12: false, + }; + const formattedTime = new Date(view.createdAt).toLocaleTimeString( + 'en-US', + timeOptions, + ); + + const dateOptions: Intl.DateTimeFormatOptions = { + month: 'short', + day: 'numeric', + year: 'numeric', + }; + + const formattedDate = new Date(view.createdAt).toLocaleDateString( + 'en-US', + dateOptions, + ); + + // Combine time and date + const formattedDateAndTime = `${formattedTime} ⎯ ${formattedDate}`; + return ( +
+
+
+ {' '} + {view.name} +
+ +
+ handleEditModelOpen(view, bgColor)} + /> + handleRedirectQuery(view)} /> + handleDeleteModelOpen(view.uuid, view.name)} + /> +
+
+
+
+ + {view.createdBy.substring(0, 1).toUpperCase()} + +
+ + {view.createdBy} + +
+ + {formattedDateAndTime} +
+
+
+ ); + }, + }, + ]; + + return ( +
+
+ Views + + Manage your saved views for logs. + + } + value={searchValue} + onChange={handleSearch} + /> + +
EmptyEmpty
+ + + Delete view} + open={isDeleteModalOpen} + closable={false} + onCancel={hideDeleteViewModal} + footer={[ + , + , + ]} + > + + {t('delete_confirm_message', { + viewName: activeViewName, + })} + + + + Edit view details} + open={isEditModalOpen} + closable={false} + onCancel={hideEditViewModal} + footer={[ + , + ]} + > + Label +
+ setColor(hex)} + /> + setNewViewName(e.target.value)} + /> +
+
+ + ); +} + +export default SaveView; diff --git a/frontend/src/pages/SaveView/utils.ts b/frontend/src/pages/SaveView/utils.ts new file mode 100644 index 0000000000..fff54a6ceb --- /dev/null +++ b/frontend/src/pages/SaveView/utils.ts @@ -0,0 +1,37 @@ +import { NotificationInstance } from 'antd/es/notification/interface'; +import { MenuItemLabelGeneratorProps } from 'components/ExplorerCard/types'; +import { showErrorNotification } from 'components/ExplorerCard/utils'; +import { UseMutateAsyncFunction } from 'react-query'; +import { DeleteViewPayloadProps } from 'types/api/saveViews/types'; + +type DeleteViewProps = { + deleteViewAsync: UseMutateAsyncFunction; + refetchAllView: MenuItemLabelGeneratorProps['refetchAllView']; + notifications: NotificationInstance; + viewId: string; + hideDeleteViewModal: () => void; + clearSearch: () => void; +}; + +export const deleteViewHandler = ({ + deleteViewAsync, + refetchAllView, + notifications, + viewId, + hideDeleteViewModal, + clearSearch, +}: DeleteViewProps): void => { + deleteViewAsync(viewId, { + onSuccess: () => { + hideDeleteViewModal(); + clearSearch(); + notifications.success({ + message: 'View Deleted Successfully', + }); + refetchAllView(); + }, + onError: (err) => { + showErrorNotification(notifications, err); + }, + }); +}; diff --git a/frontend/src/pages/Settings/utils.ts b/frontend/src/pages/Settings/utils.ts index 89a4e7f3ef..e453ee56a5 100644 --- a/frontend/src/pages/Settings/utils.ts +++ b/frontend/src/pages/Settings/utils.ts @@ -5,7 +5,6 @@ import { isCloudUser } from 'utils/app'; import { alertChannels, generalSettings, - generalSettingsCloud, ingestionSettings, organizationSettings, } from './config'; @@ -23,11 +22,12 @@ export const getRoutes = ( if (isCloudUser()) { settings.push(...ingestionSettings(t)); settings.push(...alertChannels(t)); - settings.push(...generalSettingsCloud(t)); } else { settings.push(...alertChannels(t)); settings.push(...generalSettings(t)); } + settings.push(...generalSettings(t)); + return settings; }; diff --git a/frontend/src/pages/Shortcuts/Shortcuts.styles.scss b/frontend/src/pages/Shortcuts/Shortcuts.styles.scss new file mode 100644 index 0000000000..d4596d3912 --- /dev/null +++ b/frontend/src/pages/Shortcuts/Shortcuts.styles.scss @@ -0,0 +1,23 @@ +.keyboard-shortcuts { + display: flex; + flex-direction: column; + margin-top: 1rem; + padding: 1rem; + gap: 50px; + + .shortcut-section { + display: flex; + flex-direction: column; + gap: 20px; + + .shortcut-section-heading { + font-weight: 600; + font-size: 22px; + line-height: 1.3636363636363635; + } + + .shortcut-section-table { + width: 70%; + } + } +} diff --git a/frontend/src/pages/Shortcuts/Shortcuts.tsx b/frontend/src/pages/Shortcuts/Shortcuts.tsx new file mode 100644 index 0000000000..34aadf802a --- /dev/null +++ b/frontend/src/pages/Shortcuts/Shortcuts.tsx @@ -0,0 +1,36 @@ +import './Shortcuts.styles.scss'; + +import { Table, Typography } from 'antd'; + +import { ALL_SHORTCUTS, generateTableData, shortcutColumns } from './utils'; + +function Shortcuts(): JSX.Element { + function getShortcutTable(shortcutSection: string): JSX.Element { + const tableData = generateTableData(shortcutSection); + + return ( +
+ + {shortcutSection} + +
+ + ); + } + + return ( +
+ {Object.keys(ALL_SHORTCUTS).map((shortcutSection) => + getShortcutTable(shortcutSection), + )} +
+ ); +} + +export default Shortcuts; diff --git a/frontend/src/pages/Shortcuts/index.ts b/frontend/src/pages/Shortcuts/index.ts new file mode 100644 index 0000000000..0d8d720408 --- /dev/null +++ b/frontend/src/pages/Shortcuts/index.ts @@ -0,0 +1,3 @@ +import Shortcuts from './Shortcuts'; + +export default Shortcuts; diff --git a/frontend/src/pages/Shortcuts/utils.ts b/frontend/src/pages/Shortcuts/utils.ts new file mode 100644 index 0000000000..21dfa28767 --- /dev/null +++ b/frontend/src/pages/Shortcuts/utils.ts @@ -0,0 +1,62 @@ +import { TableProps } from 'antd'; +import { + GlobalShortcuts, + GlobalShortcutsDescription, + GlobalShortcutsName, +} from 'constants/shortcuts/globalShortcuts'; +import { + LogsExplorerShortcuts, + LogsExplorerShortcutsDescription, + LogsExplorerShortcutsName, +} from 'constants/shortcuts/logsExplorerShortcuts'; + +// eslint-disable-next-line @typescript-eslint/naming-convention +export const ALL_SHORTCUTS: Record> = { + 'Global Shortcuts': GlobalShortcuts, + 'Logs Explorer Shortcuts': LogsExplorerShortcuts, +}; + +export const ALL_SHORTCUTS_LABEL: Record> = { + 'Global Shortcuts': GlobalShortcutsName, + 'Logs Explorer Shortcuts': LogsExplorerShortcutsName, +}; + +export const ALL_SHORTCUTS_DESCRIPTION: Record< + string, + Record +> = { + 'Global Shortcuts': GlobalShortcutsDescription, + 'Logs Explorer Shortcuts': LogsExplorerShortcutsDescription, +}; + +export const shortcutColumns = [ + { + title: 'Keyboard Shortcut', + dataIndex: 'shortcutKey', + key: 'shortcutKey', + width: '30%', + }, + { + title: 'Description', + dataIndex: 'shortcutDescription', + key: 'shortcutDescription', + }, +]; + +interface ShortcutRow { + shortcutKey: string; + shortcutDescription: string; +} + +export function generateTableData( + shortcutSection: string, +): TableProps['dataSource'] { + const shortcuts = ALL_SHORTCUTS[shortcutSection]; + const shortcutsDescription = ALL_SHORTCUTS_DESCRIPTION[shortcutSection]; + const shortcutsLabel = ALL_SHORTCUTS_LABEL[shortcutSection]; + return Object.keys(shortcuts).map((shortcutName) => ({ + key: `${shortcuts[shortcutName]} ${shortcutName}`, + shortcutKey: shortcutsLabel[shortcutName], + shortcutDescription: shortcutsDescription[shortcutName], + })); +} diff --git a/frontend/src/pages/TracesExplorer/TracesExplorer.styles.scss b/frontend/src/pages/TracesExplorer/TracesExplorer.styles.scss new file mode 100644 index 0000000000..ae985deb74 --- /dev/null +++ b/frontend/src/pages/TracesExplorer/TracesExplorer.styles.scss @@ -0,0 +1,7 @@ +.trace-explorer-run-query { + display: flex; + flex-direction: row-reverse; + align-items: center; + margin: 1rem 0 0.5rem 0; + gap: 8px; +} diff --git a/frontend/src/pages/TracesExplorer/index.tsx b/frontend/src/pages/TracesExplorer/index.tsx index 3fbdca1a83..22d4b71d2e 100644 --- a/frontend/src/pages/TracesExplorer/index.tsx +++ b/frontend/src/pages/TracesExplorer/index.tsx @@ -1,9 +1,14 @@ +import './TracesExplorer.styles.scss'; + import { Tabs } from 'antd'; import axios from 'axios'; import ExplorerCard from 'components/ExplorerCard/ExplorerCard'; import { AVAILABLE_EXPORT_PANEL_TYPES } from 'constants/panelTypes'; import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder'; +import ExplorerOptions from 'container/ExplorerOptions/ExplorerOptions'; import ExportPanel from 'container/ExportPanel'; +import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions'; +import DateTimeSelector from 'container/TopNav/DateTimeSelectionV2'; import QuerySection from 'container/TracesExplorer/QuerySection'; import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard'; import { addEmptyWidgetInDashboardJSONWithQuery } from 'hooks/dashboard/utils'; @@ -31,6 +36,8 @@ function TracesExplorer(): JSX.Element { currentQuery, panelType, updateAllQueriesOperators, + handleRunQuery, + stagedQuery, } = useQueryBuilder(); const currentPanelType = useGetPanelTypesQueryParam(); @@ -177,6 +184,10 @@ function TracesExplorer(): JSX.Element { return ( <> +
+ + +
@@ -197,6 +208,13 @@ function TracesExplorer(): JSX.Element { onChange={handleExplorerTabChange} /> +
); diff --git a/frontend/src/pages/TracesModulePage/TracesModulePage.styles.scss b/frontend/src/pages/TracesModulePage/TracesModulePage.styles.scss new file mode 100644 index 0000000000..1808fabecd --- /dev/null +++ b/frontend/src/pages/TracesModulePage/TracesModulePage.styles.scss @@ -0,0 +1,17 @@ +.traces-module-container { + .ant-tabs-nav { + padding: 0 16px; + margin-bottom: 0px; + + &::before { + border-bottom: none !important; + } + } + + .tab-item { + display: flex; + justify-content: center; + align-items: center; + gap: 8px; + } +} diff --git a/frontend/src/pages/TracesModulePage/TracesModulePage.tsx b/frontend/src/pages/TracesModulePage/TracesModulePage.tsx new file mode 100644 index 0000000000..f04b090311 --- /dev/null +++ b/frontend/src/pages/TracesModulePage/TracesModulePage.tsx @@ -0,0 +1,22 @@ +import './TracesModulePage.styles.scss'; + +import RouteTab from 'components/RouteTab'; +import { TabRoutes } from 'components/RouteTab/types'; +import history from 'lib/history'; +import { useLocation } from 'react-router-dom'; + +import { tracesExplorer, tracesSaveView } from './constants'; + +function TracesModulePage(): JSX.Element { + const { pathname } = useLocation(); + + const routes: TabRoutes[] = [tracesExplorer, tracesSaveView]; + + return ( +
+ ; +
+ ); +} + +export default TracesModulePage; diff --git a/frontend/src/pages/TracesModulePage/constants.tsx b/frontend/src/pages/TracesModulePage/constants.tsx new file mode 100644 index 0000000000..24d2047d01 --- /dev/null +++ b/frontend/src/pages/TracesModulePage/constants.tsx @@ -0,0 +1,27 @@ +import { TabRoutes } from 'components/RouteTab/types'; +import ROUTES from 'constants/routes'; +import { Compass, TowerControl } from 'lucide-react'; +import SaveView from 'pages/SaveView'; +import TracesExplorer from 'pages/TracesExplorer'; + +export const tracesExplorer: TabRoutes = { + Component: TracesExplorer, + name: ( +
+ Explorer +
+ ), + route: ROUTES.TRACES_EXPLORER, + key: ROUTES.TRACES_EXPLORER, +}; + +export const tracesSaveView: TabRoutes = { + Component: SaveView, + name: ( +
+ Views +
+ ), + route: ROUTES.TRACES_SAVE_VIEWS, + key: ROUTES.TRACES_SAVE_VIEWS, +}; diff --git a/frontend/src/pages/TracesModulePage/index.tsx b/frontend/src/pages/TracesModulePage/index.tsx new file mode 100644 index 0000000000..5b2064dd0a --- /dev/null +++ b/frontend/src/pages/TracesModulePage/index.tsx @@ -0,0 +1,3 @@ +import TracesModulePage from './TracesModulePage'; + +export default TracesModulePage; diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx index eab934c69c..4dcc055235 100644 --- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx +++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx @@ -87,21 +87,18 @@ export default function WorkspaceBlocked(): JSX.Element { const handleExtendTrial = (): void => { trackEvent('Workspace Blocked: User Clicked Extend Trial'); - const recipient = 'cloud-support@signoz.io'; - const subject = 'Extend SigNoz Cloud Trial'; - const body = `I'd like to request an extension for SigNoz Cloud for my account. Please find my account details below - - SigNoz URL: - Admin Email: - `; - - // Create the mailto link - const mailtoLink = `mailto:${recipient}?subject=${encodeURIComponent( - subject, - )}&body=${encodeURIComponent(body)}`; - - // Open the default email client - window.location.href = mailtoLink; + notifications.info({ + message: 'Extend Trial', + description: ( + + If you have a specific reason why you were not able to finish your PoC in + the trial period, please write to us on + cloud-support@signoz.io + with the reason. Sometimes we can extend trial by a few days on a case by + case basis + + ), + }); }; return ( @@ -152,7 +149,7 @@ export default function WorkspaceBlocked(): JSX.Element {
Got Questions? - Contact Us + Contact Us
diff --git a/frontend/src/periscope.scss b/frontend/src/periscope.scss new file mode 100644 index 0000000000..026301e2f4 --- /dev/null +++ b/frontend/src/periscope.scss @@ -0,0 +1,35 @@ +.periscope-btn-group { + display: inline-flex; + + .ant-btn { + border-radius: 0; + } +} + +.periscope-btn { + display: flex; + justify-content: center; + align-items: center; + + padding: 6px; + + border: 1px solid var(--bg-slate-400, #1d212d); + background: var(--bg-ink-400, #121317); + box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1); + color: var(--bg-vanilla-400, #c0c1c3); + + &.ghost { + box-shadow: none; + border: none; + } + + cursor: pointer; +} + +.lightMode { + .periscope-btn { + border-color: var(--bg-vanilla-300); + background: var(--bg-vanilla-100); + color: var(--bg-ink-200); + } +} diff --git a/frontend/src/providers/Dashboard/Dashboard.tsx b/frontend/src/providers/Dashboard/Dashboard.tsx index 20c54d2eba..a7fa94c044 100644 --- a/frontend/src/providers/Dashboard/Dashboard.tsx +++ b/frontend/src/providers/Dashboard/Dashboard.tsx @@ -6,6 +6,7 @@ import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; import ROUTES from 'constants/routes'; import { getMinMax } from 'container/TopNav/AutoRefresh/config'; import dayjs, { Dayjs } from 'dayjs'; +import { useDashboardVariablesFromLocalStorage } from 'hooks/dashboard/useDashboardFromLocalStorage'; import useAxiosError from 'hooks/useAxiosError'; import useTabVisibility from 'hooks/useTabFocus'; import { getUpdatedLayout } from 'lib/dashboard/getUpdatedLayout'; @@ -95,6 +96,10 @@ export function DashboardProvider({ const [selectedDashboard, setSelectedDashboard] = useState(); + const { currentDashboard } = useDashboardVariablesFromLocalStorage( + dashboardId, + ); + const updatedTimeRef = useRef(null); // Using ref to store the updated time const modalRef = useRef(null); @@ -103,11 +108,34 @@ export function DashboardProvider({ const { t } = useTranslation(['dashboard']); const dashboardRef = useRef(); + const mergeDBWithLocalStorage = ( + data: Dashboard, + localStorageVariables: any, + ): Dashboard => { + const updatedData = data; + if (data && localStorageVariables) { + const updatedVariables = data.data.variables; + Object.keys(data.data.variables).forEach((variable) => { + const variableData = data.data.variables[variable]; + const updatedVariable = { + ...data.data.variables[variable], + ...localStorageVariables[variableData.name as any], + }; + + updatedVariables[variable] = updatedVariable; + }); + updatedData.data.variables = updatedVariables; + } + return updatedData; + }; // As we do not have order and ID's in the variables object, we have to process variables to add order and ID if they do not exist in the variables object // eslint-disable-next-line sonarjs/cognitive-complexity const transformDashboardVariables = (data: Dashboard): Dashboard => { if (data && data.data && data.data.variables) { - const clonedDashboardData = JSON.parse(JSON.stringify(data)); + const clonedDashboardData = mergeDBWithLocalStorage( + JSON.parse(JSON.stringify(data)), + currentDashboard, + ); const { variables } = clonedDashboardData.data; const existingOrders: Set = new Set(); diff --git a/frontend/src/providers/QueryBuilder.tsx b/frontend/src/providers/QueryBuilder.tsx index bc0521fcbd..e7177a0536 100644 --- a/frontend/src/providers/QueryBuilder.tsx +++ b/frontend/src/providers/QueryBuilder.tsx @@ -15,6 +15,7 @@ import { MAX_QUERIES, PANEL_TYPES, } from 'constants/queryBuilder'; +import ROUTES from 'constants/routes'; import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQueryParam'; import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval'; import useUrlQuery from 'hooks/useUrlQuery'; @@ -22,6 +23,7 @@ import { createIdFromObjectFields } from 'lib/createIdFromObjectFields'; import { createNewBuilderItemName } from 'lib/newQueryBuilder/createNewBuilderItemName'; import { getOperatorsBySourceAndPanelType } from 'lib/newQueryBuilder/getOperatorsBySourceAndPanelType'; import { replaceIncorrectObjectFields } from 'lib/replaceIncorrectObjectFields'; +import { merge } from 'lodash-es'; import { createContext, PropsWithChildren, @@ -195,7 +197,7 @@ export function QueryBuilderProvider({ ); const initQueryBuilderData = useCallback( - (query: Query): void => { + (query: Query, timeUpdated?: boolean): void => { const { queryType: newQueryType, ...queryState } = prepareQueryBuilderData( query, ); @@ -210,10 +212,12 @@ export function QueryBuilderProvider({ const nextQuery: Query = { ...newQueryState, queryType: type }; setStagedQuery(nextQuery); - setCurrentQuery(newQueryState); + setCurrentQuery( + timeUpdated ? merge(currentQuery, newQueryState) : newQueryState, + ); setQueryType(type); }, - [prepareQueryBuilderData], + [prepareQueryBuilderData, currentQuery], ); const updateAllQueriesOperators = useCallback( @@ -464,7 +468,11 @@ export function QueryBuilderProvider({ ); const redirectWithQueryBuilderData = useCallback( - (query: Partial, searchParams?: Record) => { + ( + query: Partial, + searchParams?: Record, + redirectingUrl?: typeof ROUTES[keyof typeof ROUTES], + ) => { const queryType = !query.queryType || !Object.values(EQueryType).includes(query.queryType) ? EQueryType.QUERY_BUILDER @@ -519,7 +527,9 @@ export function QueryBuilderProvider({ ); } - const generatedUrl = `${location.pathname}?${urlQuery}`; + const generatedUrl = redirectingUrl + ? `${redirectingUrl}?${urlQuery}` + : `${location.pathname}?${urlQuery}`; history.replace(generatedUrl); }, diff --git a/frontend/src/store/actions/global.ts b/frontend/src/store/actions/global.ts index ab04490f94..19c3be2b7b 100644 --- a/frontend/src/store/actions/global.ts +++ b/frontend/src/store/actions/global.ts @@ -1,11 +1,12 @@ import { Time } from 'container/TopNav/DateTimeSelection/config'; +import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import GetMinMax from 'lib/getMinMax'; import { Dispatch } from 'redux'; import AppActions from 'types/actions'; import { UPDATE_TIME_INTERVAL } from 'types/actions/globalTime'; export const UpdateTimeInterval = ( - interval: Time, + interval: Time | TimeV2, dateTimeRange: [number, number] = [0, 0], ): ((dispatch: Dispatch) => void) => ( dispatch: Dispatch, diff --git a/frontend/src/styles.scss b/frontend/src/styles.scss index 71ce16d17d..2cb3ed419c 100644 --- a/frontend/src/styles.scss +++ b/frontend/src/styles.scss @@ -1,4 +1,6 @@ -@import '@signozhq/design-tokens'; +@import '@signozhq/design-tokens/dist/style.css'; + +@import './periscope.scss'; #root, html, @@ -136,3 +138,89 @@ body { align-items: center; gap: 8px; } + +// ================================================================= +// AntD style overrides +.ant-dropdown-menu { + margin-top: 2px !important; + min-width: 160px; + + border-radius: 4px; + padding: 16px; + border: 1px solid var(--bg-slate-400, #1d212d); + background: linear-gradient( + 139deg, + rgba(18, 19, 23, 0.8) 0%, + rgba(18, 19, 23, 0.9) 98.68% + ); + box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2); + backdrop-filter: blur(20px); + + .ant-dropdown-menu-item-group-title { + font-size: 11px; + font-weight: 500; + line-height: 18px; + letter-spacing: 0.08em; + text-align: left; + } + + .ant-dropdown-menu-item-group { + .ant-dropdown-menu-item-group-list { + margin: 0 !important; + + .ant-dropdown-menu-item { + font-size: 13px; + font-weight: 400; + line-height: 17px; + letter-spacing: 0.01em; + text-align: left; + } + } + } +} + +// https://github.com/ant-design/ant-design/issues/41307 +.ant-picker-panels > *:first-child button.ant-picker-header-next-btn { + visibility: visible !important; +} + +.ant-picker-panels > *:first-child button.ant-picker-header-super-next-btn { + visibility: visible !important; +} + +.ant-picker-panels > *:last-child:not(:only-child) { + display: none !important; +} + +.ant-picker-panel-container, +.ant-picker-footer-extra > div { + flex-wrap: wrap !important; +} + +.ant-picker-range-arrow { + display: none !important; +} + +.ant-picker-panel-container { + background: none !important; + box-shadow: none !important; +} + +.ant-tooltip { + --antd-arrow-background-color: var(--bg-slate-400, #1d212d); + + .ant-tooltip-inner { + background-color: var(--bg-slate-400, #1d212d); + } + .flexBtn { + display: flex; + align-items: center; + gap: 8px; + } +} + +.members-count { + display: inline-block; + margin-left: 8px; + margin-right: 8px; +} diff --git a/frontend/src/types/actions/globalTime.ts b/frontend/src/types/actions/globalTime.ts index 4a3b5a730c..858a7c78a0 100644 --- a/frontend/src/types/actions/globalTime.ts +++ b/frontend/src/types/actions/globalTime.ts @@ -1,4 +1,5 @@ import { Time } from 'container/TopNav/DateTimeSelection/config'; +import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import { ResetIdStartAndEnd, SetSearchQueryString } from './logs'; @@ -13,7 +14,7 @@ export type GlobalTime = { }; interface UpdateTime extends GlobalTime { - selectedTime: Time; + selectedTime: Time | TimeV2; } interface UpdateTimeInterval { diff --git a/frontend/src/types/common/queryBuilder.ts b/frontend/src/types/common/queryBuilder.ts index 6916d7fed2..e8dad50083 100644 --- a/frontend/src/types/common/queryBuilder.ts +++ b/frontend/src/types/common/queryBuilder.ts @@ -1,4 +1,5 @@ import { PANEL_TYPES } from 'constants/queryBuilder'; +import ROUTES from 'constants/routes'; import { Format } from 'container/NewWidget/RightContainer/types'; import { IBuilderFormula, @@ -187,6 +188,7 @@ export type QueryBuilderContextType = { redirectWithQueryBuilderData: ( query: Query, searchParams?: Record, + redirectToUrl?: typeof ROUTES[keyof typeof ROUTES], ) => void; handleRunQuery: () => void; resetQuery: (newCurrentQuery?: QueryState) => void; @@ -204,7 +206,7 @@ export type QueryBuilderContextType = { index: number, ) => QueryBuilderData[T][number], ) => Query; - initQueryBuilderData: (query: Query) => void; + initQueryBuilderData: (query: Query, timeUpdated?: boolean) => void; isStagedQueryUpdated: ( viewData: ViewProps[] | undefined, viewKey: string, diff --git a/frontend/src/types/reducer/globalTime.ts b/frontend/src/types/reducer/globalTime.ts index 94bb17eb73..cd7fac1ea8 100644 --- a/frontend/src/types/reducer/globalTime.ts +++ b/frontend/src/types/reducer/globalTime.ts @@ -1,11 +1,12 @@ import { Time } from 'container/TopNav/DateTimeSelection/config'; +import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config'; import { GlobalTime } from 'types/actions/globalTime'; export interface GlobalReducer { maxTime: GlobalTime['maxTime']; minTime: GlobalTime['minTime']; loading: boolean; - selectedTime: Time; + selectedTime: Time | TimeV2; isAutoRefreshDisabled: boolean; selectedAutoRefreshInterval: string; } diff --git a/frontend/src/utils/app.ts b/frontend/src/utils/app.ts index 0ab9e6fca7..d0b859d108 100644 --- a/frontend/src/utils/app.ts +++ b/frontend/src/utils/app.ts @@ -15,6 +15,8 @@ export function extractDomain(email: string): string { export const isCloudUser = (): boolean => { const { hostname } = window.location; + return true; + return hostname?.endsWith('signoz.cloud'); }; diff --git a/frontend/src/utils/getUserOS.ts b/frontend/src/utils/getUserOS.ts new file mode 100644 index 0000000000..164b2b8234 --- /dev/null +++ b/frontend/src/utils/getUserOS.ts @@ -0,0 +1,14 @@ +export enum UserOperatingSystem { + WINDOWS = 'Windows', + MACOS = 'Mac OS', +} + +export function getUserOperatingSystem(): UserOperatingSystem { + // https://developer.mozilla.org/en-US/docs/Web/API/Navigator/userAgent + if (window.navigator.userAgent.indexOf(UserOperatingSystem.WINDOWS) !== -1) { + return UserOperatingSystem.WINDOWS; + } + + // default return is MacOS + return UserOperatingSystem.MACOS; +} diff --git a/frontend/src/utils/logs.ts b/frontend/src/utils/logs.ts index bfe79a3177..10f6625f31 100644 --- a/frontend/src/utils/logs.ts +++ b/frontend/src/utils/logs.ts @@ -1,22 +1,31 @@ import { orange } from '@ant-design/colors'; -import { themeColors } from 'constants/theme'; -import getAlphaColor from 'utils/getAlphaColor'; +import { Color } from '@signozhq/design-tokens'; export const getDefaultLogBackground = ( isReadOnly?: boolean, isDarkMode?: boolean, ): string => { if (isReadOnly) return ''; + // TODO handle the light mode here return `&:hover { background-color: ${ - isDarkMode - ? getAlphaColor(themeColors.white)[10] - : getAlphaColor(themeColors.black)[10] + isDarkMode ? 'rgba(171, 189, 255, 0.04)' : 'var(--bg-vanilla-200)' }; }`; }; -export const getActiveLogBackground = (isActiveLog = true): string => { - if (!isActiveLog) return ''; +export const getActiveLogBackground = ( + isActiveLog = true, + isDarkMode = true, +): string => { + if (!isActiveLog) return ``; + if (isDarkMode) return `background-color: ${Color.BG_SLATE_200};`; + return `background-color: ${Color.BG_VANILLA_300}; color: ${Color.TEXT_SLATE_400}`; +}; + +export const getHightLightedLogBackground = ( + isHighlightedLog = true, +): string => { + if (!isHighlightedLog) return ''; return `background-color: ${orange[3]};`; }; diff --git a/frontend/src/utils/permission/index.ts b/frontend/src/utils/permission/index.ts index 91372d237b..5f6e59b14a 100644 --- a/frontend/src/utils/permission/index.ts +++ b/frontend/src/utils/permission/index.ts @@ -86,6 +86,9 @@ export const routePermission: Record = { BILLING: ['ADMIN', 'EDITOR', 'VIEWER'], SUPPORT: ['ADMIN', 'EDITOR', 'VIEWER'], SOMETHING_WENT_WRONG: ['ADMIN', 'EDITOR', 'VIEWER'], + LOGS_SAVE_VIEWS: ['ADMIN', 'EDITOR', 'VIEWER'], + TRACES_SAVE_VIEWS: ['ADMIN', 'EDITOR', 'VIEWER'], LOGS_BASE: [], OLD_LOGS_EXPLORER: [], + SHORTCUTS: ['ADMIN', 'EDITOR', 'VIEWER'], }; diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index a81ac69961..7603747f46 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -11,7 +11,11 @@ "esModuleInterop": true, "skipLibCheck": true, "forceConsistentCasingInFileNames": true, - "lib": ["dom", "dom.iterable", "esnext"], + "lib": [ + "dom", + "dom.iterable", + "esnext" + ], "allowSyntheticDefaultImports": true, "noFallthroughCasesInSwitch": true, "moduleResolution": "node", @@ -20,13 +24,25 @@ "noEmit": true, "baseUrl": "./src", "downlevelIteration": true, - "plugins": [{ "name": "typescript-plugin-css-modules" }], - "types": ["node", "jest"], + "plugins": [ + { + "name": "typescript-plugin-css-modules" + } + ], + "types": [ + "node", + "jest" + ], }, - "exclude": ["node_modules"], + "exclude": [ + "node_modules", + "./src/container/OnboardingContainer/constants/*.ts" + ], "include": [ "./src", - "./src/**/*.ts", "src/**/*.tsx", "src/**/*.d.ts", + "./src/**/*.ts", + "src/**/*.tsx", + "src/**/*.d.ts", "./babel.config.js", "./jest.config.ts", "./.eslintrc.js", @@ -42,4 +58,4 @@ "./tests/**.ts", "./**/*.d.ts" ] -} +} \ No newline at end of file diff --git a/frontend/yarn.lock b/frontend/yarn.lock index fe33785fb5..80ef34ec02 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -748,6 +748,11 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.0.tgz#da950e622420bf96ca0d0f2909cdddac3acd8719" integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw== +"@babel/parser@^7.23.6": + version "7.23.6" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.6.tgz#ba1c9e512bda72a47e285ae42aff9d2a635a9e3b" + integrity sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ== + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": version "7.18.6" resolved "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz" @@ -2062,6 +2067,13 @@ dependencies: regenerator-runtime "^0.13.11" +"@babel/runtime@^7.13.10": + version "7.23.6" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.6.tgz#c05e610dc228855dc92ef1b53d07389ed8ab521d" + integrity sha512-zHd0eUrf5GZoOWVCXp6koAKQTfZV07eit6bGPmJgnZdnSAvvZee6zniW2XMF7Cmc4ISOOnPy3QaSiIJGJkVEDQ== + dependencies: + regenerator-runtime "^0.14.0" + "@babel/runtime@^7.14.6": version "7.22.15" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.15.tgz#38f46494ccf6cf020bd4eed7124b425e83e523b8" @@ -2434,6 +2446,33 @@ minimatch "^3.0.4" strip-json-comments "^3.1.1" +"@floating-ui/core@^1.4.2": + version "1.5.2" + resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.5.2.tgz#53a0f7a98c550e63134d504f26804f6b83dbc071" + integrity sha512-Ii3MrfY/GAIN3OhXNzpCKaLxHQfJF9qvwq/kEJYdqDxeIHa01K8sldugal6TmeeXl+WMvhv9cnVzUTaFFJF09A== + dependencies: + "@floating-ui/utils" "^0.1.3" + +"@floating-ui/dom@^1.5.1": + version "1.5.3" + resolved "https://registry.yarnpkg.com/@floating-ui/dom/-/dom-1.5.3.tgz#54e50efcb432c06c23cd33de2b575102005436fa" + integrity sha512-ClAbQnEqJAKCJOEbbLo5IUlZHkNszqhuxS4fHAVxRPXPya6Ysf2G8KypnYcOTpx6I8xcgF9bbHb6g/2KpbV8qA== + dependencies: + "@floating-ui/core" "^1.4.2" + "@floating-ui/utils" "^0.1.3" + +"@floating-ui/react-dom@^2.0.0": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@floating-ui/react-dom/-/react-dom-2.0.4.tgz#b076fafbdfeb881e1d86ae748b7ff95150e9f3ec" + integrity sha512-CF8k2rgKeh/49UrnIBs4BdxPUV6vize/Db1d/YbCLyp9GiVZ0BEwf5AiDSxJRCr6yOkGqTFHtmrULxkEfYZ7dQ== + dependencies: + "@floating-ui/dom" "^1.5.1" + +"@floating-ui/utils@^0.1.3": + version "0.1.6" + resolved "https://registry.yarnpkg.com/@floating-ui/utils/-/utils-0.1.6.tgz#22958c042e10b67463997bd6ea7115fe28cbcaf9" + integrity sha512-OfX7E2oUDYxtBvsuS4e/jSn4Q9Qb6DzgeYtsAdkPZ47znpoNsMgZw0+tVijiv3uGNR6dgNlty6r9rzIzHjtd/A== + "@grafana/data@^9.5.2": version "9.5.7" resolved "https://registry.yarnpkg.com/@grafana/data/-/data-9.5.7.tgz#99df8c3917caf1f12a7f42258579b73b99f1aaa9" @@ -2893,6 +2932,48 @@ "@types/mdx" "^2.0.0" "@types/react" ">=16" +"@microsoft/api-extractor-model@7.28.3": + version "7.28.3" + resolved "https://registry.yarnpkg.com/@microsoft/api-extractor-model/-/api-extractor-model-7.28.3.tgz#f6a213e41a2274d5195366b646954daee39e8493" + integrity sha512-wT/kB2oDbdZXITyDh2SQLzaWwTOFbV326fP0pUwNW00WeliARs0qjmXBWmGWardEzp2U3/axkO3Lboqun6vrig== + dependencies: + "@microsoft/tsdoc" "0.14.2" + "@microsoft/tsdoc-config" "~0.16.1" + "@rushstack/node-core-library" "3.62.0" + +"@microsoft/api-extractor@7.39.0": + version "7.39.0" + resolved "https://registry.yarnpkg.com/@microsoft/api-extractor/-/api-extractor-7.39.0.tgz#41c25f7f522e8b9376debda07364ff234e602eff" + integrity sha512-PuXxzadgnvp+wdeZFPonssRAj/EW4Gm4s75TXzPk09h3wJ8RS3x7typf95B4vwZRrPTQBGopdUl+/vHvlPdAcg== + dependencies: + "@microsoft/api-extractor-model" "7.28.3" + "@microsoft/tsdoc" "0.14.2" + "@microsoft/tsdoc-config" "~0.16.1" + "@rushstack/node-core-library" "3.62.0" + "@rushstack/rig-package" "0.5.1" + "@rushstack/ts-command-line" "4.17.1" + colors "~1.2.1" + lodash "~4.17.15" + resolve "~1.22.1" + semver "~7.5.4" + source-map "~0.6.1" + typescript "5.3.3" + +"@microsoft/tsdoc-config@~0.16.1": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@microsoft/tsdoc-config/-/tsdoc-config-0.16.2.tgz#b786bb4ead00d54f53839a458ce626c8548d3adf" + integrity sha512-OGiIzzoBLgWWR0UdRJX98oYO+XKGf7tiK4Zk6tQ/E4IJqGCe7dvkTvgDZV5cFJUzLGDOjeAXrnZoA6QkVySuxw== + dependencies: + "@microsoft/tsdoc" "0.14.2" + ajv "~6.12.6" + jju "~1.4.0" + resolve "~1.19.0" + +"@microsoft/tsdoc@0.14.2": + version "0.14.2" + resolved "https://registry.yarnpkg.com/@microsoft/tsdoc/-/tsdoc-0.14.2.tgz#c3ec604a0b54b9a9b87e9735dfc59e1a5da6a5fb" + integrity sha512-9b8mPpKrfeGRuhFH5iO1iwCLeIIsV6+H1sRfxbkoGXIyQE2BTsPd9zqSqQJ+pv5sJ/hT5M1zvOFL02MnEezFug== + "@monaco-editor/loader@^1.3.3": version "1.3.3" resolved "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.3.3.tgz" @@ -2975,6 +3056,234 @@ resolved "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz" integrity sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g== +"@radix-ui/primitive@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/primitive/-/primitive-1.0.1.tgz#e46f9958b35d10e9f6dc71c497305c22e3e55dbd" + integrity sha512-yQ8oGX2GVsEYMWGxcovu1uGWPCxV5BFfeeYxqPmuAzUyLT9qmaMXSAhXpb0WrspIeqYzdJpkh2vHModJPgRIaw== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-arrow@1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-arrow/-/react-arrow-1.0.3.tgz#c24f7968996ed934d57fe6cde5d6ec7266e1d25d" + integrity sha512-wSP+pHsB/jQRaL6voubsQ/ZlrGBHHrOjmBnr19hxYgtS0WvAFwZhK2WP/YY5yF9uKECCEEDGxuLxq1NBK51wFA== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-primitive" "1.0.3" + +"@radix-ui/react-collection@1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-collection/-/react-collection-1.0.3.tgz#9595a66e09026187524a36c6e7e9c7d286469159" + integrity sha512-3SzW+0PW7yBBoQlT8wNcGtaxaD0XSu0uLUFgrtHY08Acx05TaHaOmVLR73c0j/cqpDy53KBMO7s0dx2wmOIDIA== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-context" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-slot" "1.0.2" + +"@radix-ui/react-compose-refs@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.1.tgz#7ed868b66946aa6030e580b1ffca386dd4d21989" + integrity sha512-fDSBgd44FKHa1FRMU59qBMPFcl2PZE+2nmqunj+BWFyYYjnhIDWL2ItDs3rrbJDQOtzt5nIebLCQc4QRfz6LJw== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-context@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-context/-/react-context-1.0.1.tgz#fe46e67c96b240de59187dcb7a1a50ce3e2ec00c" + integrity sha512-ebbrdFoYTcuZ0v4wG5tedGnp9tzcV8awzsxYph7gXUyvnNLuTIcCk1q17JEbnVhXAKG9oX3KtchwiMIAYp9NLg== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-direction@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-direction/-/react-direction-1.0.1.tgz#9cb61bf2ccf568f3421422d182637b7f47596c9b" + integrity sha512-RXcvnXgyvYvBEOhCBuddKecVkoMiI10Jcm5cTI7abJRAHYfFxeu+FBQs/DvdxSYucxR5mna0dNsL6QFlds5TMA== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-dismissable-layer@1.0.5": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.5.tgz#3f98425b82b9068dfbab5db5fff3df6ebf48b9d4" + integrity sha512-aJeDjQhywg9LBu2t/At58hCvr7pEm0o2Ke1x33B+MhjNmmZ17sy4KImo0KPLgsnc/zN7GPdce8Cnn0SWvwZO7g== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/primitive" "1.0.1" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-use-callback-ref" "1.0.1" + "@radix-ui/react-use-escape-keydown" "1.0.3" + +"@radix-ui/react-id@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-id/-/react-id-1.0.1.tgz#73cdc181f650e4df24f0b6a5b7aa426b912c88c0" + integrity sha512-tI7sT/kqYp8p96yGWY1OAnLHrqDgzHefRBKQ2YAkBS5ja7QLcZ9Z/uY7bEjPUatf8RomoXM8/1sMj1IJaE5UzQ== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-use-layout-effect" "1.0.1" + +"@radix-ui/react-popper@1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-popper/-/react-popper-1.1.3.tgz#24c03f527e7ac348fabf18c89795d85d21b00b42" + integrity sha512-cKpopj/5RHZWjrbF2846jBNacjQVwkP068DfmgrNJXpvVWrOvlAmE9xSiy5OqeE+Gi8D9fP+oDhUnPqNMY8/5w== + dependencies: + "@babel/runtime" "^7.13.10" + "@floating-ui/react-dom" "^2.0.0" + "@radix-ui/react-arrow" "1.0.3" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-context" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-use-callback-ref" "1.0.1" + "@radix-ui/react-use-layout-effect" "1.0.1" + "@radix-ui/react-use-rect" "1.0.1" + "@radix-ui/react-use-size" "1.0.1" + "@radix-ui/rect" "1.0.1" + +"@radix-ui/react-portal@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@radix-ui/react-portal/-/react-portal-1.0.4.tgz#df4bfd353db3b1e84e639e9c63a5f2565fb00e15" + integrity sha512-Qki+C/EuGUVCQTOTD5vzJzJuMUlewbzuKyUy+/iHM2uwGiru9gZeBJtHAPKAEkB5KWGi9mP/CHKcY0wt1aW45Q== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-primitive" "1.0.3" + +"@radix-ui/react-presence@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-presence/-/react-presence-1.0.1.tgz#491990ba913b8e2a5db1b06b203cb24b5cdef9ba" + integrity sha512-UXLW4UAbIY5ZjcvzjfRFo5gxva8QirC9hF7wRE4U5gz+TP0DbRk+//qyuAQ1McDxBt1xNMBTaciFGvEmJvAZCg== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-use-layout-effect" "1.0.1" + +"@radix-ui/react-primitive@1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-primitive/-/react-primitive-1.0.3.tgz#d49ea0f3f0b2fe3ab1cb5667eb03e8b843b914d0" + integrity sha512-yi58uVyoAcK/Nq1inRY56ZSjKypBNKTa/1mcL8qdl6oJeEaDbOldlzrGn7P6Q3Id5d+SYNGc5AJgc4vGhjs5+g== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-slot" "1.0.2" + +"@radix-ui/react-roving-focus@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@radix-ui/react-roving-focus/-/react-roving-focus-1.0.4.tgz#e90c4a6a5f6ac09d3b8c1f5b5e81aab2f0db1974" + integrity sha512-2mUg5Mgcu001VkGy+FfzZyzbmuUWzgWkj3rvv4yu+mLw03+mTzbxZHvfcGyFp2b8EkQeMkpRQ5FiA2Vr2O6TeQ== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/primitive" "1.0.1" + "@radix-ui/react-collection" "1.0.3" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-context" "1.0.1" + "@radix-ui/react-direction" "1.0.1" + "@radix-ui/react-id" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-use-callback-ref" "1.0.1" + "@radix-ui/react-use-controllable-state" "1.0.1" + +"@radix-ui/react-slot@1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.0.2.tgz#a9ff4423eade67f501ffb32ec22064bc9d3099ab" + integrity sha512-YeTpuq4deV+6DusvVUW4ivBgnkHwECUu0BiN43L5UCDFgdhsRUWAghhTF5MbvNTPzmiFOx90asDSUjWuCNapwg== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-compose-refs" "1.0.1" + +"@radix-ui/react-tabs@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@radix-ui/react-tabs/-/react-tabs-1.0.4.tgz#993608eec55a5d1deddd446fa9978d2bc1053da2" + integrity sha512-egZfYY/+wRNCflXNHx+dePvnz9FbmssDTJBtgRfDY7e8SE5oIo3Py2eCB1ckAbh1Q7cQ/6yJZThJ++sgbxibog== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/primitive" "1.0.1" + "@radix-ui/react-context" "1.0.1" + "@radix-ui/react-direction" "1.0.1" + "@radix-ui/react-id" "1.0.1" + "@radix-ui/react-presence" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-roving-focus" "1.0.4" + "@radix-ui/react-use-controllable-state" "1.0.1" + +"@radix-ui/react-tooltip@1.0.7": + version "1.0.7" + resolved "https://registry.yarnpkg.com/@radix-ui/react-tooltip/-/react-tooltip-1.0.7.tgz#8f55070f852e7e7450cc1d9210b793d2e5a7686e" + integrity sha512-lPh5iKNFVQ/jav/j6ZrWq3blfDJ0OH9R6FlNUHPMqdLuQ9vwDgFsRxvl8b7Asuy5c8xmoojHUxKHQSOAvMHxyw== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/primitive" "1.0.1" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-context" "1.0.1" + "@radix-ui/react-dismissable-layer" "1.0.5" + "@radix-ui/react-id" "1.0.1" + "@radix-ui/react-popper" "1.1.3" + "@radix-ui/react-portal" "1.0.4" + "@radix-ui/react-presence" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-slot" "1.0.2" + "@radix-ui/react-use-controllable-state" "1.0.1" + "@radix-ui/react-visually-hidden" "1.0.3" + +"@radix-ui/react-use-callback-ref@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.1.tgz#f4bb1f27f2023c984e6534317ebc411fc181107a" + integrity sha512-D94LjX4Sp0xJFVaoQOd3OO9k7tpBYNOXdVhkltUbGv2Qb9OXdrg/CpsjlZv7ia14Sylv398LswWBVVu5nqKzAQ== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-use-controllable-state@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.1.tgz#ecd2ced34e6330caf89a82854aa2f77e07440286" + integrity sha512-Svl5GY5FQeN758fWKrjM6Qb7asvXeiZltlT4U2gVfl8Gx5UAv2sMR0LWo8yhsIZh2oQ0eFdZ59aoOOMV7b47VA== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-use-callback-ref" "1.0.1" + +"@radix-ui/react-use-escape-keydown@1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.3.tgz#217b840c250541609c66f67ed7bab2b733620755" + integrity sha512-vyL82j40hcFicA+M4Ex7hVkB9vHgSse1ZWomAqV2Je3RleKGO5iM8KMOEtfoSB0PnIelMd2lATjTGMYqN5ylTg== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-use-callback-ref" "1.0.1" + +"@radix-ui/react-use-layout-effect@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.1.tgz#be8c7bc809b0c8934acf6657b577daf948a75399" + integrity sha512-v/5RegiJWYdoCvMnITBkNNx6bCj20fiaJnWtRkU18yITptraXjffz5Qbn05uOiQnOvi+dbkznkoaMltz1GnszQ== + dependencies: + "@babel/runtime" "^7.13.10" + +"@radix-ui/react-use-rect@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-rect/-/react-use-rect-1.0.1.tgz#fde50b3bb9fd08f4a1cd204572e5943c244fcec2" + integrity sha512-Cq5DLuSiuYVKNU8orzJMbl15TXilTnJKUCltMVQg53BQOF1/C5toAaGrowkgksdBQ9H+SRL23g0HDmg9tvmxXw== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/rect" "1.0.1" + +"@radix-ui/react-use-size@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-size/-/react-use-size-1.0.1.tgz#1c5f5fea940a7d7ade77694bb98116fb49f870b2" + integrity sha512-ibay+VqrgcaI6veAojjofPATwledXiSmX+C0KrBk/xgpX9rBzPV3OsfwlhQdUOFbh+LKQorLYT+xTXW9V8yd0g== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-use-layout-effect" "1.0.1" + +"@radix-ui/react-visually-hidden@1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.0.3.tgz#51aed9dd0fe5abcad7dee2a234ad36106a6984ac" + integrity sha512-D4w41yN5YRKtu464TLnByKzMDG/JlMPHtfZgQAu9v6mNakUqGUI9vUrfQKz8NK41VMm/xbZbh76NUTVtIYqOMA== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-primitive" "1.0.3" + +"@radix-ui/rect@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/rect/-/rect-1.0.1.tgz#bf8e7d947671996da2e30f4904ece343bc4a883f" + integrity sha512-fyrgCaedtvMg9NK3en0pnOYJdtfwxUcNolezkNPUsoX57X8oQk+NkqcvzHXD2uKNij6GXmWU9NDru2IWjrO4BQ== + dependencies: + "@babel/runtime" "^7.13.10" + "@rc-component/color-picker@~1.4.1": version "1.4.1" resolved "https://registry.yarnpkg.com/@rc-component/color-picker/-/color-picker-1.4.1.tgz#dcab0b660e9c4ed63a7582db68ed4a77c862cb93" @@ -3065,6 +3374,46 @@ resolved "https://registry.yarnpkg.com/@react-dnd/shallowequal/-/shallowequal-4.0.2.tgz#d1b4befa423f692fa4abf1c79209702e7d8ae4b4" integrity sha512-/RVXdLvJxLg4QKvMoM5WlwNR9ViO9z8B/qPcc+C0Sa/teJY7QG7kJ441DwzOjMYEY7GmU4dj5EcGHIkKZiQZCA== +"@rollup/pluginutils@^5.1.0": + version "5.1.0" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-5.1.0.tgz#7e53eddc8c7f483a4ad0b94afb1f7f5fd3c771e0" + integrity sha512-XTIWOPPcpvyKI6L1NHo0lFlCyznUEyPmPY1mc3KpPVDYulHSTvyeLNVW00QTLIAFNhR3kYnJTQHeGqU4M3n09g== + dependencies: + "@types/estree" "^1.0.0" + estree-walker "^2.0.2" + picomatch "^2.3.1" + +"@rushstack/node-core-library@3.62.0": + version "3.62.0" + resolved "https://registry.yarnpkg.com/@rushstack/node-core-library/-/node-core-library-3.62.0.tgz#a30a44a740b522944165f0faa6644134eb95be1d" + integrity sha512-88aJn2h8UpSvdwuDXBv1/v1heM6GnBf3RjEy6ZPP7UnzHNCqOHA2Ut+ScYUbXcqIdfew9JlTAe3g+cnX9xQ/Aw== + dependencies: + colors "~1.2.1" + fs-extra "~7.0.1" + import-lazy "~4.0.0" + jju "~1.4.0" + resolve "~1.22.1" + semver "~7.5.4" + z-schema "~5.0.2" + +"@rushstack/rig-package@0.5.1": + version "0.5.1" + resolved "https://registry.yarnpkg.com/@rushstack/rig-package/-/rig-package-0.5.1.tgz#6c9c283cc96b5bb1eae9875946d974ac5429bb21" + integrity sha512-pXRYSe29TjRw7rqxD4WS3HN/sRSbfr+tJs4a9uuaSIBAITbUggygdhuG0VrO0EO+QqH91GhYMN4S6KRtOEmGVA== + dependencies: + resolve "~1.22.1" + strip-json-comments "~3.1.1" + +"@rushstack/ts-command-line@4.17.1": + version "4.17.1" + resolved "https://registry.yarnpkg.com/@rushstack/ts-command-line/-/ts-command-line-4.17.1.tgz#c78db928ce5b93f2e98fd9e14c24f3f3876e57f1" + integrity sha512-2jweO1O57BYP5qdBGl6apJLB+aRIn5ccIRTPDyULh0KMwVzFqWtw6IZWt1qtUoZD/pD2RNkIOosH6Cq45rIYeg== + dependencies: + "@types/argparse" "1.0.38" + argparse "~1.0.9" + colors "~1.2.1" + string-argv "~0.3.1" + "@sideway/address@^4.1.3": version "4.1.4" resolved "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz" @@ -3082,12 +3431,13 @@ resolved "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz" integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ== -"@signozhq/design-tokens@0.0.6": - version "0.0.6" - resolved "https://registry.yarnpkg.com/@signozhq/design-tokens/-/design-tokens-0.0.6.tgz#42449052dca644c4d52448f9c2c521d39e535720" - integrity sha512-i+aG0YCuYL2KVUtRFj3qgAVDU6GbKmTdFXpqCqLUQp8diKMWH5Svzzxj4B14Q6+yE79+wbm1iZ0Nr6nYgkBA8Q== +"@signozhq/design-tokens@0.0.8": + version "0.0.8" + resolved "https://registry.yarnpkg.com/@signozhq/design-tokens/-/design-tokens-0.0.8.tgz#368dc92cfe01d0cd893df140445c5d9dfd944a88" + integrity sha512-YUxQw6w7iyUMTBxj82nFZQNRsg7Boej3YM6K5bYfDMQg0MqvWQCWsP7EkyLHu/TiyOZwZWb++vzXG6m+YJX9bw== dependencies: style-dictionary "3.8.0" + vite-plugin-dts "^3.6.4" "@sinclair/typebox@^0.25.16": version "0.25.24" @@ -3207,6 +3557,11 @@ dependencies: "@types/estree" "*" +"@types/argparse@1.0.38": + version "1.0.38" + resolved "https://registry.yarnpkg.com/@types/argparse/-/argparse-1.0.38.tgz#a81fd8606d481f873a3800c6ebae4f1d768a56a9" + integrity sha512-ebDJ9b0e702Yr7pWgB0jzm+CX4Srzz8RcXtLJDJB+BSccqMa36uyH/zUsSYao5+BD1ytv3k3rPYCq4mAE1hsXA== + "@types/aria-query@^5.0.1": version "5.0.1" resolved "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.1.tgz" @@ -4019,6 +4374,67 @@ resolved "https://registry.npmjs.org/@ungap/custom-elements/-/custom-elements-1.2.0.tgz" integrity sha512-zdSuu79stAwVUtzkQU9B5jhGh2LavtkeX4kxd2jtMJmZt7QqRJ1KJW5bukt/vUOaUs3z674GHd+nqYm0bu0Gyg== +"@volar/language-core@1.11.1", "@volar/language-core@~1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@volar/language-core/-/language-core-1.11.1.tgz#ecdf12ea8dc35fb8549e517991abcbf449a5ad4f" + integrity sha512-dOcNn3i9GgZAcJt43wuaEykSluAuOkQgzni1cuxLxTV0nJKanQztp7FxyswdRILaKH+P2XZMPRp2S4MV/pElCw== + dependencies: + "@volar/source-map" "1.11.1" + +"@volar/source-map@1.11.1", "@volar/source-map@~1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@volar/source-map/-/source-map-1.11.1.tgz#535b0328d9e2b7a91dff846cab4058e191f4452f" + integrity sha512-hJnOnwZ4+WT5iupLRnuzbULZ42L7BWWPMmruzwtLhJfpDVoZLjNBxHDi2sY2bgZXCKlpU5XcsMFoYrsQmPhfZg== + dependencies: + muggle-string "^0.3.1" + +"@volar/typescript@~1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@volar/typescript/-/typescript-1.11.1.tgz#ba86c6f326d88e249c7f5cfe4b765be3946fd627" + integrity sha512-iU+t2mas/4lYierSnoFOeRFQUhAEMgsFuQxoxvwn5EdQopw43j+J27a4lt9LMInx1gLJBC6qL14WYGlgymaSMQ== + dependencies: + "@volar/language-core" "1.11.1" + path-browserify "^1.0.1" + +"@vue/compiler-core@3.4.4": + version "3.4.4" + resolved "https://registry.yarnpkg.com/@vue/compiler-core/-/compiler-core-3.4.4.tgz#ba1ca008e95f118449cab79bdab3f7506bab2892" + integrity sha512-U5AdCN+6skzh2bSJrkMj2KZsVkUpgK8/XlxjSRYQZhNPcvt9/kmgIMpFEiTyK+Dz5E1J+8o8//BEIX+bakgVSw== + dependencies: + "@babel/parser" "^7.23.6" + "@vue/shared" "3.4.4" + entities "^4.5.0" + estree-walker "^2.0.2" + source-map-js "^1.0.2" + +"@vue/compiler-dom@^3.3.0": + version "3.4.4" + resolved "https://registry.yarnpkg.com/@vue/compiler-dom/-/compiler-dom-3.4.4.tgz#a11bba8af691b58700c479ce893b02bf71bb089a" + integrity sha512-iSwkdDULCN+Vr8z6uwdlL044GJ/nUmECxP9vu7MzEs4Qma0FwDLYvnvRcyO0ZITuu3Os4FptGUDnhi1kOLSaGw== + dependencies: + "@vue/compiler-core" "3.4.4" + "@vue/shared" "3.4.4" + +"@vue/language-core@1.8.27", "@vue/language-core@^1.8.26": + version "1.8.27" + resolved "https://registry.yarnpkg.com/@vue/language-core/-/language-core-1.8.27.tgz#2ca6892cb524e024a44e554e4c55d7a23e72263f" + integrity sha512-L8Kc27VdQserNaCUNiSFdDl9LWT24ly8Hpwf1ECy3aFb9m6bDhBGQYOujDm21N7EW3moKIOKEanQwe1q5BK+mA== + dependencies: + "@volar/language-core" "~1.11.1" + "@volar/source-map" "~1.11.1" + "@vue/compiler-dom" "^3.3.0" + "@vue/shared" "^3.3.0" + computeds "^0.0.1" + minimatch "^9.0.3" + muggle-string "^0.3.1" + path-browserify "^1.0.1" + vue-template-compiler "^2.7.14" + +"@vue/shared@3.4.4", "@vue/shared@^3.3.0": + version "3.4.4" + resolved "https://registry.yarnpkg.com/@vue/shared/-/shared-3.4.4.tgz#26e4e62a5fbfb39c25e9e54d21eeb852f1c83a7a" + integrity sha512-abSgiVRhfjfl3JALR/cSuBl74hGJ3SePgf1mKzodf1eMWLwHZbfEGxT2cNJSsNiw44jEgrO7bNkhchaWA7RwNw== + "@webassemblyjs/ast@1.11.5", "@webassemblyjs/ast@^1.11.5": version "1.11.5" resolved "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.5.tgz" @@ -4332,7 +4748,7 @@ ajv-keywords@^5.1.0: dependencies: fast-deep-equal "^3.1.3" -ajv@^6.10.0, ajv@^6.12.4, ajv@^6.12.5, ajv@^6.12.6: +ajv@^6.10.0, ajv@^6.12.4, ajv@^6.12.5, ajv@^6.12.6, ajv@~6.12.6: version "6.12.6" resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -4503,7 +4919,7 @@ arg@^4.1.0: resolved "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz" integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== -argparse@^1.0.7: +argparse@^1.0.7, argparse@~1.0.9: version "1.0.10" resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== @@ -5246,6 +5662,13 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz" @@ -5755,6 +6178,11 @@ colorette@^2.0.10, colorette@^2.0.14, colorette@^2.0.16: resolved "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz" integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== +colors@~1.2.1: + version "1.2.5" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.2.5.tgz#89c7ad9a374bc030df8013241f68136ed8835afc" + integrity sha512-erNRLao/Y3Fv54qUa0LBB+//Uf3YwMUmdJinN20yMXm9zdKKqH9wt7R9IIVZ+K7ShzfpLV/Zg8+VyrBJYB4lpg== + combined-stream@^1.0.8: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" @@ -5772,6 +6200,11 @@ comma-separated-tokens@^2.0.0: resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz#4e89c9458acb61bc8fef19f4529973b2392839ee" integrity sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg== +commander@^10.0.0: + version "10.0.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06" + integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug== + commander@^2.20.0, commander@^2.20.3: version "2.20.3" resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" @@ -5861,6 +6294,11 @@ compute-scroll-into-view@^3.0.2: resolved "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.0.3.tgz" integrity sha512-nadqwNxghAGTamwIqQSG433W6OADZx2vCo3UXHNrzTRHK/htu+7+L0zhjEoaeaQVNAi3YgqWDv8+tzf0hRfR+A== +computeds@^0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/computeds/-/computeds-0.0.1.tgz#215b08a4ba3e08a11ff6eee5d6d8d7166a97ce2e" + integrity sha512-7CEBgcMjVmitjYo5q8JTJVra6X5mQ20uTThdK+0kR7UEaDrAWEQcRiBtWJzga4eRpP6afNwwLsX2SET2JhVB1Q== + concat-map@0.0.1: version "0.0.1" resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" @@ -6462,6 +6900,11 @@ dayjs@^1.10.7, dayjs@^1.11.1: resolved "https://registry.npmjs.org/dayjs/-/dayjs-1.11.7.tgz" integrity sha512-+Yw9U6YO5TQohxLcIkrXBeY73WP3ejHWVvx8XCk3gxvQDCTEmS48ZrSZCKciI7Bhl/uCMyxYtE9UqRILmFphkQ== +de-indent@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/de-indent/-/de-indent-1.0.2.tgz#b2038e846dc33baa5796128d0804b455b8c1e21d" + integrity sha512-e/1zu3xH5MQryN2zdVaF0OrdNLUbvWxzMbi+iNA6Bky7l1RoP8a2fIbRocyHclXt/arDrrR6lL3TqFD9pMQTsg== + debug@2.6.9, debug@4, debug@4.3.4, debug@^3.2.6, debug@^3.2.7, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4, debug@ngokevin/debug#noTimestamp: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" @@ -6895,7 +7338,7 @@ entities@^2.0.0, entities@^2.2.0: resolved "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz" integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== -entities@^4.2.0: +entities@^4.2.0, entities@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48" integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== @@ -7434,6 +7877,11 @@ estree-util-visit@^1.0.0: "@types/estree-jsx" "^1.0.0" "@types/unist" "^2.0.0" +estree-walker@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" + integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== + estree-walker@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-3.0.3.tgz#67c3e549ec402a487b4fc193d1953a524752340d" @@ -7898,6 +8346,15 @@ fs-extra@^10.0.0: jsonfile "^6.0.1" universalify "^2.0.0" +fs-extra@~7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" + integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== + dependencies: + graceful-fs "^4.1.2" + jsonfile "^4.0.0" + universalify "^0.1.0" + fs-monkey@^1.0.3: version "1.0.3" resolved "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.3.tgz" @@ -7923,6 +8380,11 @@ function-bind@^1.1.1: resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + function.prototype.name@^1.1.5: version "1.1.5" resolved "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz" @@ -8200,6 +8662,13 @@ hasha@^5.0.0: is-stream "^2.0.0" type-fest "^0.8.0" +hasown@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c" + integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA== + dependencies: + function-bind "^1.1.2" + hast-util-from-parse5@^7.0.0: version "7.1.2" resolved "https://registry.yarnpkg.com/hast-util-from-parse5/-/hast-util-from-parse5-7.1.2.tgz#aecfef73e3ceafdfa4550716443e4eb7b02e22b0" @@ -8662,6 +9131,11 @@ import-fresh@^3.0.0, import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" +import-lazy@~4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-4.0.0.tgz#e8eb627483a0a43da3c03f3e35548be5cb0cc153" + integrity sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw== + import-local@^3.0.2: version "3.1.0" resolved "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz" @@ -8875,6 +9349,13 @@ is-ci@^3.0.1: dependencies: ci-info "^3.2.0" +is-core-module@^2.1.0: + version "2.13.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" + integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== + dependencies: + hasown "^2.0.0" + is-core-module@^2.11.0, is-core-module@^2.5.0, is-core-module@^2.9.0: version "2.12.0" resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.12.0.tgz" @@ -9791,6 +10272,11 @@ jest@^27.5.1: import-local "^3.0.2" jest-cli "^27.5.1" +jju@~1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/jju/-/jju-1.4.0.tgz#a3abe2718af241a2b2904f84a625970f389ae32a" + integrity sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA== + joi@^17.3.0: version "17.9.2" resolved "https://registry.npmjs.org/joi/-/joi-17.9.2.tgz" @@ -9927,6 +10413,13 @@ jsonc-parser@^3.0.0: resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76" integrity sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w== +jsonfile@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" + integrity sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg== + optionalDependencies: + graceful-fs "^4.1.6" + jsonfile@^6.0.1: version "6.1.0" resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz" @@ -9986,6 +10479,11 @@ klona@^2.0.4: resolved "https://registry.npmjs.org/klona/-/klona-2.0.6.tgz" integrity sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA== +kolorist@^1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/kolorist/-/kolorist-1.8.0.tgz#edddbbbc7894bc13302cdf740af6374d4a04743c" + integrity sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ== + language-subtag-registry@~0.3.2: version "0.3.22" resolved "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz" @@ -10207,7 +10705,12 @@ lodash.flattendeep@^4.4.0: resolved "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz" integrity sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ== -lodash.isequal@^4.0.0: +lodash.get@^4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" + integrity sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ== + +lodash.isequal@^4.0.0, lodash.isequal@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0" integrity sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ== @@ -10237,7 +10740,7 @@ lodash.uniq@^4.5.0: resolved "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz" integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== -lodash@4.17.21, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: +lodash@4.17.21, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.15: version "4.17.21" resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -11086,6 +11589,13 @@ minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: dependencies: brace-expansion "^1.1.7" +minimatch@^9.0.3: + version "9.0.3" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" + integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== + dependencies: + brace-expansion "^2.0.1" + minimist-options@4.1.0: version "4.1.0" resolved "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz" @@ -11164,6 +11674,11 @@ msw@1.3.2: type-fest "^2.19.0" yargs "^17.3.1" +muggle-string@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/muggle-string/-/muggle-string-0.3.1.tgz#e524312eb1728c63dd0b2ac49e3282e6ed85963a" + integrity sha512-ckmWDJjphvd/FvZawgygcUeQCxzvohjFO5RxTjj4eq8kw359gFF3E1brjfI+viLMxss5JrHTDRHZvu2/tuy0Qg== + multicast-dns@^7.2.5: version "7.2.5" resolved "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz" @@ -11845,6 +12360,11 @@ pascal-case@^3.1.2: no-case "^3.0.4" tslib "^2.0.3" +path-browserify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-1.0.1.tgz#d98454a9c3753d5790860f16f68867b9e46be1fd" + integrity sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g== + path-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/path-case/-/path-case-3.0.4.tgz#9168645334eb942658375c56f80b4c0cb5f82c6f" @@ -11873,7 +12393,7 @@ path-key@^3.0.0, path-key@^3.1.0: resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== -path-parse@^1.0.7: +path-parse@^1.0.6, path-parse@^1.0.7: version "1.0.7" resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== @@ -13657,6 +14177,23 @@ resolve@^2.0.0-next.4: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" +resolve@~1.19.0: + version "1.19.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.19.0.tgz#1af5bf630409734a067cae29318aac7fa29a267c" + integrity sha512-rArEXAgsBG4UgRGcynxWIWKFvh/XZCcS8UJdHhwy91zwAvCZIbcs+vAbflgBnNjYMs/i/i+/Ux6IZhML1yPvxg== + dependencies: + is-core-module "^2.1.0" + path-parse "^1.0.6" + +resolve@~1.22.1: + version "1.22.8" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" + integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== + dependencies: + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + restore-cursor@^3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz" @@ -13879,7 +14416,7 @@ selfsigned@^2.1.1: dependencies: node-forge "^1" -"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.6.0, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7: +"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.6.0, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.5.4, semver@~7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== @@ -14343,6 +14880,11 @@ string-argv@^0.3.1: resolved "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz" integrity sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg== +string-argv@~0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.3.2.tgz#2b6d0ef24b656274d957d54e0a4bbf6153dc02b6" + integrity sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q== + string-convert@^0.2.0: version "0.2.1" resolved "https://registry.npmjs.org/string-convert/-/string-convert-0.2.1.tgz" @@ -14473,7 +15015,7 @@ strip-indent@^3.0.0: dependencies: min-indent "^1.0.0" -strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1, strip-json-comments@~3.1.1: version "3.1.1" resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== @@ -15093,6 +15635,11 @@ typescript-plugin-css-modules@5.0.1: stylus "^0.59.0" tsconfig-paths "^4.1.2" +typescript@5.3.3: + version "5.3.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.3.3.tgz#b3ce6ba258e72e6305ba66f5c9b452aaee3ffe37" + integrity sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw== + typescript@^4.0.5, typescript@^4.4.3: version "4.9.5" resolved "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz" @@ -15211,6 +15758,11 @@ unist-util-visit@^4.0.0, unist-util-visit@^4.1.0, unist-util-visit@^4.1.2, unist unist-util-is "^5.0.0" unist-util-visit-parents "^5.1.1" +universalify@^0.1.0: + version "0.1.2" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" + integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== + universalify@^0.2.0: version "0.2.0" resolved "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz" @@ -15364,6 +15916,11 @@ validate-npm-package-license@^3.0.1: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" +validator@^13.7.0: + version "13.11.0" + resolved "https://registry.yarnpkg.com/validator/-/validator-13.11.0.tgz#23ab3fd59290c61248364eabf4067f04955fbb1b" + integrity sha512-Ii+sehpSfZy+At5nPdnyMhx78fEoPDkR2XW/zimHEL3MyGJQOCQ7WeP20jPYRz7ZCpcKLB21NxuXHF3bxjStBQ== + value-equal@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz" @@ -15400,11 +15957,40 @@ vfile@^5.0.0: unist-util-stringify-position "^3.0.0" vfile-message "^3.0.0" +vite-plugin-dts@^3.6.4: + version "3.7.0" + resolved "https://registry.yarnpkg.com/vite-plugin-dts/-/vite-plugin-dts-3.7.0.tgz#654ee7c38c0cdd4589b9bc198a264f34172bd870" + integrity sha512-np1uPaYzu98AtPReB8zkMnbjwcNHOABsLhqVOf81b3ol9b5M2wPcAVs8oqPnOpr6Us+7yDXVauwkxsk5+ldmRA== + dependencies: + "@microsoft/api-extractor" "7.39.0" + "@rollup/pluginutils" "^5.1.0" + "@vue/language-core" "^1.8.26" + debug "^4.3.4" + kolorist "^1.8.0" + vue-tsc "^1.8.26" + void-elements@3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz" integrity sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w== +vue-template-compiler@^2.7.14: + version "2.7.16" + resolved "https://registry.yarnpkg.com/vue-template-compiler/-/vue-template-compiler-2.7.16.tgz#c81b2d47753264c77ac03b9966a46637482bb03b" + integrity sha512-AYbUWAJHLGGQM7+cNTELw+KsOG9nl2CnSv467WobS5Cv9uk3wFcnr1Etsz2sEIHEZvw1U+o9mRlEO6QbZvUPGQ== + dependencies: + de-indent "^1.0.2" + he "^1.2.0" + +vue-tsc@^1.8.26: + version "1.8.27" + resolved "https://registry.yarnpkg.com/vue-tsc/-/vue-tsc-1.8.27.tgz#feb2bb1eef9be28017bb9e95e2bbd1ebdd48481c" + integrity sha512-WesKCAZCRAbmmhuGl3+VrdWItEvfoFIPXOvUJkjULi+x+6G/Dy69yO3TBRJDr9eUlmsNAwVmxsNZxvHKzbkKdg== + dependencies: + "@volar/typescript" "~1.11.1" + "@vue/language-core" "1.8.27" + semver "^7.5.4" + w3c-hr-time@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz" @@ -16073,6 +16659,17 @@ yocto-queue@^1.0.0: resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.0.0.tgz#7f816433fb2cbc511ec8bf7d263c3b58a1a3c251" integrity sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g== +z-schema@~5.0.2: + version "5.0.6" + resolved "https://registry.yarnpkg.com/z-schema/-/z-schema-5.0.6.tgz#46d6a687b15e4a4369e18d6cb1c7b8618fc256c5" + integrity sha512-+XR1GhnWklYdfr8YaZv/iu+vY+ux7V5DS5zH1DQf6bO5ufrt/5cgNhVO5qyhsjFXvsqQb/f08DWE9b6uPscyAg== + dependencies: + lodash.get "^4.4.2" + lodash.isequal "^4.5.0" + validator "^13.7.0" + optionalDependencies: + commander "^10.0.0" + zwitch@^2.0.0, zwitch@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/zwitch/-/zwitch-2.0.4.tgz#c827d4b0acb76fc3e685a4c6ec2902d51070e9d7" diff --git a/go.mod b/go.mod index d7d824f47d..ec4d7506ff 100644 --- a/go.mod +++ b/go.mod @@ -1,11 +1,11 @@ module go.signoz.io/signoz -go 1.21 +go 1.21.3 require ( github.com/ClickHouse/clickhouse-go/v2 v2.15.0 - github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb - github.com/SigNoz/signoz-otel-collector v0.88.11 + github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd + github.com/SigNoz/signoz-otel-collector v0.88.12 github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974 github.com/antonmedv/expr v1.15.3 @@ -19,7 +19,7 @@ require ( github.com/go-redis/redis/v8 v8.11.5 github.com/go-redis/redismock/v8 v8.11.5 github.com/golang-jwt/jwt v3.2.2+incompatible - github.com/google/uuid v1.3.1 + github.com/google/uuid v1.6.0 github.com/gorilla/handlers v1.5.1 github.com/gorilla/mux v1.8.0 github.com/gosimple/slug v1.10.0 @@ -61,13 +61,13 @@ require ( go.opentelemetry.io/collector/processor v0.88.0 go.opentelemetry.io/collector/receiver v0.88.0 go.opentelemetry.io/collector/service v0.88.0 - go.opentelemetry.io/otel v1.19.0 - go.opentelemetry.io/otel/sdk v1.19.0 + go.opentelemetry.io/otel v1.22.0 + go.opentelemetry.io/otel/sdk v1.22.0 go.uber.org/multierr v1.11.0 go.uber.org/zap v1.26.0 golang.org/x/crypto v0.17.0 golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 - golang.org/x/net v0.17.0 + golang.org/x/net v0.19.0 golang.org/x/oauth2 v0.13.0 google.golang.org/grpc v1.59.0 google.golang.org/protobuf v1.31.0 @@ -100,9 +100,9 @@ require ( github.com/felixge/httpsnoop v1.0.3 // indirect github.com/form3tech-oss/jwt-go v3.2.5+incompatible // indirect github.com/go-faster/city v1.0.1 // indirect - github.com/go-faster/errors v0.6.1 // indirect + github.com/go-faster/errors v0.7.1 // indirect github.com/go-logfmt/logfmt v0.6.0 // indirect - github.com/go-logr/logr v1.2.4 // indirect + github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-ole/go-ole v1.2.6 // indirect github.com/gogo/protobuf v1.3.2 // indirect @@ -122,7 +122,7 @@ require ( github.com/josharian/intern v1.0.0 // indirect github.com/jpillora/backoff v1.0.0 // indirect github.com/jtolds/gls v4.20.0+incompatible // indirect - github.com/klauspost/compress v1.17.1 // indirect + github.com/klauspost/compress v1.17.5 // indirect github.com/klauspost/cpuid v1.2.3 // indirect github.com/knadh/koanf/v2 v2.0.1 // indirect github.com/kylelemons/godebug v1.1.0 // indirect @@ -142,7 +142,7 @@ require ( github.com/oklog/ulid v1.3.1 // indirect github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.88.0 // indirect github.com/paulmach/orb v0.10.0 // indirect - github.com/pierrec/lz4/v4 v4.1.18 // indirect + github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect @@ -157,7 +157,7 @@ require ( github.com/shirou/gopsutil/v3 v3.23.9 // indirect github.com/shoenig/go-m1cpu v0.1.6 // indirect github.com/shopspring/decimal v1.3.1 // indirect - github.com/sirupsen/logrus v1.9.2 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect github.com/smarty/assertions v1.15.0 // indirect github.com/spf13/cobra v1.7.0 // indirect github.com/spf13/pflag v1.0.5 // indirect @@ -183,14 +183,14 @@ require ( go.opentelemetry.io/otel/exporters/prometheus v0.42.0 // indirect go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.42.0 // indirect go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.19.0 // indirect - go.opentelemetry.io/otel/metric v1.19.0 // indirect + go.opentelemetry.io/otel/metric v1.22.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.19.0 // indirect - go.opentelemetry.io/otel/trace v1.19.0 // indirect + go.opentelemetry.io/otel/trace v1.22.0 // indirect go.opentelemetry.io/proto/otlp v1.0.0 // indirect go.uber.org/atomic v1.11.0 // indirect go.uber.org/goleak v1.2.1 // indirect - golang.org/x/sync v0.4.0 // indirect - golang.org/x/sys v0.15.0 // indirect + golang.org/x/sync v0.6.0 // indirect + golang.org/x/sys v0.16.0 // indirect golang.org/x/text v0.14.0 // indirect golang.org/x/time v0.3.0 // indirect gonum.org/v1/gonum v0.14.0 // indirect diff --git a/go.sum b/go.sum index f65f071c40..ae300b0f17 100644 --- a/go.sum +++ b/go.sum @@ -96,10 +96,12 @@ github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb h1:bneLSKPf9YUSFmafKx32bynV6QrzViL/s+ZDvQxH1E4= github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA= +github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8= +github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc= github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY= github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww= -github.com/SigNoz/signoz-otel-collector v0.88.11 h1:w9IVcXg5T+o37c0HVtBfxdKxkPYyiGX1YaOrCexpjrc= -github.com/SigNoz/signoz-otel-collector v0.88.11/go.mod h1:2ddO2lcb/4kMONIJXwfXxegRsi7FYPSWtXTgji3qsp8= +github.com/SigNoz/signoz-otel-collector v0.88.12 h1:UwkVi1o2NY9gRgCLBtWVKr+UDxb4FaTs63Sb20qgf8w= +github.com/SigNoz/signoz-otel-collector v0.88.12/go.mod h1:RH9OEjni6tkh9RgN/meSPxv3kykjcFscqMwJgbUAXmo= github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc= github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo= github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY= @@ -240,8 +242,8 @@ github.com/go-co-op/gocron v1.30.1 h1:tjWUvJl5KrcwpkEkSXFSQFr4F9h5SfV/m4+RX0cV2f github.com/go-co-op/gocron v1.30.1/go.mod h1:39f6KNSGVOU1LO/ZOoZfcSxwlsJDQOKSu8erN0SH48Y= github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw= github.com/go-faster/city v1.0.1/go.mod h1:jKcUJId49qdW3L1qKHH/3wPeUstCVpVSXTM6vO3VcTw= -github.com/go-faster/errors v0.6.1 h1:nNIPOBkprlKzkThvS/0YaX8Zs9KewLCOSFQS5BU06FI= -github.com/go-faster/errors v0.6.1/go.mod h1:5MGV2/2T9yvlrbhe9pD9LO5Z/2zCSq2T8j+Jpi2LAyY= +github.com/go-faster/errors v0.7.1 h1:MkJTnDoEdi9pDabt1dpWf7AA8/BaSYZqibYyhZ20AYg= +github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7FPGZP2quo= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -262,8 +264,8 @@ github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= -github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= @@ -383,8 +385,8 @@ github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4= -github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= @@ -518,8 +520,8 @@ github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8 github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.17.1 h1:NE3C767s2ak2bweCZo3+rdP4U/HoyVXLv/X9f2gPS5g= -github.com/klauspost/compress v1.17.1/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.17.5 h1:d4vBd+7CHydUqpFBgUEKkSdtSugf9YFmSkvUYPquI5E= +github.com/klauspost/compress v1.17.5/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= github.com/klauspost/cpuid v1.2.3 h1:CCtW0xUnWGVINKvE/WWOYKdsPV6mawAtvQuSl8guwQs= github.com/klauspost/cpuid v1.2.3/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/knadh/koanf v1.5.0 h1:q2TSd/3Pyc/5yP9ldIrSdIz26MCcyNQzW0pEAugLPNs= @@ -653,8 +655,8 @@ github.com/open-telemetry/opentelemetry-collector-contrib/processor/logstransfor github.com/open-telemetry/opentelemetry-collector-contrib/processor/logstransformprocessor v0.88.0/go.mod h1:Vhb+pyxTKFjAoLaaJCiYHbJS6o56vQEvnJDhh/ws6yY= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.0-rc4 h1:oOxKUJWnFC4YGHCCMNql1x4YaDfYBTS5Y4x/Cgeo1E0= -github.com/opencontainers/image-spec v1.1.0-rc4/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= +github.com/opencontainers/image-spec v1.1.0-rc5 h1:Ygwkfw9bpDvs+c9E34SdgGOj41dX/cbdlwvlWt0pnFI= +github.com/opencontainers/image-spec v1.1.0-rc5/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= github.com/ovh/go-ovh v1.4.1 h1:VBGa5wMyQtTP7Zb+w97zRCh9sLtM/2YKRyy+MEJmWaM= @@ -670,8 +672,8 @@ github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAv github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ= -github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= +github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= @@ -770,8 +772,8 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.5.0/go.mod h1:+F7Ogzej0PZc/94MaYx/nvG9jOFMD2osvC3s+Squfpo= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= -github.com/sirupsen/logrus v1.9.2 h1:oxx1eChJGI6Uks2ZC4W1zpLlVgqB8ner4EuQwV4Ik1Y= -github.com/sirupsen/logrus v1.9.2/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smarty/assertions v1.15.0 h1:cR//PqUBUiQRakZWqBiFFQ9wb8emQGDb0HeGdqGByCY= github.com/smarty/assertions v1.15.0/go.mod h1:yABtdzeQs6l1brC900WlRNwj6ZR55d7B+E8C6HtKdec= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= @@ -890,8 +892,8 @@ go.opentelemetry.io/contrib/propagators/b3 v1.20.0 h1:Yty9Vs4F3D6/liF1o6FNt0PvN8 go.opentelemetry.io/contrib/propagators/b3 v1.20.0/go.mod h1:On4VgbkqYL18kbJlWsa18+cMNe6rYpBnPi1ARI/BrsU= go.opentelemetry.io/contrib/zpages v0.45.0 h1:jIwHHGoWzJoZdbIUtWdErjL85Gni6BignnAFqDtMRL4= go.opentelemetry.io/contrib/zpages v0.45.0/go.mod h1:4mIdA5hqH6hEx9sZgV50qKfQO8aIYolUZboHmz+G7vw= -go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs= -go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY= +go.opentelemetry.io/otel v1.22.0 h1:xS7Ku+7yTFvDfDraDIJVpw7XPyuHlB9MCiqqX5mcJ6Y= +go.opentelemetry.io/otel v1.22.0/go.mod h1:eoV4iAi3Ea8LkAEI9+GFT44O6T/D0GWAVFyZVCC6pMI= go.opentelemetry.io/otel/bridge/opencensus v0.42.0 h1:QvC+bcZkWMphWPiVqRQygMj6M0/3TOuJEO+erRA7kI8= go.opentelemetry.io/otel/bridge/opencensus v0.42.0/go.mod h1:XJojP7g5DqYdiyArix/H9i1XzPPlIUc9dGLKtF9copI= go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU= @@ -912,14 +914,14 @@ go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.42.0 h1:4jJuoeOo9W6hZn go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.42.0/go.mod h1:/MtYTE1SfC2QIcE0bDot6fIX+h+WvXjgTqgn9P0LNPE= go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.19.0 h1:Nw7Dv4lwvGrI68+wULbcq7su9K2cebeCUrDjVrUJHxM= go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.19.0/go.mod h1:1MsF6Y7gTqosgoZvHlzcaaM8DIMNZgJh87ykokoNH7Y= -go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE= -go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8= -go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o= -go.opentelemetry.io/otel/sdk v1.19.0/go.mod h1:NedEbbS4w3C6zElbLdPJKOpJQOrGUJ+GfzpjUvI0v1A= +go.opentelemetry.io/otel/metric v1.22.0 h1:lypMQnGyJYeuYPhOM/bgjbFM6WE44W1/T45er4d8Hhg= +go.opentelemetry.io/otel/metric v1.22.0/go.mod h1:evJGjVpZv0mQ5QBRJoBF64yMuOf4xCWdXjK8pzFvliY= +go.opentelemetry.io/otel/sdk v1.22.0 h1:6coWHw9xw7EfClIC/+O31R8IY3/+EiRFHevmHafB2Gw= +go.opentelemetry.io/otel/sdk v1.22.0/go.mod h1:iu7luyVGYovrRpe2fmj3CVKouQNdTOkxtLzPvPz1DOc= go.opentelemetry.io/otel/sdk/metric v1.19.0 h1:EJoTO5qysMsYCa+w4UghwFV/ptQgqSL/8Ni+hx+8i1k= go.opentelemetry.io/otel/sdk/metric v1.19.0/go.mod h1:XjG0jQyFJrv2PbMvwND7LwCEhsJzCzV5210euduKcKY= -go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg= -go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo= +go.opentelemetry.io/otel/trace v1.22.0 h1:Hg6pPujv0XG9QaVbGOBVHunyuLcCC3jN7WEhPx83XD0= +go.opentelemetry.io/otel/trace v1.22.0/go.mod h1:RbbHXVqKES9QhzZq/fE5UnOSILqRt40a21sPw2He1xo= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= @@ -1043,8 +1045,8 @@ golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= +golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1081,8 +1083,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= -golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1178,8 +1180,8 @@ golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= -golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index 3f11a4823a..4eef78acf0 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -71,6 +71,7 @@ const ( signozSampleLocalTableName = "samples_v2" signozSampleTableName = "distributed_samples_v2" signozTSTableName = "distributed_time_series_v2" + signozTSTableNameV41Day = "distributed_time_series_v4_1day" minTimespanForProgressiveSearch = time.Hour minTimespanForProgressiveSearchMargin = time.Minute @@ -3287,7 +3288,7 @@ func (r *ClickHouseReader) FetchTemporality(ctx context.Context, metricNames []s metricNameToTemporality := make(map[string]map[v3.Temporality]bool) - query := fmt.Sprintf(`SELECT DISTINCT metric_name, temporality FROM %s.%s WHERE metric_name IN $1`, signozMetricDBName, signozTSTableName) + query := fmt.Sprintf(`SELECT DISTINCT metric_name, temporality FROM %s.%s WHERE metric_name IN $1`, signozMetricDBName, signozTSTableNameV41Day) rows, err := r.db.Query(ctx, query, metricNames) if err != nil { @@ -3309,15 +3310,6 @@ func (r *ClickHouseReader) FetchTemporality(ctx context.Context, metricNames []s return metricNameToTemporality, nil } -// func sum(array []tsByMetricName) uint64 { -// var result uint64 -// result = 0 -// for _, v := range array { -// result += v.count -// } -// return result -// } - func (r *ClickHouseReader) GetTimeSeriesInfo(ctx context.Context) (map[string]interface{}, error) { queryStr := fmt.Sprintf("SELECT count() as count from %s.%s group by metric_name order by count desc;", signozMetricDBName, signozTSTableName) @@ -3960,7 +3952,7 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, req var rows driver.Rows var response v3.AggregateAttributeResponse - query = fmt.Sprintf("SELECT DISTINCT(metric_name) from %s.%s WHERE metric_name ILIKE $1", signozMetricDBName, signozTSTableName) + query = fmt.Sprintf("SELECT DISTINCT metric_name, type from %s.%s WHERE metric_name ILIKE $1", signozMetricDBName, signozTSTableNameV41Day) if req.Limit != 0 { query = query + fmt.Sprintf(" LIMIT %d;", req.Limit) } @@ -3972,15 +3964,16 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, req } defer rows.Close() - var metricName string + var metricName, typ string for rows.Next() { - if err := rows.Scan(&metricName); err != nil { + if err := rows.Scan(&metricName, &typ); err != nil { return nil, fmt.Errorf("error while scanning rows: %s", err.Error()) } + // unlike traces/logs `tag`/`resource` type, the `Type` will be metric type key := v3.AttributeKey{ Key: metricName, DataType: v3.AttributeKeyDataTypeFloat64, - Type: v3.AttributeKeyTypeUnspecified, + Type: v3.AttributeKeyType(typ), IsColumn: true, } response.AttributeKeys = append(response.AttributeKeys, key) @@ -4111,6 +4104,68 @@ func (r *ClickHouseReader) GetLatencyMetricMetadata(ctx context.Context, metricN }, nil } +func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, metricName, serviceName string) (*v3.MetricMetadataResponse, error) { + // Note: metric metadata should be accessible regardless of the time range selection + // our standard retention period is 30 days, so we are querying the table v4_1_day to reduce the + // amount of data scanned + query := fmt.Sprintf("SELECT DISTINCT temporality, description, type, unit, is_monotonic from %s.%s WHERE metric_name=$1", signozMetricDBName, signozTSTableNameV41Day) + rows, err := r.db.Query(ctx, query, metricName) + if err != nil { + zap.S().Error(err) + return nil, fmt.Errorf("error while fetching metric metadata: %s", err.Error()) + } + defer rows.Close() + + var deltaExists, isMonotonic bool + var temporality, description, metricType, unit string + for rows.Next() { + if err := rows.Scan(&temporality, &description, &metricType, &unit, &isMonotonic); err != nil { + return nil, fmt.Errorf("error while scanning rows: %s", err.Error()) + } + if temporality == string(v3.Delta) { + deltaExists = true + } + } + + query = fmt.Sprintf("SELECT DISTINCT(JSONExtractString(labels, 'le')) as le from %s.%s WHERE metric_name=$1 AND type = 'Histogram' AND JSONExtractString(labels, 'service_name') = $2 ORDER BY le", signozMetricDBName, signozTSTableNameV41Day) + rows, err = r.db.Query(ctx, query, metricName, serviceName) + if err != nil { + zap.S().Error(err) + return nil, fmt.Errorf("error while executing query: %s", err.Error()) + } + defer rows.Close() + + var leFloat64 []float64 + for rows.Next() { + var leStr string + if err := rows.Scan(&leStr); err != nil { + return nil, fmt.Errorf("error while scanning rows: %s", err.Error()) + } + le, err := strconv.ParseFloat(leStr, 64) + // ignore the error and continue if the value is not a float + // ideally this should not happen but we have seen ClickHouse + // returning empty string for some values + if err != nil { + zap.S().Error("error while parsing le value: ", err) + continue + } + if math.IsInf(le, 0) { + continue + } + leFloat64 = append(leFloat64, le) + } + + return &v3.MetricMetadataResponse{ + Delta: deltaExists, + Le: leFloat64, + Description: description, + Unit: unit, + Type: metricType, + IsMonotonic: isMonotonic, + Temporality: temporality, + }, nil +} + func isColumn(tableStatement, attrType, field, datType string) bool { // value of attrType will be `resource` or `tag`, if `tag` change it to `attribute` name := utils.GetClickhouseColumnName(attrType, datType, field) diff --git a/pkg/query-service/app/formula.go b/pkg/query-service/app/formula.go new file mode 100644 index 0000000000..657a7bcad9 --- /dev/null +++ b/pkg/query-service/app/formula.go @@ -0,0 +1,261 @@ +package app + +import ( + "fmt" + "math" + "sort" + + "github.com/SigNoz/govaluate" + v3 "go.signoz.io/signoz/pkg/query-service/model/v3" +) + +// Define the ExpressionEvalFunc type +type ExpressionEvalFunc func(*govaluate.EvaluableExpression, map[string]float64) float64 + +// Helper function to check if one label set is a subset of another +func isSubset(super, sub map[string]string) bool { + for k, v := range sub { + if val, ok := super[k]; !ok || val != v { + return false + } + } + return true +} + +// Function to find unique label sets +func findUniqueLabelSets(results []*v3.Result) []map[string]string { + allLabelSets := make([]map[string]string, 0) + // The size of the `results` small, It is the number of queries in the request + for _, result := range results { + // The size of the `result.Series` slice is usually small, It is the number of series in the query result. + // We will limit the number of series in the query result to order of 100-1000. + for _, series := range result.Series { + allLabelSets = append(allLabelSets, series.Labels) + } + } + + // sort the label sets by the number of labels in descending order + sort.Slice(allLabelSets, func(i, j int) bool { + return len(allLabelSets[i]) > len(allLabelSets[j]) + }) + + uniqueSets := make([]map[string]string, 0) + + for _, labelSet := range allLabelSets { + // If the label set is not a subset of any of the unique label sets, add it to the unique label sets + isUnique := true + for _, uniqueLabelSet := range uniqueSets { + if isSubset(uniqueLabelSet, labelSet) { + isUnique = false + break + } + } + if isUnique { + uniqueSets = append(uniqueSets, labelSet) + } + } + + return uniqueSets +} + +// Function to join series on timestamp and calculate new values +func joinAndCalculate(results []*v3.Result, uniqueLabelSet map[string]string, expression *govaluate.EvaluableExpression) (*v3.Series, error) { + + uniqueTimestamps := make(map[int64]struct{}) + // map[queryName]map[timestamp]value + seriesMap := make(map[string]map[int64]float64) + for _, result := range results { + var matchingSeries *v3.Series + // We try to find a series that matches the label set from the current query result + for _, series := range result.Series { + if isSubset(uniqueLabelSet, series.Labels) { + matchingSeries = series + break + } + } + + // Prepare the seriesMap for quick lookup during evaluation + // seriesMap[queryName][timestamp]value contains the value of the series with the given queryName at the given timestamp + if matchingSeries != nil { + for _, point := range matchingSeries.Points { + if _, ok := seriesMap[result.QueryName]; !ok { + seriesMap[result.QueryName] = make(map[int64]float64) + } + seriesMap[result.QueryName][point.Timestamp] = point.Value + uniqueTimestamps[point.Timestamp] = struct{}{} + } + } + } + + vars := expression.Vars() + var doesNotHaveAllVars bool + for _, v := range vars { + if _, ok := seriesMap[v]; !ok { + doesNotHaveAllVars = true + break + } + } + + // There is no series that matches the label set from all queries + // TODO: Does the lack of a series from one query mean that the result should be nil? + // Or should we interpret the series as having a value of 0 at all timestamps? + // The current behaviour with ClickHouse is to show no data + if doesNotHaveAllVars { + return nil, nil + } + + resultSeries := &v3.Series{ + Labels: uniqueLabelSet, + } + timestamps := make([]int64, 0) + for timestamp := range uniqueTimestamps { + timestamps = append(timestamps, timestamp) + } + sort.Slice(timestamps, func(i, j int) bool { + return timestamps[i] < timestamps[j] + }) + + for _, timestamp := range timestamps { + values := make(map[string]interface{}) + for queryName, series := range seriesMap { + values[queryName] = series[timestamp] + } + newValue, err := expression.Evaluate(values) + if err != nil { + return nil, err + } + + val, ok := newValue.(float64) + if !ok { + return nil, fmt.Errorf("expected float64, got %T", newValue) + } + + resultSeries.Points = append(resultSeries.Points, v3.Point{ + Timestamp: timestamp, + Value: val, + }) + } + return resultSeries, nil +} + +// Main function to process the Results +// A series can be "join"ed with other series if they have the same label set or one is a subset of the other. +// 1. Find all unique label sets +// 2. For each unique label set, find a series that matches the label set from each query result +// 3. Join the series on timestamp and calculate the new values +// 4. Return the new series +func processResults(results []*v3.Result, expression *govaluate.EvaluableExpression) (*v3.Result, error) { + uniqueLabelSets := findUniqueLabelSets(results) + newSeries := make([]*v3.Series, 0) + + for _, labelSet := range uniqueLabelSets { + series, err := joinAndCalculate(results, labelSet, expression) + if err != nil { + return nil, err + } + if series != nil { + labelsArray := make([]map[string]string, 0) + for k, v := range series.Labels { + labelsArray = append(labelsArray, map[string]string{k: v}) + } + series.LabelsArray = labelsArray + newSeries = append(newSeries, series) + } + } + + return &v3.Result{ + Series: newSeries, + }, nil +} + +var SupportedFunctions = []string{"exp", "log", "ln", "exp2", "log2", "exp10", "log10", "sqrt", "cbrt", "erf", "erfc", "lgamma", "tgamma", "sin", "cos", "tan", "asin", "acos", "atan", "degrees", "radians"} + +func evalFuncs() map[string]govaluate.ExpressionFunction { + GoValuateFuncs := make(map[string]govaluate.ExpressionFunction) + // Returns e to the power of the given argument. + GoValuateFuncs["exp"] = func(args ...interface{}) (interface{}, error) { + return math.Exp(args[0].(float64)), nil + } + // Returns the natural logarithm of the given argument. + GoValuateFuncs["log"] = func(args ...interface{}) (interface{}, error) { + return math.Log(args[0].(float64)), nil + } + // Returns the natural logarithm of the given argument. + GoValuateFuncs["ln"] = func(args ...interface{}) (interface{}, error) { + return math.Log(args[0].(float64)), nil + } + // Returns the base 2 exponential of the given argument. + GoValuateFuncs["exp2"] = func(args ...interface{}) (interface{}, error) { + return math.Exp2(args[0].(float64)), nil + } + // Returns the base 2 logarithm of the given argument. + GoValuateFuncs["log2"] = func(args ...interface{}) (interface{}, error) { + return math.Log2(args[0].(float64)), nil + } + // Returns the base 10 exponential of the given argument. + GoValuateFuncs["exp10"] = func(args ...interface{}) (interface{}, error) { + return math.Pow10(int(args[0].(float64))), nil + } + // Returns the base 10 logarithm of the given argument. + GoValuateFuncs["log10"] = func(args ...interface{}) (interface{}, error) { + return math.Log10(args[0].(float64)), nil + } + // Returns the square root of the given argument. + GoValuateFuncs["sqrt"] = func(args ...interface{}) (interface{}, error) { + return math.Sqrt(args[0].(float64)), nil + } + // Returns the cube root of the given argument. + GoValuateFuncs["cbrt"] = func(args ...interface{}) (interface{}, error) { + return math.Cbrt(args[0].(float64)), nil + } + // Returns the error function of the given argument. + GoValuateFuncs["erf"] = func(args ...interface{}) (interface{}, error) { + return math.Erf(args[0].(float64)), nil + } + // Returns the complementary error function of the given argument. + GoValuateFuncs["erfc"] = func(args ...interface{}) (interface{}, error) { + return math.Erfc(args[0].(float64)), nil + } + // Returns the natural logarithm of the absolute value of the gamma function of the given argument. + GoValuateFuncs["lgamma"] = func(args ...interface{}) (interface{}, error) { + v, _ := math.Lgamma(args[0].(float64)) + return v, nil + } + // Returns the gamma function of the given argument. + GoValuateFuncs["tgamma"] = func(args ...interface{}) (interface{}, error) { + return math.Gamma(args[0].(float64)), nil + } + // Returns the sine of the given argument. + GoValuateFuncs["sin"] = func(args ...interface{}) (interface{}, error) { + return math.Sin(args[0].(float64)), nil + } + // Returns the cosine of the given argument. + GoValuateFuncs["cos"] = func(args ...interface{}) (interface{}, error) { + return math.Cos(args[0].(float64)), nil + } + // Returns the tangent of the given argument. + GoValuateFuncs["tan"] = func(args ...interface{}) (interface{}, error) { + return math.Tan(args[0].(float64)), nil + } + // Returns the arcsine of the given argument. + GoValuateFuncs["asin"] = func(args ...interface{}) (interface{}, error) { + return math.Asin(args[0].(float64)), nil + } + // Returns the arccosine of the given argument. + GoValuateFuncs["acos"] = func(args ...interface{}) (interface{}, error) { + return math.Acos(args[0].(float64)), nil + } + // Returns the arctangent of the given argument. + GoValuateFuncs["atan"] = func(args ...interface{}) (interface{}, error) { + return math.Atan(args[0].(float64)), nil + } + // Returns the argument converted from radians to degrees. + GoValuateFuncs["degrees"] = func(args ...interface{}) (interface{}, error) { + return args[0].(float64) * 180 / math.Pi, nil + } + // Returns the argument converted from degrees to radians. + GoValuateFuncs["radians"] = func(args ...interface{}) (interface{}, error) { + return args[0].(float64) * math.Pi / 180, nil + } + return GoValuateFuncs +} diff --git a/pkg/query-service/app/formula_test.go b/pkg/query-service/app/formula_test.go new file mode 100644 index 0000000000..17a073ae32 --- /dev/null +++ b/pkg/query-service/app/formula_test.go @@ -0,0 +1,1478 @@ +package app + +import ( + "math" + "reflect" + "testing" + + "github.com/SigNoz/govaluate" + v3 "go.signoz.io/signoz/pkg/query-service/model/v3" +) + +func TestFindUniqueLabelSets(t *testing.T) { + tests := []struct { + name string + result []*v3.Result + want []map[string]string + }{ + { + name: "test1", + result: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "frontend", + "operation": "GET /api", + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "redis", + }, + }, + }, + }, + }, + want: []map[string]string{ + { + "service_name": "frontend", + "operation": "GET /api", + }, + { + "service_name": "redis", + }, + }, + }, + { + name: "test2", + result: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "frontend", + "operation": "GET /api", + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "frontend", + }, + }, + }, + }, + { + QueryName: "C", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "operation": "PUT /api", + }, + }, + }, + }, + { + QueryName: "D", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "frontend", + "http_status": "200", + }, + }, + }, + }, + }, + want: []map[string]string{ + { + "service_name": "frontend", + "operation": "GET /api", + }, + { + "service_name": "frontend", + "http_status": "200", + }, + { + "operation": "PUT /api", + }, + }, + }, + { + name: "empty result", + result: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{}, + }, + { + QueryName: "B", + Series: []*v3.Series{}, + }, + }, + want: []map[string]string{}, + }, + { + name: "results with overlapping labels", + result: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "frontend", + "operation": "GET /api", + }, + }, + { + Labels: map[string]string{ + "service_name": "redis", + "operation": "GET /api", + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "redis", + }, + }, + { + Labels: map[string]string{ + "service_name": "frontend", + }, + }, + }, + }, + }, + want: []map[string]string{ + { + "service_name": "frontend", + "operation": "GET /api", + }, + { + "service_name": "redis", + "operation": "GET /api", + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := findUniqueLabelSets(tt.result) + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("findUniqueLabelSets() = %v, want %v\n", got, tt.want) + } + }) + } +} + +func TestProcessResults(t *testing.T) { + tests := []struct { + name string + results []*v3.Result + want *v3.Result + }{ + { + name: "test1", + results: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "frontend", + "operation": "GET /api", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10, + }, + { + Timestamp: 2, + Value: 20, + }, + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "redis", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 30, + }, + { + Timestamp: 3, + Value: 40, + }, + }, + }, + }, + }, + }, + want: &v3.Result{ + Series: []*v3.Series{}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + expression, err := govaluate.NewEvaluableExpression("A + B") + if err != nil { + t.Errorf("Error parsing expression: %v", err) + } + got, err := processResults(tt.results, expression) + if err != nil { + t.Errorf("Error processing results: %v", err) + } + if len(got.Series) != len(tt.want.Series) { + t.Errorf("processResults(): number of sereis - got = %v, want %v", len(got.Series), len(tt.want.Series)) + } + + for i := range got.Series { + if len(got.Series[i].Points) != len(tt.want.Series[i].Points) { + t.Errorf("processResults(): number of points - got = %v, want %v", got, tt.want) + } + for j := range got.Series[i].Points { + if got.Series[i].Points[j].Value != tt.want.Series[i].Points[j].Value { + t.Errorf("processResults(): got = %v, want %v", got.Series[i].Points[j].Value, tt.want.Series[i].Points[j].Value) + } + } + } + }) + } +} + +func TestProcessResultsErrorRate(t *testing.T) { + tests := []struct { + name string + results []*v3.Result + want *v3.Result + }{ + { + name: "test1", + results: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "frontend", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10, + }, + { + Timestamp: 2, + Value: 20, + }, + }, + }, + { + Labels: map[string]string{ + "service_name": "redis", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 12, + }, + { + Timestamp: 2, + Value: 45, + }, + }, + }, + { + Labels: map[string]string{ + "service_name": "route", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 2, + }, + { + Timestamp: 2, + Value: 45, + }, + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "redis", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 6, + }, + { + Timestamp: 2, + Value: 9, + }, + }, + }, + }, + }, + }, + want: &v3.Result{ + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "redis", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 0.5, + }, + { + Timestamp: 2, + Value: 0.2, + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + expression, err := govaluate.NewEvaluableExpression("B/A") + if err != nil { + t.Errorf("Error parsing expression: %v", err) + } + got, err := processResults(tt.results, expression) + if err != nil { + t.Errorf("Error processing results: %v", err) + } + if len(got.Series) != len(tt.want.Series) { + t.Errorf("processResults(): number of sereis - got = %v, want %v", len(got.Series), len(tt.want.Series)) + } + + for i := range got.Series { + if len(got.Series[i].Points) != len(tt.want.Series[i].Points) { + t.Errorf("processResults(): number of points - got = %v, want %v", got, tt.want) + } + for j := range got.Series[i].Points { + if got.Series[i].Points[j].Value != tt.want.Series[i].Points[j].Value { + t.Errorf("processResults(): got = %v, want %v", got.Series[i].Points[j].Value, tt.want.Series[i].Points[j].Value) + } + } + } + }) + } +} + +func TestFormula(t *testing.T) { + tests := []struct { + name string + expression string + results []*v3.Result + want *v3.Result + }{ + { + name: "No group keys on the left side", + expression: "B/A", + results: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "frontend", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10, + }, + { + Timestamp: 2, + Value: 20, + }, + }, + }, + { + Labels: map[string]string{ + "service_name": "redis", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 12, + }, + { + Timestamp: 2, + Value: 45, + }, + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{}, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + }, + }, + }, + }, + }, + want: &v3.Result{ + Series: []*v3.Series{ + { + Labels: map[string]string{ + "service_name": "frontend", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 2.2, + }, + { + Timestamp: 2, + Value: 3.25, + }, + }, + }, + { + Labels: map[string]string{ + "service_name": "redis", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 1.8333333333333333, + }, + { + Timestamp: 2, + Value: 1.4444444444444444, + }, + }, + }, + }, + }, + }, + { + name: "No group keys on the right side", + expression: "A/B", + results: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10, + }, + { + Timestamp: 2, + Value: 20, + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 12, + }, + { + Timestamp: 2, + Value: 45, + }, + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{}, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + }, + }, + }, + }, + }, + want: &v3.Result{ + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 0.45454545454545453, + }, + { + Timestamp: 2, + Value: 0.3076923076923077, + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 0.5454545454545454, + }, + { + Timestamp: 2, + Value: 0.6923076923076923, + }, + }, + }, + }, + }, + }, + { + name: "Group keys on both sides are the same", + expression: "A/B", + results: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10, + }, + { + Timestamp: 2, + Value: 20, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + { + Timestamp: 7, + Value: 70, + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 12, + }, + { + Timestamp: 2, + Value: 45, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + }, + want: &v3.Result{ + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: float64(10) / float64(22), + }, + { + Timestamp: 2, + Value: 0.3076923076923077, + }, + { + Timestamp: 3, + Value: 0, + }, + { + Timestamp: 4, + Value: 1, + }, + { + Timestamp: 5, + Value: 1, + }, + { + Timestamp: 7, + Value: math.Inf(1), + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 0.5454545454545454, + }, + { + Timestamp: 2, + Value: 0.6923076923076923, + }, + { + Timestamp: 3, + Value: math.Inf(1), + }, + { + Timestamp: 4, + Value: 1, + }, + { + Timestamp: 5, + Value: 1, + }, + }, + }, + }, + }, + }, + { + name: "Group keys on both sides are same but different values", + expression: "A/B", + results: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10, + }, + { + Timestamp: 2, + Value: 20, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + { + Timestamp: 7, + Value: 70, + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 12, + }, + { + Timestamp: 2, + Value: 45, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "not_running_chalamet", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "busy", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + }, + want: &v3.Result{}, + }, + { + name: "Group keys on both sides are overlapping but do not match exactly", + expression: "A/B", + results: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10, + }, + { + Timestamp: 2, + Value: 20, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + { + Timestamp: 7, + Value: 70, + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 12, + }, + { + Timestamp: 2, + Value: 45, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "os.type": "linux", + "state": "running", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + { + Labels: map[string]string{ + "os.type": "windows", + "state": "busy", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + }, + want: &v3.Result{}, + }, + { + name: "Group keys on the left side are a superset of the right side", + expression: "A/B", + results: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + "os.type": "linux", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10, + }, + { + Timestamp: 2, + Value: 20, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + { + Timestamp: 7, + Value: 70, + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + "os.type": "linux", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 12, + }, + { + Timestamp: 2, + Value: 45, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "state": "running", + "os.type": "linux", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + { + Labels: map[string]string{ + "state": "busy", + "os.type": "linux", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + }, + want: &v3.Result{ + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + "os.type": "linux", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: float64(10) / float64(22), + }, + { + Timestamp: 2, + Value: 0.3076923076923077, + }, + { + Timestamp: 3, + Value: 0, + }, + { + Timestamp: 4, + Value: 1, + }, + { + Timestamp: 5, + Value: 1, + }, + { + Timestamp: 7, + Value: math.Inf(1), + }, + }, + }, + }, + }, + }, + { + name: "Group keys are subsets, A is a subset of B and their result is a subset of C", + expression: "A/B + C", + results: []*v3.Result{ + { + QueryName: "A", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "state": "running", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10, + }, + { + Timestamp: 2, + Value: 20, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + { + Timestamp: 7, + Value: 70, + }, + }, + }, + { + Labels: map[string]string{ + "state": "idle", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 12, + }, + { + Timestamp: 2, + Value: 45, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + { + QueryName: "B", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 22, + }, + { + Timestamp: 2, + Value: 65, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + { + QueryName: "C", + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + "os.type": "linux", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10, + }, + { + Timestamp: 2, + Value: 20, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + { + Timestamp: 7, + Value: 70, + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + "os.type": "linux", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 12, + }, + { + Timestamp: 2, + Value: 45, + }, + { + Timestamp: 3, + Value: 30, + }, + { + Timestamp: 4, + Value: 40, + }, + { + Timestamp: 5, + Value: 50, + }, + }, + }, + }, + }, + }, + want: &v3.Result{ + Series: []*v3.Series{ + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-1", + "state": "running", + "os.type": "linux", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 10.45454545454545453, + }, + { + Timestamp: 2, + Value: 20.3076923076923077, + }, + { + Timestamp: 3, + Value: 0, + }, + { + Timestamp: 4, + Value: 41, + }, + { + Timestamp: 5, + Value: 51, + }, + { + Timestamp: 7, + Value: math.Inf(1), + }, + }, + }, + { + Labels: map[string]string{ + "host_name": "ip-10-420-69-2", + "state": "idle", + "os.type": "linux", + }, + Points: []v3.Point{ + { + Timestamp: 1, + Value: 12.5454545454545454, + }, + { + Timestamp: 2, + Value: 45.6923076923076923, + }, + { + Timestamp: 3, + Value: math.Inf(1), + }, + { + Timestamp: 4, + Value: 41, + }, + { + Timestamp: 5, + Value: 51, + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + expression, err := govaluate.NewEvaluableExpression(tt.expression) + if err != nil { + t.Errorf("Error parsing expression: %v", err) + } + got, err := processResults(tt.results, expression) + if err != nil { + t.Errorf("Error processing results: %v", err) + } + if len(got.Series) != len(tt.want.Series) { + t.Errorf("processResults(): number of series - got = %v, want %v", len(got.Series), len(tt.want.Series)) + } + + for i := range got.Series { + if len(got.Series[i].Points) != len(tt.want.Series[i].Points) { + t.Errorf("processResults(): number of points - got = %v, want %v", len(got.Series[i].Points), len(tt.want.Series[i].Points)) + } + for j := range got.Series[i].Points { + if got.Series[i].Points[j].Value != tt.want.Series[i].Points[j].Value { + t.Errorf("processResults(): got = %v, want %v", got.Series[i].Points[j].Value, tt.want.Series[i].Points[j].Value) + } + } + } + }) + } +} diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 57962fa3ac..784fdb940d 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -14,6 +14,7 @@ import ( "text/template" "time" + "github.com/SigNoz/govaluate" "github.com/gorilla/mux" jsoniter "github.com/json-iterator/go" _ "github.com/mattn/go-sqlite3" @@ -83,6 +84,12 @@ type APIHandler struct { preferDelta bool preferSpanMetrics bool + // temporalityMap is a map of metric name to temporality + // to avoid fetching temporality for the same metric multiple times + // querying the v4 table on low cardinal temporality column + // should be fast but we can still avoid the query if we have the data in memory + temporalityMap map[string]map[v3.Temporality]bool + maxIdleConns int maxOpenConns int dialTimeout time.Duration @@ -160,6 +167,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) { skipConfig: opts.SkipConfig, preferDelta: opts.PerferDelta, preferSpanMetrics: opts.PreferSpanMetrics, + temporalityMap: make(map[string]map[v3.Temporality]bool), maxIdleConns: opts.MaxIdleConns, maxOpenConns: opts.MaxOpenConns, dialTimeout: opts.DialTimeout, @@ -334,6 +342,7 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *AuthMid func (aH *APIHandler) RegisterQueryRangeV4Routes(router *mux.Router, am *AuthMiddleware) { subRouter := router.PathPrefix("/api/v4").Subrouter() subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QueryRangeV4)).Methods(http.MethodPost) + subRouter.HandleFunc("/metric/metric_metadata", am.ViewAccess(aH.getMetricMetadata)).Methods(http.MethodGet) } func (aH *APIHandler) Respond(w http.ResponseWriter, data interface{}) { @@ -376,10 +385,10 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *AuthMiddleware) { router.HandleFunc("/api/v2/variables/query", am.ViewAccess(aH.queryDashboardVarsV2)).Methods(http.MethodPost) router.HandleFunc("/api/v1/explorer/views", am.ViewAccess(aH.getSavedViews)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/explorer/views", am.ViewAccess(aH.createSavedViews)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/explorer/views", am.EditAccess(aH.createSavedViews)).Methods(http.MethodPost) router.HandleFunc("/api/v1/explorer/views/{viewId}", am.ViewAccess(aH.getSavedView)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/explorer/views/{viewId}", am.ViewAccess(aH.updateSavedView)).Methods(http.MethodPut) - router.HandleFunc("/api/v1/explorer/views/{viewId}", am.ViewAccess(aH.deleteSavedView)).Methods(http.MethodDelete) + router.HandleFunc("/api/v1/explorer/views/{viewId}", am.EditAccess(aH.updateSavedView)).Methods(http.MethodPut) + router.HandleFunc("/api/v1/explorer/views/{viewId}", am.EditAccess(aH.deleteSavedView)).Methods(http.MethodDelete) router.HandleFunc("/api/v1/feedback", am.OpenAccess(aH.submitFeedback)).Methods(http.MethodPost) // router.HandleFunc("/api/v1/get_percentiles", aH.getApplicationPercentiles).Methods(http.MethodGet) @@ -536,7 +545,7 @@ func (aH *APIHandler) addTemporality(ctx context.Context, qp *v3.QueryRangeParam metricNameToTemporality := make(map[string]map[v3.Temporality]bool) if qp.CompositeQuery != nil && len(qp.CompositeQuery.BuilderQueries) > 0 { for _, query := range qp.CompositeQuery.BuilderQueries { - if query.DataSource == v3.DataSourceMetrics { + if query.DataSource == v3.DataSourceMetrics && query.Temporality == "" { metricNames = append(metricNames, query.AggregateAttribute.Key) if _, ok := metricNameToTemporality[query.AggregateAttribute.Key]; !ok { metricNameToTemporality[query.AggregateAttribute.Key] = make(map[v3.Temporality]bool) @@ -781,6 +790,58 @@ func (aH *APIHandler) QueryRangeMetricsV2(w http.ResponseWriter, r *http.Request aH.Respond(w, resp) } +// populateTemporality same as addTemporality but for v4 and better +func (aH *APIHandler) populateTemporality(ctx context.Context, qp *v3.QueryRangeParamsV3) error { + + missingTemporality := make([]string, 0) + metricNameToTemporality := make(map[string]map[v3.Temporality]bool) + if qp.CompositeQuery != nil && len(qp.CompositeQuery.BuilderQueries) > 0 { + for _, query := range qp.CompositeQuery.BuilderQueries { + // if there is no temporality specified in the query but we have it in the map + // then use the value from the map + if query.Temporality == "" && aH.temporalityMap[query.AggregateAttribute.Key] != nil { + // We prefer delta if it is available + if aH.temporalityMap[query.AggregateAttribute.Key][v3.Delta] { + query.Temporality = v3.Delta + } else if aH.temporalityMap[query.AggregateAttribute.Key][v3.Cumulative] { + query.Temporality = v3.Cumulative + } else { + query.Temporality = v3.Unspecified + } + } + // we don't have temporality for this metric + if query.DataSource == v3.DataSourceMetrics && query.Temporality == "" { + missingTemporality = append(missingTemporality, query.AggregateAttribute.Key) + } + if _, ok := metricNameToTemporality[query.AggregateAttribute.Key]; !ok { + metricNameToTemporality[query.AggregateAttribute.Key] = make(map[v3.Temporality]bool) + } + } + } + + nameToTemporality, err := aH.reader.FetchTemporality(ctx, missingTemporality) + if err != nil { + return err + } + + if qp.CompositeQuery != nil && len(qp.CompositeQuery.BuilderQueries) > 0 { + for name := range qp.CompositeQuery.BuilderQueries { + query := qp.CompositeQuery.BuilderQueries[name] + if query.DataSource == v3.DataSourceMetrics && query.Temporality == "" { + if nameToTemporality[query.AggregateAttribute.Key][v3.Delta] { + query.Temporality = v3.Delta + } else if nameToTemporality[query.AggregateAttribute.Key][v3.Cumulative] { + query.Temporality = v3.Cumulative + } else { + query.Temporality = v3.Unspecified + } + aH.temporalityMap[query.AggregateAttribute.Key] = nameToTemporality[query.AggregateAttribute.Key] + } + } + } + return nil +} + func (aH *APIHandler) listRules(w http.ResponseWriter, r *http.Request) { rules, err := aH.ruleManager.ListRuleStates(r.Context()) @@ -3178,7 +3239,7 @@ func (aH *APIHandler) liveTailLogs(w http.ResponseWriter, r *http.Request) { zap.S().Debug("done!") return case err := <-client.Error: - zap.S().Error("error occured!", err) + zap.S().Error("error occurred!", err) fmt.Fprintf(w, "event: error\ndata: %v\n\n", err.Error()) flusher.Flush() return @@ -3186,6 +3247,18 @@ func (aH *APIHandler) liveTailLogs(w http.ResponseWriter, r *http.Request) { } } +func (aH *APIHandler) getMetricMetadata(w http.ResponseWriter, r *http.Request) { + metricName := r.URL.Query().Get("metricName") + serviceName := r.URL.Query().Get("serviceName") + metricMetadata, err := aH.reader.GetMetricMetadata(r.Context(), metricName, serviceName) + if err != nil { + RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil) + return + } + + aH.WriteJSON(w, r, metricMetadata) +} + func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.QueryRangeParamsV3, w http.ResponseWriter, r *http.Request) { var result []*v3.Result @@ -3223,7 +3296,13 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que } if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder { - postProcessResult(result, queryRangeParams) + result, err = postProcessResult(result, queryRangeParams) + } + + if err != nil { + apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err} + RespondError(w, apiErrObj, errQuriesByName) + return } resp := v3.QueryRangeResponse{ @@ -3243,8 +3322,7 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) { } // add temporality for each metric - - temporalityErr := aH.addTemporality(r.Context(), queryRangeParams) + temporalityErr := aH.populateTemporality(r.Context(), queryRangeParams) if temporalityErr != nil { zap.S().Errorf("Error while adding temporality for metrics: %v", temporalityErr) RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil) @@ -3260,7 +3338,7 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) { // Much of this work can be done in the ClickHouse query, but we decided to do it here because: // 1. Effective use of caching // 2. Easier to add new functions -func postProcessResult(result []*v3.Result, queryRangeParams *v3.QueryRangeParamsV3) { +func postProcessResult(result []*v3.Result, queryRangeParams *v3.QueryRangeParamsV3) ([]*v3.Result, error) { // Having clause is not part of the clickhouse query, so we need to apply it here // It's not included in the query because it doesn't work nicely with caching // With this change, if you have a query with a having clause, and then you change the having clause @@ -3281,6 +3359,42 @@ func postProcessResult(result []*v3.Result, queryRangeParams *v3.QueryRangeParam applyReduceTo(result, queryRangeParams) // We apply the functions here it's easier to add new functions applyFunctions(result, queryRangeParams) + + for _, query := range queryRangeParams.CompositeQuery.BuilderQueries { + // The way we distinguish between a formula and a query is by checking if the expression + // is the same as the query name + // TODO(srikanthccv): Update the UI to send a flag to distinguish between a formula and a query + if query.Expression != query.QueryName { + expression, err := govaluate.NewEvaluableExpressionWithFunctions(query.Expression, evalFuncs()) + // This shouldn't happen here, because it should have been caught earlier in validation + if err != nil { + zap.S().Errorf("error in expression: %s", err.Error()) + return nil, err + } + formulaResult, err := processResults(result, expression) + if err != nil { + zap.S().Errorf("error in expression: %s", err.Error()) + return nil, err + } + formulaResult.QueryName = query.QueryName + result = append(result, formulaResult) + } + } + // we are done with the formula calculations, only send the results for enabled queries + removeDisabledQueries := func(result []*v3.Result) []*v3.Result { + var newResult []*v3.Result + for _, res := range result { + if queryRangeParams.CompositeQuery.BuilderQueries[res.QueryName].Disabled { + continue + } + newResult = append(newResult, res) + } + return newResult + } + if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder { + result = removeDisabledQueries(result) + } + return result, nil } // applyFunctions applies functions for each query in the composite query diff --git a/pkg/query-service/app/logs/v3/query_builder.go b/pkg/query-service/app/logs/v3/query_builder.go index 6a29397dd6..7babe2cd0d 100644 --- a/pkg/query-service/app/logs/v3/query_builder.go +++ b/pkg/query-service/app/logs/v3/query_builder.go @@ -397,7 +397,10 @@ func orderBy(panelType v3.PanelType, items []v3.OrderBy, tagLookup map[string]st } else if panelType == v3.PanelTypeList { attr := v3.AttributeKey{Key: item.ColumnName, DataType: item.DataType, Type: item.Type, IsColumn: item.IsColumn} name := getClickhouseColumnName(attr) - orderBy = append(orderBy, fmt.Sprintf("`%s` %s", name, item.Order)) + if item.IsColumn { + name = "`" + name + "`" + } + orderBy = append(orderBy, fmt.Sprintf("%s %s", name, item.Order)) } } return orderBy diff --git a/pkg/query-service/app/logs/v3/query_builder_test.go b/pkg/query-service/app/logs/v3/query_builder_test.go index 1d91ae6cff..d8c5a141b2 100644 --- a/pkg/query-service/app/logs/v3/query_builder_test.go +++ b/pkg/query-service/app/logs/v3/query_builder_test.go @@ -1120,7 +1120,7 @@ var testOrderBy = []struct { {Key: "name"}, {Key: "bytes"}, }, - Result: "`name` asc,value asc,`bytes` asc,`attributes_string_value[indexOf(attributes_string_key, 'response_time')]` desc", + Result: "`name` asc,value asc,`bytes` asc,attributes_string_value[indexOf(attributes_string_key, 'response_time')] desc", }, } @@ -1435,7 +1435,7 @@ var testPrepLogsQueryLimitOffsetData = []struct { PageSize: 5, }, TableName: "logs", - ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by `attributes_string_value[indexOf(attributes_string_key, 'method')]` desc LIMIT 1 OFFSET 0", + ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 1 OFFSET 0", }, { Name: "Test limit greater than pageSize - order by custom", @@ -1456,7 +1456,7 @@ var testPrepLogsQueryLimitOffsetData = []struct { PageSize: 50, }, TableName: "logs", - ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by `attributes_string_value[indexOf(attributes_string_key, 'method')]` desc LIMIT 50 OFFSET 50", + ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 50 OFFSET 50", }, } diff --git a/pkg/query-service/app/metrics/v4/cumulative/table_test.go b/pkg/query-service/app/metrics/v4/cumulative/table_test.go index d562b5d93a..ebdaa51182 100644 --- a/pkg/query-service/app/metrics/v4/cumulative/table_test.go +++ b/pkg/query-service/app/metrics/v4/cumulative/table_test.go @@ -51,7 +51,7 @@ func TestPrepareTableQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative", @@ -93,7 +93,7 @@ func TestPrepareTableQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", }, } diff --git a/pkg/query-service/app/metrics/v4/cumulative/timeseries.go b/pkg/query-service/app/metrics/v4/cumulative/timeseries.go index 7dfa8fef87..9845096223 100644 --- a/pkg/query-service/app/metrics/v4/cumulative/timeseries.go +++ b/pkg/query-service/app/metrics/v4/cumulative/timeseries.go @@ -107,19 +107,19 @@ const ( func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery) (string, error) { var subQuery string - timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(mq) + timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(start, end, mq) if err != nil { return "", err } - samplesTableFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) + samplesTableFilter := fmt.Sprintf("metric_name = %s AND unix_milli >= %d AND unix_milli < %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) // Select the aggregate value for interval queryTmpl := "SELECT fingerprint, %s" + - " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," + + " toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL %d SECOND) as ts," + " %s as per_series_value" + - " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_V4_TABLENAME + " INNER JOIN" + " (%s) as filtered_time_series" + " USING fingerprint" + diff --git a/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go b/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go index 91dd1c4a1e..9e31d0f0e7 100644 --- a/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go +++ b/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go @@ -66,7 +66,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", + expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", }, { name: "test time aggregation = rate, temporality = cumulative", @@ -107,7 +107,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)", + expectedQueryContains: "SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)", }, } @@ -168,7 +168,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative", @@ -210,7 +210,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", }, } diff --git a/pkg/query-service/app/metrics/v4/delta/table_test.go b/pkg/query-service/app/metrics/v4/delta/table_test.go index c7bce4268c..54a35e40e4 100644 --- a/pkg/query-service/app/metrics/v4/delta/table_test.go +++ b/pkg/query-service/app/metrics/v4/delta/table_test.go @@ -53,7 +53,7 @@ func TestPrepareTableQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = delta", @@ -95,7 +95,7 @@ func TestPrepareTableQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", }, } diff --git a/pkg/query-service/app/metrics/v4/delta/time_series_test.go b/pkg/query-service/app/metrics/v4/delta/time_series_test.go index 024371d328..0af2c91154 100644 --- a/pkg/query-service/app/metrics/v4/delta/time_series_test.go +++ b/pkg/query-service/app/metrics/v4/delta/time_series_test.go @@ -66,7 +66,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", + expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", }, { name: "test time aggregation = rate, temporality = delta", @@ -107,7 +107,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", + expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", }, } @@ -168,7 +168,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = delta", @@ -210,7 +210,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", }, } diff --git a/pkg/query-service/app/metrics/v4/delta/timeseries.go b/pkg/query-service/app/metrics/v4/delta/timeseries.go index 3d6999f425..03781dfcd1 100644 --- a/pkg/query-service/app/metrics/v4/delta/timeseries.go +++ b/pkg/query-service/app/metrics/v4/delta/timeseries.go @@ -14,19 +14,19 @@ func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery) var subQuery string - timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(mq) + timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(start, end, mq) if err != nil { return "", err } - samplesTableFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) + samplesTableFilter := fmt.Sprintf("metric_name = %s AND unix_milli >= %d AND unix_milli < %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) // Select the aggregate value for interval queryTmpl := "SELECT fingerprint, %s" + - " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," + + " toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL %d SECOND) as ts," + " %s as per_series_value" + - " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_V4_TABLENAME + " INNER JOIN" + " (%s) as filtered_time_series" + " USING fingerprint" + @@ -77,19 +77,19 @@ func prepareQueryOptimized(start, end, step int64, mq *v3.BuilderQuery) (string, var query string - timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(mq) + timeSeriesSubQuery, err := helpers.PrepareTimeseriesFilterQuery(start, end, mq) if err != nil { return "", err } - samplesTableFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) + samplesTableFilter := fmt.Sprintf("metric_name = %s AND unix_milli >= %d AND unix_milli < %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) // Select the aggregate value for interval queryTmpl := "SELECT %s" + - " toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," + + " toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL %d SECOND) as ts," + " %s as value" + - " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME + + " FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_V4_TABLENAME + " INNER JOIN" + " (%s) as filtered_time_series" + " USING fingerprint" + diff --git a/pkg/query-service/app/metrics/v4/helpers/clauses.go b/pkg/query-service/app/metrics/v4/helpers/clauses.go index 06f4b13cea..8714df51da 100644 --- a/pkg/query-service/app/metrics/v4/helpers/clauses.go +++ b/pkg/query-service/app/metrics/v4/helpers/clauses.go @@ -37,6 +37,17 @@ func GroupByAttributeKeyTags(tags ...v3.AttributeKey) string { return strings.Join(groupTags, ", ") } +func GroupByAttributeKeyTagsWithoutLe(tags ...v3.AttributeKey) string { + groupTags := []string{} + for _, tag := range tags { + if tag.Key != "le" { + groupTags = append(groupTags, tag.Key) + } + } + groupTags = append(groupTags, "ts") + return strings.Join(groupTags, ", ") +} + // OrderByAttributeKeyTags returns a string of comma separated tags for order by clause // if the order is not specified, it defaults to ASC func OrderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string { @@ -60,6 +71,29 @@ func OrderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string return strings.Join(orderBy, ", ") } +func OrderByAttributeKeyTagsWithoutLe(items []v3.OrderBy, tags []v3.AttributeKey) string { + var orderBy []string + for _, tag := range tags { + if tag.Key != "le" { + found := false + for _, item := range items { + if item.ColumnName == tag.Key { + found = true + orderBy = append(orderBy, fmt.Sprintf("%s %s", item.ColumnName, item.Order)) + break + } + } + if !found { + orderBy = append(orderBy, fmt.Sprintf("%s ASC", tag.Key)) + } + } + } + + orderBy = append(orderBy, "ts ASC") + + return strings.Join(orderBy, ", ") +} + func SelectLabelsAny(tags []v3.AttributeKey) string { var selectLabelsAny []string for _, tag := range tags { diff --git a/pkg/query-service/app/metrics/v4/helpers/sub_query.go b/pkg/query-service/app/metrics/v4/helpers/sub_query.go index 97176e54bd..d4cd103719 100644 --- a/pkg/query-service/app/metrics/v4/helpers/sub_query.go +++ b/pkg/query-service/app/metrics/v4/helpers/sub_query.go @@ -3,14 +3,43 @@ package helpers import ( "fmt" "strings" + "time" "go.signoz.io/signoz/pkg/query-service/constants" v3 "go.signoz.io/signoz/pkg/query-service/model/v3" "go.signoz.io/signoz/pkg/query-service/utils" ) +var ( + sixHoursInMilliseconds = time.Hour.Milliseconds() * 6 + oneDayInMilliseconds = time.Hour.Milliseconds() * 24 +) + +// start and end are in milliseconds +func which(start, end int64) (int64, int64, string) { + // If time range is less than 6 hours, we need to use the `time_series_v4` table + // else if time range is less than 1 day and greater than 6 hours, we need to use the `time_series_v4_6hrs` table + // else we need to use the `time_series_v4_1day` table + var tableName string + if end-start <= sixHoursInMilliseconds { + // adjust the start time to nearest 1 hour + start = start - (start % (time.Hour.Milliseconds() * 1)) + tableName = constants.SIGNOZ_TIMESERIES_v4_LOCAL_TABLENAME + } else if end-start <= oneDayInMilliseconds { + // adjust the start time to nearest 6 hours + start = start - (start % (time.Hour.Milliseconds() * 6)) + tableName = constants.SIGNOZ_TIMESERIES_v4_6HRS_LOCAL_TABLENAME + } else { + // adjust the start time to nearest 1 day + start = start - (start % (time.Hour.Milliseconds() * 24)) + tableName = constants.SIGNOZ_TIMESERIES_v4_1DAY_LOCAL_TABLENAME + } + + return start, end, tableName +} + // PrepareTimeseriesFilterQuery builds the sub-query to be used for filtering timeseries based on the search criteria -func PrepareTimeseriesFilterQuery(mq *v3.BuilderQuery) (string, error) { +func PrepareTimeseriesFilterQuery(start, end int64, mq *v3.BuilderQuery) (string, error) { var conditions []string var fs *v3.FilterSet = mq.Filters var groupTags []v3.AttributeKey = mq.GroupBy @@ -18,6 +47,10 @@ func PrepareTimeseriesFilterQuery(mq *v3.BuilderQuery) (string, error) { conditions = append(conditions, fmt.Sprintf("metric_name = %s", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key))) conditions = append(conditions, fmt.Sprintf("temporality = '%s'", mq.Temporality)) + start, end, tableName := which(start, end) + + conditions = append(conditions, fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", start, end)) + if fs != nil && len(fs.Items) != 0 { for _, item := range fs.Items { toFormat := item.Value @@ -78,7 +111,7 @@ func PrepareTimeseriesFilterQuery(mq *v3.BuilderQuery) (string, error) { "SELECT DISTINCT %s FROM %s.%s WHERE %s", selectLabels, constants.SIGNOZ_METRIC_DBNAME, - constants.SIGNOZ_TIMESERIES_LOCAL_TABLENAME, + tableName, whereClause, ) diff --git a/pkg/query-service/app/metrics/v4/query_builder.go b/pkg/query-service/app/metrics/v4/query_builder.go index 6543d6483b..ae9ee9b69a 100644 --- a/pkg/query-service/app/metrics/v4/query_builder.go +++ b/pkg/query-service/app/metrics/v4/query_builder.go @@ -21,8 +21,10 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P start, end = common.AdjustedMetricTimeRange(start, end, mq.StepInterval, mq.TimeAggregation) - groupBy := helpers.GroupByAttributeKeyTags(mq.GroupBy...) - orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, mq.GroupBy) + if mq.ShiftBy != 0 { + start = start - mq.ShiftBy*1000 + end = end - mq.ShiftBy*1000 + } var quantile float64 @@ -33,11 +35,21 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P // and time aggregation to rate mq.TimeAggregation = v3.TimeAggregationRate mq.SpaceAggregation = v3.SpaceAggregationSum - mq.GroupBy = append(mq.GroupBy, v3.AttributeKey{ - Key: "le", - Type: v3.AttributeKeyTypeTag, - DataType: v3.AttributeKeyDataTypeString, - }) + // If le is not present in group by for quantile, add it + leFound := false + for _, groupBy := range mq.GroupBy { + if groupBy.Key == "le" { + leFound = true + break + } + } + if !leFound { + mq.GroupBy = append(mq.GroupBy, v3.AttributeKey{ + Key: "le", + Type: v3.AttributeKeyTypeTag, + DataType: v3.AttributeKeyDataTypeString, + }) + } } var query string @@ -60,6 +72,15 @@ func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.P return "", err } + groupByWithoutLe := []v3.AttributeKey{} + for _, groupBy := range mq.GroupBy { + if groupBy.Key != "le" { + groupByWithoutLe = append(groupByWithoutLe, groupBy) + } + } + groupBy := helpers.GroupByAttributeKeyTags(groupByWithoutLe...) + orderBy := helpers.OrderByAttributeKeyTags(mq.OrderBy, groupByWithoutLe) + if quantile != 0 { query = fmt.Sprintf(`SELECT %s, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) as value FROM (%s) GROUP BY %s ORDER BY %s`, groupBy, quantile, query, groupBy, orderBy) } diff --git a/pkg/query-service/app/metrics/v4/query_builder_test.go b/pkg/query-service/app/metrics/v4/query_builder_test.go index b132b42ca0..15e948520c 100644 --- a/pkg/query-service/app/metrics/v4/query_builder_test.go +++ b/pkg/query-service/app/metrics/v4/query_builder_test.go @@ -33,7 +33,7 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) { Disabled: false, // remaining struct fields are not needed here }, - expectedQueryContains: "SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Delta'", + expectedQueryContains: "SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Delta' AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000", }, { name: "test prepare time series with no filters and group by", @@ -58,7 +58,7 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) { Disabled: false, // remaining struct fields are not needed here }, - expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative'", + expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000", }, { name: "test prepare time series with no filters and multiple group by", @@ -90,7 +90,7 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) { Disabled: false, // remaining struct fields are not needed here }, - expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative'", + expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000", }, { name: "test prepare time series with filters and multiple group by", @@ -138,13 +138,15 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) { Disabled: false, // remaining struct fields are not needed here }, - expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']", + expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000 AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']", }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { - query, err := helpers.PrepareTimeseriesFilterQuery(testCase.builderQuery) + // 1706432226000 - 2:27:06 PM (IST) + // 1706434026000 - 2:57:06 PM (IST) + query, err := helpers.PrepareTimeseriesFilterQuery(1706432226000, 1706434026000, testCase.builderQuery) assert.Nil(t, err) assert.Contains(t, query, testCase.expectedQueryContains) }) @@ -191,7 +193,7 @@ func TestPrepareMetricQueryCumulativeRate(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by", @@ -224,12 +226,14 @@ func TestPrepareMetricQueryCumulativeRate(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, endpoint, ts), (service_name, endpoint) ) ORDER BY service_name ASC, endpoint ASC, ts ASC", + expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Cumulative' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, endpoint, ts), (service_name, endpoint) ) ORDER BY service_name ASC, endpoint ASC, ts ASC", }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { + // 1650991982000 - April 26, 2022 10:23:02 PM + // 1651078382000 - April 27, 2022 10:23:02 PM query, err := PrepareMetricQuery(1650991982000, 1651078382000, v3.QueryTypeBuilder, v3.PanelTypeGraph, testCase.builderQuery, metricsV3.Options{}) assert.Nil(t, err) assert.Contains(t, query, testCase.expectedQueryContains) @@ -262,7 +266,7 @@ func TestPrepareMetricQueryDeltaRate(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = delta, group by service_name", @@ -288,7 +292,7 @@ func TestPrepareMetricQueryDeltaRate(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND timestamp_ms >= 1650991920000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name = 'signoz_calls_total' AND temporality = 'Delta' AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_calls_total' AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC", }, } @@ -340,7 +344,7 @@ func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test temporality = cumulative, quantile = 0.99 without group by", @@ -370,7 +374,7 @@ func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Cumulative' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", }, } @@ -422,7 +426,7 @@ func TestPrepreMetricQueryDeltaQuantile(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY GROUPING SETS ( (service_name, le, ts), (service_name, le) ) ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test temporality = delta, quantile = 0.99 no group by", @@ -452,7 +456,7 @@ func TestPrepreMetricQueryDeltaQuantile(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'signoz_latency_bucket' AND temporality = 'Delta' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name = 'signoz_latency_bucket' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY GROUPING SETS ( (le, ts), (le) ) ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", }, } @@ -490,7 +494,7 @@ func TestPrepareMetricQueryGauge(t *testing.T) { SpaceAggregation: v3.SpaceAggregationSum, Disabled: false, }, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified') as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test gauge query with group by host_name", @@ -516,7 +520,7 @@ func TestPrepareMetricQueryGauge(t *testing.T) { Expression: "A", Disabled: false, }, - expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified') as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (host_name, ts), (host_name) ) ORDER BY host_name ASC, ts ASC", + expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name = 'system_cpu_usage' AND temporality = 'Unspecified' AND unix_milli >= 1650974400000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name = 'system_cpu_usage' AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (host_name, ts), (host_name) ) ORDER BY host_name ASC, ts ASC", }, } diff --git a/pkg/query-service/app/parser.go b/pkg/query-service/app/parser.go index 96905dc1d5..ad2a9fd8de 100644 --- a/pkg/query-service/app/parser.go +++ b/pkg/query-service/app/parser.go @@ -935,11 +935,10 @@ func validateExpressions(expressions []string, funcs map[string]govaluate.Expres for _, exp := range expressions { evalExp, err := govaluate.NewEvaluableExpressionWithFunctions(exp, funcs) if err != nil { - errs = append(errs, err) + errs = append(errs, fmt.Errorf("invalid expression %s: %v", exp, err)) continue } - variables := evalExp.Vars() - for _, v := range variables { + for _, v := range evalExp.Vars() { var hasVariable bool for _, q := range cq.BuilderQueries { if q.Expression == v { @@ -961,7 +960,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE // parse the request body if err := json.NewDecoder(r.Body).Decode(&queryRangeParams); err != nil { - return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err} + return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("cannot parse the request body: %v", err)} } // validate the request body @@ -969,7 +968,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err} } - // prepare the variables for the corrspnding query type + // prepare the variables for the corresponding query type formattedVars := make(map[string]interface{}) for name, value := range queryRangeParams.Variables { if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypePromQL { @@ -985,6 +984,41 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder { for _, query := range queryRangeParams.CompositeQuery.BuilderQueries { + // Formula query + if query.QueryName != query.Expression { + expression, err := govaluate.NewEvaluableExpressionWithFunctions(query.Expression, evalFuncs()) + if err != nil { + return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err} + } + + // get the group keys for the vars + groupKeys := make(map[string][]string) + for _, v := range expression.Vars() { + if varQuery, ok := queryRangeParams.CompositeQuery.BuilderQueries[v]; ok { + groupKeys[v] = []string{} + for _, key := range varQuery.GroupBy { + groupKeys[v] = append(groupKeys[v], key.Key) + } + } else { + return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("unknown variable %s", v)} + } + } + + params := make(map[string]interface{}) + for k, v := range groupKeys { + params[k] = v + } + + can, _, err := expression.CanJoin(params) + if err != nil { + return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err} + } + + if !can { + return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("cannot join the given group keys")} + } + } + if query.Filters == nil || len(query.Filters.Items) == 0 { continue } @@ -1011,6 +1045,25 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE } } } + + var timeShiftBy int64 + if len(query.Functions) > 0 { + for idx := range query.Functions { + function := &query.Functions[idx] + if function.Name == v3.FunctionNameTimeShift { + // move the function to the beginning of the list + // so any other function can use the shifted time + var fns []v3.Function + fns = append(fns, *function) + fns = append(fns, query.Functions[:idx]...) + fns = append(fns, query.Functions[idx+1:]...) + query.Functions = fns + timeShiftBy = int64(function.Args[0].(float64)) + break + } + } + } + query.ShiftBy = timeShiftBy } } queryRangeParams.Variables = formattedVars diff --git a/pkg/query-service/app/parser_test.go b/pkg/query-service/app/parser_test.go index 65ea226909..8b172027a4 100644 --- a/pkg/query-service/app/parser_test.go +++ b/pkg/query-service/app/parser_test.go @@ -3,7 +3,6 @@ package app import ( "bytes" "encoding/json" - "fmt" "net/http" "net/http/httptest" "strings" @@ -749,7 +748,6 @@ func TestParseQueryRangeParamsDashboardVarsSubstitution(t *testing.T) { require.Error(t, apiErr) require.Contains(t, apiErr.Error(), tc.errMsg) } else { - fmt.Println(apiErr) require.Nil(t, apiErr) require.Equal(t, parsedQueryRangeParams.CompositeQuery.BuilderQueries["A"].Filters.Items[0].Value, tc.expectedValue[0]) require.Equal(t, parsedQueryRangeParams.CompositeQuery.BuilderQueries["A"].Filters.Items[1].Value, tc.expectedValue[1]) @@ -862,3 +860,368 @@ func TestParseQueryRangeParamsPromQLVars(t *testing.T) { }) } } + +func TestQueryRangeFormula(t *testing.T) { + reqCases := []struct { + desc string + compositeQuery v3.CompositeQuery + variables map[string]interface{} + expectErr bool + errMsg string + }{ + { + desc: "disjoint group by keys", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}}, + + Expression: "A", + }, + "B": { + QueryName: "B", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "operation_name"}}, + Expression: "B", + }, + "F1": { + QueryName: "F1", + Expression: "B/A", + }, + }, + }, + expectErr: true, + errMsg: "Group keys must match or be a subset of the other but found left: [operation_name], right: [service_name]", + }, + { + desc: "identical single group by key", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}}, + Expression: "A", + }, + "B": { + QueryName: "B", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}}, + Expression: "B", + }, + "F1": { + QueryName: "F1", + Expression: "B/A", + }, + }, + }, + expectErr: false, + }, + { + desc: "identical multiple group by keys", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}}, + Expression: "A", + }, + "B": { + QueryName: "B", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}}, + Expression: "B", + }, + "F1": { + QueryName: "F1", + Expression: "B/A", + }, + }, + }, + expectErr: false, + }, + { + desc: "identical multiple group by keys with different order", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}}, + Expression: "A", + }, + "B": { + QueryName: "B", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "operation_name"}, {Key: "service_name"}}, + Expression: "B", + }, + "F1": { + QueryName: "F1", + Expression: "B/A", + }, + }, + }, + expectErr: false, + }, + { + desc: "subset group by keys", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}}, + Expression: "A", + }, + "B": { + QueryName: "B", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}}, + Expression: "B", + }, + "F1": { + QueryName: "F1", + Expression: "A/B", + }, + }, + }, + expectErr: false, + }, + { + desc: "empty keys on one side", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}}, + Expression: "A", + }, + "B": { + QueryName: "B", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + Expression: "B", + }, + "F1": { + QueryName: "F1", + Expression: "A/B", + }, + }, + }, + expectErr: false, + }, + { + desc: "empty keys on both sides", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + Expression: "A", + }, + "B": { + QueryName: "B", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + Expression: "B", + }, + "F1": { + QueryName: "F1", + Expression: "A/B", + }, + }, + }, + expectErr: false, + }, + { + desc: "multiple group by keys with partial overlap", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}}, + Expression: "A", + }, + "B": { + QueryName: "B", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "operation_name"}, {Key: "status_code"}}, + Expression: "B", + }, + "F1": { + QueryName: "F1", + Expression: "A/B", + }, + }, + }, + expectErr: true, + errMsg: "Group keys must match or be a subset of the other but found left: [service_name operation_name], right: [operation_name status_code]", + }, + { + desc: "Nested Expressions with Matching Keys - Testing expressions that involve operations (e.g., addition, division) with series whose keys match or are subsets.", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}}, + Expression: "A", + }, + "B": { + QueryName: "B", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}}, + Expression: "B", + }, + "F1": { + QueryName: "F1", + Expression: "A + B", + }, + }, + }, + expectErr: false, + }, + { + desc: "Nested Expressions with Matching Keys - Testing expressions that involve operations (e.g., addition, division) with series whose keys match or are subsets.", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}}, + Expression: "A", + }, + "B": { + QueryName: "B", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}}, + Expression: "B", + }, + "C": { + QueryName: "C", + DataSource: v3.DataSourceMetrics, + AggregateOperator: v3.AggregateOperatorSum, + AggregateAttribute: v3.AttributeKey{Key: "signoz_calls_total"}, + GroupBy: []v3.AttributeKey{{Key: "service_name"}, {Key: "operation_name"}, {Key: "status_code"}}, + Expression: "C", + }, + "F1": { + QueryName: "F1", + Expression: "C/(A + B)", + }, + }, + }, + expectErr: false, + }, + { + desc: "Unknow variable in expression", + compositeQuery: v3.CompositeQuery{ + PanelType: v3.PanelTypeGraph, + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "F1": { + QueryName: "F1", + Expression: "A + B", + }, + }, + }, + expectErr: true, + errMsg: "unknown variable", + }, + } + + for _, tc := range reqCases { + t.Run(tc.desc, func(t *testing.T) { + + queryRangeParams := &v3.QueryRangeParamsV3{ + Start: time.Now().Add(-time.Hour).UnixMilli(), + End: time.Now().UnixMilli(), + Step: time.Minute.Microseconds(), + CompositeQuery: &tc.compositeQuery, + Variables: tc.variables, + } + + body := &bytes.Buffer{} + err := json.NewEncoder(body).Encode(queryRangeParams) + require.NoError(t, err) + req := httptest.NewRequest(http.MethodPost, "/api/v4/query_range", body) + + _, apiErr := ParseQueryRangeParams(req) + if tc.expectErr { + require.Error(t, apiErr) + require.Contains(t, apiErr.Error(), tc.errMsg) + } else { + if apiErr != nil { + if apiErr.Err != nil { + t.Fatalf("unexpected error for case: %s - %v", tc.desc, apiErr.Err) + } + } + require.Nil(t, apiErr) + } + }) + } +} diff --git a/pkg/query-service/app/querier/helper.go b/pkg/query-service/app/querier/helper.go index 5bb3cc81f8..addd9744e3 100644 --- a/pkg/query-service/app/querier/helper.go +++ b/pkg/query-service/app/querier/helper.go @@ -17,6 +17,61 @@ import ( "go.uber.org/zap" ) +func prepareLogsQuery(ctx context.Context, + start, + end int64, + builderQuery *v3.BuilderQuery, + params *v3.QueryRangeParamsV3, + preferRPM bool, +) (string, error) { + query := "" + + if params == nil || builderQuery == nil { + return query, fmt.Errorf("params and builderQuery cannot be nil") + } + + // for ts query with limit replace it as it is already formed + if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 { + limitQuery, err := logsV3.PrepareLogsQuery( + start, + end, + params.CompositeQuery.QueryType, + params.CompositeQuery.PanelType, + builderQuery, + logsV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM}, + ) + if err != nil { + return query, err + } + placeholderQuery, err := logsV3.PrepareLogsQuery( + start, + end, + params.CompositeQuery.QueryType, + params.CompositeQuery.PanelType, + builderQuery, + logsV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM}, + ) + if err != nil { + return query, err + } + query = strings.Replace(placeholderQuery, "#LIMIT_PLACEHOLDER", limitQuery, 1) + return query, err + } + + query, err := logsV3.PrepareLogsQuery( + start, + end, + params.CompositeQuery.QueryType, + params.CompositeQuery.PanelType, + builderQuery, + logsV3.Options{PreferRPM: preferRPM}, + ) + if err != nil { + return query, err + } + return query, err +} + func (q *querier) runBuilderQuery( ctx context.Context, builderQuery *v3.BuilderQuery, @@ -35,59 +90,88 @@ func (q *querier) runBuilderQuery( preferRPM = q.featureLookUp.CheckFeature(constants.PreferRPM) == nil } - // TODO: handle other data sources if builderQuery.DataSource == v3.DataSourceLogs { var query string var err error - // for ts query with limit replace it as it is already formed - if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 { - limitQuery, err := logsV3.PrepareLogsQuery( - params.Start, - params.End, - params.CompositeQuery.QueryType, - params.CompositeQuery.PanelType, - builderQuery, - logsV3.Options{GraphLimitQtype: constants.FirstQueryGraphLimit, PreferRPM: preferRPM}, - ) - if err != nil { - ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil} - return - } - placeholderQuery, err := logsV3.PrepareLogsQuery( - params.Start, - params.End, - params.CompositeQuery.QueryType, - params.CompositeQuery.PanelType, - builderQuery, - logsV3.Options{GraphLimitQtype: constants.SecondQueryGraphLimit, PreferRPM: preferRPM}, - ) - if err != nil { - ch <- channelResult{Err: err, Name: queryName, Query: placeholderQuery, Series: nil} - return - } - query = strings.Replace(placeholderQuery, "#LIMIT_PLACEHOLDER", limitQuery, 1) - } else { - query, err = logsV3.PrepareLogsQuery( - params.Start, - params.End, - params.CompositeQuery.QueryType, - params.CompositeQuery.PanelType, - builderQuery, - logsV3.Options{PreferRPM: preferRPM}, - ) + if _, ok := cacheKeys[queryName]; !ok { + query, err = prepareLogsQuery(ctx, params.Start, params.End, builderQuery, params, preferRPM) if err != nil { ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil} return } - } - - if err != nil { - ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil} + series, err := q.execClickHouseQuery(ctx, query) + ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series} return } - series, err := q.execClickHouseQuery(ctx, query) - ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series} + + cacheKey := cacheKeys[queryName] + var cachedData []byte + if !params.NoCache && q.cache != nil { + var retrieveStatus status.RetrieveStatus + data, retrieveStatus, err := q.cache.Retrieve(cacheKey, true) + zap.S().Infof("cache retrieve status: %s", retrieveStatus.String()) + if err == nil { + cachedData = data + } + } + misses := q.findMissingTimeRanges(params.Start, params.End, params.Step, cachedData) + missedSeries := make([]*v3.Series, 0) + cachedSeries := make([]*v3.Series, 0) + for _, miss := range misses { + query, err = prepareLogsQuery(ctx, miss.start, miss.end, builderQuery, params, preferRPM) + if err != nil { + ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil} + return + } + series, err := q.execClickHouseQuery(ctx, query) + if err != nil { + ch <- channelResult{ + Err: err, + Name: queryName, + Query: query, + Series: nil, + } + return + } + missedSeries = append(missedSeries, series...) + } + if err := json.Unmarshal(cachedData, &cachedSeries); err != nil && cachedData != nil { + zap.S().Error("error unmarshalling cached data", zap.Error(err)) + } + mergedSeries := mergeSerieses(cachedSeries, missedSeries) + + var mergedSeriesData []byte + var marshallingErr error + missedSeriesLen := len(missedSeries) + if missedSeriesLen > 0 && !params.NoCache && q.cache != nil { + // caching the data + mergedSeriesData, marshallingErr = json.Marshal(mergedSeries) + if marshallingErr != nil { + zap.S().Error("error marshalling merged series", zap.Error(marshallingErr)) + } + } + + // response doesn't need everything + filterCachedPoints(mergedSeries, params.Start, params.End) + + ch <- channelResult{ + Err: nil, + Name: queryName, + Series: mergedSeries, + } + + // Cache the seriesList for future queries + if missedSeriesLen > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil { + // caching the data + err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) + if err != nil { + zap.S().Error("error storing merged series", zap.Error(err)) + return + } + } + return + } if builderQuery.DataSource == v3.DataSourceTraces { @@ -202,20 +286,28 @@ func (q *querier) runBuilderQuery( zap.S().Error("error unmarshalling cached data", zap.Error(err)) } mergedSeries := mergeSerieses(cachedSeries, missedSeries) + var mergedSeriesData []byte + var marshallingErr error + missedSeriesLen := len(missedSeries) + if missedSeriesLen > 0 && !params.NoCache && q.cache != nil { + // caching the data + mergedSeriesData, marshallingErr = json.Marshal(mergedSeries) + if marshallingErr != nil { + zap.S().Error("error marshalling merged series", zap.Error(marshallingErr)) + } + } + // response doesn't need everything + filterCachedPoints(mergedSeries, params.Start, params.End) ch <- channelResult{ Err: nil, Name: queryName, Series: mergedSeries, } + // Cache the seriesList for future queries - if len(missedSeries) > 0 && !params.NoCache && q.cache != nil { - mergedSeriesData, err := json.Marshal(mergedSeries) - if err != nil { - zap.S().Error("error marshalling merged series", zap.Error(err)) - return - } - err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) + if missedSeriesLen > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil { + err := q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { zap.S().Error("error storing merged series", zap.Error(err)) return @@ -284,18 +376,27 @@ func (q *querier) runBuilderExpression( } mergedSeries := mergeSerieses(cachedSeries, missedSeries) + var mergedSeriesData []byte + missedSeriesLen := len(missedSeries) + var marshallingErr error + if missedSeriesLen > 0 && !params.NoCache && q.cache != nil { + // caching the data + mergedSeriesData, marshallingErr = json.Marshal(mergedSeries) + if marshallingErr != nil { + zap.S().Error("error marshalling merged series", zap.Error(marshallingErr)) + } + } + + // response doesn't need everything + filterCachedPoints(mergedSeries, params.Start, params.End) ch <- channelResult{ Err: nil, Name: queryName, Series: mergedSeries, } + // Cache the seriesList for future queries - if len(missedSeries) > 0 && !params.NoCache && q.cache != nil { - mergedSeriesData, err := json.Marshal(mergedSeries) - if err != nil { - zap.S().Error("error marshalling merged series", zap.Error(err)) - return - } + if len(missedSeries) > 0 && !params.NoCache && q.cache != nil && marshallingErr == nil { err = q.cache.Store(cacheKey, mergedSeriesData, time.Hour) if err != nil { zap.S().Error("error storing merged series", zap.Error(err)) diff --git a/pkg/query-service/app/querier/querier.go b/pkg/query-service/app/querier/querier.go index 16ef778d20..103660f8bc 100644 --- a/pkg/query-service/app/querier/querier.go +++ b/pkg/query-service/app/querier/querier.go @@ -160,6 +160,8 @@ func findMissingTimeRanges(start, end, step int64, seriesList []*v3.Series, flux } } + // time.Now is used because here we are considering the case where data might not + // be fully ingested for last (fluxInterval) minutes endMillis := time.Now().UnixMilli() adjustStep := int64(math.Min(float64(step), 60)) roundedMillis := endMillis - (endMillis % (adjustStep * 1000)) @@ -241,6 +243,19 @@ func labelsToString(labels map[string]string) string { return fmt.Sprintf("{%s}", strings.Join(labelKVs, ",")) } +func filterCachedPoints(cachedSeries []*v3.Series, start, end int64) { + for _, c := range cachedSeries { + points := []v3.Point{} + for _, p := range c.Points { + if p.Timestamp < start || p.Timestamp > end { + continue + } + points = append(points, p) + } + c.Points = points + } +} + func mergeSerieses(cachedSeries, missedSeries []*v3.Series) []*v3.Series { // Merge the missed series with the cached series by timestamp mergedSeries := make([]*v3.Series, 0) diff --git a/pkg/query-service/app/querier/v2/querier.go b/pkg/query-service/app/querier/v2/querier.go index 86a472c064..50f19b89b1 100644 --- a/pkg/query-service/app/querier/v2/querier.go +++ b/pkg/query-service/app/querier/v2/querier.go @@ -276,14 +276,9 @@ func (q *querier) runBuilderQueries(ctx context.Context, params *v3.QueryRangePa var wg sync.WaitGroup for queryName, builderQuery := range params.CompositeQuery.BuilderQueries { - if builderQuery.Disabled { - continue - } - wg.Add(1) if queryName == builderQuery.Expression { + wg.Add(1) go q.runBuilderQuery(ctx, builderQuery, params, keys, cacheKeys, ch, &wg) - } else { - go q.runBuilderExpression(ctx, builderQuery, params, keys, cacheKeys, ch, &wg) } } diff --git a/pkg/query-service/app/queryBuilder/functions.go b/pkg/query-service/app/queryBuilder/functions.go index d71bfd0d54..a933406d23 100644 --- a/pkg/query-service/app/queryBuilder/functions.go +++ b/pkg/query-service/app/queryBuilder/functions.go @@ -281,6 +281,21 @@ func ApplyFunction(fn v3.Function, result *v3.Result) *v3.Result { return funcMedian5(result) case v3.FunctionNameMedian7: return funcMedian7(result) + case v3.FunctionNameTimeShift: + shift, ok := fn.Args[0].(float64) + if !ok { + return result + } + return funcTimeShift(result, shift) + } + return result +} + +func funcTimeShift(result *v3.Result, shift float64) *v3.Result { + for _, series := range result.Series { + for idx, point := range series.Points { + series.Points[idx].Timestamp = point.Timestamp + int64(shift)*1000 + } } return result } diff --git a/pkg/query-service/app/queryBuilder/query_builder.go b/pkg/query-service/app/queryBuilder/query_builder.go index 5b4ad4291f..988acfb458 100644 --- a/pkg/query-service/app/queryBuilder/query_builder.go +++ b/pkg/query-service/app/queryBuilder/query_builder.go @@ -302,6 +302,16 @@ func isMetricExpression(expression *govaluate.EvaluableExpression, params *v3.Qu return true } +func isLogExpression(expression *govaluate.EvaluableExpression, params *v3.QueryRangeParamsV3) bool { + variables := unique(expression.Vars()) + for _, variable := range variables { + if params.CompositeQuery.BuilderQueries[variable].DataSource != v3.DataSourceLogs { + return false + } + } + return true +} + func (c *cacheKeyGenerator) GenerateKeys(params *v3.QueryRangeParamsV3) map[string]string { keys := make(map[string]string) @@ -320,7 +330,46 @@ func (c *cacheKeyGenerator) GenerateKeys(params *v3.QueryRangeParamsV3) map[stri // Build keys for each builder query for queryName, query := range params.CompositeQuery.BuilderQueries { - if query.Expression == queryName && query.DataSource == v3.DataSourceMetrics { + if query.Expression == queryName && query.DataSource == v3.DataSourceLogs { + var parts []string + + // We need to build uniqe cache query for BuilderQuery + parts = append(parts, fmt.Sprintf("source=%s", query.DataSource)) + parts = append(parts, fmt.Sprintf("step=%d", query.StepInterval)) + parts = append(parts, fmt.Sprintf("aggregate=%s", query.AggregateOperator)) + parts = append(parts, fmt.Sprintf("limit=%d", query.Limit)) + + if query.AggregateAttribute.Key != "" { + parts = append(parts, fmt.Sprintf("aggregateAttribute=%s", query.AggregateAttribute.CacheKey())) + } + + if query.Filters != nil && len(query.Filters.Items) > 0 { + for idx, filter := range query.Filters.Items { + parts = append(parts, fmt.Sprintf("filter-%d=%s", idx, filter.CacheKey())) + } + } + + if len(query.GroupBy) > 0 { + for idx, groupBy := range query.GroupBy { + parts = append(parts, fmt.Sprintf("groupBy-%d=%s", idx, groupBy.CacheKey())) + } + } + + if len(query.OrderBy) > 0 { + for idx, orderBy := range query.OrderBy { + parts = append(parts, fmt.Sprintf("orderBy-%d=%s", idx, orderBy.CacheKey())) + } + } + + if len(query.Having) > 0 { + for idx, having := range query.Having { + parts = append(parts, fmt.Sprintf("having-%d=%s", idx, having.CacheKey())) + } + } + + key := strings.Join(parts, "&") + keys[queryName] = key + } else if query.Expression == queryName && query.DataSource == v3.DataSourceMetrics { var parts []string // We need to build uniqe cache query for BuilderQuery @@ -361,7 +410,7 @@ func (c *cacheKeyGenerator) GenerateKeys(params *v3.QueryRangeParamsV3) map[stri if query.Expression != query.QueryName { expression, _ := govaluate.NewEvaluableExpressionWithFunctions(query.Expression, EvalFuncs) - if !isMetricExpression(expression, params) { + if !isMetricExpression(expression, params) && !isLogExpression(expression, params) { continue } diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index 3e7b737ce2..e7a482d02f 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -203,12 +203,16 @@ var GroupByColMap = map[string]struct{}{ } const ( - SIGNOZ_METRIC_DBNAME = "signoz_metrics" - SIGNOZ_SAMPLES_TABLENAME = "distributed_samples_v2" - SIGNOZ_TIMESERIES_TABLENAME = "distributed_time_series_v2" - SIGNOZ_TRACE_DBNAME = "signoz_traces" - SIGNOZ_SPAN_INDEX_TABLENAME = "distributed_signoz_index_v2" - SIGNOZ_TIMESERIES_LOCAL_TABLENAME = "time_series_v2" + SIGNOZ_METRIC_DBNAME = "signoz_metrics" + SIGNOZ_SAMPLES_TABLENAME = "distributed_samples_v2" + SIGNOZ_SAMPLES_V4_TABLENAME = "distributed_samples_v4" + SIGNOZ_TIMESERIES_TABLENAME = "distributed_time_series_v2" + SIGNOZ_TRACE_DBNAME = "signoz_traces" + SIGNOZ_SPAN_INDEX_TABLENAME = "distributed_signoz_index_v2" + SIGNOZ_TIMESERIES_LOCAL_TABLENAME = "time_series_v2" + SIGNOZ_TIMESERIES_v4_LOCAL_TABLENAME = "time_series_v4" + SIGNOZ_TIMESERIES_v4_6HRS_LOCAL_TABLENAME = "time_series_v4_6hrs" + SIGNOZ_TIMESERIES_v4_1DAY_LOCAL_TABLENAME = "time_series_v4_1day" ) var TimeoutExcludedRoutes = map[string]bool{ @@ -352,3 +356,36 @@ const TIMESTAMP = "timestamp" const FirstQueryGraphLimit = "first_query_graph_limit" const SecondQueryGraphLimit = "second_query_graph_limit" + +var TracesListViewDefaultSelectedColumns = []v3.AttributeKey{ + { + Key: "serviceName", + DataType: v3.AttributeKeyDataTypeString, + Type: v3.AttributeKeyTypeTag, + IsColumn: true, + }, + { + Key: "name", + DataType: v3.AttributeKeyDataTypeString, + Type: v3.AttributeKeyTypeTag, + IsColumn: true, + }, + { + Key: "durationNano", + DataType: v3.AttributeKeyDataTypeArrayFloat64, + Type: v3.AttributeKeyTypeTag, + IsColumn: true, + }, + { + Key: "httpMethod", + DataType: v3.AttributeKeyDataTypeString, + Type: v3.AttributeKeyTypeTag, + IsColumn: true, + }, + { + Key: "responseStatusCode", + DataType: v3.AttributeKeyDataTypeString, + Type: v3.AttributeKeyTypeTag, + IsColumn: true, + }, +} diff --git a/pkg/query-service/interfaces/interface.go b/pkg/query-service/interfaces/interface.go index a75a2f5f30..e15b1db67e 100644 --- a/pkg/query-service/interfaces/interface.go +++ b/pkg/query-service/interfaces/interface.go @@ -99,6 +99,7 @@ type Reader interface { CheckClickHouse(ctx context.Context) error GetLatencyMetricMetadata(context.Context, string, string, bool) (*v3.LatencyMetricMetadataResponse, error) + GetMetricMetadata(context.Context, string, string) (*v3.MetricMetadataResponse, error) } type Querier interface { diff --git a/pkg/query-service/model/v3/v3.go b/pkg/query-service/model/v3/v3.go index a960797ff3..010b0b41c1 100644 --- a/pkg/query-service/model/v3/v3.go +++ b/pkg/query-service/model/v3/v3.go @@ -463,6 +463,23 @@ const ( TimeAggregationIncrease TimeAggregation = "increase" ) +func (t TimeAggregation) Validate() error { + switch t { + case TimeAggregationAnyLast, + TimeAggregationSum, + TimeAggregationAvg, + TimeAggregationMin, + TimeAggregationMax, + TimeAggregationCount, + TimeAggregationCountDistinct, + TimeAggregationRate, + TimeAggregationIncrease: + return nil + default: + return fmt.Errorf("invalid time aggregation: %s", t) + } +} + func (t TimeAggregation) IsRateOperator() bool { switch t { case TimeAggregationRate, TimeAggregationIncrease: @@ -488,6 +505,24 @@ const ( SpaceAggregationPercentile99 SpaceAggregation = "percentile_99" ) +func (s SpaceAggregation) Validate() error { + switch s { + case SpaceAggregationSum, + SpaceAggregationAvg, + SpaceAggregationMin, + SpaceAggregationMax, + SpaceAggregationCount, + SpaceAggregationPercentile50, + SpaceAggregationPercentile75, + SpaceAggregationPercentile90, + SpaceAggregationPercentile95, + SpaceAggregationPercentile99: + return nil + default: + return fmt.Errorf("invalid space aggregation: %s", s) + } +} + func IsPercentileOperator(operator SpaceAggregation) bool { switch operator { case SpaceAggregationPercentile50, @@ -536,6 +571,7 @@ const ( FunctionNameMedian3 FunctionName = "median3" FunctionNameMedian5 FunctionName = "median5" FunctionNameMedian7 FunctionName = "median7" + FunctionNameTimeShift FunctionName = "timeShift" ) func (f FunctionName) Validate() error { @@ -553,7 +589,8 @@ func (f FunctionName) Validate() error { FunctionNameEWMA7, FunctionNameMedian3, FunctionNameMedian5, - FunctionNameMedian7: + FunctionNameMedian7, + FunctionNameTimeShift: return nil default: return fmt.Errorf("invalid function name: %s", f) @@ -587,6 +624,7 @@ type BuilderQuery struct { TimeAggregation TimeAggregation `json:"timeAggregation,omitempty"` SpaceAggregation SpaceAggregation `json:"spaceAggregation,omitempty"` Functions []Function `json:"functions,omitempty"` + ShiftBy int64 } func (b *BuilderQuery) Validate() error { @@ -604,10 +642,19 @@ func (b *BuilderQuery) Validate() error { return fmt.Errorf("data source is invalid: %w", err) } if b.DataSource == DataSourceMetrics { - if b.TimeAggregation == TimeAggregationUnspecified { + // if AggregateOperator is specified, then the request is using v3 payload + if b.AggregateOperator != "" { if err := b.AggregateOperator.Validate(); err != nil { return fmt.Errorf("aggregate operator is invalid: %w", err) } + } else { + if err := b.TimeAggregation.Validate(); err != nil { + return fmt.Errorf("time aggregation is invalid: %w", err) + } + + if err := b.SpaceAggregation.Validate(); err != nil { + return fmt.Errorf("space aggregation is invalid: %w", err) + } } } else { if err := b.AggregateOperator.Validate(); err != nil { @@ -661,6 +708,28 @@ func (b *BuilderQuery) Validate() error { if err := function.Name.Validate(); err != nil { return fmt.Errorf("function name is invalid: %w", err) } + if function.Name == FunctionNameTimeShift { + if len(function.Args) == 0 { + return fmt.Errorf("timeShiftBy param missing in query") + } + } else if function.Name == FunctionNameEWMA3 || + function.Name == FunctionNameEWMA5 || + function.Name == FunctionNameEWMA7 { + if len(function.Args) == 0 { + return fmt.Errorf("alpha param missing in query") + } + alpha := function.Args[0].(float64) + if alpha < 0 || alpha > 1 { + return fmt.Errorf("alpha param should be between 0 and 1") + } + } else if function.Name == FunctionNameCutOffMax || + function.Name == FunctionNameCutOffMin || + function.Name == FunctionNameClampMax || + function.Name == FunctionNameClampMin { + if len(function.Args) == 0 { + return fmt.Errorf("threshold param missing in query") + } + } } } @@ -748,6 +817,10 @@ type OrderBy struct { IsColumn bool `json:"-"` } +func (o OrderBy) CacheKey() string { + return fmt.Sprintf("%s-%s", o.ColumnName, o.Order) +} + // See HAVING_OPERATORS in queryBuilder.ts type HavingOperator string @@ -919,3 +992,13 @@ type LatencyMetricMetadataResponse struct { Delta bool `json:"delta"` Le []float64 `json:"le"` } + +type MetricMetadataResponse struct { + Delta bool `json:"delta"` + Le []float64 `json:"le"` + Description string `json:"description"` + Unit string `json:"unit"` + Type string `json:"type"` + IsMonotonic bool `json:"isMonotonic"` + Temporality string `json:"temporality"` +} diff --git a/pkg/query-service/pqlEngine/engine.go b/pkg/query-service/pqlEngine/engine.go index ac318bd990..99cd4ea6df 100644 --- a/pkg/query-service/pqlEngine/engine.go +++ b/pkg/query-service/pqlEngine/engine.go @@ -10,7 +10,6 @@ import ( "github.com/prometheus/common/promlog" plog "github.com/prometheus/common/promlog" pconfig "github.com/prometheus/prometheus/config" - plabels "github.com/prometheus/prometheus/model/labels" pql "github.com/prometheus/prometheus/promql" pstorage "github.com/prometheus/prometheus/storage" premote "github.com/prometheus/prometheus/storage/remote" @@ -89,8 +88,8 @@ func NewPqlEngine(config *pconfig.Config) (*PqlEngine, error) { }, nil } -func (p *PqlEngine) RunAlertQuery(ctx context.Context, qs string, t time.Time) (pql.Vector, error) { - q, err := p.engine.NewInstantQuery(ctx, p.fanoutStorage, nil, qs, t) +func (p *PqlEngine) RunAlertQuery(ctx context.Context, qs string, start, end time.Time, interval time.Duration) (pql.Matrix, error) { + q, err := p.engine.NewRangeQuery(ctx, p.fanoutStorage, nil, qs, start, end, interval) if err != nil { return nil, err } @@ -101,16 +100,26 @@ func (p *PqlEngine) RunAlertQuery(ctx context.Context, qs string, t time.Time) ( return nil, res.Err } - switch v := res.Value.(type) { + switch typ := res.Value.(type) { case pql.Vector: - return v, nil + series := make([]pql.Series, 0, len(typ)) + value := res.Value.(pql.Vector) + for _, smpl := range value { + series = append(series, pql.Series{ + Metric: smpl.Metric, + Floats: []pql.FPoint{{T: smpl.T, F: smpl.F}}, + }) + } + return series, nil case pql.Scalar: - return pql.Vector{pql.Sample{ - T: v.T, - F: v.V, - H: nil, - Metric: plabels.Labels{}, - }}, nil + value := res.Value.(pql.Scalar) + series := make([]pql.Series, 0, 1) + series = append(series, pql.Series{ + Floats: []pql.FPoint{{T: value.T, F: value.V}}, + }) + return series, nil + case pql.Matrix: + return res.Value.(pql.Matrix), nil default: return nil, fmt.Errorf("rule result is not a vector or scalar") } diff --git a/pkg/query-service/rules/apiParams.go b/pkg/query-service/rules/apiParams.go index 300eac330f..6000ec280f 100644 --- a/pkg/query-service/rules/apiParams.go +++ b/pkg/query-service/rules/apiParams.go @@ -246,3 +246,25 @@ type GettableRule struct { UpdatedAt *time.Time `json:"updateAt"` UpdatedBy *string `json:"updateBy"` } + +type timeRange struct { + Start int64 `json:"start"` + End int64 `json:"end"` + PageSize int64 `json:"pageSize"` +} + +type builderQuery struct { + QueryData []v3.BuilderQuery `json:"queryData"` + QueryFormulas []string `json:"queryFormulas"` +} + +type urlShareableCompositeQuery struct { + QueryType string `json:"queryType"` + Builder builderQuery `json:"builder"` +} + +type Options struct { + MaxLines int `json:"maxLines"` + Format string `json:"format"` + SelectColumns []v3.AttributeKey `json:"selectColumns"` +} diff --git a/pkg/query-service/rules/promRule.go b/pkg/query-service/rules/promRule.go index 94ace4137b..5607366e6b 100644 --- a/pkg/query-service/rules/promRule.go +++ b/pkg/query-service/rules/promRule.go @@ -3,6 +3,7 @@ package rules import ( "context" "fmt" + "math" "sync" "time" @@ -115,7 +116,9 @@ func (r *PromRule) targetVal() float64 { return 0 } - return *r.ruleCondition.Target + unitConverter := converter.FromUnit(converter.Unit(r.ruleCondition.TargetUnit)) + value := unitConverter.Convert(converter.Value{F: *r.ruleCondition.Target, U: converter.Unit(r.ruleCondition.TargetUnit)}, converter.Unit(r.Unit())) + return value.F } func (r *PromRule) Type() RuleType { @@ -322,14 +325,7 @@ func (r *PromRule) getPqlQuery() (string, error) { if query == "" { return query, fmt.Errorf("a promquery needs to be set for this rule to function") } - if r.ruleCondition.Target != nil && r.ruleCondition.CompareOp != CompareOpNone { - unitConverter := converter.FromUnit(converter.Unit(r.ruleCondition.TargetUnit)) - value := unitConverter.Convert(converter.Value{F: *r.ruleCondition.Target, U: converter.Unit(r.ruleCondition.TargetUnit)}, converter.Unit(r.Unit())) - query = fmt.Sprintf("(%s) %s %f", query, ResolveCompareOp(r.ruleCondition.CompareOp), value.F) - return query, nil - } else { - return query, nil - } + return query, nil } } } @@ -337,8 +333,26 @@ func (r *PromRule) getPqlQuery() (string, error) { return "", fmt.Errorf("invalid promql rule query") } +func (r *PromRule) matchType() MatchType { + if r.ruleCondition == nil { + return AtleastOnce + } + return r.ruleCondition.MatchType +} + +func (r *PromRule) compareOp() CompareOp { + if r.ruleCondition == nil { + return ValueIsEq + } + return r.ruleCondition.CompareOp +} + func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (interface{}, error) { + start := ts.Add(-r.evalWindow) + end := ts + interval := 60 * time.Second // TODO(srikanthccv): this should be configurable + valueFormatter := formatter.FromUnit(r.Unit()) q, err := r.getPqlQuery() @@ -346,7 +360,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) ( return nil, err } zap.S().Info("rule:", r.Name(), "\t evaluating promql query: ", q) - res, err := queriers.PqlEngine.RunAlertQuery(ctx, q, ts) + res, err := queriers.PqlEngine.RunAlertQuery(ctx, q, start, end, interval) if err != nil { r.SetHealth(HealthBad) r.SetLastError(err) @@ -360,16 +374,25 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) ( var alerts = make(map[uint64]*Alert, len(res)) - for _, smpl := range res { - l := make(map[string]string, len(smpl.Metric)) - for _, lbl := range smpl.Metric { + for _, series := range res { + l := make(map[string]string, len(series.Metric)) + for _, lbl := range series.Metric { l[lbl.Name] = lbl.Value } + if len(series.Floats) == 0 { + continue + } + + alertSmpl, shouldAlert := r.shouldAlert(series) + if !shouldAlert { + continue + } + thresholdFormatter := formatter.FromUnit(r.ruleCondition.TargetUnit) threshold := thresholdFormatter.Format(r.targetVal(), r.ruleCondition.TargetUnit) - tmplData := AlertTemplateData(l, valueFormatter.Format(smpl.F, r.Unit()), threshold) + tmplData := AlertTemplateData(l, valueFormatter.Format(alertSmpl.F, r.Unit()), threshold) // Inject some convenience variables that are easier to remember for users // who are not used to Go's templating system. defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}" @@ -392,7 +415,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) ( return result } - lb := plabels.NewBuilder(smpl.Metric).Del(plabels.MetricName) + lb := plabels.NewBuilder(alertSmpl.Metric).Del(plabels.MetricName) for _, l := range r.labels { lb.Set(l.Name, expand(l.Value)) @@ -425,7 +448,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) ( Annotations: annotations, ActiveAt: ts, State: StatePending, - Value: smpl.F, + Value: alertSmpl.F, GeneratorURL: r.GeneratorURL(), Receivers: r.preferredChannels, } @@ -473,6 +496,137 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) ( return len(r.active), nil } +func (r *PromRule) shouldAlert(series pql.Series) (pql.Sample, bool) { + var alertSmpl pql.Sample + var shouldAlert bool + switch r.matchType() { + case AtleastOnce: + // If any sample matches the condition, the rule is firing. + if r.compareOp() == ValueIsAbove { + for _, smpl := range series.Floats { + if smpl.F > r.targetVal() { + alertSmpl = pql.Sample{F: smpl.F, T: smpl.T, Metric: series.Metric} + shouldAlert = true + break + } + } + } else if r.compareOp() == ValueIsBelow { + for _, smpl := range series.Floats { + if smpl.F < r.targetVal() { + alertSmpl = pql.Sample{F: smpl.F, T: smpl.T, Metric: series.Metric} + shouldAlert = true + break + } + } + } else if r.compareOp() == ValueIsEq { + for _, smpl := range series.Floats { + if smpl.F == r.targetVal() { + alertSmpl = pql.Sample{F: smpl.F, T: smpl.T, Metric: series.Metric} + shouldAlert = true + break + } + } + } else if r.compareOp() == ValueIsNotEq { + for _, smpl := range series.Floats { + if smpl.F != r.targetVal() { + alertSmpl = pql.Sample{F: smpl.F, T: smpl.T, Metric: series.Metric} + shouldAlert = true + break + } + } + } + case AllTheTimes: + // If all samples match the condition, the rule is firing. + shouldAlert = true + alertSmpl = pql.Sample{F: r.targetVal(), Metric: series.Metric} + if r.compareOp() == ValueIsAbove { + for _, smpl := range series.Floats { + if smpl.F <= r.targetVal() { + shouldAlert = false + break + } + } + } else if r.compareOp() == ValueIsBelow { + for _, smpl := range series.Floats { + if smpl.F >= r.targetVal() { + shouldAlert = false + break + } + } + } else if r.compareOp() == ValueIsEq { + for _, smpl := range series.Floats { + if smpl.F != r.targetVal() { + shouldAlert = false + break + } + } + } else if r.compareOp() == ValueIsNotEq { + for _, smpl := range series.Floats { + if smpl.F == r.targetVal() { + shouldAlert = false + break + } + } + } + case OnAverage: + // If the average of all samples matches the condition, the rule is firing. + var sum float64 + for _, smpl := range series.Floats { + if math.IsNaN(smpl.F) { + continue + } + sum += smpl.F + } + avg := sum / float64(len(series.Floats)) + alertSmpl = pql.Sample{F: avg, Metric: series.Metric} + if r.compareOp() == ValueIsAbove { + if avg > r.targetVal() { + shouldAlert = true + } + } else if r.compareOp() == ValueIsBelow { + if avg < r.targetVal() { + shouldAlert = true + } + } else if r.compareOp() == ValueIsEq { + if avg == r.targetVal() { + shouldAlert = true + } + } else if r.compareOp() == ValueIsNotEq { + if avg != r.targetVal() { + shouldAlert = true + } + } + case InTotal: + // If the sum of all samples matches the condition, the rule is firing. + var sum float64 + for _, smpl := range series.Floats { + if math.IsNaN(smpl.F) { + continue + } + sum += smpl.F + } + alertSmpl = pql.Sample{F: sum, Metric: series.Metric} + if r.compareOp() == ValueIsAbove { + if sum > r.targetVal() { + shouldAlert = true + } + } else if r.compareOp() == ValueIsBelow { + if sum < r.targetVal() { + shouldAlert = true + } + } else if r.compareOp() == ValueIsEq { + if sum == r.targetVal() { + shouldAlert = true + } + } else if r.compareOp() == ValueIsNotEq { + if sum != r.targetVal() { + shouldAlert = true + } + } + } + return alertSmpl, shouldAlert +} + func (r *PromRule) String() string { ar := PostableRule{ diff --git a/pkg/query-service/rules/promrule_test.go b/pkg/query-service/rules/promrule_test.go new file mode 100644 index 0000000000..ee843b9b64 --- /dev/null +++ b/pkg/query-service/rules/promrule_test.go @@ -0,0 +1,622 @@ +package rules + +import ( + "testing" + "time" + + pql "github.com/prometheus/prometheus/promql" + "github.com/stretchr/testify/assert" + v3 "go.signoz.io/signoz/pkg/query-service/model/v3" +) + +type testLogger struct { + t *testing.T +} + +func (l testLogger) Log(args ...interface{}) error { + l.t.Log(args...) + return nil +} + +func TestPromRuleShouldAlert(t *testing.T) { + postableRule := PostableRule{ + Alert: "Test Rule", + AlertType: "METRIC_BASED_ALERT", + RuleType: RuleTypeProm, + EvalWindow: Duration(5 * time.Minute), + Frequency: Duration(1 * time.Minute), + RuleCondition: &RuleCondition{ + CompositeQuery: &v3.CompositeQuery{ + QueryType: v3.QueryTypePromQL, + PromQueries: map[string]*v3.PromQuery{ + "A": { + Query: "dummy_query", // This is not used in the test + }, + }, + }, + }, + } + + cases := []struct { + values pql.Series + expectAlert bool + compareOp string + matchType string + target float64 + }{ + // Test cases for Equals Always + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + }, + }, + expectAlert: true, + compareOp: "3", // Equals + matchType: "2", // Always + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + {F: 1.0}, + }, + }, + expectAlert: false, + compareOp: "3", // Equals + matchType: "2", // Always + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 0.0}, + {F: 1.0}, + {F: 0.0}, + {F: 1.0}, + {F: 1.0}, + }, + }, + expectAlert: false, + compareOp: "3", // Equals + matchType: "2", // Always + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + }, + }, + expectAlert: false, + compareOp: "3", // Equals + matchType: "2", // Always + target: 0.0, + }, + // Test cases for Equals Once + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + }, + }, + expectAlert: true, + compareOp: "3", // Equals + matchType: "1", // Once + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + {F: 1.0}, + }, + }, + expectAlert: true, + compareOp: "3", // Equals + matchType: "1", // Once + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 0.0}, + {F: 1.0}, + {F: 0.0}, + {F: 1.0}, + {F: 1.0}, + }, + }, + expectAlert: true, + compareOp: "3", // Equals + matchType: "1", // Once + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + }, + }, + expectAlert: false, + compareOp: "3", // Equals + matchType: "1", // Once + target: 0.0, + }, + // Test cases for Greater Than Always + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: true, + compareOp: "1", // Greater Than + matchType: "2", // Always + target: 1.5, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: false, + compareOp: "1", // Greater Than + matchType: "2", // Always + target: 4.5, + }, + // Test cases for Greater Than Once + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: true, + compareOp: "1", // Greater Than + matchType: "1", // Once + target: 4.5, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 4.0}, + {F: 4.0}, + {F: 4.0}, + {F: 4.0}, + {F: 4.0}, + }, + }, + expectAlert: false, + compareOp: "1", // Greater Than + matchType: "1", // Once + target: 4.5, + }, + // Test cases for Not Equals Always + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 0.0}, + {F: 1.0}, + {F: 0.0}, + {F: 1.0}, + {F: 0.0}, + }, + }, + expectAlert: false, + compareOp: "4", // Not Equals + matchType: "2", // Always + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + {F: 0.0}, + }, + }, + expectAlert: false, + compareOp: "4", // Not Equals + matchType: "2", // Always + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + }, + }, + expectAlert: true, + compareOp: "4", // Not Equals + matchType: "2", // Always + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 1.0}, + {F: 0.0}, + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + }, + }, + expectAlert: false, + compareOp: "4", // Not Equals + matchType: "2", // Always + target: 0.0, + }, + // Test cases for Not Equals Once + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 0.0}, + {F: 1.0}, + {F: 0.0}, + {F: 1.0}, + {F: 0.0}, + }, + }, + expectAlert: true, + compareOp: "4", // Not Equals + matchType: "1", // Once + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + {F: 0.0}, + }, + }, + expectAlert: false, + compareOp: "4", // Not Equals + matchType: "1", // Once + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 0.0}, + {F: 0.0}, + {F: 1.0}, + {F: 0.0}, + {F: 1.0}, + }, + }, + expectAlert: true, + compareOp: "4", // Not Equals + matchType: "1", // Once + target: 0.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + {F: 1.0}, + }, + }, + expectAlert: true, + compareOp: "4", // Not Equals + matchType: "1", // Once + target: 0.0, + }, + // Test cases for Less Than Always + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 1.5}, + {F: 1.5}, + {F: 1.5}, + {F: 1.5}, + {F: 1.5}, + }, + }, + expectAlert: true, + compareOp: "2", // Less Than + matchType: "2", // Always + target: 4, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 4.5}, + {F: 4.5}, + {F: 4.5}, + {F: 4.5}, + {F: 4.5}, + }, + }, + expectAlert: false, + compareOp: "2", // Less Than + matchType: "2", // Always + target: 4, + }, + // Test cases for Less Than Once + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 4.5}, + {F: 4.5}, + {F: 4.5}, + {F: 4.5}, + {F: 2.5}, + }, + }, + expectAlert: true, + compareOp: "2", // Less Than + matchType: "1", // Once + target: 4, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 4.5}, + {F: 4.5}, + {F: 4.5}, + {F: 4.5}, + {F: 4.5}, + }, + }, + expectAlert: false, + compareOp: "2", // Less Than + matchType: "1", // Once + target: 4, + }, + // Test cases for OnAverage + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: true, + compareOp: "3", // Equals + matchType: "3", // OnAverage + target: 6.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: false, + compareOp: "3", // Equals + matchType: "3", // OnAverage + target: 4.5, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: true, + compareOp: "4", // Not Equals + matchType: "3", // OnAverage + target: 4.5, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: false, + compareOp: "4", // Not Equals + matchType: "3", // OnAverage + target: 6.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: true, + compareOp: "1", // Greater Than + matchType: "3", // OnAverage + target: 4.5, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: true, + compareOp: "2", // Less Than + matchType: "3", // OnAverage + target: 12.0, + }, + // Test cases for InTotal + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: true, + compareOp: "3", // Equals + matchType: "4", // InTotal + target: 30.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 4.0}, + {F: 6.0}, + {F: 8.0}, + {F: 2.0}, + }, + }, + expectAlert: false, + compareOp: "3", // Equals + matchType: "4", // InTotal + target: 20.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + }, + }, + expectAlert: true, + compareOp: "4", // Not Equals + matchType: "4", // InTotal + target: 9.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + }, + }, + expectAlert: false, + compareOp: "4", // Not Equals + matchType: "4", // InTotal + target: 10.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 10.0}, + }, + }, + expectAlert: true, + compareOp: "1", // Greater Than + matchType: "4", // InTotal + target: 10.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 10.0}, + }, + }, + expectAlert: false, + compareOp: "1", // Greater Than + matchType: "4", // InTotal + target: 20.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 10.0}, + }, + }, + expectAlert: true, + compareOp: "2", // Less Than + matchType: "4", // InTotal + target: 30.0, + }, + { + values: pql.Series{ + Floats: []pql.FPoint{ + {F: 10.0}, + {F: 10.0}, + }, + }, + expectAlert: false, + compareOp: "2", // Less Than + matchType: "4", // InTotal + target: 20.0, + }, + } + + for idx, c := range cases { + postableRule.RuleCondition.CompareOp = CompareOp(c.compareOp) + postableRule.RuleCondition.MatchType = MatchType(c.matchType) + postableRule.RuleCondition.Target = &c.target + + rule, err := NewPromRule("69", &postableRule, testLogger{t}, PromRuleOpts{}) + if err != nil { + assert.NoError(t, err) + } + + _, shoulAlert := rule.shouldAlert(c.values) + assert.Equal(t, c.expectAlert, shoulAlert, "Test case %d", idx) + } +} diff --git a/pkg/query-service/rules/thresholdRule.go b/pkg/query-service/rules/thresholdRule.go index 895026ffa7..3c6cf2537b 100644 --- a/pkg/query-service/rules/thresholdRule.go +++ b/pkg/query-service/rules/thresholdRule.go @@ -3,8 +3,10 @@ package rules import ( "bytes" "context" + "encoding/json" "fmt" "math" + "net/url" "reflect" "regexp" "sort" @@ -60,6 +62,7 @@ type ThresholdRule struct { queryBuilder *queryBuilder.QueryBuilder opts ThresholdRuleOpts + typ string } type ThresholdRuleOpts struct { @@ -98,6 +101,7 @@ func NewThresholdRule( health: HealthUnknown, active: map[uint64]*Alert{}, opts: opts, + typ: p.AlertType, } if int64(t.evalWindow) == 0 { @@ -625,6 +629,210 @@ func (r *ThresholdRule) prepareBuilderQueries(ts time.Time) (map[string]string, return runQueries, err } +// The following function is used to prepare the where clause for the query +// `lbls` contains the key value pairs of the labels from the result of the query +// We iterate over the where clause and replace the labels with the actual values +// There are two cases: +// 1. The label is present in the where clause +// 2. The label is not present in the where clause +// +// Example for case 2: +// Latency by serviceName without any filter +// In this case, for each service with latency > threshold we send a notification +// The expectation will be that clicking on the related traces for service A, will +// take us to the traces page with the filter serviceName=A +// So for all the missing labels in the where clause, we add them as key = value +// +// Example for case 1: +// Severity text IN (WARN, ERROR) +// In this case, the Severity text will appear in the `lbls` if it were part of the group +// by clause, in which case we replace it with the actual value for the notification +// i.e Severity text = WARN +// If the Severity text is not part of the group by clause, then we add it as it is +func (r *ThresholdRule) fetchFilters(selectedQuery string, lbls labels.Labels) []v3.FilterItem { + var filterItems []v3.FilterItem + + added := make(map[string]struct{}) + + if r.ruleCondition.CompositeQuery.QueryType == v3.QueryTypeBuilder && + r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery] != nil && + r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery].Filters != nil { + + for _, item := range r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery].Filters.Items { + exists := false + for _, label := range lbls { + if item.Key.Key == label.Name { + // if the label is present in the where clause, replace it with key = value + filterItems = append(filterItems, v3.FilterItem{ + Key: item.Key, + Operator: v3.FilterOperatorEqual, + Value: label.Value, + }) + exists = true + added[label.Name] = struct{}{} + break + } + } + + if !exists { + // if the label is not present in the where clause, add it as it is + filterItems = append(filterItems, item) + } + } + } + + // add the labels which are not present in the where clause + for _, label := range lbls { + if _, ok := added[label.Name]; !ok { + filterItems = append(filterItems, v3.FilterItem{ + Key: v3.AttributeKey{Key: label.Name}, + Operator: v3.FilterOperatorEqual, + Value: label.Value, + }) + } + } + + return filterItems +} + +func (r *ThresholdRule) prepareLinksToLogs(ts time.Time, lbls labels.Labels) string { + selectedQuery := r.GetSelectedQuery() + + // TODO(srikanthccv): handle formula queries + if selectedQuery < "A" || selectedQuery > "Z" { + return "" + } + + // Logs list view expects time in milliseconds + tr := timeRange{ + Start: ts.Add(-time.Duration(r.evalWindow)).UnixMilli(), + End: ts.UnixMilli(), + PageSize: 100, + } + + options := Options{ + MaxLines: 2, + Format: "list", + SelectColumns: []v3.AttributeKey{}, + } + + period, _ := json.Marshal(tr) + urlEncodedTimeRange := url.QueryEscape(string(period)) + + filterItems := r.fetchFilters(selectedQuery, lbls) + urlData := urlShareableCompositeQuery{ + QueryType: string(v3.QueryTypeBuilder), + Builder: builderQuery{ + QueryData: []v3.BuilderQuery{ + { + DataSource: v3.DataSourceLogs, + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + AggregateAttribute: v3.AttributeKey{}, + Filters: &v3.FilterSet{ + Items: filterItems, + Operator: "AND", + }, + Expression: "A", + Disabled: false, + Having: []v3.Having{}, + StepInterval: 60, + OrderBy: []v3.OrderBy{ + { + ColumnName: "timestamp", + Order: "desc", + }, + }, + }, + }, + QueryFormulas: make([]string, 0), + }, + } + + data, _ := json.Marshal(urlData) + compositeQuery := url.QueryEscape(url.QueryEscape(string(data))) + + optionsData, _ := json.Marshal(options) + urlEncodedOptions := url.QueryEscape(string(optionsData)) + + return fmt.Sprintf("compositeQuery=%s&timeRange=%s&startTime=%d&endTime=%d&options=%s", compositeQuery, urlEncodedTimeRange, tr.Start, tr.End, urlEncodedOptions) +} + +func (r *ThresholdRule) prepareLinksToTraces(ts time.Time, lbls labels.Labels) string { + selectedQuery := r.GetSelectedQuery() + + // TODO(srikanthccv): handle formula queries + if selectedQuery < "A" || selectedQuery > "Z" { + return "" + } + + // Traces list view expects time in nanoseconds + tr := timeRange{ + Start: ts.Add(-time.Duration(r.evalWindow)).UnixNano(), + End: ts.UnixNano(), + PageSize: 100, + } + + options := Options{ + MaxLines: 2, + Format: "list", + SelectColumns: constants.TracesListViewDefaultSelectedColumns, + } + + period, _ := json.Marshal(tr) + urlEncodedTimeRange := url.QueryEscape(string(period)) + + filterItems := r.fetchFilters(selectedQuery, lbls) + urlData := urlShareableCompositeQuery{ + QueryType: string(v3.QueryTypeBuilder), + Builder: builderQuery{ + QueryData: []v3.BuilderQuery{ + { + DataSource: v3.DataSourceTraces, + QueryName: "A", + AggregateOperator: v3.AggregateOperatorNoOp, + AggregateAttribute: v3.AttributeKey{}, + Filters: &v3.FilterSet{ + Items: filterItems, + Operator: "AND", + }, + Expression: "A", + Disabled: false, + Having: []v3.Having{}, + StepInterval: 60, + OrderBy: []v3.OrderBy{ + { + ColumnName: "timestamp", + Order: "desc", + }, + }, + }, + }, + QueryFormulas: make([]string, 0), + }, + } + + data, _ := json.Marshal(urlData) + // We need to double encode the composite query to remain compatible with the UI + compositeQuery := url.QueryEscape(url.QueryEscape(string(data))) + + optionsData, _ := json.Marshal(options) + urlEncodedOptions := url.QueryEscape(string(optionsData)) + + return fmt.Sprintf("compositeQuery=%s&timeRange=%s&startTime=%d&endTime=%d&options=%s", compositeQuery, urlEncodedTimeRange, tr.Start, tr.End, urlEncodedOptions) +} + +func (r *ThresholdRule) hostFromSource() string { + parsedUrl, err := url.Parse(r.source) + if err != nil { + return "" + } + if parsedUrl.Port() != "" { + return fmt.Sprintf("%s:%s", parsedUrl.Hostname(), parsedUrl.Port()) + } + return parsedUrl.Hostname() +} + func (r *ThresholdRule) prepareClickhouseQueries(ts time.Time) (map[string]string, error) { queries := make(map[string]string) @@ -668,7 +876,7 @@ func (r *ThresholdRule) prepareClickhouseQueries(ts time.Time) (map[string]strin func (r *ThresholdRule) GetSelectedQuery() string { - // The acutal query string is not relevant here + // The actual query string is not relevant here // we just need to know the selected query var queries map[string]string @@ -817,7 +1025,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie // who are not used to Go's templating system. defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}" - // utility function to apply go template on labels and annots + // utility function to apply go template on labels and annotations expand := func(text string) string { tmpl := NewTemplateExpander( @@ -846,6 +1054,18 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie lb.Set(labels.AlertRuleIdLabel, r.ID()) lb.Set(labels.RuleSourceLabel, r.GeneratorURL()) + if r.typ == "TRACES_BASED_ALERT" { + link := r.prepareLinksToTraces(ts, smpl.Metric) + if link != "" && r.hostFromSource() != "" { + lb.Set("RelatedTraces", fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)) + } + } else if r.typ == "LOGS_BASED_ALERT" { + link := r.prepareLinksToLogs(ts, smpl.Metric) + if link != "" && r.hostFromSource() != "" { + lb.Set("RelatedLogs", fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)) + } + } + annotations := make(labels.Labels, 0, len(r.annotations)) for _, a := range r.annotations { annotations = append(annotations, labels.Label{Name: normalizeLabelName(a.Name), Value: expand(a.Value)}) diff --git a/pkg/query-service/rules/thresholdRule_test.go b/pkg/query-service/rules/thresholdRule_test.go index 81cf97af1e..2b39084bec 100644 --- a/pkg/query-service/rules/thresholdRule_test.go +++ b/pkg/query-service/rules/thresholdRule_test.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/assert" "go.signoz.io/signoz/pkg/query-service/featureManager" v3 "go.signoz.io/signoz/pkg/query-service/model/v3" + "go.signoz.io/signoz/pkg/query-service/utils/labels" ) func TestThresholdRuleCombinations(t *testing.T) { @@ -335,3 +336,87 @@ func TestNormalizeLabelName(t *testing.T) { assert.Equal(t, c.expected, normalizeLabelName(c.labelName)) } } + +func TestPrepareLinksToLogs(t *testing.T) { + postableRule := PostableRule{ + Alert: "Tricky Condition Tests", + AlertType: "LOGS_BASED_ALERT", + RuleType: RuleTypeThreshold, + EvalWindow: Duration(5 * time.Minute), + Frequency: Duration(1 * time.Minute), + RuleCondition: &RuleCondition{ + CompositeQuery: &v3.CompositeQuery{ + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + StepInterval: 60, + AggregateAttribute: v3.AttributeKey{ + Key: "", + }, + AggregateOperator: v3.AggregateOperatorNoOp, + DataSource: v3.DataSourceLogs, + Expression: "A", + }, + }, + }, + CompareOp: "4", // Not Equals + MatchType: "1", // Once + Target: &[]float64{0.0}[0], + SelectedQuery: "A", + }, + } + fm := featureManager.StartManager() + + rule, err := NewThresholdRule("69", &postableRule, ThresholdRuleOpts{}, fm) + if err != nil { + assert.NoError(t, err) + } + + ts := time.UnixMilli(1705469040000) + + link := rule.prepareLinksToLogs(ts, labels.Labels{}) + assert.Contains(t, link, "&timeRange=%7B%22start%22%3A1705468740000%2C%22end%22%3A1705469040000%2C%22pageSize%22%3A100%7D&startTime=1705468740000&endTime=1705469040000") +} + +func TestPrepareLinksToTraces(t *testing.T) { + postableRule := PostableRule{ + Alert: "Links to traces test", + AlertType: "TRACES_BASED_ALERT", + RuleType: RuleTypeThreshold, + EvalWindow: Duration(5 * time.Minute), + Frequency: Duration(1 * time.Minute), + RuleCondition: &RuleCondition{ + CompositeQuery: &v3.CompositeQuery{ + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + StepInterval: 60, + AggregateAttribute: v3.AttributeKey{ + Key: "durationNano", + }, + AggregateOperator: v3.AggregateOperatorAvg, + DataSource: v3.DataSourceTraces, + Expression: "A", + }, + }, + }, + CompareOp: "4", // Not Equals + MatchType: "1", // Once + Target: &[]float64{0.0}[0], + SelectedQuery: "A", + }, + } + fm := featureManager.StartManager() + + rule, err := NewThresholdRule("69", &postableRule, ThresholdRuleOpts{}, fm) + if err != nil { + assert.NoError(t, err) + } + + ts := time.UnixMilli(1705469040000) + + link := rule.prepareLinksToTraces(ts, labels.Labels{}) + assert.Contains(t, link, "&timeRange=%7B%22start%22%3A1705468740000000000%2C%22end%22%3A1705469040000000000%2C%22pageSize%22%3A100%7D&startTime=1705468740000000000&endTime=1705469040000000000") +} diff --git a/pkg/query-service/tests/cold_storage_test.go b/pkg/query-service/tests/cold_storage_test.go index 09ee39b94b..87db1b6a93 100644 --- a/pkg/query-service/tests/cold_storage_test.go +++ b/pkg/query-service/tests/cold_storage_test.go @@ -28,6 +28,9 @@ func setTTL(table, coldStorage, toColdTTL, deleteTTL string, jwtToken string) ([ } var bearer = "Bearer " + jwtToken req, err := http.NewRequest("POST", endpoint+"/api/v1/settings/ttl?"+params, nil) + if err != nil { + return nil, err + } req.Header.Add("Authorization", bearer) resp, err := client.Do(req) @@ -129,6 +132,7 @@ func getTTL(t *testing.T, table string, jwtToken string) *model.GetTTLResponseIt var bearer = "Bearer " + jwtToken req, err := http.NewRequest("GET", url, nil) + require.NoError(t, err) req.Header.Add("Authorization", bearer) resp, err := client.Do(req) diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index 25de92e819..7c9b50199f 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -2,7 +2,7 @@ version: "2.4" x-clickhouse-defaults: &clickhouse-defaults restart: on-failure - image: clickhouse/clickhouse-server:23.11.1-alpine + image: clickhouse/clickhouse-server:24.1.2-alpine tty: true depends_on: - zookeeper-1 @@ -192,7 +192,7 @@ services: <<: *db-depend otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.11} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.12} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -205,7 +205,7 @@ services: # condition: service_healthy otel-collector: - image: signoz/signoz-otel-collector:0.88.11 + image: signoz/signoz-otel-collector:0.88.12 container_name: signoz-otel-collector command: [ diff --git a/pkg/query-service/tests/test-deploy/otel-collector-config.yaml b/pkg/query-service/tests/test-deploy/otel-collector-config.yaml index 8a0e899826..d6ef6fcc35 100644 --- a/pkg/query-service/tests/test-deploy/otel-collector-config.yaml +++ b/pkg/query-service/tests/test-deploy/otel-collector-config.yaml @@ -115,14 +115,7 @@ exporters: clickhouselogsexporter: dsn: tcp://clickhouse:9000/ docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER} - timeout: 5s - sending_queue: - queue_size: 100 - retry_on_failure: - enabled: true - initial_interval: 5s - max_interval: 30s - max_elapsed_time: 300s + timeout: 10s service: telemetry: