Merge pull request #4304 from SigNoz/release/v0.36.1

Release/v0.36.1
This commit is contained in:
Prashant Shahi 2023-12-29 15:39:38 +05:30 committed by GitHub
commit 79c05d8fa8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
84 changed files with 3474 additions and 185 deletions

1
.github/CODEOWNERS vendored
View File

@ -1,7 +1,6 @@
# CODEOWNERS info: https://help.github.com/en/articles/about-code-owners # CODEOWNERS info: https://help.github.com/en/articles/about-code-owners
# Owners are automatically requested for review for PRs that changes code # Owners are automatically requested for review for PRs that changes code
# that they own. # that they own.
* @ankitnayan
/frontend/ @palashgdev @YounixM /frontend/ @palashgdev @YounixM
/frontend/src/container/MetricsApplication @srikanthccv /frontend/src/container/MetricsApplication @srikanthccv

View File

@ -146,7 +146,7 @@ services:
condition: on-failure condition: on-failure
query-service: query-service:
image: signoz/query-service:0.36.0 image: signoz/query-service:0.36.1
command: command:
[ [
"-config=/root/config/prometheus.yml", "-config=/root/config/prometheus.yml",
@ -186,7 +186,7 @@ services:
<<: *db-depend <<: *db-depend
frontend: frontend:
image: signoz/frontend:0.36.0 image: signoz/frontend:0.36.1
deploy: deploy:
restart_policy: restart_policy:
condition: on-failure condition: on-failure
@ -199,7 +199,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:0.88.4 image: signoz/signoz-otel-collector:0.88.6
command: command:
[ [
"--config=/etc/otel-collector-config.yaml", "--config=/etc/otel-collector-config.yaml",
@ -237,7 +237,7 @@ services:
- query-service - query-service
otel-collector-migrator: otel-collector-migrator:
image: signoz/signoz-schema-migrator:0.88.4 image: signoz/signoz-schema-migrator:0.88.6
deploy: deploy:
restart_policy: restart_policy:
condition: on-failure condition: on-failure
@ -250,7 +250,7 @@ services:
# - clickhouse-3 # - clickhouse-3
otel-collector-metrics: otel-collector-metrics:
image: signoz/signoz-otel-collector:0.88.4 image: signoz/signoz-otel-collector:0.88.6
command: command:
[ [
"--config=/etc/otel-collector-metrics-config.yaml", "--config=/etc/otel-collector-metrics-config.yaml",

View File

@ -66,7 +66,7 @@ services:
- --storage.path=/data - --storage.path=/data
otel-collector-migrator: otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.4} image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.6}
container_name: otel-migrator container_name: otel-migrator
command: command:
- "--dsn=tcp://clickhouse:9000" - "--dsn=tcp://clickhouse:9000"
@ -81,7 +81,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector: otel-collector:
container_name: signoz-otel-collector container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:0.88.4 image: signoz/signoz-otel-collector:0.88.6
command: command:
[ [
"--config=/etc/otel-collector-config.yaml", "--config=/etc/otel-collector-config.yaml",
@ -118,7 +118,7 @@ services:
otel-collector-metrics: otel-collector-metrics:
container_name: signoz-otel-collector-metrics container_name: signoz-otel-collector-metrics
image: signoz/signoz-otel-collector:0.88.4 image: signoz/signoz-otel-collector:0.88.6
command: command:
[ [
"--config=/etc/otel-collector-metrics-config.yaml", "--config=/etc/otel-collector-metrics-config.yaml",

View File

@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service: query-service:
image: signoz/query-service:${DOCKER_TAG:-0.36.0} image: signoz/query-service:${DOCKER_TAG:-0.36.1}
container_name: signoz-query-service container_name: signoz-query-service
command: command:
[ [
@ -203,7 +203,7 @@ services:
<<: *db-depend <<: *db-depend
frontend: frontend:
image: signoz/frontend:${DOCKER_TAG:-0.36.0} image: signoz/frontend:${DOCKER_TAG:-0.36.1}
container_name: signoz-frontend container_name: signoz-frontend
restart: on-failure restart: on-failure
depends_on: depends_on:
@ -215,7 +215,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator: otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.4} image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.6}
container_name: otel-migrator container_name: otel-migrator
command: command:
- "--dsn=tcp://clickhouse:9000" - "--dsn=tcp://clickhouse:9000"
@ -229,7 +229,7 @@ services:
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.4} image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.6}
container_name: signoz-otel-collector container_name: signoz-otel-collector
command: command:
[ [
@ -269,7 +269,7 @@ services:
condition: service_healthy condition: service_healthy
otel-collector-metrics: otel-collector-metrics:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.4} image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.6}
container_name: signoz-otel-collector-metrics container_name: signoz-otel-collector-metrics
command: command:
[ [

View File

@ -7,7 +7,6 @@ import {
} from '@ant-design/icons'; } from '@ant-design/icons';
import Convert from 'ansi-to-html'; import Convert from 'ansi-to-html';
import { Button, Divider, Row, Typography } from 'antd'; import { Button, Divider, Row, Typography } from 'antd';
import LogDetail from 'components/LogDetail';
import LogsExplorerContext from 'container/LogsExplorerContext'; import LogsExplorerContext from 'container/LogsExplorerContext';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import dompurify from 'dompurify'; import dompurify from 'dompurify';
@ -95,11 +94,15 @@ function LogSelectedField({
type ListLogViewProps = { type ListLogViewProps = {
logData: ILog; logData: ILog;
selectedFields: IField[]; selectedFields: IField[];
onSetActiveLog: (log: ILog) => void;
onAddToQuery: AddToQueryHOCProps['onAddToQuery'];
}; };
function ListLogView({ function ListLogView({
logData, logData,
selectedFields, selectedFields,
onSetActiveLog,
onAddToQuery,
}: ListLogViewProps): JSX.Element { }: ListLogViewProps): JSX.Element {
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]); const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
@ -113,12 +116,6 @@ function ListLogView({
onSetActiveLog: handleSetActiveContextLog, onSetActiveLog: handleSetActiveContextLog,
onClearActiveLog: handleClearActiveContextLog, onClearActiveLog: handleClearActiveContextLog,
} = useActiveLog(); } = useActiveLog();
const {
activeLog,
onSetActiveLog,
onClearActiveLog,
onAddToQuery,
} = useActiveLog();
const handleDetailedView = useCallback(() => { const handleDetailedView = useCallback(() => {
onSetActiveLog(logData); onSetActiveLog(logData);
@ -223,12 +220,6 @@ function ListLogView({
onClose={handleClearActiveContextLog} onClose={handleClearActiveContextLog}
/> />
)} )}
<LogDetail
log={activeLog}
onClose={onClearActiveLog}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
/>
</Row> </Row>
</Container> </Container>
); );

View File

@ -158,7 +158,7 @@ function Trace(props: TraceProps): JSX.Element {
isDarkMode={isDarkMode} isDarkMode={isDarkMode}
onClick={onClickTreeExpansion} onClick={onClickTreeExpansion}
> >
<Typography>{totalSpans}</Typography> <Typography style={{ wordBreak: 'normal' }}>{totalSpans}</Typography>
<CaretContainer>{icon}</CaretContainer> <CaretContainer>{icon}</CaretContainer>
</CardComponent> </CardComponent>
)} )}

View File

@ -1,3 +1,5 @@
import '../GridCardLayout.styles.scss';
import { Skeleton, Typography } from 'antd'; import { Skeleton, Typography } from 'antd';
import cx from 'classnames'; import cx from 'classnames';
import { ToggleGraphProps } from 'components/Graph/types'; import { ToggleGraphProps } from 'components/Graph/types';

View File

@ -19,5 +19,13 @@ export const WrapperStyled = styled.div`
& .ant-table { & .ant-table {
flex: 1; flex: 1;
overflow: auto; overflow: auto;
> .ant-table-container {
> .ant-table-content {
> table {
min-width: 99% !important;
}
}
}
} }
`; `;

View File

@ -1,4 +1,5 @@
import { Card, Typography } from 'antd'; import { Card, Typography } from 'antd';
import LogDetail from 'components/LogDetail';
import ListLogView from 'components/Logs/ListLogView'; import ListLogView from 'components/Logs/ListLogView';
import RawLogView from 'components/Logs/RawLogView'; import RawLogView from 'components/Logs/RawLogView';
import Spinner from 'components/Spinner'; import Spinner from 'components/Spinner';
@ -10,6 +11,7 @@ import { InfinityWrapperStyled } from 'container/LogsExplorerList/styles';
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils'; import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
import { Heading } from 'container/LogsTable/styles'; import { Heading } from 'container/LogsTable/styles';
import { useOptionsMenu } from 'container/OptionsMenu'; import { useOptionsMenu } from 'container/OptionsMenu';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
import useFontFaceObserver from 'hooks/useFontObserver'; import useFontFaceObserver from 'hooks/useFontObserver';
import { useEventSource } from 'providers/EventSource'; import { useEventSource } from 'providers/EventSource';
@ -31,6 +33,13 @@ function LiveLogsList({ logs }: LiveLogsListProps): JSX.Element {
const { activeLogId } = useCopyLogLink(); const { activeLogId } = useCopyLogLink();
const {
activeLog,
onClearActiveLog,
onAddToQuery,
onSetActiveLog,
} = useActiveLog();
const { options } = useOptionsMenu({ const { options } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS, storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: DataSource.LOGS, dataSource: DataSource.LOGS,
@ -66,10 +75,22 @@ function LiveLogsList({ logs }: LiveLogsListProps): JSX.Element {
} }
return ( return (
<ListLogView key={log.id} logData={log} selectedFields={selectedFields} /> <ListLogView
key={log.id}
logData={log}
selectedFields={selectedFields}
onAddToQuery={onAddToQuery}
onSetActiveLog={onSetActiveLog}
/>
); );
}, },
[options.format, options.maxLines, selectedFields], [
onAddToQuery,
onSetActiveLog,
options.format,
options.maxLines,
selectedFields,
],
); );
useEffect(() => { useEffect(() => {
@ -123,6 +144,12 @@ function LiveLogsList({ logs }: LiveLogsListProps): JSX.Element {
)} )}
</InfinityWrapperStyled> </InfinityWrapperStyled>
)} )}
<LogDetail
log={activeLog}
onClose={onClearActiveLog}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
/>
</> </>
); );
} }

View File

@ -1,4 +1,5 @@
import { Card, Typography } from 'antd'; import { Card, Typography } from 'antd';
import LogDetail from 'components/LogDetail';
// components // components
import ListLogView from 'components/Logs/ListLogView'; import ListLogView from 'components/Logs/ListLogView';
import RawLogView from 'components/Logs/RawLogView'; import RawLogView from 'components/Logs/RawLogView';
@ -8,6 +9,7 @@ import { LOCALSTORAGE } from 'constants/localStorage';
import ExplorerControlPanel from 'container/ExplorerControlPanel'; import ExplorerControlPanel from 'container/ExplorerControlPanel';
import { Heading } from 'container/LogsTable/styles'; import { Heading } from 'container/LogsTable/styles';
import { useOptionsMenu } from 'container/OptionsMenu'; import { useOptionsMenu } from 'container/OptionsMenu';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink'; import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import useFontFaceObserver from 'hooks/useFontObserver'; import useFontFaceObserver from 'hooks/useFontObserver';
@ -37,6 +39,13 @@ function LogsExplorerList({
const { activeLogId } = useCopyLogLink(); const { activeLogId } = useCopyLogLink();
const {
activeLog,
onClearActiveLog,
onAddToQuery,
onSetActiveLog,
} = useActiveLog();
const { options, config } = useOptionsMenu({ const { options, config } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS, storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: initialDataSource || DataSource.METRICS, dataSource: initialDataSource || DataSource.METRICS,
@ -76,10 +85,22 @@ function LogsExplorerList({
} }
return ( return (
<ListLogView key={log.id} logData={log} selectedFields={selectedFields} /> <ListLogView
key={log.id}
logData={log}
selectedFields={selectedFields}
onAddToQuery={onAddToQuery}
onSetActiveLog={onSetActiveLog}
/>
); );
}, },
[options.format, options.maxLines, selectedFields], [
onAddToQuery,
onSetActiveLog,
options.format,
options.maxLines,
selectedFields,
],
); );
useEffect(() => { useEffect(() => {
@ -149,6 +170,13 @@ function LogsExplorerList({
)} )}
<InfinityWrapperStyled>{renderContent}</InfinityWrapperStyled> <InfinityWrapperStyled>{renderContent}</InfinityWrapperStyled>
<LogDetail
log={activeLog}
onClose={onClearActiveLog}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
/>
</> </>
); );
} }

View File

@ -1,6 +1,7 @@
import './logsTable.styles.scss'; import './logsTable.styles.scss';
import { Card, Typography } from 'antd'; import { Card, Typography } from 'antd';
import LogDetail from 'components/LogDetail';
// components // components
import ListLogView from 'components/Logs/ListLogView'; import ListLogView from 'components/Logs/ListLogView';
import RawLogView from 'components/Logs/RawLogView'; import RawLogView from 'components/Logs/RawLogView';
@ -29,7 +30,12 @@ type LogsTableProps = {
function LogsTable(props: LogsTableProps): JSX.Element { function LogsTable(props: LogsTableProps): JSX.Element {
const { viewMode, linesPerRow } = props; const { viewMode, linesPerRow } = props;
const { onSetActiveLog } = useActiveLog(); const {
activeLog,
onClearActiveLog,
onAddToQuery,
onSetActiveLog,
} = useActiveLog();
useFontFaceObserver( useFontFaceObserver(
[ [
@ -69,9 +75,17 @@ function LogsTable(props: LogsTableProps): JSX.Element {
return <RawLogView key={log.id} data={log} linesPerRow={linesPerRow} />; return <RawLogView key={log.id} data={log} linesPerRow={linesPerRow} />;
} }
return <ListLogView key={log.id} logData={log} selectedFields={selected} />; return (
<ListLogView
key={log.id}
logData={log}
selectedFields={selected}
onAddToQuery={onAddToQuery}
onSetActiveLog={onSetActiveLog}
/>
);
}, },
[logs, viewMode, selected, linesPerRow], [logs, viewMode, selected, onAddToQuery, onSetActiveLog, linesPerRow],
); );
const renderContent = useMemo(() => { const renderContent = useMemo(() => {
@ -110,6 +124,12 @@ function LogsTable(props: LogsTableProps): JSX.Element {
{isNoLogs && <Typography>No logs lines found</Typography>} {isNoLogs && <Typography>No logs lines found</Typography>}
{renderContent} {renderContent}
<LogDetail
log={activeLog}
onClose={onClearActiveLog}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
/>
</Container> </Container>
); );
} }

View File

@ -32,19 +32,13 @@ import {
errorPercentage, errorPercentage,
operationPerSec, operationPerSec,
} from '../MetricsPageQueries/OverviewQueries'; } from '../MetricsPageQueries/OverviewQueries';
import { import { Col, ColApDexContainer, ColErrorContainer, Row } from '../styles';
Card,
Col,
ColApDexContainer,
ColErrorContainer,
Row,
} from '../styles';
import ApDex from './Overview/ApDex'; import ApDex from './Overview/ApDex';
import ServiceOverview from './Overview/ServiceOverview'; import ServiceOverview from './Overview/ServiceOverview';
import TopLevelOperation from './Overview/TopLevelOperations'; import TopLevelOperation from './Overview/TopLevelOperations';
import TopOperation from './Overview/TopOperation'; import TopOperation from './Overview/TopOperation';
import TopOperationMetrics from './Overview/TopOperationMetrics'; import TopOperationMetrics from './Overview/TopOperationMetrics';
import { Button } from './styles'; import { Button, Card } from './styles';
import { IServiceName } from './types'; import { IServiceName } from './types';
import { import {
handleNonInQueryRange, handleNonInQueryRange,
@ -276,7 +270,7 @@ function Application(): JSX.Element {
<Col span={12}> <Col span={12}>
<Card> <Card>
{isSpanMetricEnabled ? <TopOperationMetrics /> : <TopOperation />} {isSpanMetricEnabled ? <TopOperationMetrics /> : <TopOperation />}{' '}
</Card> </Card>
</Col> </Col>
</Row> </Row>

View File

@ -1,4 +1,4 @@
import { Button as ButtonComponent } from 'antd'; import { Button as ButtonComponent, Card as CardComponent } from 'antd';
import styled from 'styled-components'; import styled from 'styled-components';
export const Button = styled(ButtonComponent)` export const Button = styled(ButtonComponent)`
@ -8,3 +8,9 @@ export const Button = styled(ButtonComponent)`
display: none; display: none;
} }
`; `;
export const Card = styled(CardComponent)`
.ant-card-body {
padding: 10px;
}
`;

View File

@ -8,12 +8,13 @@ import styled from 'styled-components';
export const Card = styled(CardComponent)` export const Card = styled(CardComponent)`
&&& { &&& {
padding: 10px; height: 40vh;
overflow: hidden;
} }
.ant-card-body { .ant-card-body {
height: calc(100% - 40px);
padding: 0; padding: 0;
min-height: 40vh;
} }
`; `;
@ -38,7 +39,8 @@ export const ColErrorContainer = styled(ColComponent)`
`; `;
export const GraphContainer = styled.div` export const GraphContainer = styled.div`
height: 40vh; min-height: calc(40vh - 40px);
height: calc(100% - 40px);
`; `;
export const GraphTitle = styled(Typography)` export const GraphTitle = styled(Typography)`

View File

@ -28,6 +28,10 @@ export const timeItems: timePreferance[] = [
name: 'Last 1 day', name: 'Last 1 day',
enum: 'LAST_1_DAY', enum: 'LAST_1_DAY',
}, },
{
name: 'Last 3 days',
enum: 'LAST_3_DAYS',
},
{ {
name: 'Last 1 week', name: 'Last 1 week',
enum: 'LAST_1_WEEK', enum: 'LAST_1_WEEK',
@ -47,6 +51,7 @@ export type timePreferenceType =
| LAST_1_HR | LAST_1_HR
| LAST_6_HR | LAST_6_HR
| LAST_1_DAY | LAST_1_DAY
| LAST_3_DAYS
| LAST_1_WEEK; | LAST_1_WEEK;
type GLOBAL_TIME = 'GLOBAL_TIME'; type GLOBAL_TIME = 'GLOBAL_TIME';
@ -56,6 +61,7 @@ type LAST_30_MIN = 'LAST_30_MIN';
type LAST_1_HR = 'LAST_1_HR'; type LAST_1_HR = 'LAST_1_HR';
type LAST_6_HR = 'LAST_6_HR'; type LAST_6_HR = 'LAST_6_HR';
type LAST_1_DAY = 'LAST_1_DAY'; type LAST_1_DAY = 'LAST_1_DAY';
type LAST_3_DAYS = 'LAST_3_DAYS';
type LAST_1_WEEK = 'LAST_1_WEEK'; type LAST_1_WEEK = 'LAST_1_WEEK';
export default timeItems; export default timeItems;

View File

@ -178,6 +178,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
yAxisUnit, yAxisUnit,
panelTypes: graphType, panelTypes: graphType,
thresholds, thresholds,
fillSpans: isFillSpans,
}, },
...afterWidgets, ...afterWidgets,
], ],
@ -212,6 +213,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
yAxisUnit, yAxisUnit,
graphType, graphType,
thresholds, thresholds,
isFillSpans,
afterWidgets, afterWidgets,
updateDashboardMutation, updateDashboardMutation,
setSelectedDashboard, setSelectedDashboard,

View File

@ -0,0 +1,24 @@
## Install otel-collector in your Kubernetes infra
&nbsp;
Add the SigNoz Helm Chart repository
```bash
helm repo add signoz https://charts.signoz.io
```
&nbsp;
If the chart is already present, update the chart to the latest using:
```bash
helm repo update
```
&nbsp;
Install the Kubernetes Infrastructure chart provided by SigNoz
```bash
helm install my-release signoz/k8s-infra \
--set otelCollectorEndpoint=ingest.{{REGION}}.signoz.cloud:443 \
--set otelInsecure=false \
--set signozApiKey={{SIGNOZ_INGESTION_KEY}} \
--set global.clusterName=<CLUSTER_NAME>
```
- Replace `<CLUSTER_NAME>` with the name of the Kubernetes cluster or a unique identifier of the cluster.

View File

@ -0,0 +1,68 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces',
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,18 @@
Once you are done intrumenting your JavaScript application, you can run it as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,71 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'https://ingest.{{REGION}}.signoz.cloud:443/v1/traces',
headers: {
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}",
},
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,18 @@
Once you are done intrumenting your JavaScript application, you can run it as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,96 @@
## Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_amd64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_amd64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@ -0,0 +1,68 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces',
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,43 @@
&nbsp;
Once you are done intrumenting your JavaScript application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Run your application
Run your JavaScript application as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,71 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'https://ingest.{{REGION}}.signoz.cloud:443/v1/traces',
headers: {
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}",
},
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,18 @@
Once you are done intrumenting your JavaScript application, you can run it as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,96 @@
## Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_arm64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_arm64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@ -0,0 +1,68 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces',
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,43 @@
&nbsp;
Once you are done intrumenting your JavaScript application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Run your application
Run your JavaScript application as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,71 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'https://ingest.{{REGION}}.signoz.cloud:443/v1/traces',
headers: {
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}",
},
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,18 @@
Once you are done intrumenting your JavaScript application, you can run it as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,96 @@
### Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_amd64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_amd64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@ -0,0 +1,68 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces',
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,43 @@
&nbsp;
Once you are done intrumenting your JavaScript application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Run your application
Run your JavaScript application as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,71 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'https://ingest.{{REGION}}.signoz.cloud:443/v1/traces',
headers: {
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}",
},
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,18 @@
Once you are done intrumenting your JavaScript application, you can run it as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,96 @@
## Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_arm64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_arm64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@ -0,0 +1,68 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces',
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,43 @@
&nbsp;
Once you are done intrumenting your JavaScript application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Run your application
Run your JavaScript application as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,24 @@
## Install otel-collector in your Kubernetes infra
&nbsp;
Add the SigNoz Helm Chart repository
```bash
helm repo add signoz https://charts.signoz.io
```
&nbsp;
If the chart is already present, update the chart to the latest using:
```bash
helm repo update
```
&nbsp;
Install the Kubernetes Infrastructure chart provided by SigNoz
```bash
helm install my-release signoz/k8s-infra \
--set otelCollectorEndpoint=ingest.{{REGION}}.signoz.cloud:443 \
--set otelInsecure=false \
--set signozApiKey={{SIGNOZ_INGESTION_KEY}} \
--set global.clusterName=<CLUSTER_NAME>
```
- Replace `<CLUSTER_NAME>` with the name of the Kubernetes cluster or a unique identifier of the cluster.

View File

@ -0,0 +1,68 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces',
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,18 @@
Once you are done intrumenting your JavaScript application, you can run it as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,71 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'https://ingest.{{REGION}}.signoz.cloud:443/v1/traces',
headers: {
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}",
},
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,18 @@
Once you are done intrumenting your JavaScript application, you can run it as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,96 @@
## Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_amd64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_amd64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@ -0,0 +1,68 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces',
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,43 @@
&nbsp;
Once you are done intrumenting your JavaScript application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Run your application
Run your JavaScript application as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,71 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'https://ingest.{{REGION}}.signoz.cloud:443/v1/traces',
headers: {
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}",
},
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,18 @@
Once you are done intrumenting your JavaScript application, you can run it as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,96 @@
## Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_arm64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_arm64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@ -0,0 +1,68 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces',
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,43 @@
&nbsp;
Once you are done intrumenting your JavaScript application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Run your application
Run your JavaScript application as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,71 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'https://ingest.{{REGION}}.signoz.cloud:443/v1/traces',
headers: {
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}",
},
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,18 @@
Once you are done intrumenting your JavaScript application, you can run it as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,96 @@
### Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_amd64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_amd64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@ -0,0 +1,68 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces',
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,43 @@
&nbsp;
Once you are done intrumenting your JavaScript application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Run your application
Run your JavaScript application as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,71 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'https://ingest.{{REGION}}.signoz.cloud:443/v1/traces',
headers: {
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}",
},
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,18 @@
Once you are done intrumenting your JavaScript application, you can run it as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -0,0 +1,96 @@
## Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_arm64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_arm64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@ -0,0 +1,68 @@
### Step 1: Install OpenTelemetry packages
```bash
npm install --save @opentelemetry/context-zone
npm install --save @opentelemetry/instrumentation
npm install --save @opentelemetry/auto-instrumentations-web
npm install --save @opentelemetry/sdk-trace-base
npm install --save @opentelemetry/sdk-trace-web
npm install --save @opentelemetry/resources
npm install --save @opentelemetry/semantic-conventions
npm install --save @opentelemetry/exporter-trace-otlp-http
```
&nbsp;
### Step 2: Create tracing.js file
```javascript
// tracing.js
import { ZoneContextManager } from '@opentelemetry/context-zone';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import { getWebAutoInstrumentations } from '@opentelemetry/auto-instrumentations-web';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web';
import { Resource } from '@opentelemetry/resources';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
const provider = new WebTracerProvider({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: '{{MYAPP}}',
}),
});
const exporter = new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces',
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register({
// Changing default contextManager to use ZoneContextManager - supports asynchronous operations so that traces are not broken
contextManager: new ZoneContextManager(),
});
// Registering instrumentations
registerInstrumentations({
instrumentations: [
getWebAutoInstrumentations({
'@opentelemetry/instrumentation-xml-http-request': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
'@opentelemetry/instrumentation-fetch': {
propagateTraceHeaderCorsUrls: [
/.+/g, //Regex to match your backend urls.
],
},
}),
],
});
```
### Step 3: Import tracer in main file
**Important Note**: The below import should be the first line in the main file of your application (Ex -> `index.js`)
```bash
import './tracing.js'
```

View File

@ -0,0 +1,43 @@
&nbsp;
Once you are done intrumenting your JavaScript application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Run your application
Run your JavaScript application as you normally would.
For example:
If you're using `npm`
```bash
npm start
```
&nbsp;
If you're using `yarn`
```bash
yarn start
```
&nbsp;
To view more detailed documentation, checkout this [link](https://signoz.io/docs/instrumentation/javascript/)

View File

@ -35,9 +35,7 @@ export default function DataSource(): JSX.Element {
selectedFramework, selectedFramework,
updateSelectedDataSource, updateSelectedDataSource,
updateServiceName, updateServiceName,
updateSelectedEnvironment,
updateSelectedFramework, updateSelectedFramework,
updateErrorDetails,
} = useOnboardingContext(); } = useOnboardingContext();
const [supportedDataSources, setSupportedDataSources] = useState< const [supportedDataSources, setSupportedDataSources] = useState<
@ -55,11 +53,6 @@ export default function DataSource(): JSX.Element {
setSupportedDataSources(dataSource); setSupportedDataSources(dataSource);
} }
updateSelectedEnvironment('');
updateErrorDetails('');
updateServiceName('');
updateSelectedFramework('');
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, []);

View File

@ -3,7 +3,10 @@ import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import { ApmDocFilePaths } from 'container/OnboardingContainer/constants/apmDocFilePaths'; import { ApmDocFilePaths } from 'container/OnboardingContainer/constants/apmDocFilePaths';
import { InfraMonitoringDocFilePaths } from 'container/OnboardingContainer/constants/infraMonitoringDocFilePaths'; import { InfraMonitoringDocFilePaths } from 'container/OnboardingContainer/constants/infraMonitoringDocFilePaths';
import { LogsManagementDocFilePaths } from 'container/OnboardingContainer/constants/logsManagementDocFilePaths'; import { LogsManagementDocFilePaths } from 'container/OnboardingContainer/constants/logsManagementDocFilePaths';
import { useOnboardingContext } from 'container/OnboardingContainer/context/OnboardingContext'; import {
OnboardingMethods,
useOnboardingContext,
} from 'container/OnboardingContainer/context/OnboardingContext';
import { ModulesMap } from 'container/OnboardingContainer/OnboardingContainer'; import { ModulesMap } from 'container/OnboardingContainer/OnboardingContainer';
import useAnalytics from 'hooks/analytics/useAnalytics'; import useAnalytics from 'hooks/analytics/useAnalytics';
import { useEffect, useState } from 'react'; import { useEffect, useState } from 'react';
@ -42,12 +45,12 @@ export default function MarkdownStep(): JSX.Element {
path += `_${selectedEnvironment}`; path += `_${selectedEnvironment}`;
} }
if ( if (selectedModule?.id === ModulesMap.APM) {
selectedModule?.id === ModulesMap.APM && if (selectedEnvironment === 'kubernetes') {
selectedDataSource?.id !== 'kubernetes' && path += `_${OnboardingMethods.RECOMMENDED_STEPS}`;
selectedMethod } else if (selectedEnvironment !== 'kubernetes' && selectedMethod) {
) { path += `_${selectedMethod}`;
path += `_${selectedMethod}`; }
} }
path += `_${step?.id}`; path += `_${step?.id}`;

View File

@ -306,8 +306,72 @@ import APM_javascript_nodejs_macOsARM64_quickStart_runApplication from '../Modul
import APM_javascript_nodejs_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/NodeJS/MacOsARM64/Recommended/nodejs-macosarm64-recommended-installOtelCollector.md'; import APM_javascript_nodejs_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/NodeJS/MacOsARM64/Recommended/nodejs-macosarm64-recommended-installOtelCollector.md';
import APM_javascript_nodejs_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/NodeJS/MacOsARM64/Recommended/nodejs-macosarm64-recommended-instrumentApplication.md'; import APM_javascript_nodejs_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/NodeJS/MacOsARM64/Recommended/nodejs-macosarm64-recommended-instrumentApplication.md';
import APM_javascript_nodejs_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/NodeJS/MacOsARM64/Recommended/nodejs-macosarm64-recommended-runApplication.md'; import APM_javascript_nodejs_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/NodeJS/MacOsARM64/Recommended/nodejs-macosarm64-recommended-runApplication.md';
/// // JavaScript Others
import APM_javascript_others_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/Others/Kubernetes/others-kubernetes-installOtelCollector.md';
import APM_javascript_others_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/Others/Kubernetes/others-kubernetes-instrumentApplication.md';
import APM_javascript_others_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/Others/Kubernetes/others-kubernetes-runApplication.md';
// Others-JavaScript-LinuxAMD64-quickstart
import APM_javascript_others_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Javascript/md-docs/Others/LinuxAMD64/QuickStart/others-linuxamd64-quickStart-instrumentApplication.md';
import APM_javascript_others_linuxAMD64_quickStart_runApplication from '../Modules/APM/Javascript/md-docs/Others/LinuxAMD64/QuickStart/others-linuxamd64-quickStart-runApplication.md';
// // Others-JavaScript-LinuxAMD64-recommended
import APM_javascript_others_linuxAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/Others/LinuxAMD64/Recommended/others-linuxamd64-recommended-installOtelCollector.md';
import APM_javascript_others_linuxAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/Others/LinuxAMD64/Recommended/others-linuxamd64-recommended-instrumentApplication.md';
import APM_javascript_others_linuxAMD64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/Others/LinuxAMD64/Recommended/others-linuxamd64-recommended-runApplication.md';
// Others-JavaScript-LinuxARM64-quiOthers
import APM_javascript_others_linuxARM64_quickStart_instrumentApplication from '../Modules/APM/Javascript/md-docs/Others/LinuxARM64/QuickStart/others-linuxarm64-quickStart-instrumentApplication.md';
import APM_javascript_others_linuxARM64_quickStart_runApplication from '../Modules/APM/Javascript/md-docs/Others/LinuxARM64/QuickStart/others-linuxarm64-quickStart-runApplication.md';
// Others-JavaScript-LinuxARM64-recommended
import APM_javascript_others_linuxARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/Others/LinuxARM64/Recommended/others-linuxarm64-recommended-installOtelCollector.md';
import APM_javascript_others_linuxARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/Others/LinuxARM64/Recommended/others-linuxarm64-recommended-instrumentApplication.md';
import APM_javascript_others_linuxARM64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/Others/LinuxARM64/Recommended/others-linuxarm64-recommended-runApplication.md';
// Others-JavaScript-MacOsAMD64-quickstart
import APM_javascript_others_macOsAMD64_quickStart_instrumentApplication from '../Modules/APM/Javascript/md-docs/Others/MacOsAMD64/QuickStart/others-macosamd64-quickStart-instrumentApplication.md';
import APM_javascript_others_macOsAMD64_quickStart_runApplication from '../Modules/APM/Javascript/md-docs/Others/MacOsAMD64/QuickStart/others-macosamd64-quickStart-runApplication.md';
// Others-JavaScript-MacOsAMD64-recommended
import APM_javascript_others_macOsAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/Others/MacOsAMD64/Recommended/others-macosamd64-recommended-installOtelCollector.md';
import APM_javascript_others_macOsAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/Others/MacOsAMD64/Recommended/others-macosamd64-recommended-instrumentApplication.md';
import APM_javascript_others_macOsAMD64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/Others/MacOsAMD64/Recommended/others-macosamd64-recommended-runApplication.md';
// Others-JavaScript-MacOsARM64-quickstart
import APM_javascript_others_macOsARM64_quickStart_instrumentApplication from '../Modules/APM/Javascript/md-docs/Others/MacOsARM64/QuickStart/others-macosarm64-quickStart-instrumentApplication.md';
import APM_javascript_others_macOsARM64_quickStart_runApplication from '../Modules/APM/Javascript/md-docs/Others/MacOsARM64/QuickStart/others-macosarm64-quickStart-runApplication.md';
// Others-JavaScript-MacOsARM64-recommended
import APM_javascript_others_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/Others/MacOsARM64/Recommended/others-macosarm64-recommended-installOtelCollector.md';
import APM_javascript_others_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/Others/MacOsARM64/Recommended/others-macosarm64-recommended-instrumentApplication.md';
import APM_javascript_others_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/Others/MacOsARM64/Recommended/others-macosarm64-recommended-runApplication.md';
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
/// ////// Java Done // ReactJS-Kubernetes
import APM_javascript_reactjs_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/ReactJS/Kubernetes/reactjs-kubernetes-installOtelCollector.md';
import APM_javascript_reactjs_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/Kubernetes/reactjs-kubernetes-instrumentApplication.md';
import APM_javascript_reactjs_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/Kubernetes/reactjs-kubernetes-runApplication.md';
// ReactJS-LinuxAMD64-quickstart
import APM_javascript_reactjs_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/LinuxAMD64/QuickStart/reactjs-linuxamd64-quickStart-instrumentApplication.md';
import APM_javascript_reactjs_linuxAMD64_quickStart_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/LinuxAMD64/QuickStart/reactjs-linuxamd64-quickStart-runApplication.md';
// // ReactJS-LinuxAMD64-recommended
import APM_javascript_reactjs_linuxAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/ReactJS/LinuxAMD64/Recommended/reactjs-linuxamd64-recommended-installOtelCollector.md';
import APM_javascript_reactjs_linuxAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/LinuxAMD64/Recommended/reactjs-linuxamd64-recommended-instrumentApplication.md';
import APM_javascript_reactjs_linuxAMD64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/LinuxAMD64/Recommended/reactjs-linuxamd64-recommended-runApplication.md';
// ReactJS-LinuxARM64-quickstart
import APM_javascript_reactjs_linuxARM64_quickStart_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/LinuxARM64/QuickStart/reactjs-linuxarm64-quickStart-instrumentApplication.md';
import APM_javascript_reactjs_linuxARM64_quickStart_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/LinuxARM64/QuickStart/reactjs-linuxarm64-quickStart-runApplication.md';
// ReactJS-LinuxARM64-recommended
import APM_javascript_reactjs_linuxARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/ReactJS/LinuxARM64/Recommended/reactjs-linuxarm64-recommended-installOtelCollector.md';
import APM_javascript_reactjs_linuxARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/LinuxARM64/Recommended/reactjs-linuxarm64-recommended-instrumentApplication.md';
import APM_javascript_reactjs_linuxARM64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/LinuxARM64/Recommended/reactjs-linuxarm64-recommended-runApplication.md';
// ReactJS-MacOsAMD64-quickstart
import APM_javascript_reactjs_macOsAMD64_quickStart_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsAMD64/QuickStart/reactjs-macosamd64-quickStart-instrumentApplication.md';
import APM_javascript_reactjs_macOsAMD64_quickStart_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsAMD64/QuickStart/reactjs-macosamd64-quickStart-runApplication.md';
// ReactJS-MacOsAMD64-recommended
import APM_javascript_reactjs_macOsAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsAMD64/Recommended/reactjs-macosamd64-recommended-installOtelCollector.md';
import APM_javascript_reactjs_macOsAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsAMD64/Recommended/reactjs-macosamd64-recommended-instrumentApplication.md';
import APM_javascript_reactjs_macOsAMD64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsAMD64/Recommended/reactjs-macosamd64-recommended-runApplication.md';
// ReactJS-MacOsARM64-quickstart
import APM_javascript_reactjs_macOsARM64_quickStart_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/QuickStart/reactjs-macosarm64-quickStart-instrumentApplication.md';
import APM_javascript_reactjs_macOsARM64_quickStart_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/QuickStart/reactjs-macosarm64-quickStart-runApplication.md';
// ReactJS-MacOsARM64-recommended
import APM_javascript_reactjs_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-installOtelCollector.md';
import APM_javascript_reactjs_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-instrumentApplication.md';
import APM_javascript_reactjs_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-runApplication.md';
/// ////// Javascript Done
/// ///// Python Start /// ///// Python Start
// Django // Django
// Django-Kubernetes // Django-Kubernetes
@ -1062,6 +1126,91 @@ export const ApmDocFilePaths = {
APM_javascript_nodejs_macOsARM64_quickStart_instrumentApplication, APM_javascript_nodejs_macOsARM64_quickStart_instrumentApplication,
APM_javascript_nodejs_macOsARM64_quickStart_runApplication, APM_javascript_nodejs_macOsARM64_quickStart_runApplication,
/// React JS
// ReeactJS-Kubernetes
APM_javascript_reactjs_kubernetes_recommendedSteps_setupOtelCollector,
APM_javascript_reactjs_kubernetes_recommendedSteps_instrumentApplication,
APM_javascript_reactjs_kubernetes_recommendedSteps_runApplication,
// ReactJS-LinuxAMD64-quickstart
APM_javascript_reactjs_linuxAMD64_quickStart_instrumentApplication,
APM_javascript_reactjs_linuxAMD64_quickStart_runApplication,
// // ReactJS-LinuxAMD64-recommended
APM_javascript_reactjs_linuxAMD64_recommendedSteps_setupOtelCollector,
APM_javascript_reactjs_linuxAMD64_recommendedSteps_instrumentApplication,
APM_javascript_reactjs_linuxAMD64_recommendedSteps_runApplication,
// ReactJS-LinuxARM64-quickstart
APM_javascript_reactjs_linuxARM64_quickStart_instrumentApplication,
APM_javascript_reactjs_linuxARM64_quickStart_runApplication,
// ReactJS-LinuxARM64-recommended
APM_javascript_reactjs_linuxARM64_recommendedSteps_setupOtelCollector,
APM_javascript_reactjs_linuxARM64_recommendedSteps_instrumentApplication,
APM_javascript_reactjs_linuxARM64_recommendedSteps_runApplication,
// ReactJS-MacOsAMD64-quickstart
APM_javascript_reactjs_macOsAMD64_quickStart_instrumentApplication,
APM_javascript_reactjs_macOsAMD64_quickStart_runApplication,
// ReactJS-MacOsAMD64-recommended
APM_javascript_reactjs_macOsAMD64_recommendedSteps_setupOtelCollector,
APM_javascript_reactjs_macOsAMD64_recommendedSteps_instrumentApplication,
APM_javascript_reactjs_macOsAMD64_recommendedSteps_runApplication,
// ReactJS-MacOsARM64-quickstart
APM_javascript_reactjs_macOsARM64_quickStart_instrumentApplication,
APM_javascript_reactjs_macOsARM64_quickStart_runApplication,
// ReactJS-MacOsARM64-recommended
APM_javascript_reactjs_macOsARM64_recommendedSteps_setupOtelCollector,
APM_javascript_reactjs_macOsARM64_recommendedSteps_instrumentApplication,
APM_javascript_reactjs_macOsARM64_recommendedSteps_runApplication,
/// // JavaScript Others
APM_javascript_others_kubernetes_recommendedSteps_setupOtelCollector,
APM_javascript_others_kubernetes_recommendedSteps_instrumentApplication,
APM_javascript_others_kubernetes_recommendedSteps_runApplication,
// Others-JavaScript-LinuxAMD64-quickstart
APM_javascript_others_linuxAMD64_quickStart_instrumentApplication,
APM_javascript_others_linuxAMD64_quickStart_runApplication,
// // Others-JavaScript-LinuxAMD64-recommended
APM_javascript_others_linuxAMD64_recommendedSteps_setupOtelCollector,
APM_javascript_others_linuxAMD64_recommendedSteps_instrumentApplication,
APM_javascript_others_linuxAMD64_recommendedSteps_runApplication,
// Others-JavaScript-LinuxARM64-quiOthers
APM_javascript_others_linuxARM64_quickStart_instrumentApplication,
APM_javascript_others_linuxARM64_quickStart_runApplication,
// Others-JavaScript-LinuxARM64-recommended
APM_javascript_others_linuxARM64_recommendedSteps_setupOtelCollector,
APM_javascript_others_linuxARM64_recommendedSteps_instrumentApplication,
APM_javascript_others_linuxARM64_recommendedSteps_runApplication,
// Others-JavaScript-MacOsAMD64-quickstart
APM_javascript_others_macOsAMD64_quickStart_instrumentApplication,
APM_javascript_others_macOsAMD64_quickStart_runApplication,
// Others-JavaScript-MacOsAMD64-recommended
APM_javascript_others_macOsAMD64_recommendedSteps_setupOtelCollector,
APM_javascript_others_macOsAMD64_recommendedSteps_instrumentApplication,
APM_javascript_others_macOsAMD64_recommendedSteps_runApplication,
// Others-JavaScript-MacOsARM64-quickstart
APM_javascript_others_macOsARM64_quickStart_instrumentApplication,
APM_javascript_others_macOsARM64_quickStart_runApplication,
// Others-JavaScript-MacOsARM64-recommended
APM_javascript_others_macOsARM64_recommendedSteps_setupOtelCollector,
APM_javascript_others_macOsARM64_recommendedSteps_instrumentApplication,
APM_javascript_others_macOsARM64_recommendedSteps_runApplication,
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
/// //// JavaScript Done /// //// JavaScript Done

View File

@ -104,7 +104,7 @@ function OnboardingContextProvider({
setSelectedDataSource(defaultApplicationDataSource); setSelectedDataSource(defaultApplicationDataSource);
setSelectedEnvironment(''); setSelectedEnvironment('');
setSelectedFramework(''); setSelectedFramework('');
setSelectedMethod(OnboardingMethods.RECOMMENDED_STEPS); setSelectedMethod(OnboardingMethods.QUICK_START);
updateActiveStep(null); updateActiveStep(null);
}; };

View File

@ -34,6 +34,14 @@ export const frameworksMap = {
value: 'nodejs', value: 'nodejs',
label: 'Nodejs', label: 'Nodejs',
}, },
{
value: 'reactjs',
label: 'React JS',
},
{
value: 'others',
label: 'Other Web Instrumentation',
},
], ],
python: [ python: [
{ {

View File

@ -9,6 +9,7 @@ type SixHour = '6hr';
type OneHour = '1hr'; type OneHour = '1hr';
type FourHour = '4hr'; type FourHour = '4hr';
type OneDay = '1day'; type OneDay = '1day';
type ThreeDay = '3days';
type OneWeek = '1week'; type OneWeek = '1week';
type Custom = 'custom'; type Custom = 'custom';
@ -23,7 +24,8 @@ export type Time =
| OneHour | OneHour
| Custom | Custom
| OneWeek | OneWeek
| OneDay; | OneDay
| ThreeDay;
export const Options: Option[] = [ export const Options: Option[] = [
{ value: '5min', label: 'Last 5 min' }, { value: '5min', label: 'Last 5 min' },
@ -32,6 +34,7 @@ export const Options: Option[] = [
{ value: '1hr', label: 'Last 1 hour' }, { value: '1hr', label: 'Last 1 hour' },
{ value: '6hr', label: 'Last 6 hour' }, { value: '6hr', label: 'Last 6 hour' },
{ value: '1day', label: 'Last 1 day' }, { value: '1day', label: 'Last 1 day' },
{ value: '3days', label: 'Last 3 days' },
{ value: '1week', label: 'Last 1 week' }, { value: '1week', label: 'Last 1 week' },
{ value: 'custom', label: 'Custom' }, { value: 'custom', label: 'Custom' },
]; ];
@ -48,6 +51,7 @@ export const RelativeDurationOptions: Option[] = [
{ value: '1hr', label: 'Last 1 hour' }, { value: '1hr', label: 'Last 1 hour' },
{ value: '6hr', label: 'Last 6 hour' }, { value: '6hr', label: 'Last 6 hour' },
{ value: '1day', label: 'Last 1 day' }, { value: '1day', label: 'Last 1 day' },
{ value: '3days', label: 'Last 3 days' },
{ value: '1week', label: 'Last 1 week' }, { value: '1week', label: 'Last 1 week' },
]; ];

View File

@ -6,6 +6,7 @@ import getMinAgo from './getStartAndEndTime/getMinAgo';
const GetMinMax = ( const GetMinMax = (
interval: Time, interval: Time,
dateTimeRange?: [number, number], dateTimeRange?: [number, number],
// eslint-disable-next-line sonarjs/cognitive-complexity
): GetMinMaxPayload => { ): GetMinMaxPayload => {
let maxTime = new Date().getTime(); let maxTime = new Date().getTime();
let minTime = 0; let minTime = 0;
@ -32,6 +33,10 @@ const GetMinMax = (
// one day = 24*60(min) // one day = 24*60(min)
const minTimeAgo = getMinAgo({ minutes: 24 * 60 }).getTime(); const minTimeAgo = getMinAgo({ minutes: 24 * 60 }).getTime();
minTime = minTimeAgo; minTime = minTimeAgo;
} else if (interval === '3days') {
// three day = one day * 3
const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 3 }).getTime();
minTime = minTimeAgo;
} else if (interval === '1week') { } else if (interval === '1week') {
// one week = one day * 7 // one week = one day * 7
const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 7 }).getTime(); const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 7 }).getTime();

View File

@ -3,6 +3,19 @@ import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems
import getMicroSeconds from './getMicroSeconds'; import getMicroSeconds from './getMicroSeconds';
import getMinAgo from './getMinAgo'; import getMinAgo from './getMinAgo';
const calculateStartAndEndTime = (
minutes: number,
endString: string,
): Payload => {
const agodate = getMinAgo({ minutes }).getTime();
const agoString = getMicroSeconds({ time: agodate });
return {
start: agoString,
end: endString,
};
};
const GetStartAndEndTime = ({ const GetStartAndEndTime = ({
type, type,
minTime, minTime,
@ -12,73 +25,35 @@ const GetStartAndEndTime = ({
const endString = getMicroSeconds({ time: end }); const endString = getMicroSeconds({ time: end });
if (type === 'LAST_5_MIN') { if (type === 'LAST_5_MIN') {
const agodate = getMinAgo({ minutes: 5 }).getTime(); return calculateStartAndEndTime(5, endString);
const agoString = getMicroSeconds({ time: agodate });
return {
start: agoString,
end: endString,
};
} }
if (type === 'LAST_30_MIN') { if (type === 'LAST_30_MIN') {
const agodate = getMinAgo({ minutes: 30 }).getTime(); return calculateStartAndEndTime(30, endString);
const agoString = getMicroSeconds({ time: agodate });
return {
start: agoString,
end: endString,
};
} }
if (type === 'LAST_1_HR') { if (type === 'LAST_1_HR') {
const agodate = getMinAgo({ minutes: 60 }).getTime(); return calculateStartAndEndTime(60, endString);
const agoString = getMicroSeconds({ time: agodate });
return {
start: agoString,
end: endString,
};
} }
if (type === 'LAST_15_MIN') { if (type === 'LAST_15_MIN') {
const agodate = getMinAgo({ minutes: 15 }).getTime(); return calculateStartAndEndTime(15, endString);
const agoString = getMicroSeconds({ time: agodate });
return {
start: agoString,
end: endString,
};
} }
if (type === 'LAST_6_HR') { if (type === 'LAST_6_HR') {
const agoDate = getMinAgo({ minutes: 6 * 60 }).getTime(); return calculateStartAndEndTime(6 * 60, endString);
const agoString = getMicroSeconds({ time: agoDate });
return {
start: agoString,
end: endString,
};
} }
if (type === 'LAST_1_DAY') { if (type === 'LAST_1_DAY') {
const agoDate = getMinAgo({ minutes: 24 * 60 }).getTime(); return calculateStartAndEndTime(24 * 60, endString);
const agoString = getMicroSeconds({ time: agoDate }); }
return { if (type === 'LAST_3_DAYS') {
start: agoString, return calculateStartAndEndTime(24 * 60 * 3, endString);
end: endString,
};
} }
if (type === 'LAST_1_WEEK') { if (type === 'LAST_1_WEEK') {
const agoDate = getMinAgo({ minutes: 24 * 60 * 7 }).getTime(); return calculateStartAndEndTime(24 * 60 * 7, endString);
const agoString = getMicroSeconds({ time: agoDate });
return {
start: agoString,
end: endString,
};
} }
return { return {

View File

@ -136,7 +136,18 @@ export const getUPlotChartOptions = ({
if (threshold.thresholdLabel) { if (threshold.thresholdLabel) {
const text = threshold.thresholdLabel; const text = threshold.thresholdLabel;
const textX = plotRight - ctx.measureText(text).width - 20; const textX = plotRight - ctx.measureText(text).width - 20;
const textY = yPos - 15;
const canvasHeight = ctx.canvas.height;
const yposHeight = canvasHeight - yPos;
const isHeightGreaterThan90Percent = canvasHeight * 0.9 < yposHeight;
// Adjust textY based on the condition
let textY;
if (isHeightGreaterThan90Percent) {
textY = yPos + 15; // Below the threshold line
} else {
textY = yPos - 15; // Above the threshold line
}
ctx.fillStyle = threshold.thresholdColor || 'red'; ctx.fillStyle = threshold.thresholdColor || 'red';
ctx.fillText(text, textX, textY); ctx.fillText(text, textX, textY);
} }

2
go.mod
View File

@ -5,7 +5,7 @@ go 1.21
require ( require (
github.com/ClickHouse/clickhouse-go/v2 v2.15.0 github.com/ClickHouse/clickhouse-go/v2 v2.15.0
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb
github.com/SigNoz/signoz-otel-collector v0.88.4 github.com/SigNoz/signoz-otel-collector v0.88.6
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974 github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
github.com/antonmedv/expr v1.15.3 github.com/antonmedv/expr v1.15.3

4
go.sum
View File

@ -98,8 +98,8 @@ github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb h1:bneLSKPf9YUSFm
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA= github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA=
github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY= github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY=
github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww= github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww=
github.com/SigNoz/signoz-otel-collector v0.88.4 h1:vwyr26Iz1IkjTJQvcTk6E0StSg8pZGoz6aqXCAJjU8w= github.com/SigNoz/signoz-otel-collector v0.88.6 h1:rvXm9bz4b9GsYeT8c3+F/g56DHPf0IN8mK8tUfZfnw8=
github.com/SigNoz/signoz-otel-collector v0.88.4/go.mod h1:AkN5EPLaFn9TRS5303LzOEjiApld7TBoMImiRTXAvs8= github.com/SigNoz/signoz-otel-collector v0.88.6/go.mod h1:6lR8Uy99zBd0JGPg9zt0aEBW4A4GpblUtpcbszGmg8E=
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc= github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo= github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY= github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=

View File

@ -2,8 +2,12 @@ package logparsingpipeline
import ( import (
"fmt" "fmt"
"slices"
"strings" "strings"
"github.com/antonmedv/expr"
"github.com/antonmedv/expr/ast"
"github.com/antonmedv/expr/parser"
"github.com/pkg/errors" "github.com/pkg/errors"
"go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/queryBuilderToExpr" "go.signoz.io/signoz/pkg/query-service/queryBuilderToExpr"
@ -81,50 +85,97 @@ func getOperators(ops []PipelineOperator) ([]PipelineOperator, error) {
} }
if operator.Type == "regex_parser" { if operator.Type == "regex_parser" {
parseFromParts := strings.Split(operator.ParseFrom, ".") parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom)
parseFromPath := strings.Join(parseFromParts, "?.") if err != nil {
return nil, fmt.Errorf(
"couldn't generate nil check for parseFrom of regex op %s: %w", operator.Name, err,
)
}
operator.If = fmt.Sprintf( operator.If = fmt.Sprintf(
`%s != nil && %s matches "%s"`, `%s && %s matches "%s"`,
parseFromPath, parseFromNotNilCheck,
parseFromPath, operator.ParseFrom,
strings.ReplaceAll( strings.ReplaceAll(
strings.ReplaceAll(operator.Regex, `\`, `\\`), strings.ReplaceAll(operator.Regex, `\`, `\\`),
`"`, `\"`, `"`, `\"`,
), ),
) )
} else if operator.Type == "grok_parser" {
parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom)
if err != nil {
return nil, fmt.Errorf(
"couldn't generate nil check for parseFrom of grok op %s: %w", operator.Name, err,
)
}
operator.If = parseFromNotNilCheck
} else if operator.Type == "json_parser" { } else if operator.Type == "json_parser" {
parseFromParts := strings.Split(operator.ParseFrom, ".") parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom)
parseFromPath := strings.Join(parseFromParts, "?.") if err != nil {
operator.If = fmt.Sprintf(`%s != nil && %s matches "^\\s*{.*}\\s*$"`, parseFromPath, parseFromPath) return nil, fmt.Errorf(
"couldn't generate nil check for parseFrom of json parser op %s: %w", operator.Name, err,
)
}
operator.If = fmt.Sprintf(
`%s && %s matches "^\\s*{.*}\\s*$"`, parseFromNotNilCheck, operator.ParseFrom,
)
} else if operator.Type == "add" {
if strings.HasPrefix(operator.Value, "EXPR(") && strings.HasSuffix(operator.Value, ")") {
expression := strings.TrimSuffix(strings.TrimPrefix(operator.Value, "EXPR("), ")")
fieldsNotNilCheck, err := fieldsReferencedInExprNotNilCheck(expression)
if err != nil {
return nil, fmt.Errorf(
"could'nt generate nil check for fields referenced in value expr of add operator %s: %w",
operator.Name, err,
)
}
if fieldsNotNilCheck != "" {
operator.If = fieldsNotNilCheck
}
}
} else if operator.Type == "move" || operator.Type == "copy" { } else if operator.Type == "move" || operator.Type == "copy" {
fromParts := strings.Split(operator.From, ".") fromNotNilCheck, err := fieldNotNilCheck(operator.From)
fromPath := strings.Join(fromParts, "?.") if err != nil {
operator.If = fmt.Sprintf(`%s != nil`, fromPath) return nil, fmt.Errorf(
"couldn't generate nil check for From field of %s op %s: %w", operator.Type, operator.Name, err,
)
}
operator.If = fromNotNilCheck
} else if operator.Type == "remove" { } else if operator.Type == "remove" {
fieldParts := strings.Split(operator.Field, ".") fieldNotNilCheck, err := fieldNotNilCheck(operator.Field)
fieldPath := strings.Join(fieldParts, "?.") if err != nil {
operator.If = fmt.Sprintf(`%s != nil`, fieldPath) return nil, fmt.Errorf(
"couldn't generate nil check for field to be removed by op %s: %w", operator.Name, err,
)
}
operator.If = fieldNotNilCheck
} else if operator.Type == "trace_parser" { } else if operator.Type == "trace_parser" {
cleanTraceParser(&operator) cleanTraceParser(&operator)
} else if operator.Type == "time_parser" { } else if operator.Type == "time_parser" {
parseFromParts := strings.Split(operator.ParseFrom, ".") parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom)
parseFromPath := strings.Join(parseFromParts, "?.") if err != nil {
return nil, fmt.Errorf(
operator.If = fmt.Sprintf(`%s != nil`, parseFromPath) "couldn't generate nil check for parseFrom of time parser op %s: %w", operator.Name, err,
)
}
operator.If = parseFromNotNilCheck
if operator.LayoutType == "strptime" { if operator.LayoutType == "strptime" {
regex, err := RegexForStrptimeLayout(operator.Layout) regex, err := RegexForStrptimeLayout(operator.Layout)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not generate time_parser processor: %w", err) return nil, fmt.Errorf(
"couldn't generate layout regex for time_parser %s: %w", operator.Name, err,
)
} }
operator.If = fmt.Sprintf( operator.If = fmt.Sprintf(
`%s && %s matches "%s"`, operator.If, parseFromPath, regex, `%s && %s matches "%s"`, operator.If, operator.ParseFrom, regex,
) )
} else if operator.LayoutType == "epoch" { } else if operator.LayoutType == "epoch" {
valueRegex := `^\\s*[0-9]+\\s*$` valueRegex := `^\\s*[0-9]+\\s*$`
@ -133,19 +184,22 @@ func getOperators(ops []PipelineOperator) ([]PipelineOperator, error) {
} }
operator.If = fmt.Sprintf( operator.If = fmt.Sprintf(
`%s && string(%s) matches "%s"`, operator.If, parseFromPath, valueRegex, `%s && string(%s) matches "%s"`, operator.If, operator.ParseFrom, valueRegex,
) )
} }
// TODO(Raj): Maybe add support for gotime too eventually // TODO(Raj): Maybe add support for gotime too eventually
} else if operator.Type == "severity_parser" { } else if operator.Type == "severity_parser" {
parseFromParts := strings.Split(operator.ParseFrom, ".") parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom)
parseFromPath := strings.Join(parseFromParts, "?.") if err != nil {
return nil, fmt.Errorf(
"couldn't generate nil check for parseFrom of severity parser %s: %w", operator.Name, err,
)
}
operator.If = fmt.Sprintf( operator.If = fmt.Sprintf(
`%s != nil && ( type(%s) == "string" || ( type(%s) in ["int", "float"] && %s == float(int(%s)) ) )`, `%s && ( type(%s) == "string" || ( type(%s) in ["int", "float"] && %s == float(int(%s)) ) )`,
parseFromPath, parseFromPath, parseFromPath, parseFromPath, parseFromPath, parseFromNotNilCheck, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom,
) )
} }
@ -169,3 +223,151 @@ func cleanTraceParser(operator *PipelineOperator) {
operator.TraceFlags = nil operator.TraceFlags = nil
} }
} }
// Generates an expression checking that `fieldPath` has a non-nil value in a log record.
func fieldNotNilCheck(fieldPath string) (string, error) {
_, err := expr.Compile(fieldPath)
if err != nil {
return "", fmt.Errorf("invalid fieldPath %s: %w", fieldPath, err)
}
// helper for turning `.` into `?.` in field paths.
// Eg: a.b?.c.d -> a?.b?.c?.d
optionalChainedPath := func(path string) string {
return strings.ReplaceAll(
strings.ReplaceAll(path, "?.", "."), ".", "?.",
)
}
// Optional chaining before membership ops is not supported by expr.
// Eg: The field `attributes.test["a.b"].value["c.d"].e` can't be checked using
// the nil check `attributes.test?.["a.b"]?.value?.["c.d"]?.e != nil`
// This needs to be worked around by checking that the target of membership op is not nil first.
// Eg: attributes.test != nil && attributes.test["a.b"]?.value != nil && attributes.test["a.b"].value["c.d"]?.e != nil
// Split once from the right to include the rightmost membership op and everything after it.
// Eg: `attributes.test["a.b"].value["c.d"].e` would result in `attributes.test["a.b"].value` and `["c.d"].e`
parts := rSplitAfterN(fieldPath, "[", 2)
if len(parts) < 2 {
// there is no [] access in fieldPath
return fmt.Sprintf("%s != nil", optionalChainedPath(fieldPath)), nil
}
// recursively generate nil check for target of the rightmost membership op (attributes.test["a.b"].value)
// should come out to be (attributes.test != nil && attributes.test["a.b"]?.value != nil)
collectionNotNilCheck, err := fieldNotNilCheck(parts[0])
if err != nil {
return "", fmt.Errorf("couldn't generate nil check for %s: %w", parts[0], err)
}
// generate nil check for entire path.
suffixParts := strings.SplitAfter(parts[1], "]") // ["c.d"], ".e"
fullPath := parts[0] + suffixParts[0]
if len(suffixParts) > 1 {
// attributes.test["a.b"].value["c.d"]?.e
fullPath += optionalChainedPath(suffixParts[1])
}
fullPathCheck := fmt.Sprintf("%s != nil", fullPath)
// If the membership op is for array/slice indexing, add check ensuring array is long enough
// attributes.test[3] -> len(attributes.test) > 3 && attributes.test[3] != nil
if !(strings.Contains(suffixParts[0], "'") || strings.Contains(suffixParts[0], `"`)) {
fullPathCheck = fmt.Sprintf(
"len(%s) > %s && %s",
parts[0], suffixParts[0][1:len(suffixParts[0])-1], fullPathCheck,
)
}
// If prefix is `attributes` or `resource` there is no need to add a nil check for
// the prefix since all log records have non nil `attributes` and `resource` fields.
if slices.Contains([]string{"attributes", "resource"}, parts[0]) {
return fullPathCheck, nil
}
return fmt.Sprintf("%s && %s", collectionNotNilCheck, fullPathCheck), nil
}
// Split `str` after `sep` from the right to create up to `n` parts.
// rSplitAfterN("a.b.c.d", ".", 3) -> ["a.b", ".c", ".d"]
func rSplitAfterN(str string, sep string, n int) []string {
reversedStr := reverseString(str)
parts := strings.SplitAfterN(reversedStr, sep, n)
slices.Reverse(parts)
result := []string{}
for _, p := range parts {
result = append(result, reverseString(p))
}
return result
}
func reverseString(s string) string {
r := []rune(s)
for i := 0; i < len(r)/2; i++ {
j := len(s) - 1 - i
r[i], r[j] = r[j], r[i]
}
return string(r)
}
// Generate expression for checking that all fields referenced in `expr` have a non nil value in log record.
// Eg: `attributes.x + len(resource.y)` will return the expression `attributes.x != nil && resource.y != nil`
func fieldsReferencedInExprNotNilCheck(expr string) (string, error) {
referencedFields, err := logFieldsReferencedInExpr(expr)
if err != nil {
return "", fmt.Errorf("couldn't extract log fields referenced in expr %s: %w", expr, err)
}
// Generating nil check for deepest fields takes care of their prefixes too.
// Eg: `attributes.test.value + len(attributes.test)` needs a nil check only for `attributes.test.value`
deepestFieldRefs := []string{}
for _, field := range referencedFields {
isPrefixOfAnotherReferencedField := slices.ContainsFunc(
referencedFields, func(e string) bool {
return len(e) > len(field) && strings.HasPrefix(e, field)
},
)
if !isPrefixOfAnotherReferencedField {
deepestFieldRefs = append(deepestFieldRefs, field)
}
}
fieldExprChecks := []string{}
for _, field := range deepestFieldRefs {
checkExpr, err := fieldNotNilCheck(field)
if err != nil {
return "", fmt.Errorf("could not create nil check for %s: %w", field, err)
}
fieldExprChecks = append(fieldExprChecks, fmt.Sprintf("(%s)", checkExpr))
}
return strings.Join(fieldExprChecks, " && "), nil
}
// Expr AST visitor for extracting referenced log fields
// See more at https://github.com/expr-lang/expr/blob/master/ast/visitor.go
type logFieldsInExprExtractor struct {
referencedFields []string
}
func (v *logFieldsInExprExtractor) Visit(node *ast.Node) {
if n, ok := (*node).(*ast.MemberNode); ok {
memberRef := n.String()
if strings.HasPrefix(memberRef, "attributes") || strings.HasPrefix(memberRef, "resource") {
v.referencedFields = append(v.referencedFields, memberRef)
}
}
}
func logFieldsReferencedInExpr(expr string) ([]string, error) {
// parse abstract syntax tree for expr
exprAst, err := parser.Parse(expr)
if err != nil {
return nil, fmt.Errorf("could not parse expr: %w", err)
}
// walk ast for expr to collect all member references.
v := &logFieldsInExprExtractor{}
ast.Walk(&exprAst.Node, v)
return v.referencedFields, nil
}

View File

@ -386,8 +386,19 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
makeTestLog("mismatching log", map[string]string{ makeTestLog("mismatching log", map[string]string{
"test_timestamp": "not-an-epoch", "test_timestamp": "not-an-epoch",
}), }),
}, {
"grok parser should ignore logs with missing parse from field",
PipelineOperator{
ID: "grok",
Type: "grok_parser",
Enabled: true,
Name: "grok parser",
ParseFrom: "attributes.test",
Pattern: "%{GREEDYDATA}",
ParseTo: "attributes.test_parsed",
},
makeTestLog("test log with missing parse from field", map[string]string{}),
}, },
// TODO(Raj): see if there is an error scenario for grok parser.
// TODO(Raj): see if there is an error scenario for trace parser. // TODO(Raj): see if there is an error scenario for trace parser.
// TODO(Raj): see if there is an error scenario for Add operator. // TODO(Raj): see if there is an error scenario for Add operator.
} }
@ -608,6 +619,184 @@ func TestAttributePathsContainingDollarDoNotBreakCollector(t *testing.T) {
require.Equal("test", result[0].Attributes_string["$test1"]) require.Equal("test", result[0].Attributes_string["$test1"])
} }
func TestMembershipOpInProcessorFieldExpressions(t *testing.T) {
require := require.New(t)
testLogs := []model.SignozLog{
makeTestSignozLog("test log", map[string]interface{}{
"http.method": "GET",
"order.products": `{"ids": ["pid0", "pid1"]}`,
}),
}
testPipeline := Pipeline{
OrderId: 1,
Name: "pipeline1",
Alias: "pipeline1",
Enabled: true,
Filter: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "http.method",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeTag,
},
Operator: "=",
Value: "GET",
},
},
},
Config: []PipelineOperator{
{
ID: "move",
Type: "move",
Enabled: true,
Name: "move",
From: `attributes["http.method"]`,
To: `attributes["test.http.method"]`,
}, {
ID: "json",
Type: "json_parser",
Enabled: true,
Name: "json",
ParseFrom: `attributes["order.products"]`,
ParseTo: `attributes["order.products"]`,
}, {
ID: "move1",
Type: "move",
Enabled: true,
Name: "move1",
From: `attributes["order.products"].ids`,
To: `attributes["order.product_ids"]`,
}, {
ID: "move2",
Type: "move",
Enabled: true,
Name: "move2",
From: `attributes.test?.doesnt_exist`,
To: `attributes["test.doesnt_exist"]`,
}, {
ID: "add",
Type: "add",
Enabled: true,
Name: "add",
Field: `attributes["order.pids"].missing_field`,
Value: `EXPR(attributes.a["b.c"].d[4].e + resource.f)`,
}, {
ID: "add2",
Type: "add",
Enabled: true,
Name: "add2",
Field: `attributes["order.pids.pid0"]`,
Value: `EXPR(attributes["order.product_ids"][0])`,
}, {
ID: "add3",
Type: "add",
Enabled: true,
Name: "add3",
Field: `attributes["attrs.test.value"]`,
Value: `EXPR(attributes.test?.value)`,
},
},
}
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
context.Background(),
[]Pipeline{testPipeline},
testLogs,
)
require.Nil(err)
require.Equal(0, len(collectorWarnAndErrorLogs), strings.Join(collectorWarnAndErrorLogs, "\n"))
require.Equal(1, len(result))
_, methodAttrExists := result[0].Attributes_string["http.method"]
require.False(methodAttrExists)
require.Equal("GET", result[0].Attributes_string["test.http.method"])
require.Equal("pid0", result[0].Attributes_string["order.pids.pid0"])
}
func TestContainsFilterIsCaseInsensitive(t *testing.T) {
// The contains and ncontains query builder filters are case insensitive when querying logs.
// Pipeline filter should also behave in the same way.
require := require.New(t)
testLogs := []model.SignozLog{
makeTestSignozLog("test Ecom Log", map[string]interface{}{}),
}
testPipelines := []Pipeline{{
OrderId: 1,
Name: "pipeline1",
Alias: "pipeline1",
Enabled: true,
Filter: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{{
Key: v3.AttributeKey{
Key: "body",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeUnspecified,
IsColumn: true,
},
Operator: "contains",
Value: "log",
}},
},
Config: []PipelineOperator{
{
ID: "add",
Type: "add",
Enabled: true,
Name: "add",
Field: "attributes.test1",
Value: "value1",
},
},
}, {
OrderId: 2,
Name: "pipeline2",
Alias: "pipeline2",
Enabled: true,
Filter: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{{
Key: v3.AttributeKey{
Key: "body",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeUnspecified,
IsColumn: true,
},
Operator: "ncontains",
Value: "ecom",
}},
},
Config: []PipelineOperator{
{
ID: "add",
Type: "add",
Enabled: true,
Name: "add",
Field: "attributes.test2",
Value: "value2",
},
},
}}
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
context.Background(), testPipelines, testLogs,
)
require.Nil(err)
require.Equal(0, len(collectorWarnAndErrorLogs), strings.Join(collectorWarnAndErrorLogs, "\n"))
require.Equal(1, len(result))
require.Equal(result[0].Attributes_string["test1"], "value1")
_, test2Exists := result[0].Attributes_string["test2"]
require.False(test2Exists)
}
func TestTemporaryWorkaroundForSupportingAttribsContainingDots(t *testing.T) { func TestTemporaryWorkaroundForSupportingAttribsContainingDots(t *testing.T) {
// TODO(Raj): Remove this after dots are supported // TODO(Raj): Remove this after dots are supported

View File

@ -438,15 +438,15 @@ func reduceQuery(query string, reduceTo v3.ReduceToOperator, aggregateOperator v
// chart with just the query value. // chart with just the query value.
switch reduceTo { switch reduceTo {
case v3.ReduceToOperatorLast: case v3.ReduceToOperatorLast:
query = fmt.Sprintf("SELECT anyLast(value) as value, any(ts) as ts FROM (%s)", query) query = fmt.Sprintf("SELECT anyLast(value) as value, now() as ts FROM (%s)", query)
case v3.ReduceToOperatorSum: case v3.ReduceToOperatorSum:
query = fmt.Sprintf("SELECT sum(value) as value, any(ts) as ts FROM (%s)", query) query = fmt.Sprintf("SELECT sum(value) as value, now() as ts FROM (%s)", query)
case v3.ReduceToOperatorAvg: case v3.ReduceToOperatorAvg:
query = fmt.Sprintf("SELECT avg(value) as value, any(ts) as ts FROM (%s)", query) query = fmt.Sprintf("SELECT avg(value) as value, now() as ts FROM (%s)", query)
case v3.ReduceToOperatorMax: case v3.ReduceToOperatorMax:
query = fmt.Sprintf("SELECT max(value) as value, any(ts) as ts FROM (%s)", query) query = fmt.Sprintf("SELECT max(value) as value, now() as ts FROM (%s)", query)
case v3.ReduceToOperatorMin: case v3.ReduceToOperatorMin:
query = fmt.Sprintf("SELECT min(value) as value, any(ts) as ts FROM (%s)", query) query = fmt.Sprintf("SELECT min(value) as value, now() as ts FROM (%s)", query)
default: default:
return "", fmt.Errorf("unsupported reduce operator") return "", fmt.Errorf("unsupported reduce operator")
} }

View File

@ -2,6 +2,7 @@ package v3
import ( import (
"fmt" "fmt"
"math"
"strings" "strings"
"time" "time"
@ -172,7 +173,7 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str
return "", err return "", err
} }
samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end) samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms < %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end)
// Select the aggregate value for interval // Select the aggregate value for interval
queryTmpl := queryTmpl :=
@ -427,15 +428,15 @@ func reduceQuery(query string, reduceTo v3.ReduceToOperator, aggregateOperator v
// chart with just the query value. For the quer // chart with just the query value. For the quer
switch reduceTo { switch reduceTo {
case v3.ReduceToOperatorLast: case v3.ReduceToOperatorLast:
query = fmt.Sprintf("SELECT *, timestamp AS ts FROM (SELECT anyLastIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy) query = fmt.Sprintf("SELECT *, now() AS ts FROM (SELECT anyLastIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
case v3.ReduceToOperatorSum: case v3.ReduceToOperatorSum:
query = fmt.Sprintf("SELECT *, timestamp AS ts FROM (SELECT sumIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy) query = fmt.Sprintf("SELECT *, now() AS ts FROM (SELECT sumIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
case v3.ReduceToOperatorAvg: case v3.ReduceToOperatorAvg:
query = fmt.Sprintf("SELECT *, timestamp AS ts FROM (SELECT avgIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy) query = fmt.Sprintf("SELECT *, now() AS ts FROM (SELECT avgIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
case v3.ReduceToOperatorMax: case v3.ReduceToOperatorMax:
query = fmt.Sprintf("SELECT *, timestamp AS ts FROM (SELECT maxIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy) query = fmt.Sprintf("SELECT *, now() AS ts FROM (SELECT maxIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
case v3.ReduceToOperatorMin: case v3.ReduceToOperatorMin:
query = fmt.Sprintf("SELECT *, timestamp AS ts FROM (SELECT minIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy) query = fmt.Sprintf("SELECT *, now() AS ts FROM (SELECT minIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
default: default:
return "", fmt.Errorf("unsupported reduce operator") return "", fmt.Errorf("unsupported reduce operator")
} }
@ -447,10 +448,14 @@ func reduceQuery(query string, reduceTo v3.ReduceToOperator, aggregateOperator v
// start and end are in milliseconds // start and end are in milliseconds
// step is in seconds // step is in seconds
func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery, options Options) (string, error) { func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery, options Options) (string, error) {
// adjust the start and end time to be aligned with the step interval
start = start - (start % (mq.StepInterval * 1000)) start = start - (start % (mq.StepInterval * 1000))
end = end - (end % (mq.StepInterval * 1000)) // if the query is a rate query, we adjust the start time by one more step
// so that we can calculate the rate for the first data point
if mq.AggregateOperator.IsRateOperator() && mq.Temporality != v3.Delta {
start -= mq.StepInterval * 1000
}
adjustStep := int64(math.Min(float64(mq.StepInterval), 60))
end = end - (end % (adjustStep * 1000))
var query string var query string
var err error var err error

View File

@ -245,7 +245,7 @@ func TestBuildQueryOperators(t *testing.T) {
func TestBuildQueryXRate(t *testing.T) { func TestBuildQueryXRate(t *testing.T) {
t.Run("TestBuildQueryXRate", func(t *testing.T) { t.Run("TestBuildQueryXRate", func(t *testing.T) {
tmpl := `SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts` tmpl := `SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991920000 AND timestamp_ms < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts`
cases := []struct { cases := []struct {
aggregateOperator v3.AggregateOperator aggregateOperator v3.AggregateOperator
@ -298,7 +298,7 @@ func TestBuildQueryXRate(t *testing.T) {
func TestBuildQueryRPM(t *testing.T) { func TestBuildQueryRPM(t *testing.T) {
t.Run("TestBuildQueryXRate", func(t *testing.T) { t.Run("TestBuildQueryXRate", func(t *testing.T) {
tmpl := `SELECT ts, ceil(value * 60) as value FROM (SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts)` tmpl := `SELECT ts, ceil(value * 60) as value FROM (SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991920000 AND timestamp_ms < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts)`
cases := []struct { cases := []struct {
aggregateOperator v3.AggregateOperator aggregateOperator v3.AggregateOperator
@ -376,8 +376,8 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
}, },
}, },
}, },
// 20:11:00 - 20:41:00 // 20:10:00 - 20:41:00
expected: "timestamp_ms >= 1686082260000 AND timestamp_ms <= 1686084060000", expected: "timestamp_ms >= 1686082200000 AND timestamp_ms < 1686084060000",
}, },
{ {
name: "TestBuildQueryAdjustedTimes start close to 50 seconds", name: "TestBuildQueryAdjustedTimes start close to 50 seconds",
@ -401,8 +401,8 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
}, },
}, },
}, },
// 20:11:00 - 20:41:00 // 20:10:00 - 20:41:00
expected: "timestamp_ms >= 1686082260000 AND timestamp_ms <= 1686084060000", expected: "timestamp_ms >= 1686082200000 AND timestamp_ms < 1686084060000",
}, },
{ {
name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 30 seconds", name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 30 seconds",
@ -426,8 +426,8 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
}, },
}, },
}, },
// 20:11:30 - 20:41:00 // 20:11:00 - 20:41:00
expected: "timestamp_ms >= 1686082290000 AND timestamp_ms <= 1686084060000", expected: "timestamp_ms >= 1686082260000 AND timestamp_ms < 1686084060000",
}, },
{ {
name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 30 seconds and end close to 30 seconds", name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 30 seconds and end close to 30 seconds",
@ -451,8 +451,8 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
}, },
}, },
}, },
// 20:11:30 - 20:41:00 // 20:11:00 - 20:41:00
expected: "timestamp_ms >= 1686082290000 AND timestamp_ms <= 1686084060000", expected: "timestamp_ms >= 1686082260000 AND timestamp_ms < 1686084060000",
}, },
{ {
name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 300 seconds and end close to 30 seconds", name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 300 seconds and end close to 30 seconds",
@ -476,8 +476,10 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
}, },
}, },
}, },
// 20:10:00 - 20:40:00 // 20:05:00 - 20:41:00
expected: "timestamp_ms >= 1686082200000 AND timestamp_ms <= 1686084000000", // 20:10:00 is the nearest 5 minute interval, but we round down to 20:05:00
// as this is a rate query and we want to include the previous value for the first interval
expected: "timestamp_ms >= 1686081900000 AND timestamp_ms < 1686084060000",
}, },
{ {
name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 180 seconds and end close to 30 seconds", name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 180 seconds and end close to 30 seconds",
@ -501,8 +503,10 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
}, },
}, },
}, },
// 20:09:00 - 20:39:00 // 20:06:00 - 20:39:00
expected: "timestamp_ms >= 1686082140000 AND timestamp_ms <= 1686083940000", // 20:09:00 is the nearest 3 minute interval, but we round down to 20:06:00
// as this is a rate query and we want to include the previous value for the first interval
expected: "timestamp_ms >= 1686081960000 AND timestamp_ms < 1686084060000",
}, },
} }

View File

@ -558,8 +558,8 @@ func TestQueryRange(t *testing.T) {
} }
q := NewQuerier(opts) q := NewQuerier(opts)
expectedTimeRangeInQueryString := []string{ expectedTimeRangeInQueryString := []string{
fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms <= %d", 1675115580000, 1675115580000+120*60*1000), fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms < %d", 1675115520000, 1675115580000+120*60*1000),
fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms <= %d", 1675115580000+120*60*1000, 1675115580000+180*60*1000), fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms < %d", 1675115520000+120*60*1000, 1675115580000+180*60*1000),
fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", 1675115580000*1000000, (1675115580000+120*60*1000)*int64(1000000)), fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", 1675115580000*1000000, (1675115580000+120*60*1000)*int64(1000000)),
fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675115580000+60*60*1000)*int64(1000000), (1675115580000+180*60*1000)*int64(1000000)), fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675115580000+60*60*1000)*int64(1000000), (1675115580000+180*60*1000)*int64(1000000)),
} }
@ -669,7 +669,7 @@ func TestQueryRangeValueType(t *testing.T) {
q := NewQuerier(opts) q := NewQuerier(opts)
// No caching // No caching
expectedTimeRangeInQueryString := []string{ expectedTimeRangeInQueryString := []string{
fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms <= %d", 1675115580000, 1675115580000+120*60*1000), fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms < %d", 1675115520000, 1675115580000+120*60*1000),
fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675115580000+60*60*1000)*int64(1000000), (1675115580000+180*60*1000)*int64(1000000)), fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675115580000+60*60*1000)*int64(1000000), (1675115580000+180*60*1000)*int64(1000000)),
} }

View File

@ -476,15 +476,15 @@ func reduceToQuery(query string, reduceTo v3.ReduceToOperator, aggregateOperator
var groupBy string var groupBy string
switch reduceTo { switch reduceTo {
case v3.ReduceToOperatorLast: case v3.ReduceToOperatorLast:
query = fmt.Sprintf("SELECT anyLast(value) as value, any(ts) as ts FROM (%s) %s", query, groupBy) query = fmt.Sprintf("SELECT anyLast(value) as value, now() as ts FROM (%s) %s", query, groupBy)
case v3.ReduceToOperatorSum: case v3.ReduceToOperatorSum:
query = fmt.Sprintf("SELECT sum(value) as value, any(ts) as ts FROM (%s) %s", query, groupBy) query = fmt.Sprintf("SELECT sum(value) as value, now() as ts FROM (%s) %s", query, groupBy)
case v3.ReduceToOperatorAvg: case v3.ReduceToOperatorAvg:
query = fmt.Sprintf("SELECT avg(value) as value, any(ts) as ts FROM (%s) %s", query, groupBy) query = fmt.Sprintf("SELECT avg(value) as value, now() as ts FROM (%s) %s", query, groupBy)
case v3.ReduceToOperatorMax: case v3.ReduceToOperatorMax:
query = fmt.Sprintf("SELECT max(value) as value, any(ts) as ts FROM (%s) %s", query, groupBy) query = fmt.Sprintf("SELECT max(value) as value, now() as ts FROM (%s) %s", query, groupBy)
case v3.ReduceToOperatorMin: case v3.ReduceToOperatorMin:
query = fmt.Sprintf("SELECT min(value) as value, any(ts) as ts FROM (%s) %s", query, groupBy) query = fmt.Sprintf("SELECT min(value) as value, now() as ts FROM (%s) %s", query, groupBy)
default: default:
return "", fmt.Errorf("unsupported reduce operator") return "", fmt.Errorf("unsupported reduce operator")
} }

View File

@ -73,9 +73,18 @@ func Parse(filters *v3.FilterSet) (string, error) {
case v3.FilterOperatorExists, v3.FilterOperatorNotExists: case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], getTypeName(v.Key.Type)) filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], getTypeName(v.Key.Type))
default: default:
filter = fmt.Sprintf("%s %s %s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value)) filter = fmt.Sprintf("%s %s %s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
if v.Operator == v3.FilterOperatorContains || v.Operator == v3.FilterOperatorNotContains {
// `contains` and `ncontains` should be case insensitive to match how they work when querying logs.
filter = fmt.Sprintf(
"lower(%s) %s lower(%s)",
name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value),
)
}
// Avoid running operators on nil values // Avoid running operators on nil values
if v.Operator != v3.FilterOperatorEqual && v.Operator != v3.FilterOperatorNotEqual { if v.Operator != v3.FilterOperatorEqual && v.Operator != v3.FilterOperatorNotEqual {
filter = fmt.Sprintf("%s != nil && %s", name, filter) filter = fmt.Sprintf("%s != nil && %s", name, filter)

View File

@ -6,10 +6,12 @@ import (
"fmt" "fmt"
"math" "math"
"reflect" "reflect"
"regexp"
"sort" "sort"
"sync" "sync"
"text/template" "text/template"
"time" "time"
"unicode"
"go.uber.org/zap" "go.uber.org/zap"
@ -435,7 +437,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
for i, v := range vars { for i, v := range vars {
colName := columnNames[i] colName := normalizeLabelName(columnNames[i])
switch v := v.(type) { switch v := v.(type) {
case *string: case *string:
@ -764,6 +766,23 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time, ch c
return nil, fmt.Errorf("this is unexpected, invalid query label") return nil, fmt.Errorf("this is unexpected, invalid query label")
} }
func normalizeLabelName(name string) string {
// See https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels
// Regular expression to match non-alphanumeric characters except underscores
reg := regexp.MustCompile(`[^a-zA-Z0-9_]`)
// Replace all non-alphanumeric characters except underscores with underscores
normalized := reg.ReplaceAllString(name, "_")
// If the first character is not a letter or an underscore, prepend an underscore
if len(normalized) > 0 && !unicode.IsLetter(rune(normalized[0])) && normalized[0] != '_' {
normalized = "_" + normalized
}
return normalized
}
func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (interface{}, error) { func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (interface{}, error) {
valueFormatter := formatter.FromUnit(r.Unit()) valueFormatter := formatter.FromUnit(r.Unit())
@ -829,7 +848,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie
annotations := make(labels.Labels, 0, len(r.annotations)) annotations := make(labels.Labels, 0, len(r.annotations))
for _, a := range r.annotations { for _, a := range r.annotations {
annotations = append(annotations, labels.Label{Name: a.Name, Value: expand(a.Value)}) annotations = append(annotations, labels.Label{Name: normalizeLabelName(a.Name), Value: expand(a.Value)})
} }
lbs := lb.Labels() lbs := lb.Labels()

View File

@ -295,3 +295,43 @@ func TestThresholdRuleCombinations(t *testing.T) {
} }
} }
} }
func TestNormalizeLabelName(t *testing.T) {
cases := []struct {
labelName string
expected string
}{
{
labelName: "label",
expected: "label",
},
{
labelName: "label.with.dots",
expected: "label_with_dots",
},
{
labelName: "label-with-dashes",
expected: "label_with_dashes",
},
{
labelName: "labelwithnospaces",
expected: "labelwithnospaces",
},
{
labelName: "label with spaces",
expected: "label_with_spaces",
},
{
labelName: "label with spaces and .dots",
expected: "label_with_spaces_and__dots",
},
{
labelName: "label with spaces and -dashes",
expected: "label_with_spaces_and__dashes",
},
}
for _, c := range cases {
assert.Equal(t, c.expected, normalizeLabelName(c.labelName))
}
}

View File

@ -192,7 +192,7 @@ services:
<<: *db-depend <<: *db-depend
otel-collector-migrator: otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.4} image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.6}
container_name: otel-migrator container_name: otel-migrator
command: command:
- "--dsn=tcp://clickhouse:9000" - "--dsn=tcp://clickhouse:9000"
@ -205,7 +205,7 @@ services:
# condition: service_healthy # condition: service_healthy
otel-collector: otel-collector:
image: signoz/signoz-otel-collector:0.88.4 image: signoz/signoz-otel-collector:0.88.6
container_name: signoz-otel-collector container_name: signoz-otel-collector
command: command:
[ [
@ -245,7 +245,7 @@ services:
condition: service_healthy condition: service_healthy
otel-collector-metrics: otel-collector-metrics:
image: signoz/signoz-otel-collector:0.88.4 image: signoz/signoz-otel-collector:0.88.6
container_name: signoz-otel-collector-metrics container_name: signoz-otel-collector-metrics
command: command:
[ [