mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-20 17:39:09 +08:00
commit
4a467435e9
@ -137,7 +137,7 @@ services:
|
|||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz/query-service:0.20.0
|
image: signoz/query-service:0.20.2
|
||||||
command: ["-config=/root/config/prometheus.yml"]
|
command: ["-config=/root/config/prometheus.yml"]
|
||||||
# ports:
|
# ports:
|
||||||
# - "6060:6060" # pprof port
|
# - "6060:6060" # pprof port
|
||||||
@ -166,7 +166,7 @@ services:
|
|||||||
<<: *clickhouse-depend
|
<<: *clickhouse-depend
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: signoz/frontend:0.20.0
|
image: signoz/frontend:0.20.2
|
||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
@ -153,7 +153,7 @@ services:
|
|||||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz/query-service:${DOCKER_TAG:-0.20.0}
|
image: signoz/query-service:${DOCKER_TAG:-0.20.2}
|
||||||
container_name: query-service
|
container_name: query-service
|
||||||
command: ["-config=/root/config/prometheus.yml"]
|
command: ["-config=/root/config/prometheus.yml"]
|
||||||
# ports:
|
# ports:
|
||||||
@ -181,7 +181,7 @@ services:
|
|||||||
<<: *clickhouse-depend
|
<<: *clickhouse-depend
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: signoz/frontend:${DOCKER_TAG:-0.20.0}
|
image: signoz/frontend:${DOCKER_TAG:-0.20.2}
|
||||||
container_name: frontend
|
container_name: frontend
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -146,7 +146,6 @@ export const initialQueryPromQLData: IPromQLQuery = {
|
|||||||
|
|
||||||
export const initialClickHouseData: IClickHouseQuery = {
|
export const initialClickHouseData: IClickHouseQuery = {
|
||||||
name: createNewBuilderItemName({ existNames: [], sourceNames: alphabet }),
|
name: createNewBuilderItemName({ existNames: [], sourceNames: alphabet }),
|
||||||
rawQuery: '',
|
|
||||||
legend: '',
|
legend: '',
|
||||||
disabled: false,
|
disabled: false,
|
||||||
query: '',
|
query: '',
|
||||||
|
@ -39,7 +39,6 @@ export const alertDefaults: AlertDef = {
|
|||||||
A: {
|
A: {
|
||||||
name: 'A',
|
name: 'A',
|
||||||
query: ``,
|
query: ``,
|
||||||
rawQuery: ``,
|
|
||||||
legend: '',
|
legend: '',
|
||||||
disabled: false,
|
disabled: false,
|
||||||
},
|
},
|
||||||
@ -73,7 +72,6 @@ export const logAlertDefaults: AlertDef = {
|
|||||||
A: {
|
A: {
|
||||||
name: 'A',
|
name: 'A',
|
||||||
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
||||||
rawQuery: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`,
|
|
||||||
legend: '',
|
legend: '',
|
||||||
disabled: false,
|
disabled: false,
|
||||||
},
|
},
|
||||||
@ -107,7 +105,6 @@ export const traceAlertDefaults: AlertDef = {
|
|||||||
chQueries: {
|
chQueries: {
|
||||||
A: {
|
A: {
|
||||||
name: 'A',
|
name: 'A',
|
||||||
rawQuery: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.distributed_signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
|
||||||
query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.distributed_signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.distributed_signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||||
legend: '',
|
legend: '',
|
||||||
disabled: false,
|
disabled: false,
|
||||||
@ -142,7 +139,6 @@ export const exceptionAlertDefaults: AlertDef = {
|
|||||||
chQueries: {
|
chQueries: {
|
||||||
A: {
|
A: {
|
||||||
name: 'A',
|
name: 'A',
|
||||||
rawQuery: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.distributed_signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
|
||||||
query: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.distributed_signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
query: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.distributed_signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`,
|
||||||
legend: '',
|
legend: '',
|
||||||
disabled: false,
|
disabled: false,
|
||||||
|
@ -64,7 +64,7 @@ function ChartPreview({
|
|||||||
case EQueryType.CLICKHOUSE:
|
case EQueryType.CLICKHOUSE:
|
||||||
return (
|
return (
|
||||||
query.clickhouse_sql?.length > 0 &&
|
query.clickhouse_sql?.length > 0 &&
|
||||||
query.clickhouse_sql[0].rawQuery?.length > 0
|
query.clickhouse_sql[0].query?.length > 0
|
||||||
);
|
);
|
||||||
case EQueryType.QUERY_BUILDER:
|
case EQueryType.QUERY_BUILDER:
|
||||||
return (
|
return (
|
||||||
|
@ -162,7 +162,7 @@ function FormAlertRules({
|
|||||||
}
|
}
|
||||||
|
|
||||||
currentQuery.clickhouse_sql.forEach((item) => {
|
currentQuery.clickhouse_sql.forEach((item) => {
|
||||||
if (item.rawQuery === '') {
|
if (item.query === '') {
|
||||||
notifications.error({
|
notifications.error({
|
||||||
message: 'Error',
|
message: 'Error',
|
||||||
description: t('chquery_required'),
|
description: t('chquery_required'),
|
||||||
|
@ -50,7 +50,7 @@ function ClickHouseQueryBuilder({
|
|||||||
|
|
||||||
const handleUpdateEditor = useCallback(
|
const handleUpdateEditor = useCallback(
|
||||||
(value: string) => {
|
(value: string) => {
|
||||||
handleUpdateQuery('rawQuery', value);
|
handleUpdateQuery('query', value);
|
||||||
},
|
},
|
||||||
[handleUpdateQuery],
|
[handleUpdateQuery],
|
||||||
);
|
);
|
||||||
@ -75,7 +75,7 @@ function ClickHouseQueryBuilder({
|
|||||||
language="sql"
|
language="sql"
|
||||||
height="200px"
|
height="200px"
|
||||||
onChange={handleUpdateEditor}
|
onChange={handleUpdateEditor}
|
||||||
value={queryData.rawQuery}
|
value={queryData.query}
|
||||||
options={{
|
options={{
|
||||||
scrollbar: {
|
scrollbar: {
|
||||||
alwaysConsumeMouseWheel: false,
|
alwaysConsumeMouseWheel: false,
|
||||||
|
@ -2,7 +2,7 @@ import { IClickHouseQuery } from 'types/api/queryBuilder/queryBuilderData';
|
|||||||
|
|
||||||
export interface IClickHouseQueryHandleChange {
|
export interface IClickHouseQueryHandleChange {
|
||||||
queryIndex: number | string;
|
queryIndex: number | string;
|
||||||
rawQuery?: IClickHouseQuery['rawQuery'];
|
query?: IClickHouseQuery['query'];
|
||||||
legend?: IClickHouseQuery['legend'];
|
legend?: IClickHouseQuery['legend'];
|
||||||
toggleDisable?: IClickHouseQuery['disabled'];
|
toggleDisable?: IClickHouseQuery['disabled'];
|
||||||
toggleDelete?: boolean;
|
toggleDelete?: boolean;
|
||||||
|
@ -22,7 +22,7 @@ export const mapQueryDataFromApi = (
|
|||||||
? Object.keys(compositeQuery.chQueries).map((key) => ({
|
? Object.keys(compositeQuery.chQueries).map((key) => ({
|
||||||
...compositeQuery.chQueries[key],
|
...compositeQuery.chQueries[key],
|
||||||
name: key,
|
name: key,
|
||||||
rawQuery: compositeQuery.chQueries[key].query,
|
query: compositeQuery.chQueries[key].query,
|
||||||
}))
|
}))
|
||||||
: initialQuery.clickhouse_sql;
|
: initialQuery.clickhouse_sql;
|
||||||
|
|
||||||
|
@ -67,9 +67,9 @@ export async function GetMetricQueryRange({
|
|||||||
case EQueryType.CLICKHOUSE: {
|
case EQueryType.CLICKHOUSE: {
|
||||||
const chQueries = {};
|
const chQueries = {};
|
||||||
queryData.map((query) => {
|
queryData.map((query) => {
|
||||||
if (!query.rawQuery) return;
|
if (!query.query) return;
|
||||||
chQueries[query.name] = {
|
chQueries[query.name] = {
|
||||||
query: query.rawQuery,
|
query: query.query,
|
||||||
disabled: query.disabled,
|
disabled: query.disabled,
|
||||||
};
|
};
|
||||||
legendMap[query.name] = query.legend;
|
legendMap[query.name] = query.legend;
|
||||||
|
@ -63,7 +63,6 @@ export type IBuilderQuery = {
|
|||||||
|
|
||||||
export interface IClickHouseQuery {
|
export interface IClickHouseQuery {
|
||||||
name: string;
|
name: string;
|
||||||
rawQuery: string;
|
|
||||||
legend: string;
|
legend: string;
|
||||||
disabled: boolean;
|
disabled: boolean;
|
||||||
query: string;
|
query: string;
|
||||||
|
@ -3879,7 +3879,7 @@ func (r *ClickHouseReader) GetLogAggregateAttributes(ctx context.Context, req *v
|
|||||||
}
|
}
|
||||||
// add other attributes
|
// add other attributes
|
||||||
for _, field := range constants.StaticFieldsLogsV3 {
|
for _, field := range constants.StaticFieldsLogsV3 {
|
||||||
if !stringAllowed && field.DataType == v3.AttributeKeyDataTypeString && (v3.AttributeKey{} == field) {
|
if (!stringAllowed && field.DataType == v3.AttributeKeyDataTypeString) || (v3.AttributeKey{} == field) {
|
||||||
continue
|
continue
|
||||||
} else if len(req.SearchText) == 0 || strings.Contains(field.Key, req.SearchText) {
|
} else if len(req.SearchText) == 0 || strings.Contains(field.Key, req.SearchText) {
|
||||||
response.AttributeKeys = append(response.AttributeKeys, field)
|
response.AttributeKeys = append(response.AttributeKeys, field)
|
||||||
|
@ -293,7 +293,7 @@ func (a AttributeKey) Validate() error {
|
|||||||
|
|
||||||
if a.IsColumn {
|
if a.IsColumn {
|
||||||
switch a.Type {
|
switch a.Type {
|
||||||
case AttributeKeyTypeResource, AttributeKeyTypeTag:
|
case AttributeKeyTypeResource, AttributeKeyTypeTag, AttributeKeyTypeUnspecified:
|
||||||
break
|
break
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("invalid attribute type: %s", a.Type)
|
return fmt.Errorf("invalid attribute type: %s", a.Type)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user