Chore: restrict logs connection test for integrations to use log attributes for identifying logs (#4977)

* chore: change logs connection test spec to be based on an attrib value

* chore: disallow unknown fields while unmarshalling JSON for an integration

* chore: add description field to collected metric spec

* chore: update logs connection test for builtin integrations

* chore: update logic for calculating logs connection status
This commit is contained in:
Raj Kamal Singh 2024-05-15 14:36:52 +05:30 committed by GitHub
parent 0cbaa17d9f
commit 3efd9801a1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 48 additions and 95 deletions

View File

@ -2347,13 +2347,28 @@ func (ah *APIHandler) calculateConnectionStatus(
func (ah *APIHandler) calculateLogsConnectionStatus(
ctx context.Context,
logsConnectionTest *v3.FilterSet,
logsConnectionTest *integrations.LogsConnectionTest,
lookbackSeconds int64,
) (*integrations.SignalConnectionStatus, *model.ApiError) {
if logsConnectionTest == nil {
return nil, nil
}
logsConnTestFilter := &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: logsConnectionTest.AttributeKey,
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeTag,
},
Operator: "=",
Value: logsConnectionTest.AttributeValue,
},
},
}
qrParams := &v3.QueryRangeParamsV3{
Start: time.Now().UnixMilli() - (lookbackSeconds * 1000),
End: time.Now().UnixMilli(),
@ -2363,7 +2378,7 @@ func (ah *APIHandler) calculateLogsConnectionStatus(
BuilderQueries: map[string]*v3.BuilderQuery{
"A": {
PageSize: 1,
Filters: logsConnectionTest,
Filters: logsConnTestFilter,
QueryName: "A",
DataSource: v3.DataSourceLogs,
Expression: "A",

View File

@ -1,6 +1,7 @@
package integrations
import (
"bytes"
"context"
"embed"
"strings"
@ -120,7 +121,9 @@ func readBuiltInIntegration(dirpath string) (
}
var integration IntegrationDetails
err = json.Unmarshal(hydratedSpecJson, &integration)
decoder := json.NewDecoder(bytes.NewReader(hydratedSpecJson))
decoder.DisallowUnknownFields()
err = decoder.Decode(&integration)
if err != nil {
return nil, fmt.Errorf(
"couldn't parse hydrated JSON spec read from %s: %w",

View File

@ -41,18 +41,8 @@
},
"connection_tests": {
"logs": {
"op": "AND",
"items": [
{
"key": {
"type": "tag",
"key": "source",
"dataType": "string"
},
"op": "=",
"value": "clickhouse"
}
]
"attribute_key": "source",
"attribute_value": "clickhouse"
}
},
"data_collected": "file://data-collected.json"

View File

@ -37,18 +37,8 @@
},
"connection_tests": {
"logs": {
"op": "AND",
"items": [
{
"key": {
"type": "tag",
"key": "source",
"dataType": "string"
},
"op": "=",
"value": "mongo"
}
]
"attribute_key": "source",
"attribute_value": "mongodb"
}
},
"data_collected": {

View File

@ -32,18 +32,8 @@
},
"connection_tests": {
"logs": {
"op": "AND",
"items": [
{
"key": {
"type": "tag",
"key": "source",
"dataType": "string"
},
"op": "=",
"value": "nginx"
}
]
"attribute_key": "source",
"attribute_value": "nginx"
}
},
"data_collected": {

View File

@ -37,18 +37,8 @@
},
"connection_tests": {
"logs": {
"op": "AND",
"items": [
{
"key": {
"type": "tag",
"key": "source",
"dataType": "string"
},
"op": "=",
"value": "postgres"
}
]
"attribute_key": "source",
"attribute_value": "postgres"
}
},
"data_collected": {

View File

@ -37,18 +37,8 @@
},
"connection_tests": {
"logs": {
"op": "AND",
"items": [
{
"key": {
"type": "tag",
"key": "source",
"dataType": "string"
},
"op": "=",
"value": "redis"
}
]
"attribute_key": "source",
"attribute_value": "redis"
}
},
"data_collected": {

View File

@ -12,7 +12,6 @@ import (
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/rules"
"go.signoz.io/signoz/pkg/query-service/utils"
)
@ -63,6 +62,7 @@ type CollectedMetric struct {
Name string `json:"name"`
Type string `json:"type"`
Unit string `json:"unit"`
Description string `json:"description"`
}
type SignalConnectionStatus struct {
@ -75,9 +75,14 @@ type IntegrationConnectionStatus struct {
Metrics *SignalConnectionStatus `json:"metrics"`
}
// log attribute value to use for finding logs for the integration.
type LogsConnectionTest struct {
AttributeKey string `json:"attribute_key"`
AttributeValue string `json:"attribute_value"`
}
type IntegrationConnectionTests struct {
// Filter to use for finding logs for the integration.
Logs *v3.FilterSet `json:"logs"`
Logs *LogsConnectionTest `json:"logs"`
// Metric names expected to have been received for the integration.
Metrics []string `json:"metrics"`

View File

@ -96,19 +96,9 @@ func (t *TestAvailableIntegrationsRepo) list(
Alerts: []rules.PostableRule{},
},
ConnectionTests: &IntegrationConnectionTests{
Logs: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "source",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeTag,
},
Operator: "=",
Value: "nginx",
},
},
Logs: &LogsConnectionTest{
AttributeKey: "source",
AttributeValue: "nginx",
},
},
}, {
@ -174,19 +164,9 @@ func (t *TestAvailableIntegrationsRepo) list(
Alerts: []rules.PostableRule{},
},
ConnectionTests: &IntegrationConnectionTests{
Logs: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "source",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeTag,
},
Operator: "=",
Value: "nginx",
},
},
Logs: &LogsConnectionTest{
AttributeKey: "source",
AttributeValue: "nginx",
},
},
},