fix: pipelines postgres support and multitenancy (#7371)

* fix: pipelines postgres support and multitenancy

* fix: minor fixes

* fix: address minor comments

* fix: rename package pipelinetypes
This commit is contained in:
Nityananda Gohain 2025-03-24 10:17:12 +05:30 committed by GitHub
parent b36d2ec4c6
commit 1dfebed93a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 815 additions and 628 deletions

View File

@ -217,7 +217,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
// ingestion pipelines manager // ingestion pipelines manager
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController( logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
serverOptions.SigNoz.SQLStore.SQLxDB(), integrationsController.GetPipelinesForInstalledIntegrations, serverOptions.SigNoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations,
) )
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -106,9 +106,7 @@ func (m *Manager) RecommendAgentConfig(currentConfYaml []byte) (
return nil, "", errors.Wrap(apiErr.ToError(), "failed to get latest agent config version") return nil, "", errors.Wrap(apiErr.ToError(), "failed to get latest agent config version")
} }
updatedConf, serializedSettingsUsed, apiErr := feature.RecommendAgentConfig( updatedConf, serializedSettingsUsed, apiErr := feature.RecommendAgentConfig(recommendation, latestConfig)
recommendation, latestConfig,
)
if apiErr != nil { if apiErr != nil {
return nil, "", errors.Wrap(apiErr.ToError(), fmt.Sprintf( return nil, "", errors.Wrap(apiErr.ToError(), fmt.Sprintf(
"failed to generate agent config recommendation for %s", featureType, "failed to generate agent config recommendation for %s", featureType,

View File

@ -60,6 +60,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/postprocess" "github.com/SigNoz/signoz/pkg/query-service/postprocess"
"github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"go.uber.org/zap" "go.uber.org/zap"
@ -4462,6 +4463,11 @@ func (aH *APIHandler) PreviewLogsPipelinesHandler(w http.ResponseWriter, r *http
} }
func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
version, err := parseAgentConfigVersion(r) version, err := parseAgentConfigVersion(r)
if err != nil { if err != nil {
@ -4473,9 +4479,9 @@ func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Re
var apierr *model.ApiError var apierr *model.ApiError
if version != -1 { if version != -1 {
payload, apierr = aH.listLogsPipelinesByVersion(context.Background(), version) payload, apierr = aH.listLogsPipelinesByVersion(context.Background(), claims.OrgID, version)
} else { } else {
payload, apierr = aH.listLogsPipelines(context.Background()) payload, apierr = aH.listLogsPipelines(context.Background(), claims.OrgID)
} }
if apierr != nil { if apierr != nil {
@ -4486,7 +4492,7 @@ func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Re
} }
// listLogsPipelines lists logs piplines for latest version // listLogsPipelines lists logs piplines for latest version
func (aH *APIHandler) listLogsPipelines(ctx context.Context) ( func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID string) (
*logparsingpipeline.PipelinesResponse, *model.ApiError, *logparsingpipeline.PipelinesResponse, *model.ApiError,
) { ) {
// get lateset agent config // get lateset agent config
@ -4516,7 +4522,7 @@ func (aH *APIHandler) listLogsPipelines(ctx context.Context) (
} }
// listLogsPipelinesByVersion lists pipelines along with config version history // listLogsPipelinesByVersion lists pipelines along with config version history
func (aH *APIHandler) listLogsPipelinesByVersion(ctx context.Context, version int) ( func (aH *APIHandler) listLogsPipelinesByVersion(ctx context.Context, orgID string, version int) (
*logparsingpipeline.PipelinesResponse, *model.ApiError, *logparsingpipeline.PipelinesResponse, *model.ApiError,
) { ) {
payload, err := aH.LogsParsingPipelineController.GetPipelinesByVersion(ctx, version) payload, err := aH.LogsParsingPipelineController.GetPipelinesByVersion(ctx, version)
@ -4537,7 +4543,13 @@ func (aH *APIHandler) listLogsPipelinesByVersion(ctx context.Context, version in
func (aH *APIHandler) CreateLogsPipeline(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) CreateLogsPipeline(w http.ResponseWriter, r *http.Request) {
req := logparsingpipeline.PostablePipelines{} claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
req := pipelinetypes.PostablePipelines{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil { if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil) RespondError(w, model.BadRequest(err), nil)
@ -4546,7 +4558,7 @@ func (aH *APIHandler) CreateLogsPipeline(w http.ResponseWriter, r *http.Request)
createPipeline := func( createPipeline := func(
ctx context.Context, ctx context.Context,
postable []logparsingpipeline.PostablePipeline, postable []pipelinetypes.PostablePipeline,
) (*logparsingpipeline.PipelinesResponse, *model.ApiError) { ) (*logparsingpipeline.PipelinesResponse, *model.ApiError) {
if len(postable) == 0 { if len(postable) == 0 {
zap.L().Warn("found no pipelines in the http request, this will delete all the pipelines") zap.L().Warn("found no pipelines in the http request, this will delete all the pipelines")
@ -4557,7 +4569,7 @@ func (aH *APIHandler) CreateLogsPipeline(w http.ResponseWriter, r *http.Request)
return nil, validationErr return nil, validationErr
} }
return aH.LogsParsingPipelineController.ApplyPipelines(ctx, postable) return aH.LogsParsingPipelineController.ApplyPipelines(ctx, claims.OrgID, postable)
} }
res, err := createPipeline(r.Context(), req.Pipelines) res, err := createPipeline(r.Context(), req.Pipelines)

View File

@ -5,10 +5,10 @@ import (
"fmt" "fmt"
"github.com/SigNoz/signoz/pkg/query-service/agentConf" "github.com/SigNoz/signoz/pkg/query-service/agentConf"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
) )
type Controller struct { type Controller struct {
@ -124,7 +124,7 @@ func (c *Controller) Uninstall(
func (c *Controller) GetPipelinesForInstalledIntegrations( func (c *Controller) GetPipelinesForInstalledIntegrations(
ctx context.Context, ctx context.Context,
) ([]logparsingpipeline.Pipeline, *model.ApiError) { ) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
return c.mgr.GetPipelinesForInstalledIntegrations(ctx) return c.mgr.GetPipelinesForInstalledIntegrations(ctx)
} }

View File

@ -7,11 +7,11 @@ import (
"strings" "strings"
"time" "time"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/rules" "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
) )
@ -39,7 +39,7 @@ type IntegrationAssets struct {
} }
type LogsAssets struct { type LogsAssets struct {
Pipelines []logparsingpipeline.PostablePipeline `json:"pipelines"` Pipelines []pipelinetypes.PostablePipeline `json:"pipelines"`
} }
type IntegrationConfigStep struct { type IntegrationConfigStep struct {
@ -257,33 +257,34 @@ func (m *Manager) UninstallIntegration(
func (m *Manager) GetPipelinesForInstalledIntegrations( func (m *Manager) GetPipelinesForInstalledIntegrations(
ctx context.Context, ctx context.Context,
) ([]logparsingpipeline.Pipeline, *model.ApiError) { ) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx) installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
if apiErr != nil { if apiErr != nil {
return nil, apiErr return nil, apiErr
} }
pipelines := []logparsingpipeline.Pipeline{} gettablePipelines := []pipelinetypes.GettablePipeline{}
for _, ii := range installedIntegrations { for _, ii := range installedIntegrations {
for _, p := range ii.Assets.Logs.Pipelines { for _, p := range ii.Assets.Logs.Pipelines {
pp := logparsingpipeline.Pipeline{ gettablePipelines = append(gettablePipelines, pipelinetypes.GettablePipeline{
// Alias is used for identifying integration pipelines. Id can't be used for this // Alias is used for identifying integration pipelines. Id can't be used for this
// since versioning while saving pipelines requires a new id for each version // since versioning while saving pipelines requires a new id for each version
// to avoid altering history when pipelines are edited/reordered etc // to avoid altering history when pipelines are edited/reordered etc
StoreablePipeline: pipelinetypes.StoreablePipeline{
Alias: AliasForIntegrationPipeline(ii.Id, p.Alias), Alias: AliasForIntegrationPipeline(ii.Id, p.Alias),
Id: uuid.NewString(), ID: uuid.NewString(),
OrderId: p.OrderId, OrderID: p.OrderID,
Enabled: p.Enabled, Enabled: p.Enabled,
Name: p.Name, Name: p.Name,
Description: &p.Description, Description: p.Description,
},
Filter: p.Filter, Filter: p.Filter,
Config: p.Config, Config: p.Config,
} })
pipelines = append(pipelines, pp)
} }
} }
return pipelines, nil return gettablePipelines, nil
} }
func (m *Manager) dashboardUuid(integrationId string, dashboardId string) string { func (m *Manager) dashboardUuid(integrationId string, dashboardId string) string {

View File

@ -3,8 +3,8 @@ package integrations
import ( import (
"strings" "strings"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
) )
const IntegrationPipelineIdSeparator string = "--" const IntegrationPipelineIdSeparator string = "--"
@ -20,7 +20,7 @@ func AliasForIntegrationPipeline(
// Returns ptr to integration_id string if `p` is a pipeline for an installed integration. // Returns ptr to integration_id string if `p` is a pipeline for an installed integration.
// Returns null otherwise. // Returns null otherwise.
func IntegrationIdForPipeline(p logparsingpipeline.Pipeline) *string { func IntegrationIdForPipeline(p pipelinetypes.GettablePipeline) *string {
if strings.HasPrefix(p.Alias, constants.IntegrationPipelineIdPrefix) { if strings.HasPrefix(p.Alias, constants.IntegrationPipelineIdPrefix) {
parts := strings.Split(p.Alias, IntegrationPipelineIdSeparator) parts := strings.Split(p.Alias, IntegrationPipelineIdSeparator)
if len(parts) < 2 { if len(parts) < 2 {

View File

@ -5,12 +5,12 @@ import (
"slices" "slices"
"testing" "testing"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/rules" "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
) )
func NewTestIntegrationsManager(t *testing.T) *Manager { func NewTestIntegrationsManager(t *testing.T) *Manager {
@ -59,7 +59,7 @@ func (t *TestAvailableIntegrationsRepo) list(
}, },
Assets: IntegrationAssets{ Assets: IntegrationAssets{
Logs: LogsAssets{ Logs: LogsAssets{
Pipelines: []logparsingpipeline.PostablePipeline{ Pipelines: []pipelinetypes.PostablePipeline{
{ {
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
@ -78,7 +78,7 @@ func (t *TestAvailableIntegrationsRepo) list(
}, },
}, },
}, },
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -127,7 +127,7 @@ func (t *TestAvailableIntegrationsRepo) list(
}, },
Assets: IntegrationAssets{ Assets: IntegrationAssets{
Logs: LogsAssets{ Logs: LogsAssets{
Pipelines: []logparsingpipeline.PostablePipeline{ Pipelines: []pipelinetypes.PostablePipeline{
{ {
Name: "pipeline2", Name: "pipeline2",
Alias: "pipeline2", Alias: "pipeline2",
@ -146,7 +146,7 @@ func (t *TestAvailableIntegrationsRepo) list(
}, },
}, },
}, },
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",

View File

@ -10,6 +10,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/constants"
coreModel "github.com/SigNoz/signoz/pkg/query-service/model" coreModel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/pkg/errors" "github.com/pkg/errors"
"go.uber.org/zap" "go.uber.org/zap"
) )
@ -164,7 +165,7 @@ func checkDuplicateString(pipeline []string) bool {
func GenerateCollectorConfigWithPipelines( func GenerateCollectorConfigWithPipelines(
config []byte, config []byte,
pipelines []Pipeline, pipelines []pipelinetypes.GettablePipeline,
) ([]byte, *coreModel.ApiError) { ) ([]byte, *coreModel.ApiError) {
var collectorConf map[string]interface{} var collectorConf map[string]interface{}
err := yaml.Unmarshal([]byte(config), &collectorConf) err := yaml.Unmarshal([]byte(config), &collectorConf)

View File

@ -8,6 +8,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
. "github.com/smartystreets/goconvey/convey" . "github.com/smartystreets/goconvey/convey"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v3"
@ -230,12 +231,14 @@ func TestPipelineAliasCollisionsDontResultInDuplicateCollectorProcessors(t *test
- memory - memory
`) `)
makeTestPipeline := func(name string, alias string) Pipeline { makeTestPipeline := func(name string, alias string) pipelinetypes.GettablePipeline {
return Pipeline{ return pipelinetypes.GettablePipeline{
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: name, Name: name,
Alias: alias, Alias: alias,
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -250,7 +253,7 @@ func TestPipelineAliasCollisionsDontResultInDuplicateCollectorProcessors(t *test
}, },
}, },
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
ID: "regex", ID: "regex",
Type: "regex_parser", Type: "regex_parser",
@ -264,7 +267,7 @@ func TestPipelineAliasCollisionsDontResultInDuplicateCollectorProcessors(t *test
} }
} }
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
makeTestPipeline("test pipeline 1", "pipeline-alias"), makeTestPipeline("test pipeline 1", "pipeline-alias"),
makeTestPipeline("test pipeline 2", "pipeline-alias"), makeTestPipeline("test pipeline 2", "pipeline-alias"),
} }
@ -299,12 +302,14 @@ func TestPipelineAliasCollisionsDontResultInDuplicateCollectorProcessors(t *test
func TestPipelineRouterWorksEvenIfFirstOpIsDisabled(t *testing.T) { func TestPipelineRouterWorksEvenIfFirstOpIsDisabled(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -319,7 +324,7 @@ func TestPipelineRouterWorksEvenIfFirstOpIsDisabled(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -370,12 +375,14 @@ func TestPipelineRouterWorksEvenIfFirstOpIsDisabled(t *testing.T) {
func TestPipeCharInAliasDoesntBreakCollectorConfig(t *testing.T) { func TestPipeCharInAliasDoesntBreakCollectorConfig(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "test | pipeline", Name: "test | pipeline",
Alias: "test|pipeline", Alias: "test|pipeline",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -390,7 +397,7 @@ func TestPipeCharInAliasDoesntBreakCollectorConfig(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",

View File

@ -11,9 +11,10 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/jmoiron/sqlx"
"github.com/pkg/errors" "github.com/pkg/errors"
"go.uber.org/zap" "go.uber.org/zap"
) )
@ -22,14 +23,14 @@ import (
type LogParsingPipelineController struct { type LogParsingPipelineController struct {
Repo Repo
GetIntegrationPipelines func(context.Context) ([]Pipeline, *model.ApiError) GetIntegrationPipelines func(context.Context) ([]pipelinetypes.GettablePipeline, *model.ApiError)
} }
func NewLogParsingPipelinesController( func NewLogParsingPipelinesController(
db *sqlx.DB, sqlStore sqlstore.SQLStore,
getIntegrationPipelines func(context.Context) ([]Pipeline, *model.ApiError), getIntegrationPipelines func(context.Context) ([]pipelinetypes.GettablePipeline, *model.ApiError),
) (*LogParsingPipelineController, error) { ) (*LogParsingPipelineController, error) {
repo := NewRepo(db) repo := NewRepo(sqlStore)
return &LogParsingPipelineController{ return &LogParsingPipelineController{
Repo: repo, Repo: repo,
GetIntegrationPipelines: getIntegrationPipelines, GetIntegrationPipelines: getIntegrationPipelines,
@ -40,14 +41,15 @@ func NewLogParsingPipelinesController(
type PipelinesResponse struct { type PipelinesResponse struct {
*agentConf.ConfigVersion *agentConf.ConfigVersion
Pipelines []Pipeline `json:"pipelines"` Pipelines []pipelinetypes.GettablePipeline `json:"pipelines"`
History []agentConf.ConfigVersion `json:"history"` History []agentConf.ConfigVersion `json:"history"`
} }
// ApplyPipelines stores new or changed pipelines and initiates a new config update // ApplyPipelines stores new or changed pipelines and initiates a new config update
func (ic *LogParsingPipelineController) ApplyPipelines( func (ic *LogParsingPipelineController) ApplyPipelines(
ctx context.Context, ctx context.Context,
postable []PostablePipeline, orgID string,
postable []pipelinetypes.PostablePipeline,
) (*PipelinesResponse, *model.ApiError) { ) (*PipelinesResponse, *model.ApiError) {
// get user id from context // get user id from context
claims, ok := authtypes.ClaimsFromContext(ctx) claims, ok := authtypes.ClaimsFromContext(ctx)
@ -55,7 +57,7 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
return nil, model.UnauthorizedError(fmt.Errorf("failed to get userId from context")) return nil, model.UnauthorizedError(fmt.Errorf("failed to get userId from context"))
} }
var pipelines []Pipeline var pipelines []pipelinetypes.GettablePipeline
// scan through postable pipelines, to select the existing pipelines or insert missing ones // scan through postable pipelines, to select the existing pipelines or insert missing ones
for idx, r := range postable { for idx, r := range postable {
@ -67,9 +69,9 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
// For versioning, pipelines get stored with unique ids each time they are saved. // For versioning, pipelines get stored with unique ids each time they are saved.
// This ensures updating a pipeline doesn't alter historical versions that referenced // This ensures updating a pipeline doesn't alter historical versions that referenced
// the same pipeline id. // the same pipeline id.
r.Id = uuid.NewString() r.ID = uuid.NewString()
r.OrderId = idx + 1 r.OrderID = idx + 1
pipeline, apiErr := ic.insertPipeline(ctx, &r) pipeline, apiErr := ic.insertPipeline(ctx, orgID, &r)
if apiErr != nil { if apiErr != nil {
return nil, model.WrapApiError(apiErr, "failed to insert pipeline") return nil, model.WrapApiError(apiErr, "failed to insert pipeline")
} }
@ -80,7 +82,7 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
// prepare config elements // prepare config elements
elements := make([]string, len(pipelines)) elements := make([]string, len(pipelines))
for i, p := range pipelines { for i, p := range pipelines {
elements[i] = p.Id elements[i] = p.ID
} }
// prepare config by calling gen func // prepare config by calling gen func
@ -94,7 +96,7 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
func (ic *LogParsingPipelineController) ValidatePipelines( func (ic *LogParsingPipelineController) ValidatePipelines(
ctx context.Context, ctx context.Context,
postedPipelines []PostablePipeline, postedPipelines []pipelinetypes.PostablePipeline,
) *model.ApiError { ) *model.ApiError {
for _, p := range postedPipelines { for _, p := range postedPipelines {
if err := p.IsValid(); err != nil { if err := p.IsValid(); err != nil {
@ -104,15 +106,17 @@ func (ic *LogParsingPipelineController) ValidatePipelines(
// Also run a collector simulation to ensure config is fit // Also run a collector simulation to ensure config is fit
// for e2e use with a collector // for e2e use with a collector
pipelines := []Pipeline{} gettablePipelines := []pipelinetypes.GettablePipeline{}
for _, pp := range postedPipelines { for _, pp := range postedPipelines {
pipelines = append(pipelines, Pipeline{ gettablePipelines = append(gettablePipelines, pipelinetypes.GettablePipeline{
Id: uuid.New().String(), StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderId: pp.OrderId, ID: uuid.New().String(),
OrderID: pp.OrderID,
Enabled: pp.Enabled, Enabled: pp.Enabled,
Name: pp.Name, Name: pp.Name,
Alias: pp.Alias, Alias: pp.Alias,
Description: &pp.Description, Description: pp.Description,
},
Filter: pp.Filter, Filter: pp.Filter,
Config: pp.Config, Config: pp.Config,
}) })
@ -120,7 +124,7 @@ func (ic *LogParsingPipelineController) ValidatePipelines(
sampleLogs := []model.SignozLog{{Body: ""}} sampleLogs := []model.SignozLog{{Body: ""}}
_, _, simulationErr := SimulatePipelinesProcessing( _, _, simulationErr := SimulatePipelinesProcessing(
ctx, pipelines, sampleLogs, ctx, gettablePipelines, sampleLogs,
) )
if simulationErr != nil { if simulationErr != nil {
return model.BadRequest(fmt.Errorf( return model.BadRequest(fmt.Errorf(
@ -135,14 +139,22 @@ func (ic *LogParsingPipelineController) ValidatePipelines(
// pipelines and pipelines for installed integrations // pipelines and pipelines for installed integrations
func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion( func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
ctx context.Context, version int, ctx context.Context, version int,
) ([]Pipeline, *model.ApiError) { ) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
result := []Pipeline{} result := []pipelinetypes.GettablePipeline{}
// todo(nitya): remove this once we fix agents in multitenancy
defaultOrgID, err := ic.GetDefaultOrgID(ctx)
if err != nil {
return nil, model.WrapApiError(err, "failed to get default org ID")
}
fmt.Println("defaultOrgID", defaultOrgID)
if version >= 0 { if version >= 0 {
savedPipelines, errors := ic.getPipelinesByVersion(ctx, version) savedPipelines, errors := ic.getPipelinesByVersion(ctx, defaultOrgID, version)
if errors != nil { if errors != nil {
zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Errors("errors", errors)) zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Errors("errors", errors))
return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version")) return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version %v", errors))
} }
result = savedPipelines result = savedPipelines
} }
@ -156,10 +168,10 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
// Filter out any integration pipelines included in pipelines saved by user // Filter out any integration pipelines included in pipelines saved by user
// if the corresponding integration is no longer installed. // if the corresponding integration is no longer installed.
ipAliases := utils.MapSlice(integrationPipelines, func(p Pipeline) string { ipAliases := utils.MapSlice(integrationPipelines, func(p pipelinetypes.GettablePipeline) string {
return p.Alias return p.Alias
}) })
result = utils.FilterSlice(result, func(p Pipeline) bool { result = utils.FilterSlice(result, func(p pipelinetypes.GettablePipeline) bool {
if !strings.HasPrefix(p.Alias, constants.IntegrationPipelineIdPrefix) { if !strings.HasPrefix(p.Alias, constants.IntegrationPipelineIdPrefix) {
return true return true
} }
@ -170,7 +182,7 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
// Users are allowed to enable/disable and reorder integration pipelines while // Users are allowed to enable/disable and reorder integration pipelines while
// saving the pipeline list. // saving the pipeline list.
for _, ip := range integrationPipelines { for _, ip := range integrationPipelines {
userPipelineIdx := slices.IndexFunc(result, func(p Pipeline) bool { userPipelineIdx := slices.IndexFunc(result, func(p pipelinetypes.GettablePipeline) bool {
return p.Alias == ip.Alias return p.Alias == ip.Alias
}) })
if userPipelineIdx >= 0 { if userPipelineIdx >= 0 {
@ -183,7 +195,7 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
} }
for idx := range result { for idx := range result {
result[idx].OrderId = idx + 1 result[idx].OrderID = idx + 1
} }
return result, nil return result, nil
@ -193,10 +205,11 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
func (ic *LogParsingPipelineController) GetPipelinesByVersion( func (ic *LogParsingPipelineController) GetPipelinesByVersion(
ctx context.Context, version int, ctx context.Context, version int,
) (*PipelinesResponse, *model.ApiError) { ) (*PipelinesResponse, *model.ApiError) {
pipelines, errors := ic.getEffectivePipelinesByVersion(ctx, version) pipelines, errors := ic.getEffectivePipelinesByVersion(ctx, version)
if errors != nil { if errors != nil {
zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Error(errors)) zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Error(errors))
return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version")) return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version %v", errors))
} }
var configVersion *agentConf.ConfigVersion var configVersion *agentConf.ConfigVersion
@ -216,7 +229,7 @@ func (ic *LogParsingPipelineController) GetPipelinesByVersion(
} }
type PipelinesPreviewRequest struct { type PipelinesPreviewRequest struct {
Pipelines []Pipeline `json:"pipelines"` Pipelines []pipelinetypes.GettablePipeline `json:"pipelines"`
Logs []model.SignozLog `json:"logs"` Logs []model.SignozLog `json:"logs"`
} }

View File

@ -7,31 +7,33 @@ import (
"time" "time"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/jmoiron/sqlx"
"github.com/pkg/errors" "github.com/pkg/errors"
"go.uber.org/zap" "go.uber.org/zap"
) )
// Repo handles DDL and DML ops on ingestion pipeline // Repo handles DDL and DML ops on ingestion pipeline
type Repo struct { type Repo struct {
db *sqlx.DB sqlStore sqlstore.SQLStore
} }
const logPipelines = "log_pipelines" const logPipelines = "log_pipelines"
// NewRepo initiates a new ingestion repo // NewRepo initiates a new ingestion repo
func NewRepo(db *sqlx.DB) Repo { func NewRepo(sqlStore sqlstore.SQLStore) Repo {
return Repo{ return Repo{
db: db, sqlStore: sqlStore,
} }
} }
// insertPipeline stores a given postable pipeline to database // insertPipeline stores a given postable pipeline to database
func (r *Repo) insertPipeline( func (r *Repo) insertPipeline(
ctx context.Context, postable *PostablePipeline, ctx context.Context, orgID string, postable *pipelinetypes.PostablePipeline,
) (*Pipeline, *model.ApiError) { ) (*pipelinetypes.GettablePipeline, *model.ApiError) {
if err := postable.IsValid(); err != nil { if err := postable.IsValid(); err != nil {
return nil, model.BadRequest(errors.Wrap(err, return nil, model.BadRequest(errors.Wrap(err,
"pipeline is not valid", "pipeline is not valid",
@ -44,44 +46,43 @@ func (r *Repo) insertPipeline(
"failed to unmarshal postable pipeline config", "failed to unmarshal postable pipeline config",
)) ))
} }
filter, err := json.Marshal(postable.Filter)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err,
"failed to marshal postable pipeline filter",
))
}
claims, ok := authtypes.ClaimsFromContext(ctx) claims, ok := authtypes.ClaimsFromContext(ctx)
if !ok { if !ok {
return nil, model.UnauthorizedError(fmt.Errorf("failed to get email from context")) return nil, model.UnauthorizedError(fmt.Errorf("failed to get email from context"))
} }
insertRow := &Pipeline{ insertRow := &pipelinetypes.GettablePipeline{
Id: uuid.New().String(), StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderId: postable.OrderId, OrgID: orgID,
ID: uuid.New().String(),
OrderID: postable.OrderID,
Enabled: postable.Enabled, Enabled: postable.Enabled,
Name: postable.Name, Name: postable.Name,
Alias: postable.Alias, Alias: postable.Alias,
Description: &postable.Description, Description: postable.Description,
Filter: postable.Filter, FilterString: string(filter),
Config: postable.Config, ConfigJSON: string(rawConfig),
RawConfig: string(rawConfig), TimeAuditable: types.TimeAuditable{
Creator: Creator{
CreatedBy: claims.Email,
CreatedAt: time.Now(), CreatedAt: time.Now(),
}, },
UserAuditable: types.UserAuditable{
CreatedBy: claims.Email,
},
},
Filter: postable.Filter,
Config: postable.Config,
} }
insertQuery := `INSERT INTO pipelines _, err = r.sqlStore.BunDB().NewInsert().
(id, order_id, enabled, created_by, created_at, name, alias, description, filter, config_json) Model(&insertRow.StoreablePipeline).
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)` Exec(ctx)
_, err = r.db.ExecContext(ctx,
insertQuery,
insertRow.Id,
insertRow.OrderId,
insertRow.Enabled,
insertRow.Creator.CreatedBy,
insertRow.Creator.CreatedAt,
insertRow.Name,
insertRow.Alias,
insertRow.Description,
insertRow.Filter,
insertRow.RawConfig)
if err != nil { if err != nil {
zap.L().Error("error in inserting pipeline data", zap.Error(err)) zap.L().Error("error in inserting pipeline data", zap.Error(err))
@ -93,102 +94,105 @@ func (r *Repo) insertPipeline(
// getPipelinesByVersion returns pipelines associated with a given version // getPipelinesByVersion returns pipelines associated with a given version
func (r *Repo) getPipelinesByVersion( func (r *Repo) getPipelinesByVersion(
ctx context.Context, version int, ctx context.Context, orgID string, version int,
) ([]Pipeline, []error) { ) ([]pipelinetypes.GettablePipeline, []error) {
var errors []error var errors []error
pipelines := []Pipeline{} storablePipelines := []pipelinetypes.StoreablePipeline{}
err := r.sqlStore.BunDB().NewSelect().
versionQuery := `SELECT r.id, Model(&storablePipelines).
r.name, Join("JOIN agent_config_elements e ON p.id = e.element_id").
r.config_json, Join("JOIN agent_config_versions v ON v.id = e.version_id").
r.alias, Where("e.element_type = ?", logPipelines). // TODO: nitya - add org_id to this as well
r.description, Where("v.version = ?", version). // TODO: nitya - add org_id to this as well
r.filter, Where("p.org_id = ?", orgID).
r.order_id, Order("p.order_id ASC").
r.created_by, Scan(ctx)
r.created_at,
r.enabled
FROM pipelines r,
agent_config_elements e,
agent_config_versions v
WHERE r.id = e.element_id
AND v.id = e.version_id
AND e.element_type = $1
AND v.version = $2
ORDER BY order_id asc`
err := r.db.SelectContext(ctx, &pipelines, versionQuery, logPipelines, version)
if err != nil { if err != nil {
return nil, []error{fmt.Errorf("failed to get drop pipelines from db: %v", err)} return nil, []error{fmt.Errorf("failed to get pipelines from db: %v", err)}
} }
if len(pipelines) == 0 { gettablePipelines := make([]pipelinetypes.GettablePipeline, len(storablePipelines))
return pipelines, nil if len(storablePipelines) == 0 {
return gettablePipelines, nil
} }
for i := range pipelines { for i := range storablePipelines {
if err := pipelines[i].ParseRawConfig(); err != nil { gettablePipelines[i].StoreablePipeline = storablePipelines[i]
if err := gettablePipelines[i].ParseRawConfig(); err != nil {
errors = append(errors, err)
}
if err := gettablePipelines[i].ParseFilter(); err != nil {
errors = append(errors, err) errors = append(errors, err)
} }
} }
return pipelines, errors return gettablePipelines, errors
}
func (r *Repo) GetDefaultOrgID(ctx context.Context) (string, *model.ApiError) {
var orgs []types.Organization
err := r.sqlStore.BunDB().NewSelect().
Model(&orgs).
Scan(ctx)
if err != nil {
return "", model.InternalError(errors.Wrap(err, "failed to get default org ID"))
}
if len(orgs) == 0 {
return "", model.InternalError(errors.New("no orgs found"))
}
return orgs[0].ID, nil
} }
// GetPipelines returns pipeline and errors (if any) // GetPipelines returns pipeline and errors (if any)
func (r *Repo) GetPipeline( func (r *Repo) GetPipeline(
ctx context.Context, id string, ctx context.Context, orgID string, id string,
) (*Pipeline, *model.ApiError) { ) (*pipelinetypes.GettablePipeline, *model.ApiError) {
pipelines := []Pipeline{} storablePipelines := []pipelinetypes.StoreablePipeline{}
pipelineQuery := `SELECT id, err := r.sqlStore.BunDB().NewSelect().
name, Model(&storablePipelines).
config_json, Where("id = ?", id).
alias, Where("org_id = ?", orgID).
description, Scan(ctx)
filter,
order_id,
created_by,
created_at,
enabled
FROM pipelines
WHERE id = $1`
err := r.db.SelectContext(ctx, &pipelines, pipelineQuery, id)
if err != nil { if err != nil {
zap.L().Error("failed to get ingestion pipeline from db", zap.Error(err)) zap.L().Error("failed to get ingestion pipeline from db", zap.Error(err))
return nil, model.InternalError(errors.Wrap(err, "failed to get ingestion pipeline from db")) return nil, model.InternalError(errors.Wrap(err, "failed to get ingestion pipeline from db"))
} }
if len(pipelines) == 0 { if len(storablePipelines) == 0 {
zap.L().Warn("No row found for ingestion pipeline id", zap.String("id", id)) zap.L().Warn("No row found for ingestion pipeline id", zap.String("id", id))
return nil, model.NotFoundError(fmt.Errorf("no row found for ingestion pipeline id %v", id)) return nil, model.NotFoundError(fmt.Errorf("no row found for ingestion pipeline id %v", id))
} }
if len(pipelines) == 1 { if len(storablePipelines) == 1 {
err := pipelines[0].ParseRawConfig() gettablePipeline := pipelinetypes.GettablePipeline{}
if err != nil { gettablePipeline.StoreablePipeline = storablePipelines[0]
if err := gettablePipeline.ParseRawConfig(); err != nil {
zap.L().Error("invalid pipeline config found", zap.String("id", id), zap.Error(err)) zap.L().Error("invalid pipeline config found", zap.String("id", id), zap.Error(err))
return nil, model.InternalError( return nil, model.InternalError(
errors.Wrap(err, "found an invalid pipeline config"), errors.Wrap(err, "found an invalid pipeline config"),
) )
} }
return &pipelines[0], nil if err := gettablePipeline.ParseFilter(); err != nil {
zap.L().Error("invalid pipeline filter found", zap.String("id", id), zap.Error(err))
return nil, model.InternalError(
errors.Wrap(err, "found an invalid pipeline filter"),
)
}
return &gettablePipeline, nil
} }
return nil, model.InternalError(fmt.Errorf("multiple pipelines with same id")) return nil, model.InternalError(fmt.Errorf("multiple pipelines with same id"))
} }
func (r *Repo) DeletePipeline(ctx context.Context, id string) error { func (r *Repo) DeletePipeline(ctx context.Context, orgID string, id string) error {
deleteQuery := `DELETE _, err := r.sqlStore.BunDB().NewDelete().
FROM pipelines Model(&pipelinetypes.StoreablePipeline{}).
WHERE id = $1` Where("id = ?", id).
Where("org_id = ?", orgID).
_, err := r.db.ExecContext(ctx, deleteQuery, id) Exec(ctx)
if err != nil { if err != nil {
return model.BadRequest(err) return model.BadRequest(err)
} }
return nil return nil
} }

View File

@ -1,104 +0,0 @@
package logparsingpipeline
import (
"encoding/json"
"time"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/pkg/errors"
)
// Pipeline is stored and also deployed finally to collector config
type Pipeline struct {
Id string `json:"id,omitempty" db:"id"`
OrderId int `json:"orderId" db:"order_id"`
Name string `json:"name,omitempty" db:"name"`
Alias string `json:"alias" db:"alias"`
Description *string `json:"description" db:"description"`
Enabled bool `json:"enabled" db:"enabled"`
Filter *v3.FilterSet `json:"filter" db:"filter"`
// configuration for pipeline
RawConfig string `db:"config_json" json:"-"`
Config []PipelineOperator `json:"config"`
// Updater not required as any change will result in new version
Creator
}
type Creator struct {
CreatedBy string `json:"createdBy" db:"created_by"`
CreatedAt time.Time `json:"createdAt" db:"created_at"`
}
type Processor struct {
Operators []PipelineOperator `json:"operators" yaml:"operators"`
}
type PipelineOperator struct {
Type string `json:"type" yaml:"type"`
ID string `json:"id,omitempty" yaml:"id,omitempty"`
Output string `json:"output,omitempty" yaml:"output,omitempty"`
OnError string `json:"on_error,omitempty" yaml:"on_error,omitempty"`
If string `json:"if,omitempty" yaml:"if,omitempty"`
// don't need the following in the final config
OrderId int `json:"orderId" yaml:"-"`
Enabled bool `json:"enabled" yaml:"-"`
Name string `json:"name,omitempty" yaml:"-"`
// optional keys depending on the type
ParseTo string `json:"parse_to,omitempty" yaml:"parse_to,omitempty"`
Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
Regex string `json:"regex,omitempty" yaml:"regex,omitempty"`
ParseFrom string `json:"parse_from,omitempty" yaml:"parse_from,omitempty"`
*TraceParser `yaml:",inline,omitempty"`
Field string `json:"field,omitempty" yaml:"field,omitempty"`
Value string `json:"value,omitempty" yaml:"value,omitempty"`
From string `json:"from,omitempty" yaml:"from,omitempty"`
To string `json:"to,omitempty" yaml:"to,omitempty"`
Expr string `json:"expr,omitempty" yaml:"expr,omitempty"`
Routes *[]Route `json:"routes,omitempty" yaml:"routes,omitempty"`
Fields []string `json:"fields,omitempty" yaml:"fields,omitempty"`
Default string `json:"default,omitempty" yaml:"default,omitempty"`
// time_parser fields.
Layout string `json:"layout,omitempty" yaml:"layout,omitempty"`
LayoutType string `json:"layout_type,omitempty" yaml:"layout_type,omitempty"`
// severity parser fields
SeverityMapping map[string][]string `json:"mapping,omitempty" yaml:"mapping,omitempty"`
OverwriteSeverityText bool `json:"overwrite_text,omitempty" yaml:"overwrite_text,omitempty"`
}
type TimestampParser struct {
Layout string `json:"layout" yaml:"layout"`
LayoutType string `json:"layout_type" yaml:"layout_type"`
ParseFrom string `json:"parse_from" yaml:"parse_from"`
}
type TraceParser struct {
TraceId *ParseFrom `json:"trace_id,omitempty" yaml:"trace_id,omitempty"`
SpanId *ParseFrom `json:"span_id,omitempty" yaml:"span_id,omitempty"`
TraceFlags *ParseFrom `json:"trace_flags,omitempty" yaml:"trace_flags,omitempty"`
}
type ParseFrom struct {
ParseFrom string `json:"parse_from" yaml:"parse_from"`
}
type Route struct {
Output string `json:"output" yaml:"output"`
Expr string `json:"expr" yaml:"expr"`
}
func (i *Pipeline) ParseRawConfig() error {
c := []PipelineOperator{}
err := json.Unmarshal([]byte(i.RawConfig), &c)
if err != nil {
return errors.Wrap(err, "failed to parse ingestion rule config")
}
i.Config = c
return nil
}

View File

@ -8,6 +8,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/queryBuilderToExpr" "github.com/SigNoz/signoz/pkg/query-service/queryBuilderToExpr"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/antonmedv/expr" "github.com/antonmedv/expr"
"github.com/antonmedv/expr/ast" "github.com/antonmedv/expr/ast"
"github.com/antonmedv/expr/parser" "github.com/antonmedv/expr/parser"
@ -22,15 +23,15 @@ const (
// only alphabets, digits and `-` are used when translating pipeline identifiers // only alphabets, digits and `-` are used when translating pipeline identifiers
var badCharsForCollectorConfName = regexp.MustCompile("[^a-zA-Z0-9-]") var badCharsForCollectorConfName = regexp.MustCompile("[^a-zA-Z0-9-]")
func CollectorConfProcessorName(p Pipeline) string { func CollectorConfProcessorName(p pipelinetypes.GettablePipeline) string {
normalizedAlias := badCharsForCollectorConfName.ReplaceAllString(p.Alias, "-") normalizedAlias := badCharsForCollectorConfName.ReplaceAllString(p.Alias, "-")
return constants.LogsPPLPfx + normalizedAlias return constants.LogsPPLPfx + normalizedAlias
} }
func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []string, error) { func PreparePipelineProcessor(gettablePipelines []pipelinetypes.GettablePipeline) (map[string]interface{}, []string, error) {
processors := map[string]interface{}{} processors := map[string]interface{}{}
names := []string{} names := []string{}
for pipelineIdx, v := range pipelines { for pipelineIdx, v := range gettablePipelines {
if !v.Enabled { if !v.Enabled {
continue continue
} }
@ -49,11 +50,11 @@ func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []s
return nil, nil, errors.Wrap(err, "failed to parse pipeline filter") return nil, nil, errors.Wrap(err, "failed to parse pipeline filter")
} }
router := []PipelineOperator{ router := []pipelinetypes.PipelineOperator{
{ {
ID: "router_signoz", ID: "router_signoz",
Type: "router", Type: "router",
Routes: &[]Route{ Routes: &[]pipelinetypes.Route{
{ {
Output: operators[0].ID, Output: operators[0].ID,
Expr: filterExpr, Expr: filterExpr,
@ -66,13 +67,13 @@ func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []s
v.Config = append(router, operators...) v.Config = append(router, operators...)
// noop operator is needed as the default operator so that logs are not dropped // noop operator is needed as the default operator so that logs are not dropped
noop := PipelineOperator{ noop := pipelinetypes.PipelineOperator{
ID: NOOP, ID: NOOP,
Type: NOOP, Type: NOOP,
} }
v.Config = append(v.Config, noop) v.Config = append(v.Config, noop)
processor := Processor{ processor := pipelinetypes.Processor{
Operators: v.Config, Operators: v.Config,
} }
name := CollectorConfProcessorName(v) name := CollectorConfProcessorName(v)
@ -88,8 +89,8 @@ func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []s
return processors, names, nil return processors, names, nil
} }
func getOperators(ops []PipelineOperator) ([]PipelineOperator, error) { func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.PipelineOperator, error) {
filteredOp := []PipelineOperator{} filteredOp := []pipelinetypes.PipelineOperator{}
for i, operator := range ops { for i, operator := range ops {
if operator.Enabled { if operator.Enabled {
if len(filteredOp) > 0 { if len(filteredOp) > 0 {
@ -179,7 +180,7 @@ func getOperators(ops []PipelineOperator) ([]PipelineOperator, error) {
operator.If = parseFromNotNilCheck operator.If = parseFromNotNilCheck
if operator.LayoutType == "strptime" { if operator.LayoutType == "strptime" {
regex, err := RegexForStrptimeLayout(operator.Layout) regex, err := pipelinetypes.RegexForStrptimeLayout(operator.Layout)
if err != nil { if err != nil {
return nil, fmt.Errorf( return nil, fmt.Errorf(
"couldn't generate layout regex for time_parser %s: %w", operator.Name, err, "couldn't generate layout regex for time_parser %s: %w", operator.Name, err,
@ -224,7 +225,7 @@ func getOperators(ops []PipelineOperator) ([]PipelineOperator, error) {
return filteredOp, nil return filteredOp, nil
} }
func cleanTraceParser(operator *PipelineOperator) { func cleanTraceParser(operator *pipelinetypes.PipelineOperator) {
if operator.TraceId != nil && len(operator.TraceId.ParseFrom) < 1 { if operator.TraceId != nil && len(operator.TraceId.ParseFrom) < 1 {
operator.TraceId = nil operator.TraceId = nil
} }

View File

@ -10,6 +10,8 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/google/uuid"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry"
. "github.com/smartystreets/goconvey/convey" . "github.com/smartystreets/goconvey/convey"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -17,12 +19,12 @@ import (
var prepareProcessorTestData = []struct { var prepareProcessorTestData = []struct {
Name string Name string
Operators []PipelineOperator Operators []pipelinetypes.PipelineOperator
Output []PipelineOperator Output []pipelinetypes.PipelineOperator
}{ }{
{ {
Name: "Last operator disabled", Name: "Last operator disabled",
Operators: []PipelineOperator{ Operators: []pipelinetypes.PipelineOperator{
{ {
ID: "t1", ID: "t1",
Name: "t1", Name: "t1",
@ -35,7 +37,7 @@ var prepareProcessorTestData = []struct {
Enabled: false, Enabled: false,
}, },
}, },
Output: []PipelineOperator{ Output: []pipelinetypes.PipelineOperator{
{ {
ID: "t1", ID: "t1",
Name: "t1", Name: "t1",
@ -45,7 +47,7 @@ var prepareProcessorTestData = []struct {
}, },
{ {
Name: "Operator in middle disabled", Name: "Operator in middle disabled",
Operators: []PipelineOperator{ Operators: []pipelinetypes.PipelineOperator{
{ {
ID: "t1", ID: "t1",
Name: "t1", Name: "t1",
@ -64,7 +66,7 @@ var prepareProcessorTestData = []struct {
Enabled: true, Enabled: true,
}, },
}, },
Output: []PipelineOperator{ Output: []pipelinetypes.PipelineOperator{
{ {
ID: "t1", ID: "t1",
Name: "t1", Name: "t1",
@ -80,7 +82,7 @@ var prepareProcessorTestData = []struct {
}, },
{ {
Name: "Single operator disabled", Name: "Single operator disabled",
Operators: []PipelineOperator{ Operators: []pipelinetypes.PipelineOperator{
{ {
ID: "t1", ID: "t1",
Name: "t1", Name: "t1",
@ -88,18 +90,18 @@ var prepareProcessorTestData = []struct {
Enabled: false, Enabled: false,
}, },
}, },
Output: []PipelineOperator{}, Output: []pipelinetypes.PipelineOperator{},
}, },
{ {
Name: "Single operator enabled", Name: "Single operator enabled",
Operators: []PipelineOperator{ Operators: []pipelinetypes.PipelineOperator{
{ {
ID: "t1", ID: "t1",
Name: "t1", Name: "t1",
Enabled: true, Enabled: true,
}, },
}, },
Output: []PipelineOperator{ Output: []pipelinetypes.PipelineOperator{
{ {
ID: "t1", ID: "t1",
Name: "t1", Name: "t1",
@ -109,12 +111,12 @@ var prepareProcessorTestData = []struct {
}, },
{ {
Name: "Empty operator", Name: "Empty operator",
Operators: []PipelineOperator{}, Operators: []pipelinetypes.PipelineOperator{},
Output: []PipelineOperator{}, Output: []pipelinetypes.PipelineOperator{},
}, },
{ {
Name: "new test", Name: "new test",
Operators: []PipelineOperator{ Operators: []pipelinetypes.PipelineOperator{
{ {
ID: "move_filename", ID: "move_filename",
Output: "move_function", Output: "move_function",
@ -145,7 +147,7 @@ var prepareProcessorTestData = []struct {
Name: "move_lwp", Name: "move_lwp",
}, },
}, },
Output: []PipelineOperator{ Output: []pipelinetypes.PipelineOperator{
{ {
ID: "move_filename", ID: "move_filename",
Output: "move_line", Output: "move_line",
@ -173,7 +175,7 @@ var prepareProcessorTestData = []struct {
}, },
{ {
Name: "first op disabled", Name: "first op disabled",
Operators: []PipelineOperator{ Operators: []pipelinetypes.PipelineOperator{
{ {
ID: "move_filename", ID: "move_filename",
Output: "move_function", Output: "move_function",
@ -186,7 +188,7 @@ var prepareProcessorTestData = []struct {
Name: "move_function", Name: "move_function",
}, },
}, },
Output: []PipelineOperator{ Output: []pipelinetypes.PipelineOperator{
{ {
ID: "move_function", ID: "move_function",
Enabled: true, Enabled: true,
@ -223,12 +225,15 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
}, },
}, },
} }
makeTestPipeline := func(config []PipelineOperator) Pipeline { makeTestPipeline := func(config []pipelinetypes.PipelineOperator) pipelinetypes.GettablePipeline {
return Pipeline{ return pipelinetypes.GettablePipeline{
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
ID: uuid.New().String(),
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: testPipelineFilter, Filter: testPipelineFilter,
Config: config, Config: config,
} }
@ -260,14 +265,14 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
type pipelineTestCase struct { type pipelineTestCase struct {
Name string Name string
Operator PipelineOperator Operator pipelinetypes.PipelineOperator
NonMatchingLog model.SignozLog NonMatchingLog model.SignozLog
} }
testCases := []pipelineTestCase{ testCases := []pipelineTestCase{
{ {
"regex processor should ignore log with missing field", "regex processor should ignore log with missing field",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "regex", ID: "regex",
Type: "regex_parser", Type: "regex_parser",
Enabled: true, Enabled: true,
@ -279,7 +284,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
makeTestLog("mismatching log", map[string]string{}), makeTestLog("mismatching log", map[string]string{}),
}, { }, {
"regex processor should ignore non-matching log", "regex processor should ignore non-matching log",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "regex", ID: "regex",
Type: "regex_parser", Type: "regex_parser",
Enabled: true, Enabled: true,
@ -291,7 +296,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
makeTestLog("mismatching log", map[string]string{}), makeTestLog("mismatching log", map[string]string{}),
}, { }, {
"json parser should ignore logs with missing field.", "json parser should ignore logs with missing field.",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "json", ID: "json",
Type: "json_parser", Type: "json_parser",
Enabled: true, Enabled: true,
@ -303,7 +308,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
}, },
{ {
"json parser should ignore log with non JSON target field value", "json parser should ignore log with non JSON target field value",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "json", ID: "json",
Type: "json_parser", Type: "json_parser",
Enabled: true, Enabled: true,
@ -316,7 +321,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
}), }),
}, { }, {
"move parser should ignore non matching logs", "move parser should ignore non matching logs",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "move", ID: "move",
Type: "move", Type: "move",
Enabled: true, Enabled: true,
@ -327,7 +332,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
makeTestLog("mismatching log", map[string]string{}), makeTestLog("mismatching log", map[string]string{}),
}, { }, {
"copy parser should ignore non matching logs", "copy parser should ignore non matching logs",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "copy", ID: "copy",
Type: "copy", Type: "copy",
Enabled: true, Enabled: true,
@ -338,7 +343,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
makeTestLog("mismatching log", map[string]string{}), makeTestLog("mismatching log", map[string]string{}),
}, { }, {
"remove parser should ignore non matching logs", "remove parser should ignore non matching logs",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "remove", ID: "remove",
Type: "remove", Type: "remove",
Enabled: true, Enabled: true,
@ -348,7 +353,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
makeTestLog("mismatching log", map[string]string{}), makeTestLog("mismatching log", map[string]string{}),
}, { }, {
"time parser should ignore logs with missing field.", "time parser should ignore logs with missing field.",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "time", ID: "time",
Type: "time_parser", Type: "time_parser",
Enabled: true, Enabled: true,
@ -360,7 +365,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
makeTestLog("mismatching log", map[string]string{}), makeTestLog("mismatching log", map[string]string{}),
}, { }, {
"time parser should ignore logs timestamp values that don't contain expected strptime layout.", "time parser should ignore logs timestamp values that don't contain expected strptime layout.",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "time", ID: "time",
Type: "time_parser", Type: "time_parser",
Enabled: true, Enabled: true,
@ -374,7 +379,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
}), }),
}, { }, {
"time parser should ignore logs timestamp values that don't contain an epoch", "time parser should ignore logs timestamp values that don't contain an epoch",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "time", ID: "time",
Type: "time_parser", Type: "time_parser",
Enabled: true, Enabled: true,
@ -388,7 +393,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
}), }),
}, { }, {
"grok parser should ignore logs with missing parse from field", "grok parser should ignore logs with missing parse from field",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "grok", ID: "grok",
Type: "grok_parser", Type: "grok_parser",
Enabled: true, Enabled: true,
@ -417,7 +422,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
"time parser should ignore log with timestamp value %s that doesn't match layout type %s", "time parser should ignore log with timestamp value %s that doesn't match layout type %s",
testValue, epochLayout, testValue, epochLayout,
), ),
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "time", ID: "time",
Type: "time_parser", Type: "time_parser",
Enabled: true, Enabled: true,
@ -434,7 +439,7 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
} }
for _, testCase := range testCases { for _, testCase := range testCases {
testPipelines := []Pipeline{makeTestPipeline([]PipelineOperator{testCase.Operator})} testPipelines := []pipelinetypes.GettablePipeline{makeTestPipeline([]pipelinetypes.PipelineOperator{testCase.Operator})}
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing( result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
context.Background(), context.Background(),
@ -450,11 +455,14 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
func TestResourceFiltersWork(t *testing.T) { func TestResourceFiltersWork(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipeline := Pipeline{ testPipeline := pipelinetypes.GettablePipeline{
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
ID: uuid.New().String(),
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -469,7 +477,7 @@ func TestResourceFiltersWork(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
ID: "add", ID: "add",
Type: "add", Type: "add",
@ -496,7 +504,7 @@ func TestResourceFiltersWork(t *testing.T) {
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing( result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
context.Background(), context.Background(),
[]Pipeline{testPipeline}, []pipelinetypes.GettablePipeline{testPipeline},
[]model.SignozLog{testLog}, []model.SignozLog{testLog},
) )
require.Nil(err) require.Nil(err)
@ -515,11 +523,14 @@ func TestPipelineFilterWithStringOpsShouldNotSpamWarningsIfAttributeIsMissing(t
v3.FilterOperatorRegex, v3.FilterOperatorRegex,
v3.FilterOperatorNotRegex, v3.FilterOperatorNotRegex,
} { } {
testPipeline := Pipeline{ testPipeline := pipelinetypes.GettablePipeline{
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
ID: uuid.New().String(),
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -534,7 +545,7 @@ func TestPipelineFilterWithStringOpsShouldNotSpamWarningsIfAttributeIsMissing(t
}, },
}, },
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
ID: "add", ID: "add",
Type: "add", Type: "add",
@ -559,7 +570,7 @@ func TestPipelineFilterWithStringOpsShouldNotSpamWarningsIfAttributeIsMissing(t
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing( result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
context.Background(), context.Background(),
[]Pipeline{testPipeline}, []pipelinetypes.GettablePipeline{testPipeline},
[]model.SignozLog{testLog}, []model.SignozLog{testLog},
) )
require.Nil(err) require.Nil(err)
@ -571,11 +582,14 @@ func TestPipelineFilterWithStringOpsShouldNotSpamWarningsIfAttributeIsMissing(t
func TestAttributePathsContainingDollarDoNotBreakCollector(t *testing.T) { func TestAttributePathsContainingDollarDoNotBreakCollector(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipeline := Pipeline{ testPipeline := pipelinetypes.GettablePipeline{
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
ID: uuid.New().String(),
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -590,7 +604,7 @@ func TestAttributePathsContainingDollarDoNotBreakCollector(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
ID: "move", ID: "move",
Type: "move", Type: "move",
@ -610,7 +624,7 @@ func TestAttributePathsContainingDollarDoNotBreakCollector(t *testing.T) {
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing( result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
context.Background(), context.Background(),
[]Pipeline{testPipeline}, []pipelinetypes.GettablePipeline{testPipeline},
testLogs, testLogs,
) )
require.Nil(err) require.Nil(err)
@ -629,11 +643,14 @@ func TestMembershipOpInProcessorFieldExpressions(t *testing.T) {
}), }),
} }
testPipeline := Pipeline{ testPipeline := pipelinetypes.GettablePipeline{
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
ID: uuid.New().String(),
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -648,7 +665,7 @@ func TestMembershipOpInProcessorFieldExpressions(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
ID: "move", ID: "move",
Type: "move", Type: "move",
@ -711,7 +728,7 @@ func TestMembershipOpInProcessorFieldExpressions(t *testing.T) {
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing( result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
context.Background(), context.Background(),
[]Pipeline{testPipeline}, []pipelinetypes.GettablePipeline{testPipeline},
testLogs, testLogs,
) )
require.Nil(err) require.Nil(err)
@ -733,11 +750,14 @@ func TestContainsFilterIsCaseInsensitive(t *testing.T) {
makeTestSignozLog("test Ecom Log", map[string]interface{}{}), makeTestSignozLog("test Ecom Log", map[string]interface{}{}),
} }
testPipelines := []Pipeline{{ testPipelines := []pipelinetypes.GettablePipeline{{
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
ID: uuid.New().String(),
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{{ Items: []v3.FilterItem{{
@ -751,7 +771,7 @@ func TestContainsFilterIsCaseInsensitive(t *testing.T) {
Value: "log", Value: "log",
}}, }},
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
ID: "add", ID: "add",
Type: "add", Type: "add",
@ -762,10 +782,13 @@ func TestContainsFilterIsCaseInsensitive(t *testing.T) {
}, },
}, },
}, { }, {
OrderId: 2, StoreablePipeline: pipelinetypes.StoreablePipeline{
ID: uuid.New().String(),
OrderID: 2,
Name: "pipeline2", Name: "pipeline2",
Alias: "pipeline2", Alias: "pipeline2",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{{ Items: []v3.FilterItem{{
@ -779,7 +802,7 @@ func TestContainsFilterIsCaseInsensitive(t *testing.T) {
Value: "ecom", Value: "ecom",
}}, }},
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
ID: "add", ID: "add",
Type: "add", Type: "add",

View File

@ -10,6 +10,7 @@ import (
_ "github.com/SigNoz/signoz-otel-collector/pkg/parser/grok" _ "github.com/SigNoz/signoz-otel-collector/pkg/parser/grok"
"github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor" "github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/pkg/errors" "github.com/pkg/errors"
"go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/plog"
@ -18,7 +19,7 @@ import (
func SimulatePipelinesProcessing( func SimulatePipelinesProcessing(
ctx context.Context, ctx context.Context,
pipelines []Pipeline, pipelines []pipelinetypes.GettablePipeline,
logs []model.SignozLog, logs []model.SignozLog,
) ( ) (
output []model.SignozLog, collectorWarnAndErrorLogs []string, apiErr *model.ApiError, output []model.SignozLog, collectorWarnAndErrorLogs []string, apiErr *model.ApiError,

View File

@ -8,6 +8,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -16,12 +17,14 @@ import (
func TestPipelinePreview(t *testing.T) { func TestPipelinePreview(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -36,7 +39,7 @@ func TestPipelinePreview(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -49,10 +52,12 @@ func TestPipelinePreview(t *testing.T) {
}, },
}, },
{ {
OrderId: 2, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 2,
Name: "pipeline2", Name: "pipeline2",
Alias: "pipeline2", Alias: "pipeline2",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -67,7 +72,7 @@ func TestPipelinePreview(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -146,12 +151,14 @@ func TestPipelinePreview(t *testing.T) {
func TestGrokParsingProcessor(t *testing.T) { func TestGrokParsingProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -166,7 +173,7 @@ func TestGrokParsingProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "grok", ID: "grok",

View File

@ -12,6 +12,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -21,12 +22,14 @@ import (
func TestRegexProcessor(t *testing.T) { func TestRegexProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -41,11 +44,11 @@ func TestRegexProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{}, Config: []pipelinetypes.PipelineOperator{},
}, },
} }
var parserOp PipelineOperator var parserOp pipelinetypes.PipelineOperator
err := json.Unmarshal([]byte(` err := json.Unmarshal([]byte(`
{ {
"orderId": 1, "orderId": 1,
@ -86,12 +89,14 @@ func TestRegexProcessor(t *testing.T) {
func TestGrokProcessor(t *testing.T) { func TestGrokProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -106,11 +111,11 @@ func TestGrokProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{}, Config: []pipelinetypes.PipelineOperator{},
}, },
} }
var parserOp PipelineOperator var parserOp pipelinetypes.PipelineOperator
err := json.Unmarshal([]byte(` err := json.Unmarshal([]byte(`
{ {
"orderId": 1, "orderId": 1,
@ -151,12 +156,14 @@ func TestGrokProcessor(t *testing.T) {
func TestJSONProcessor(t *testing.T) { func TestJSONProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -171,11 +178,11 @@ func TestJSONProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{}, Config: []pipelinetypes.PipelineOperator{},
}, },
} }
var parserOp PipelineOperator var parserOp pipelinetypes.PipelineOperator
err := json.Unmarshal([]byte(` err := json.Unmarshal([]byte(`
{ {
"orderId": 1, "orderId": 1,
@ -215,12 +222,14 @@ func TestJSONProcessor(t *testing.T) {
func TestTraceParsingProcessor(t *testing.T) { func TestTraceParsingProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -235,12 +244,12 @@ func TestTraceParsingProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{}, Config: []pipelinetypes.PipelineOperator{},
}, },
} }
// Start with JSON serialized trace parser to validate deserialization too // Start with JSON serialized trace parser to validate deserialization too
var traceParserOp PipelineOperator var traceParserOp pipelinetypes.PipelineOperator
err := json.Unmarshal([]byte(` err := json.Unmarshal([]byte(`
{ {
"orderId": 1, "orderId": 1,
@ -322,12 +331,14 @@ func TestTraceParsingProcessor(t *testing.T) {
func TestAddProcessor(t *testing.T) { func TestAddProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -342,11 +353,11 @@ func TestAddProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{}, Config: []pipelinetypes.PipelineOperator{},
}, },
} }
var parserOp PipelineOperator var parserOp pipelinetypes.PipelineOperator
err := json.Unmarshal([]byte(` err := json.Unmarshal([]byte(`
{ {
"orderId": 1, "orderId": 1,
@ -385,12 +396,14 @@ func TestAddProcessor(t *testing.T) {
func TestRemoveProcessor(t *testing.T) { func TestRemoveProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -405,11 +418,11 @@ func TestRemoveProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{}, Config: []pipelinetypes.PipelineOperator{},
}, },
} }
var parserOp PipelineOperator var parserOp pipelinetypes.PipelineOperator
err := json.Unmarshal([]byte(` err := json.Unmarshal([]byte(`
{ {
"orderId": 1, "orderId": 1,
@ -448,12 +461,14 @@ func TestRemoveProcessor(t *testing.T) {
func TestCopyProcessor(t *testing.T) { func TestCopyProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -468,11 +483,11 @@ func TestCopyProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{}, Config: []pipelinetypes.PipelineOperator{},
}, },
} }
var parserOp PipelineOperator var parserOp pipelinetypes.PipelineOperator
err := json.Unmarshal([]byte(` err := json.Unmarshal([]byte(`
{ {
"orderId": 1, "orderId": 1,
@ -512,12 +527,14 @@ func TestCopyProcessor(t *testing.T) {
func TestMoveProcessor(t *testing.T) { func TestMoveProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -532,11 +549,11 @@ func TestMoveProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{}, Config: []pipelinetypes.PipelineOperator{},
}, },
} }
var parserOp PipelineOperator var parserOp pipelinetypes.PipelineOperator
err := json.Unmarshal([]byte(` err := json.Unmarshal([]byte(`
{ {
"orderId": 1, "orderId": 1,

View File

@ -8,18 +8,21 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestSeverityParsingProcessor(t *testing.T) { func TestSeverityParsingProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -34,11 +37,11 @@ func TestSeverityParsingProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{}, Config: []pipelinetypes.PipelineOperator{},
}, },
} }
var severityParserOp PipelineOperator var severityParserOp pipelinetypes.PipelineOperator
err := json.Unmarshal([]byte(` err := json.Unmarshal([]byte(`
{ {
"orderId": 1, "orderId": 1,
@ -152,12 +155,14 @@ func TestNoCollectorErrorsFromSeverityParserForMismatchedLogs(t *testing.T) {
}, },
}, },
} }
makeTestPipeline := func(config []PipelineOperator) Pipeline { makeTestPipeline := func(config []pipelinetypes.PipelineOperator) pipelinetypes.GettablePipeline {
return Pipeline{ return pipelinetypes.GettablePipeline{
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: testPipelineFilter, Filter: testPipelineFilter,
Config: config, Config: config,
} }
@ -165,14 +170,14 @@ func TestNoCollectorErrorsFromSeverityParserForMismatchedLogs(t *testing.T) {
type pipelineTestCase struct { type pipelineTestCase struct {
Name string Name string
Operator PipelineOperator Operator pipelinetypes.PipelineOperator
NonMatchingLog model.SignozLog NonMatchingLog model.SignozLog
} }
testCases := []pipelineTestCase{ testCases := []pipelineTestCase{
{ {
"severity parser should ignore logs with missing field", "severity parser should ignore logs with missing field",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "severity", ID: "severity",
Type: "severity_parser", Type: "severity_parser",
Enabled: true, Enabled: true,
@ -188,7 +193,7 @@ func TestNoCollectorErrorsFromSeverityParserForMismatchedLogs(t *testing.T) {
}), }),
}, { }, {
"severity parser should ignore logs with invalid values.", "severity parser should ignore logs with invalid values.",
PipelineOperator{ pipelinetypes.PipelineOperator{
ID: "severity", ID: "severity",
Type: "severity_parser", Type: "severity_parser",
Enabled: true, Enabled: true,
@ -207,7 +212,7 @@ func TestNoCollectorErrorsFromSeverityParserForMismatchedLogs(t *testing.T) {
} }
for _, testCase := range testCases { for _, testCase := range testCases {
testPipelines := []Pipeline{makeTestPipeline([]PipelineOperator{testCase.Operator})} testPipelines := []pipelinetypes.GettablePipeline{makeTestPipeline([]pipelinetypes.PipelineOperator{testCase.Operator})}
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing( result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
context.Background(), context.Background(),

View File

@ -3,76 +3,27 @@ package logparsingpipeline
import ( import (
"context" "context"
"encoding/json" "encoding/json"
"fmt"
"strings" "strings"
"testing" "testing"
"time" "time"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/antonmedv/expr" "github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestRegexForStrptimeLayout(t *testing.T) {
require := require.New(t)
var testCases = []struct {
strptimeLayout string
str string
shouldMatch bool
}{
{
strptimeLayout: "%Y-%m-%dT%H:%M:%S.%f%z",
str: "2023-11-26T12:03:28.239907+0530",
shouldMatch: true,
}, {
strptimeLayout: "%d-%m-%Y",
str: "26-11-2023",
shouldMatch: true,
}, {
strptimeLayout: "%d-%m-%Y",
str: "26-11-2023",
shouldMatch: true,
}, {
strptimeLayout: "%d/%m/%y",
str: "11/03/02",
shouldMatch: true,
}, {
strptimeLayout: "%A, %d. %B %Y %I:%M%p",
str: "Tuesday, 21. November 2006 04:30PM11/03/02",
shouldMatch: true,
}, {
strptimeLayout: "%A, %d. %B %Y %I:%M%p",
str: "some random text",
shouldMatch: false,
},
}
for _, test := range testCases {
regex, err := RegexForStrptimeLayout(test.strptimeLayout)
require.Nil(err, test.strptimeLayout)
code := fmt.Sprintf(`"%s" matches "%s"`, test.str, regex)
program, err := expr.Compile(code)
require.Nil(err, test.strptimeLayout)
output, err := expr.Run(program, map[string]string{})
require.Nil(err, test.strptimeLayout)
require.Equal(output, test.shouldMatch, test.strptimeLayout)
}
}
func TestTimestampParsingProcessor(t *testing.T) { func TestTimestampParsingProcessor(t *testing.T) {
require := require.New(t) require := require.New(t)
testPipelines := []Pipeline{ testPipelines := []pipelinetypes.GettablePipeline{
{ {
OrderId: 1, StoreablePipeline: pipelinetypes.StoreablePipeline{
OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
},
Filter: &v3.FilterSet{ Filter: &v3.FilterSet{
Operator: "AND", Operator: "AND",
Items: []v3.FilterItem{ Items: []v3.FilterItem{
@ -87,11 +38,11 @@ func TestTimestampParsingProcessor(t *testing.T) {
}, },
}, },
}, },
Config: []PipelineOperator{}, Config: []pipelinetypes.PipelineOperator{},
}, },
} }
var timestampParserOp PipelineOperator var timestampParserOp pipelinetypes.PipelineOperator
err := json.Unmarshal([]byte(` err := json.Unmarshal([]byte(`
{ {
"orderId": 1, "orderId": 1,

View File

@ -251,6 +251,9 @@ func (agent *Agent) processStatusUpdate(
if agentDescrChanged { if agentDescrChanged {
// Agent description is changed. // Agent description is changed.
//Get the default org ID
// agent.
// We need to recalculate the config. // We need to recalculate the config.
configChanged = agent.updateRemoteConfig(configProvider) configChanged = agent.updateRemoteConfig(configProvider)
} }

View File

@ -183,7 +183,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
} }
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController( logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
serverOptions.SigNoz.SQLStore.SQLxDB(), integrationsController.GetPipelinesForInstalledIntegrations, serverOptions.SigNoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations,
) )
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -22,6 +22,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/gorilla/mux" "github.com/gorilla/mux"
"github.com/knadh/koanf/parsers/yaml" "github.com/knadh/koanf/parsers/yaml"
@ -62,15 +63,15 @@ func TestLogPipelinesLifecycle(t *testing.T) {
}, },
} }
postablePipelines := logparsingpipeline.PostablePipelines{ postablePipelines := pipelinetypes.PostablePipelines{
Pipelines: []logparsingpipeline.PostablePipeline{ Pipelines: []pipelinetypes.PostablePipeline{
{ {
OrderId: 1, OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
Filter: pipelineFilterSet, Filter: pipelineFilterSet,
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -82,12 +83,12 @@ func TestLogPipelinesLifecycle(t *testing.T) {
}, },
}, },
}, { }, {
OrderId: 2, OrderID: 2,
Name: "pipeline2", Name: "pipeline2",
Alias: "pipeline2", Alias: "pipeline2",
Enabled: true, Enabled: true,
Filter: pipelineFilterSet, Filter: pipelineFilterSet,
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "remove", ID: "remove",
@ -179,10 +180,10 @@ func TestLogPipelinesHistory(t *testing.T) {
getPipelinesResp := testbed.GetPipelinesFromQS() getPipelinesResp := testbed.GetPipelinesFromQS()
require.Equal(0, len(getPipelinesResp.History)) require.Equal(0, len(getPipelinesResp.History))
postablePipelines := logparsingpipeline.PostablePipelines{ postablePipelines := pipelinetypes.PostablePipelines{
Pipelines: []logparsingpipeline.PostablePipeline{ Pipelines: []pipelinetypes.PostablePipeline{
{ {
OrderId: 1, OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
@ -200,7 +201,7 @@ func TestLogPipelinesHistory(t *testing.T) {
}, },
}, },
}, },
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -222,7 +223,7 @@ func TestLogPipelinesHistory(t *testing.T) {
postablePipelines.Pipelines[0].Config = append( postablePipelines.Pipelines[0].Config = append(
postablePipelines.Pipelines[0].Config, postablePipelines.Pipelines[0].Config,
logparsingpipeline.PipelineOperator{ pipelinetypes.PipelineOperator{
OrderId: 2, OrderId: 2,
ID: "remove", ID: "remove",
Type: "remove", Type: "remove",
@ -259,18 +260,18 @@ func TestLogPipelinesValidation(t *testing.T) {
testCases := []struct { testCases := []struct {
Name string Name string
Pipeline logparsingpipeline.PostablePipeline Pipeline pipelinetypes.PostablePipeline
ExpectedResponseStatusCode int ExpectedResponseStatusCode int
}{ }{
{ {
Name: "Valid Pipeline", Name: "Valid Pipeline",
Pipeline: logparsingpipeline.PostablePipeline{ Pipeline: pipelinetypes.PostablePipeline{
OrderId: 1, OrderID: 1,
Name: "pipeline 1", Name: "pipeline 1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
Filter: validPipelineFilterSet, Filter: validPipelineFilterSet,
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -286,13 +287,13 @@ func TestLogPipelinesValidation(t *testing.T) {
}, },
{ {
Name: "Invalid orderId", Name: "Invalid orderId",
Pipeline: logparsingpipeline.PostablePipeline{ Pipeline: pipelinetypes.PostablePipeline{
OrderId: 0, OrderID: 0,
Name: "pipeline 1", Name: "pipeline 1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
Filter: validPipelineFilterSet, Filter: validPipelineFilterSet,
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -308,13 +309,13 @@ func TestLogPipelinesValidation(t *testing.T) {
}, },
{ {
Name: "Invalid filter", Name: "Invalid filter",
Pipeline: logparsingpipeline.PostablePipeline{ Pipeline: pipelinetypes.PostablePipeline{
OrderId: 1, OrderID: 1,
Name: "pipeline 1", Name: "pipeline 1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
Filter: &v3.FilterSet{}, Filter: &v3.FilterSet{},
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -330,13 +331,13 @@ func TestLogPipelinesValidation(t *testing.T) {
}, },
{ {
Name: "Invalid operator field", Name: "Invalid operator field",
Pipeline: logparsingpipeline.PostablePipeline{ Pipeline: pipelinetypes.PostablePipeline{
OrderId: 1, OrderID: 1,
Name: "pipeline 1", Name: "pipeline 1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
Filter: validPipelineFilterSet, Filter: validPipelineFilterSet,
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -351,13 +352,13 @@ func TestLogPipelinesValidation(t *testing.T) {
ExpectedResponseStatusCode: 400, ExpectedResponseStatusCode: 400,
}, { }, {
Name: "Invalid from field path", Name: "Invalid from field path",
Pipeline: logparsingpipeline.PostablePipeline{ Pipeline: pipelinetypes.PostablePipeline{
OrderId: 1, OrderID: 1,
Name: "pipeline 1", Name: "pipeline 1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
Filter: validPipelineFilterSet, Filter: validPipelineFilterSet,
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "move", ID: "move",
@ -377,8 +378,8 @@ func TestLogPipelinesValidation(t *testing.T) {
t.Run(tc.Name, func(t *testing.T) { t.Run(tc.Name, func(t *testing.T) {
testbed := NewLogPipelinesTestBed(t, nil) testbed := NewLogPipelinesTestBed(t, nil)
testbed.PostPipelinesToQSExpectingStatusCode( testbed.PostPipelinesToQSExpectingStatusCode(
logparsingpipeline.PostablePipelines{ pipelinetypes.PostablePipelines{
Pipelines: []logparsingpipeline.PostablePipeline{tc.Pipeline}, Pipelines: []pipelinetypes.PostablePipeline{tc.Pipeline},
}, },
tc.ExpectedResponseStatusCode, tc.ExpectedResponseStatusCode,
) )
@ -394,10 +395,10 @@ func TestCanSavePipelinesWithoutConnectedAgents(t *testing.T) {
require.Equal(0, len(getPipelinesResp.Pipelines)) require.Equal(0, len(getPipelinesResp.Pipelines))
require.Equal(0, len(getPipelinesResp.History)) require.Equal(0, len(getPipelinesResp.History))
postablePipelines := logparsingpipeline.PostablePipelines{ postablePipelines := pipelinetypes.PostablePipelines{
Pipelines: []logparsingpipeline.PostablePipeline{ Pipelines: []pipelinetypes.PostablePipeline{
{ {
OrderId: 1, OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
@ -415,7 +416,7 @@ func TestCanSavePipelinesWithoutConnectedAgents(t *testing.T) {
}, },
}, },
}, },
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -454,13 +455,16 @@ func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipeli
sqlStore = utils.NewQueryServiceDBForTests(t) sqlStore = utils.NewQueryServiceDBForTests(t)
} }
// create test org
// utils.CreateTestOrg(t, sqlStore)
ic, err := integrations.NewController(sqlStore) ic, err := integrations.NewController(sqlStore)
if err != nil { if err != nil {
t.Fatalf("could not create integrations controller: %v", err) t.Fatalf("could not create integrations controller: %v", err)
} }
controller, err := logparsingpipeline.NewLogParsingPipelinesController( controller, err := logparsingpipeline.NewLogParsingPipelinesController(
sqlStore.SQLxDB(), ic.GetPipelinesForInstalledIntegrations, sqlStore, ic.GetPipelinesForInstalledIntegrations,
) )
if err != nil { if err != nil {
t.Fatalf("could not create a logparsingpipelines controller: %v", err) t.Fatalf("could not create a logparsingpipelines controller: %v", err)
@ -529,7 +533,7 @@ func NewLogPipelinesTestBed(t *testing.T, testDB sqlstore.SQLStore) *LogPipeline
} }
func (tb *LogPipelinesTestBed) PostPipelinesToQSExpectingStatusCode( func (tb *LogPipelinesTestBed) PostPipelinesToQSExpectingStatusCode(
postablePipelines logparsingpipeline.PostablePipelines, postablePipelines pipelinetypes.PostablePipelines,
expectedStatusCode int, expectedStatusCode int,
) *logparsingpipeline.PipelinesResponse { ) *logparsingpipeline.PipelinesResponse {
req, err := AuthenticatedRequestForTest( req, err := AuthenticatedRequestForTest(
@ -579,7 +583,7 @@ func (tb *LogPipelinesTestBed) PostPipelinesToQSExpectingStatusCode(
} }
func (tb *LogPipelinesTestBed) PostPipelinesToQS( func (tb *LogPipelinesTestBed) PostPipelinesToQS(
postablePipelines logparsingpipeline.PostablePipelines, postablePipelines pipelinetypes.PostablePipelines,
) *logparsingpipeline.PipelinesResponse { ) *logparsingpipeline.PipelinesResponse {
return tb.PostPipelinesToQSExpectingStatusCode( return tb.PostPipelinesToQSExpectingStatusCode(
postablePipelines, 200, postablePipelines, 200,
@ -628,7 +632,7 @@ func (tb *LogPipelinesTestBed) GetPipelinesFromQS() *logparsingpipeline.Pipeline
} }
func (tb *LogPipelinesTestBed) assertPipelinesSentToOpampClient( func (tb *LogPipelinesTestBed) assertPipelinesSentToOpampClient(
pipelines []logparsingpipeline.Pipeline, pipelines []pipelinetypes.GettablePipeline,
) { ) {
lastMsg := tb.opampClientConn.LatestMsgFromServer() lastMsg := tb.opampClientConn.LatestMsgFromServer()
assertPipelinesRecommendedInRemoteConfig( assertPipelinesRecommendedInRemoteConfig(
@ -639,7 +643,7 @@ func (tb *LogPipelinesTestBed) assertPipelinesSentToOpampClient(
func assertPipelinesRecommendedInRemoteConfig( func assertPipelinesRecommendedInRemoteConfig(
t *testing.T, t *testing.T,
msg *protobufs.ServerToAgent, msg *protobufs.ServerToAgent,
pipelines []logparsingpipeline.Pipeline, gettablePipelines []pipelinetypes.GettablePipeline,
) { ) {
collectorConfigFiles := msg.RemoteConfig.Config.ConfigMap collectorConfigFiles := msg.RemoteConfig.Config.ConfigMap
require.Equal( require.Equal(
@ -669,7 +673,7 @@ func assertPipelinesRecommendedInRemoteConfig(
} }
} }
_, expectedLogProcessorNames, err := logparsingpipeline.PreparePipelineProcessor(pipelines) _, expectedLogProcessorNames, err := logparsingpipeline.PreparePipelineProcessor(gettablePipelines)
require.NoError(t, err) require.NoError(t, err)
require.Equal( require.Equal(
t, expectedLogProcessorNames, collectorConfLogsPipelineProcNames, t, expectedLogProcessorNames, collectorConfLogsPipelineProcNames,
@ -698,12 +702,12 @@ func assertPipelinesRecommendedInRemoteConfig(
// find logparsingpipeline.Pipeline whose processor is being validated here // find logparsingpipeline.Pipeline whose processor is being validated here
pipelineIdx := slices.IndexFunc( pipelineIdx := slices.IndexFunc(
pipelines, func(p logparsingpipeline.Pipeline) bool { gettablePipelines, func(p pipelinetypes.GettablePipeline) bool {
return logparsingpipeline.CollectorConfProcessorName(p) == procName return logparsingpipeline.CollectorConfProcessorName(p) == procName
}, },
) )
require.GreaterOrEqual(t, pipelineIdx, 0) require.GreaterOrEqual(t, pipelineIdx, 0)
expectedExpr, err := queryBuilderToExpr.Parse(pipelines[pipelineIdx].Filter) expectedExpr, err := queryBuilderToExpr.Parse(gettablePipelines[pipelineIdx].Filter)
require.Nil(t, err) require.Nil(t, err)
require.Equal(t, expectedExpr, pipelineFilterExpr) require.Equal(t, expectedExpr, pipelineFilterExpr)
} }
@ -724,7 +728,7 @@ func (tb *LogPipelinesTestBed) simulateOpampClientAcknowledgementForLatestConfig
} }
func (tb *LogPipelinesTestBed) assertNewAgentGetsPipelinesOnConnection( func (tb *LogPipelinesTestBed) assertNewAgentGetsPipelinesOnConnection(
pipelines []logparsingpipeline.Pipeline, pipelines []pipelinetypes.GettablePipeline,
) { ) {
newAgentConn := &opamp.MockOpAmpConnection{} newAgentConn := &opamp.MockOpAmpConnection{}
tb.opampServer.OnMessage( tb.opampServer.OnMessage(
@ -762,7 +766,7 @@ func unmarshalPipelinesResponse(apiResponse *app.ApiResponse) (
func assertPipelinesResponseMatchesPostedPipelines( func assertPipelinesResponseMatchesPostedPipelines(
t *testing.T, t *testing.T,
postablePipelines logparsingpipeline.PostablePipelines, postablePipelines pipelinetypes.PostablePipelines,
pipelinesResp *logparsingpipeline.PipelinesResponse, pipelinesResp *logparsingpipeline.PipelinesResponse,
) { ) {
require.Equal( require.Equal(
@ -772,7 +776,7 @@ func assertPipelinesResponseMatchesPostedPipelines(
for i, pipeline := range pipelinesResp.Pipelines { for i, pipeline := range pipelinesResp.Pipelines {
postable := postablePipelines.Pipelines[i] postable := postablePipelines.Pipelines[i]
require.Equal(t, postable.Name, pipeline.Name, "pipeline.Name mismatch") require.Equal(t, postable.Name, pipeline.Name, "pipeline.Name mismatch")
require.Equal(t, postable.OrderId, pipeline.OrderId, "pipeline.OrderId mismatch") require.Equal(t, postable.OrderID, pipeline.OrderID, "pipeline.OrderId mismatch")
require.Equal(t, postable.Enabled, pipeline.Enabled, "pipeline.Enabled mismatch") require.Equal(t, postable.Enabled, pipeline.Enabled, "pipeline.Enabled mismatch")
require.Equal(t, postable.Config, pipeline.Config, "pipeline.Config mismatch") require.Equal(t, postable.Config, pipeline.Config, "pipeline.Config mismatch")
} }

View File

@ -12,7 +12,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/app" "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations" "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations" "github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/auth" "github.com/SigNoz/signoz/pkg/query-service/auth"
"github.com/SigNoz/signoz/pkg/query-service/dao" "github.com/SigNoz/signoz/pkg/query-service/dao"
"github.com/SigNoz/signoz/pkg/query-service/featureManager" "github.com/SigNoz/signoz/pkg/query-service/featureManager"
@ -21,6 +20,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
mockhouse "github.com/srikanthccv/ClickHouse-go-mock" mockhouse "github.com/srikanthccv/ClickHouse-go-mock"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"go.uber.org/zap" "go.uber.org/zap"
@ -176,10 +176,10 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
// After saving a user created pipeline, pipelines response should include // After saving a user created pipeline, pipelines response should include
// both user created pipelines and pipelines for installed integrations. // both user created pipelines and pipelines for installed integrations.
postablePipelines := logparsingpipeline.PostablePipelines{ postablePipelines := pipelinetypes.PostablePipelines{
Pipelines: []logparsingpipeline.PostablePipeline{ Pipelines: []pipelinetypes.PostablePipeline{
{ {
OrderId: 1, OrderID: 1,
Name: "pipeline1", Name: "pipeline1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
@ -197,7 +197,7 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
}, },
}, },
}, },
Config: []logparsingpipeline.PipelineOperator{ Config: []pipelinetypes.PipelineOperator{
{ {
OrderId: 1, OrderId: 1,
ID: "add", ID: "add",
@ -223,7 +223,7 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
postable := postableFromPipelines(getPipelinesResp.Pipelines) postable := postableFromPipelines(getPipelinesResp.Pipelines)
slices.Reverse(postable.Pipelines) slices.Reverse(postable.Pipelines)
for i := range postable.Pipelines { for i := range postable.Pipelines {
postable.Pipelines[i].OrderId = i + 1 postable.Pipelines[i].OrderID = i + 1
} }
pipelinesTB.PostPipelinesToQS(postable) pipelinesTB.PostPipelinesToQS(postable)
@ -256,7 +256,7 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
// should not be able to edit integrations pipeline. // should not be able to edit integrations pipeline.
require.Greater(len(postable.Pipelines[0].Config), 0) require.Greater(len(postable.Pipelines[0].Config), 0)
postable.Pipelines[0].Config = []logparsingpipeline.PipelineOperator{} postable.Pipelines[0].Config = []pipelinetypes.PipelineOperator{}
pipelinesTB.PostPipelinesToQS(postable) pipelinesTB.PostPipelinesToQS(postable)
getPipelinesResp = pipelinesTB.GetPipelinesFromQS() getPipelinesResp = pipelinesTB.GetPipelinesFromQS()
@ -270,7 +270,7 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
require.Greater(len(firstPipeline.Config), 0) require.Greater(len(firstPipeline.Config), 0)
// should not be able to delete integrations pipeline // should not be able to delete integrations pipeline
postable.Pipelines = []logparsingpipeline.PostablePipeline{postable.Pipelines[1]} postable.Pipelines = []pipelinetypes.PostablePipeline{postable.Pipelines[1]}
pipelinesTB.PostPipelinesToQS(postable) pipelinesTB.PostPipelinesToQS(postable)
getPipelinesResp = pipelinesTB.GetPipelinesFromQS() getPipelinesResp = pipelinesTB.GetPipelinesFromQS()
@ -596,21 +596,21 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration
} }
} }
func postableFromPipelines(pipelines []logparsingpipeline.Pipeline) logparsingpipeline.PostablePipelines { func postableFromPipelines(gettablePipelines []pipelinetypes.GettablePipeline) pipelinetypes.PostablePipelines {
result := logparsingpipeline.PostablePipelines{} result := pipelinetypes.PostablePipelines{}
for _, p := range pipelines { for _, p := range gettablePipelines {
postable := logparsingpipeline.PostablePipeline{ postable := pipelinetypes.PostablePipeline{
Id: p.Id, ID: p.ID,
OrderId: p.OrderId, OrderID: p.OrderID,
Name: p.Name, Name: p.Name,
Alias: p.Alias, Alias: p.Alias,
Enabled: p.Enabled, Enabled: p.Enabled,
Config: p.Config, Config: p.Config,
} }
if p.Description != nil { if p.Description != "" {
postable.Description = *p.Description postable.Description = p.Description
} }
if p.Filter != nil { if p.Filter != nil {

View File

@ -207,6 +207,13 @@ func AuthenticatedRequestForTest(
} }
req.Header.Add("Authorization", "Bearer "+userJwt.AccessJwt) req.Header.Add("Authorization", "Bearer "+userJwt.AccessJwt)
ctx, err := jwt.ContextFromRequest(req.Context(), req.Header.Get("Authorization"))
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
return req, nil return req, nil
} }

View File

@ -50,6 +50,7 @@ func NewTestSqliteDB(t *testing.T) (sqlStore sqlstore.SQLStore, testDBFilePath s
sqlmigration.NewUpdateOrganizationFactory(sqlStore), sqlmigration.NewUpdateOrganizationFactory(sqlStore),
sqlmigration.NewUpdateDashboardAndSavedViewsFactory(sqlStore), sqlmigration.NewUpdateDashboardAndSavedViewsFactory(sqlStore),
sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlStore), sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlStore),
sqlmigration.NewUpdatePipelines(sqlStore),
), ),
) )
if err != nil { if err != nil {

View File

@ -61,6 +61,7 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedM
sqlmigration.NewAddAlertmanagerFactory(sqlstore), sqlmigration.NewAddAlertmanagerFactory(sqlstore),
sqlmigration.NewUpdateDashboardAndSavedViewsFactory(sqlstore), sqlmigration.NewUpdateDashboardAndSavedViewsFactory(sqlstore),
sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlstore), sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlstore),
sqlmigration.NewUpdatePipelines(sqlstore),
) )
} }

View File

@ -0,0 +1,96 @@
package sqlmigration
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type updatePipelines struct {
store sqlstore.SQLStore
}
func NewUpdatePipelines(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("update_pipelines"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return newUpdatePipelines(ctx, ps, c, sqlstore)
})
}
func newUpdatePipelines(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
return &updatePipelines{
store: store,
}, nil
}
func (migration *updatePipelines) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *updatePipelines) Up(ctx context.Context, db *bun.DB) error {
// begin transaction
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer tx.Rollback() //nolint:errcheck
// get all org ids
var orgIDs []string
if err := migration.store.BunDB().NewSelect().Model((*types.Organization)(nil)).Column("id").Scan(ctx, &orgIDs); err != nil {
return err
}
// add org id to pipelines table
if exists, err := migration.store.Dialect().ColumnExists(ctx, tx, "pipelines", "org_id"); err != nil {
return err
} else if !exists {
if _, err := tx.NewAddColumn().Table("pipelines").ColumnExpr("org_id TEXT REFERENCES organizations(id) ON DELETE CASCADE").Exec(ctx); err != nil {
return err
}
// check if there is one org ID if yes then set it to all pipelines.
if len(orgIDs) == 1 {
orgID := orgIDs[0]
if _, err := tx.NewUpdate().Table("pipelines").Set("org_id = ?", orgID).Where("org_id IS NULL").Exec(ctx); err != nil {
return err
}
}
}
// add updated_by to pipelines table
if exists, err := migration.store.Dialect().ColumnExists(ctx, tx, "pipelines", "updated_by"); err != nil {
return err
} else if !exists {
if _, err := tx.NewAddColumn().Table("pipelines").ColumnExpr("updated_by TEXT").Exec(ctx); err != nil {
return err
}
}
// add updated_at to pipelines table
if exists, err := migration.store.Dialect().ColumnExists(ctx, tx, "pipelines", "updated_at"); err != nil {
return err
} else if !exists {
if _, err := tx.NewAddColumn().Table("pipelines").ColumnExpr("updated_at TIMESTAMP").Exec(ctx); err != nil {
return err
}
}
if err := tx.Commit(); err != nil {
return err
}
return nil
}
func (migration *updatePipelines) Down(ctx context.Context, db *bun.DB) error {
return nil
}

View File

@ -1,22 +0,0 @@
package types
import (
"time"
"github.com/uptrace/bun"
)
type Pipeline struct {
bun.BaseModel `bun:"table:pipelines"`
ID string `bun:"id,pk,type:text"`
OrderID int `bun:"order_id"`
Enabled bool `bun:"enabled"`
CreatedBy string `bun:"created_by,type:text"`
CreatedAt time.Time `bun:"created_at,default:current_timestamp"`
Name string `bun:"name,type:varchar(400),notnull"`
Alias string `bun:"alias,type:varchar(20),notnull"`
Description string `bun:"description,type:text"`
Filter string `bun:"filter,type:text,notnull"`
ConfigJSON string `bun:"config_json,type:text"`
}

View File

@ -1,17 +1,122 @@
package logparsingpipeline package pipelinetypes
import ( import (
"errors" "encoding/json"
"fmt" "fmt"
"regexp" "regexp"
"slices"
"strings" "strings"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/queryBuilderToExpr" "github.com/SigNoz/signoz/pkg/query-service/queryBuilderToExpr"
"golang.org/x/exp/slices" "github.com/SigNoz/signoz/pkg/types"
"github.com/pkg/errors"
"github.com/uptrace/bun"
) )
// PostablePipelines are a list of user defined pielines type StoreablePipeline struct {
bun.BaseModel `bun:"table:pipelines,alias:p"`
types.UserAuditable
types.TimeAuditable
OrgID string `json:"-" bun:"org_id,notnull"`
ID string `json:"id" bun:"id,pk,type:text"`
OrderID int `json:"orderId" bun:"order_id"`
Enabled bool `json:"enabled" bun:"enabled"`
Name string `json:"name" bun:"name,type:varchar(400),notnull"`
Alias string `json:"alias" bun:"alias,type:varchar(20),notnull"`
Description string `json:"description" bun:"description,type:text"`
FilterString string `json:"-" bun:"filter,type:text,notnull"`
ConfigJSON string `json:"-" bun:"config_json,type:text"`
}
type GettablePipeline struct {
StoreablePipeline
Filter *v3.FilterSet `json:"filter"`
Config []PipelineOperator `json:"config"`
}
func (i *GettablePipeline) ParseRawConfig() error {
c := []PipelineOperator{}
err := json.Unmarshal([]byte(i.ConfigJSON), &c)
if err != nil {
return errors.Wrap(err, "failed to parse ingestion rule config")
}
i.Config = c
return nil
}
func (i *GettablePipeline) ParseFilter() error {
f := v3.FilterSet{}
err := json.Unmarshal([]byte(i.FilterString), &f)
if err != nil {
return errors.Wrap(err, "failed to parse filter")
}
i.Filter = &f
return nil
}
type Processor struct {
Operators []PipelineOperator `json:"operators" yaml:"operators"`
}
type PipelineOperator struct {
Type string `json:"type" yaml:"type"`
ID string `json:"id,omitempty" yaml:"id,omitempty"`
Output string `json:"output,omitempty" yaml:"output,omitempty"`
OnError string `json:"on_error,omitempty" yaml:"on_error,omitempty"`
If string `json:"if,omitempty" yaml:"if,omitempty"`
// don't need the following in the final config
OrderId int `json:"orderId" yaml:"-"`
Enabled bool `json:"enabled" yaml:"-"`
Name string `json:"name,omitempty" yaml:"-"`
// optional keys depending on the type
ParseTo string `json:"parse_to,omitempty" yaml:"parse_to,omitempty"`
Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"`
Regex string `json:"regex,omitempty" yaml:"regex,omitempty"`
ParseFrom string `json:"parse_from,omitempty" yaml:"parse_from,omitempty"`
*TraceParser `yaml:",inline,omitempty"`
Field string `json:"field,omitempty" yaml:"field,omitempty"`
Value string `json:"value,omitempty" yaml:"value,omitempty"`
From string `json:"from,omitempty" yaml:"from,omitempty"`
To string `json:"to,omitempty" yaml:"to,omitempty"`
Expr string `json:"expr,omitempty" yaml:"expr,omitempty"`
Routes *[]Route `json:"routes,omitempty" yaml:"routes,omitempty"`
Fields []string `json:"fields,omitempty" yaml:"fields,omitempty"`
Default string `json:"default,omitempty" yaml:"default,omitempty"`
// time_parser fields.
Layout string `json:"layout,omitempty" yaml:"layout,omitempty"`
LayoutType string `json:"layout_type,omitempty" yaml:"layout_type,omitempty"`
// severity parser fields
SeverityMapping map[string][]string `json:"mapping,omitempty" yaml:"mapping,omitempty"`
OverwriteSeverityText bool `json:"overwrite_text,omitempty" yaml:"overwrite_text,omitempty"`
}
type TimestampParser struct {
Layout string `json:"layout" yaml:"layout"`
LayoutType string `json:"layout_type" yaml:"layout_type"`
ParseFrom string `json:"parse_from" yaml:"parse_from"`
}
type TraceParser struct {
TraceId *ParseFrom `json:"trace_id,omitempty" yaml:"trace_id,omitempty"`
SpanId *ParseFrom `json:"span_id,omitempty" yaml:"span_id,omitempty"`
TraceFlags *ParseFrom `json:"trace_flags,omitempty" yaml:"trace_flags,omitempty"`
}
type ParseFrom struct {
ParseFrom string `json:"parse_from" yaml:"parse_from"`
}
type Route struct {
Output string `json:"output" yaml:"output"`
Expr string `json:"expr" yaml:"expr"`
}
type PostablePipelines struct { type PostablePipelines struct {
Pipelines []PostablePipeline `json:"pipelines"` Pipelines []PostablePipeline `json:"pipelines"`
} }
@ -19,8 +124,8 @@ type PostablePipelines struct {
// PostablePipeline captures user inputs in setting the pipeline // PostablePipeline captures user inputs in setting the pipeline
type PostablePipeline struct { type PostablePipeline struct {
Id string `json:"id"` ID string `json:"id"`
OrderId int `json:"orderId"` OrderID int `json:"orderId"`
Name string `json:"name"` Name string `json:"name"`
Alias string `json:"alias"` Alias string `json:"alias"`
Description string `json:"description"` Description string `json:"description"`
@ -31,7 +136,7 @@ type PostablePipeline struct {
// IsValid checks if postable pipeline has all the required params // IsValid checks if postable pipeline has all the required params
func (p *PostablePipeline) IsValid() error { func (p *PostablePipeline) IsValid() error {
if p.OrderId == 0 { if p.OrderID == 0 {
return fmt.Errorf("orderId with value > 1 is required") return fmt.Errorf("orderId with value > 1 is required")
} }
if p.Name == "" { if p.Name == "" {

View File

@ -1,4 +1,4 @@
package logparsingpipeline package pipelinetypes
import ( import (
"testing" "testing"
@ -42,7 +42,7 @@ func TestIsValidPostablePipeline(t *testing.T) {
{ {
Name: "Invalid orderId", Name: "Invalid orderId",
Pipeline: PostablePipeline{ Pipeline: PostablePipeline{
OrderId: 0, OrderID: 0,
Name: "pipeline 1", Name: "pipeline 1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
@ -54,7 +54,7 @@ func TestIsValidPostablePipeline(t *testing.T) {
{ {
Name: "Valid orderId", Name: "Valid orderId",
Pipeline: PostablePipeline{ Pipeline: PostablePipeline{
OrderId: 1, OrderID: 1,
Name: "pipeline 1", Name: "pipeline 1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
@ -66,7 +66,7 @@ func TestIsValidPostablePipeline(t *testing.T) {
{ {
Name: "Invalid filter", Name: "Invalid filter",
Pipeline: PostablePipeline{ Pipeline: PostablePipeline{
OrderId: 1, OrderID: 1,
Name: "pipeline 1", Name: "pipeline 1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,
@ -90,7 +90,7 @@ func TestIsValidPostablePipeline(t *testing.T) {
{ {
Name: "Valid filter", Name: "Valid filter",
Pipeline: PostablePipeline{ Pipeline: PostablePipeline{
OrderId: 1, OrderID: 1,
Name: "pipeline 1", Name: "pipeline 1",
Alias: "pipeline1", Alias: "pipeline1",
Enabled: true, Enabled: true,

View File

@ -1,4 +1,4 @@
package logparsingpipeline package pipelinetypes
import ( import (
"errors" "errors"

View File

@ -0,0 +1,55 @@
package pipelinetypes
import (
"fmt"
"testing"
"github.com/antonmedv/expr"
"github.com/stretchr/testify/require"
)
func TestRegexForStrptimeLayout(t *testing.T) {
require := require.New(t)
var testCases = []struct {
strptimeLayout string
str string
shouldMatch bool
}{
{
strptimeLayout: "%Y-%m-%dT%H:%M:%S.%f%z",
str: "2023-11-26T12:03:28.239907+0530",
shouldMatch: true,
}, {
strptimeLayout: "%d-%m-%Y",
str: "26-11-2023",
shouldMatch: true,
}, {
strptimeLayout: "%d/%m/%y",
str: "11/03/02",
shouldMatch: true,
}, {
strptimeLayout: "%A, %d. %B %Y %I:%M%p",
str: "Tuesday, 21. November 2006 04:30PM11/03/02",
shouldMatch: true,
}, {
strptimeLayout: "%A, %d. %B %Y %I:%M%p",
str: "some random text",
shouldMatch: false,
},
}
for _, test := range testCases {
regex, err := RegexForStrptimeLayout(test.strptimeLayout)
require.Nil(err, test.strptimeLayout)
code := fmt.Sprintf(`"%s" matches "%s"`, test.str, regex)
program, err := expr.Compile(code)
require.Nil(err, test.strptimeLayout)
output, err := expr.Run(program, map[string]string{})
require.Nil(err, test.strptimeLayout)
require.Equal(test.shouldMatch, output, test.strptimeLayout)
}
}